Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 41 additions & 22 deletions backend/app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,42 @@

BACKEND_ROOT = Path(__file__).resolve().parents[1]
REPO_ROOT = BACKEND_ROOT.parent
TMP_ROOT = Path("/tmp")


def _resolve_writable_dir(*, env_var: str, default_dir_name: str) -> Path:
configured_dir = os.getenv(env_var)
if configured_dir:
configured_path = Path(configured_dir)
if not configured_path.is_absolute():
return TMP_ROOT / configured_path
return configured_path
return TMP_ROOT / default_dir_name


def get_data_dir() -> Path:
configured_data_dir = os.getenv("DATA_DIR")
if configured_data_dir:
candidate = Path(configured_data_dir)
if not candidate.is_absolute():
candidate = Path("/tmp") / candidate
else:
candidate = Path("/tmp/gitplant-data")
candidate = _resolve_writable_dir(env_var="DATA_DIR", default_dir_name="gitplant-data")

try:
candidate.relative_to(REPO_ROOT)
except ValueError:
return candidate

return Path("/tmp/gitplant-data")
return TMP_ROOT / "gitplant-data"


def ensure_dir(path: Path) -> Path:
path.mkdir(parents=True, exist_ok=True)
return path


def ensure_data_dir() -> Path:
path = get_data_dir()
try:
path.mkdir(parents=True, exist_ok=True)
return path
return ensure_dir(path)
except OSError:
fallback = Path("/tmp/gitplant-data")
fallback.mkdir(parents=True, exist_ok=True)
return fallback
fallback = TMP_ROOT / "gitplant-data"
return ensure_dir(fallback)


# Vercel serverless functions run on a read-only filesystem except for /tmp.
Expand Down Expand Up @@ -107,29 +115,40 @@ def sanitize_database_url(database_url: str) -> str:
def _resolve_path(path: str) -> Path:
configured = Path(path)
if not configured.is_absolute():
configured = (BACKEND_ROOT / configured).resolve()
configured = (TMP_ROOT / configured).resolve()
return configured


def _ensure_path(path: str) -> Path:
configured = _resolve_path(path)
configured.mkdir(parents=True, exist_ok=True)
return configured
return ensure_dir(configured)


def get_storage_dir(*, ensure_exists: bool = True) -> Path:
path = _resolve_writable_dir(env_var="STORAGE_DIR", default_dir_name="gitplant-storage")
return ensure_dir(path) if ensure_exists else path


def get_document_storage_path(*, ensure_exists: bool = True) -> Path:
if settings.document_storage_dir == str(DEFAULT_STORAGE_DIR):
base_path = ensure_data_dir() / "documents" if ensure_exists else get_data_dir() / "documents"
if ensure_exists:
base_path.mkdir(parents=True, exist_ok=True)
return base_path
return ensure_dir(base_path) if ensure_exists else base_path
return _ensure_path(settings.document_storage_dir) if ensure_exists else _resolve_path(settings.document_storage_dir)


def get_plant_storage_path(*, ensure_exists: bool = True) -> Path:
if settings.plant_storage_dir == str(DEFAULT_PLANT_STORAGE_DIR):
base_path = ensure_data_dir() / "plant" if ensure_exists else get_data_dir() / "plant"
if ensure_exists:
base_path.mkdir(parents=True, exist_ok=True)
return base_path
return ensure_dir(base_path) if ensure_exists else base_path
return _ensure_path(settings.plant_storage_dir) if ensure_exists else _resolve_path(settings.plant_storage_dir)


def get_working_storage_path(*, ensure_exists: bool = True) -> Path:
configured = os.getenv("WORKING_STORAGE_DIR")
if configured:
path = _ensure_path(configured) if ensure_exists else _resolve_path(configured)
return path

base = get_storage_dir(ensure_exists=ensure_exists)
path = base / "projects"
return ensure_dir(path) if ensure_exists else path
1 change: 1 addition & 0 deletions backend/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ async def hardening_middleware(request: Request, call_next):
@app.on_event("startup")
def on_startup() -> None:
init_db()
health.run_runtime_storage_check()


@app.get("/", tags=["root"])
Expand Down
50 changes: 50 additions & 0 deletions backend/app/routers/health.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import os

from fastapi import APIRouter
from sqlmodel import Session, select

from app.config import (
ensure_data_dir,
get_data_dir,
get_document_storage_path,
get_storage_dir,
get_working_storage_path,
resolve_sqlite_path,
sanitize_database_url,
settings,
Expand All @@ -17,6 +23,44 @@ def health_check():
return {"status": "ok"}


def run_runtime_storage_check() -> dict:
checks: dict[str, dict[str, str | bool]] = {}
env_overrides = {
"DATA_DIR": bool(os.getenv("DATA_DIR")),
"STORAGE_DIR": bool(os.getenv("STORAGE_DIR")),
"WORKING_STORAGE_DIR": bool(os.getenv("WORKING_STORAGE_DIR")),
}

for name, resolver in {
"data_dir": ensure_data_dir,
"document_storage_dir": get_document_storage_path,
"storage_dir": get_storage_dir,
"working_storage_dir": get_working_storage_path,
}.items():
path = resolver()
path_str = str(path)
checks[name] = {
"path": path_str,
"under_tmp": path_str.startswith("/tmp"),
"exists": path.exists(),
"writable": path.is_dir(),
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Check actual write permission in storage health checks

run_runtime_storage_check() marks a directory as writable using path.is_dir(), which only confirms existence/type and will still be true for read-only mounts. In deployments that set DATA_DIR, STORAGE_DIR, or WORKING_STORAGE_DIR to an existing but non-writable directory, /health/storage and health_info can report healthy storage even though uploads will fail later with permission errors. This should validate real write access (for example via os.access(..., os.W_OK) or a temp-file probe) before reporting writable: true.

Useful? React with 👍 / 👎.

}

checks["configured_data_dir"] = {
"path": str(get_data_dir()),
"under_tmp": str(get_data_dir()).startswith("/tmp"),
"exists": True,
"writable": True,
}
checks["env_overrides_present"] = {
"path": ", ".join([key for key, enabled in env_overrides.items() if enabled]) or "none",
"under_tmp": True,
"exists": True,
"writable": True,
}
return checks


@router.get("/info", summary="Runtime environment and storage info")
def health_info():
sqlite_path = resolve_sqlite_path(settings.database_url)
Expand All @@ -25,9 +69,15 @@ def health_info():
"database_url": sanitize_database_url(settings.database_url),
"sqlite_path": str(sqlite_path) if sqlite_path else None,
"document_storage_dir": str(get_document_storage_path()),
"storage_checks": run_runtime_storage_check(),
}


@router.get("/storage", summary="Writable storage runtime check")
def storage_check():
return {"status": "ok", "checks": run_runtime_storage_check()}


@router.get("/live", summary="Liveness probe")
def liveness_probe():
return {"status": "alive"}
Expand Down
10 changes: 2 additions & 8 deletions backend/app/routers/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile
from sqlmodel import Session, col, select

from app.config import BACKEND_ROOT
from app.config import get_working_storage_path
from app.db import get_session
from app.models import (
AuditEvent,
Expand All @@ -29,11 +29,6 @@
router = APIRouter(prefix="/projects", tags=["projects"])

ACTIVE_WORKING_STATUSES = {"WORKING", "READY"}
WORKING_STORAGE_DIR = (BACKEND_ROOT / "storage" / "projects").resolve()


def ensure_working_storage_dir() -> None:
WORKING_STORAGE_DIR.mkdir(parents=True, exist_ok=True)


def _to_working_response(
Expand Down Expand Up @@ -293,8 +288,7 @@ def upload_working_revision_file(
if not filename.endswith(".pdf"):
raise HTTPException(status_code=400, detail="Only PDF files are supported")

ensure_working_storage_dir()
destination = WORKING_STORAGE_DIR / f"{project.id}-{working.id}.pdf"
destination = get_working_storage_path() / f"{project.id}-{working.id}.pdf"
file.file.seek(0)
destination.write_bytes(file.file.read())

Expand Down
22 changes: 2 additions & 20 deletions backend/index.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,5 @@
"""Vercel entrypoint for FastAPI app."""

from importlib import import_module
from app.main import app

_TRIED_IMPORTS = (
"app.main",
"main",
"app.server",
"server",
)


for _module_path in _TRIED_IMPORTS:
try:
app = import_module(_module_path).app
break
except (ImportError, AttributeError):
continue
else:
tried = ", ".join(f"{module}.app" for module in _TRIED_IMPORTS)
raise ImportError(
f"Could not import FastAPI app. Tried: {tried}"
)
__all__ = ["app"]
6 changes: 5 additions & 1 deletion backend/tests/test_health.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,17 @@ def test_health_check():
assert response.json()["status"] == "ok"


def test_live_and_ready_checks():
def test_live_ready_and_storage_checks():
live = client.get("/health/live")
ready = client.get("/health/ready")
storage = client.get("/health/storage")

assert live.status_code == 200
assert live.json()["status"] == "alive"
assert ready.status_code == 200
assert ready.json()["status"] in {"ready", "not_ready"}
assert storage.status_code == 200
assert storage.json()["status"] == "ok"


def test_health_info_includes_storage_paths():
Expand All @@ -30,4 +33,5 @@ def test_health_info_includes_storage_paths():
assert payload["status"] == "ok"
assert "database_url" in payload
assert "document_storage_dir" in payload
assert "storage_checks" in payload
assert payload["sqlite_path"] is None or payload["sqlite_path"].endswith(".db")
38 changes: 38 additions & 0 deletions backend/tests/test_import_safety.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import importlib
import pathlib
import sys


def test_backend_imports_without_mkdir_calls(monkeypatch):
mkdir_calls: list[pathlib.Path] = []
original_mkdir = pathlib.Path.mkdir

def tracking_mkdir(self, *args, **kwargs):
mkdir_calls.append(self)
return original_mkdir(self, *args, **kwargs)

monkeypatch.setattr(pathlib.Path, "mkdir", tracking_mkdir)

target_modules = [
"app.config",
"app.db",
"app.routers.documents",
"app.routers.projects",
"app.routers.health",
"app.main",
]
for module in target_modules:
sys.modules.pop(module, None)

importlib.import_module("app.main")

assert mkdir_calls == []


def test_default_storage_dir_targets_tmp(monkeypatch):
monkeypatch.delenv("DATA_DIR", raising=False)
monkeypatch.delenv("STORAGE_DIR", raising=False)

import app.config as config

assert str(config.get_storage_dir(ensure_exists=False)).startswith("/tmp/")