diff --git a/.github/workflows/catalog-contract.yml b/.github/workflows/catalog-contract.yml
new file mode 100644
index 00000000..d46f2402
--- /dev/null
+++ b/.github/workflows/catalog-contract.yml
@@ -0,0 +1,41 @@
+name: Catalog API Contract Drift
+
+on:
+ push:
+ branches: [main]
+ paths:
+ - "src/daemon/NKS.WebDevConsole.Daemon/Binaries/CatalogClient.cs"
+ - "scripts/check-catalog-drift.mjs"
+ - ".github/workflows/catalog-contract.yml"
+ pull_request:
+ paths:
+ - "src/daemon/NKS.WebDevConsole.Daemon/Binaries/CatalogClient.cs"
+ - "scripts/check-catalog-drift.mjs"
+ - ".github/workflows/catalog-contract.yml"
+ workflow_dispatch:
+ inputs:
+ catalog_version:
+ description: "Pinned catalog-api version (e.g. 0.2.0)"
+ required: false
+
+concurrency:
+ group: catalog-contract-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ check:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: actions/setup-node@v4
+ with:
+ node-version: "20"
+
+ - name: Run drift check
+ env:
+ # Pin the catalog-api release whose openapi.json we validate
+ # the C# CatalogClient.cs against. Bump this whenever the
+ # catalog service ships a new contract version.
+ CATALOG_API_VERSION: ${{ inputs.catalog_version || '0.2.0' }}
+ run: node scripts/check-catalog-drift.mjs
diff --git a/scripts/check-catalog-drift.mjs b/scripts/check-catalog-drift.mjs
new file mode 100644
index 00000000..c33777af
--- /dev/null
+++ b/scripts/check-catalog-drift.mjs
@@ -0,0 +1,139 @@
+#!/usr/bin/env node
+/**
+ * Catalog API contract drift checker.
+ *
+ * Fetches ``openapi.json`` from a pinned ``wdc-catalog-api`` GitHub
+ * release (or a local file via --spec) and verifies the endpoints /
+ * required fields the C# ``CatalogClient.cs`` depends on are still
+ * present with the expected shape. Emits a non-zero exit when the
+ * contract drifts so CI can block a breaking merge.
+ *
+ * Usage (CI):
+ * CATALOG_API_VERSION=0.2.0 node scripts/check-catalog-drift.mjs
+ *
+ * Usage (local):
+ * node scripts/check-catalog-drift.mjs --spec /path/to/openapi.json
+ */
+
+import fs from "node:fs";
+import path from "node:path";
+import { fileURLToPath } from "node:url";
+
+const __dirname = path.dirname(fileURLToPath(import.meta.url));
+const REPO_ROOT = path.resolve(__dirname, "..");
+
+// Endpoints + DTOs the C# daemon consumes. Add to this list when
+// ``CatalogClient.cs`` grows a new dependency.
+const REQUIRED_PATHS = [
+ "/healthz",
+ "/api/v1/catalog",
+ "/api/v1/catalog/{app_name}",
+ "/api/v1/sync/config",
+ "/api/v1/sync/config/{device_id}",
+];
+
+// Wire-format fields on ``CatalogDocument`` / related DTOs. Values are
+// snake_case because ``CatalogClient.cs`` serializes with
+// ``JsonNamingPolicy.SnakeCaseLower``.
+const REQUIRED_FIELDS_BY_SCHEMA = {
+ TokenResponse: ["token", "email"],
+ ConfigSyncEntry: ["device_id", "updated_at", "payload"],
+ ConfigSyncUploadRequest: ["device_id", "payload"],
+};
+
+function fail(msg) {
+ console.error(`[drift-check] ${msg}`);
+ process.exitCode = 1;
+}
+
+function ok(msg) {
+ console.log(`[drift-check] ${msg}`);
+}
+
+async function loadSpec(args) {
+ const explicit = args.indexOf("--spec");
+ if (explicit !== -1 && args[explicit + 1]) {
+ const file = args[explicit + 1];
+ ok(`Loading spec from ${file}`);
+ return JSON.parse(fs.readFileSync(file, "utf-8"));
+ }
+ const version = process.env.CATALOG_API_VERSION;
+ if (!version) {
+ fail("CATALOG_API_VERSION not set (and no --spec passed).");
+ fail("Either export CATALOG_API_VERSION=x.y.z or pass --spec path.");
+ process.exit(2);
+ }
+ const tag = version.startsWith("v") ? version : `v${version}`;
+ const url =
+ `https://github.com/nks-hub/wdc-catalog-api/releases/download/` +
+ `${tag}/openapi.json`;
+ ok(`Fetching spec: ${url}`);
+ const res = await fetch(url);
+ if (!res.ok) {
+ fail(`Failed to fetch ${url}: ${res.status} ${res.statusText}`);
+ process.exit(2);
+ }
+ return await res.json();
+}
+
+function checkPaths(spec) {
+ const present = spec.paths ?? {};
+ for (const p of REQUIRED_PATHS) {
+ if (!(p in present)) {
+ fail(`Missing required path: ${p}`);
+ } else {
+ ok(`Path present: ${p}`);
+ }
+ }
+}
+
+function checkSchemas(spec) {
+ const schemas = spec.components?.schemas ?? {};
+ for (const [name, fields] of Object.entries(REQUIRED_FIELDS_BY_SCHEMA)) {
+ const schema = schemas[name];
+ if (!schema) {
+ fail(`Missing schema: ${name}`);
+ continue;
+ }
+ const props = schema.properties ?? {};
+ for (const field of fields) {
+ if (!(field in props)) {
+ fail(`Schema ${name} is missing field '${field}'`);
+ }
+ }
+ ok(`Schema ${name} has required fields [${fields.join(", ")}]`);
+ }
+}
+
+function checkCSharpStillCompiles() {
+ // Light sanity check: the C# client file still exists and mentions
+ // the critical DTO class names. Full type-level check needs dotnet
+ // build, which the CI's downstream job already runs.
+ const clientPath = path.join(
+ REPO_ROOT,
+ "src/daemon/NKS.WebDevConsole.Daemon/Binaries/CatalogClient.cs",
+ );
+ if (!fs.existsSync(clientPath)) {
+ fail(`Missing CatalogClient.cs at ${clientPath}`);
+ return;
+ }
+ const source = fs.readFileSync(clientPath, "utf-8");
+ for (const token of ["CatalogDocument", "ReleaseDoc", "DownloadDoc"]) {
+ if (!source.includes(token)) {
+ fail(`CatalogClient.cs no longer references '${token}'`);
+ }
+ }
+ ok("CatalogClient.cs references CatalogDocument/ReleaseDoc/DownloadDoc");
+}
+
+const args = process.argv.slice(2);
+const spec = await loadSpec(args);
+checkPaths(spec);
+checkSchemas(spec);
+checkCSharpStillCompiles();
+
+if (process.exitCode) {
+ console.error("[drift-check] FAILED — catalog-api contract has drifted.");
+ process.exit(1);
+}
+ok("All contract checks passed.");
diff --git a/services/catalog-api-MOVED.md b/services/catalog-api-MOVED.md
new file mode 100644
index 00000000..1fd2f312
--- /dev/null
+++ b/services/catalog-api-MOVED.md
@@ -0,0 +1,34 @@
+# catalog-api has moved
+
+This service was extracted into its own public repository:
+
+**https://github.com/nks-hub/wdc-catalog-api**
+
+## Why
+
+- Zero code coupling with the rest of the monorepo — only a JSON wire contract
+- Separate tech stack (Python/FastAPI/Docker vs C#/.NET/Electron)
+- Independent release cadence — catalog scraping iterates weekly, desktop app monthly
+- Dedicated CI pipeline (ruff + mypy + pytest + pip-audit) instead of cross-stack overhead
+- Cleaner contributor surface — backend contributors don't need to clone .NET + Electron
+
+## Contract
+
+The desktop daemon consumes the public JSON at `https://wdc.nks-hub.cz/api/v1/catalog`.
+The new repo publishes `openapi.json` as a GitHub Release asset on every
+tagged version — this repo's CI pins a catalog schema version via
+`CATALOG_API_VERSION` and regenerates C# DTOs, failing the build if the
+hand-maintained `CatalogClient.cs` drifts from the spec.
+
+## History
+
+The pre-split monorepo state is preserved under tag
+[`catalog-api-pre-split`](https://github.com/nks-hub/webdev-console/releases/tag/catalog-api-pre-split).
+The new repo was created via `git filter-repo --subdirectory-filter services/catalog-api`
+so all 700+ commits retain their original authors and dates.
+
+## Deployment
+
+The public instance `wdc.nks-hub.cz` is deployed from
+`ghcr.io/nks-hub/wdc-catalog-api:latest`. See the new repo's README for
+operational runbooks.
diff --git a/services/catalog-api/.dockerignore b/services/catalog-api/.dockerignore
deleted file mode 100644
index 5d616798..00000000
--- a/services/catalog-api/.dockerignore
+++ /dev/null
@@ -1,13 +0,0 @@
-__pycache__
-*.pyc
-*.pyo
-.venv
-venv
-.pytest_cache
-state
-.tox
-.mypy_cache
-.ruff_cache
-build
-dist
-*.egg-info
diff --git a/services/catalog-api/.gitignore b/services/catalog-api/.gitignore
deleted file mode 100644
index 33123779..00000000
--- a/services/catalog-api/.gitignore
+++ /dev/null
@@ -1,12 +0,0 @@
-.venv/
-venv/
-__pycache__/
-*.pyc
-*.pyo
-state/
-.pytest_cache/
-.ruff_cache/
-.mypy_cache/
-build/
-dist/
-*.egg-info/
diff --git a/services/catalog-api/Dockerfile b/services/catalog-api/Dockerfile
deleted file mode 100644
index 0e007d56..00000000
--- a/services/catalog-api/Dockerfile
+++ /dev/null
@@ -1,38 +0,0 @@
-# ─────────────────────────────────────────────────────────────────────────
-# NKS WDC Catalog API — minimal FastAPI container
-#
-# Build:
-# docker build -t nks-wdc-catalog-api services/catalog-api
-# Run:
-# docker run -p 8765:8765 -v nks-wdc-catalog-state:/state \
-# -e NKS_WDC_CATALOG_STATE_DIR=/state nks-wdc-catalog-api
-# ─────────────────────────────────────────────────────────────────────────
-FROM python:3.12-slim AS base
-
-ENV PYTHONDONTWRITEBYTECODE=1 \
- PYTHONUNBUFFERED=1 \
- PIP_DISABLE_PIP_VERSION_CHECK=1 \
- PIP_NO_CACHE_DIR=1
-
-WORKDIR /srv/app
-
-# Install dependencies first so the requirements layer stays cacheable
-# across app code edits — only re-runs when requirements.txt changes.
-COPY requirements.txt .
-RUN pip install --no-cache-dir -r requirements.txt
-
-# Copy the package + seed catalog data. State is volume-mounted so
-# config-sync uploads persist across container restarts.
-COPY app ./app
-
-# State dir for config-sync JSON files (override via env when mounting
-# an external volume in production).
-ENV NKS_WDC_CATALOG_STATE_DIR=/state
-RUN mkdir -p /state
-
-EXPOSE 8765
-
-HEALTHCHECK --interval=30s --timeout=5s --start-period=10s \
- CMD python -c "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8765/healthz').read()" || exit 1
-
-CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8765"]
diff --git a/services/catalog-api/README.md b/services/catalog-api/README.md
deleted file mode 100644
index 08042132..00000000
--- a/services/catalog-api/README.md
+++ /dev/null
@@ -1,150 +0,0 @@
-# NKS WDC Catalog API
-
-Cloud-hosted binary catalog + per-device config sync for **NKS WebDev
-Console**. The C# daemon pulls release metadata (Apache, PHP, MySQL,
-Redis, Caddy, cloudflared, …) from this service and backs up local
-site/service configuration so a fresh install hydrates from the last
-known good snapshot.
-
-## Features
-
-- **Public JSON catalog** — `/api/v1/catalog` served in the exact shape
- `CatalogClient.cs` expects. Drop in a URL and the daemon refreshes
- on startup.
-- **Admin UI** with bcrypt-hashed session login at `/login` → `/admin`.
-- **URL auto-generators** — one click scrapes the upstream release
- listing (GitHub Releases API for cloudflared / caddy / mailpit /
- redis-windows, HTML listings for php.net / apachelounge / nginx.org)
- and inserts new releases into SQLite.
-- **Config sync** — per-device JSON upload/download keyed by device ID
- for seamless re-install.
-- **SQLite by default**, swappable to Postgres via `DATABASE_URL`.
-- **Docker-ready** with `Dockerfile` + `docker-compose.yml`.
-
-## Quickstart (local)
-
-```cmd
-run.cmd
-```
-
-That creates a venv in `.venv/`, installs dependencies, and starts
-uvicorn on `http://127.0.0.1:8765` with hot-reload. First run bootstraps
-an `admin` / `admin` account (dev mode — env var
-`NKS_WDC_CATALOG_DEV=1` is set by the script).
-
-POSIX / macOS:
-
-```bash
-python3 -m venv .venv
-source .venv/bin/activate
-pip install -r requirements.txt
-NKS_WDC_CATALOG_DEV=1 uvicorn app.main:app --host 127.0.0.1 --port 8765
-```
-
-## Quickstart (Docker)
-
-```bash
-cd services/catalog-api
-docker compose up -d
-```
-
-Service listens on `http://localhost:8765`. Catalog data mounts from
-`./app/data/apps` so you can edit JSONs and `POST /api/v1/catalog/reload`
-without rebuilding.
-
-## Pointing NKS WDC at this service
-
-In NKS WebDev Console → Settings → Advanced:
-
-- **Catalog URL**: `http://127.0.0.1:8765` (local)
- or `https://catalog.wdc.nks-hub.cz` (when deployed)
-
-Or via env var when launching the daemon:
-
-```
-NKS_WDC_CATALOG_URL=http://127.0.0.1:8765
-```
-
-The daemon's `CatalogClient` fetches `/api/v1/catalog` on startup and
-caches the full release list in memory for the session. Use
-`POST /api/binaries/catalog/refresh` (authenticated) to pull a newer
-version without restarting the daemon.
-
-## Environment variables
-
-| Variable | Default | Description |
-| --- | --- | --- |
-| `DATABASE_URL` | `sqlite:///state/catalog.db` | SQLAlchemy connection string |
-| `NKS_WDC_CATALOG_STATE_DIR` | `./state` | Where SQLite + uploads live |
-| `NKS_WDC_CATALOG_ADMIN_USER` | `admin` | Bootstrap admin username |
-| `NKS_WDC_CATALOG_ADMIN_PASS` | — (dev: `admin`) | Bootstrap admin password |
-| `NKS_WDC_CATALOG_DEV` | — | `1` enables `admin`/`admin` fallback + verbose logs |
-| `NKS_WDC_CATALOG_SECRET` | dev fallback | `itsdangerous` signer key for session cookies |
-| `NKS_WDC_CATALOG_ALLOW_CORS` | — | `1` emits permissive CORS headers |
-
-## API
-
-### Catalog (public)
-
-```
-GET /healthz
-GET /api/v1/catalog
-GET /api/v1/catalog/{app_name}
-```
-
-### Config sync (public, runs behind reverse-proxy auth in prod)
-
-```
-POST /api/v1/sync/config body: { device_id, payload }
-GET /api/v1/sync/config/{device_id}
-GET /api/v1/sync/config/{device_id}/exists
-DELETE /api/v1/sync/config/{device_id}
-```
-
-### Admin UI (session cookie auth)
-
-```
-GET /login
-POST /login
-POST /logout
-GET /admin list all apps
-GET /admin/new new-app form
-POST /admin/new
-GET /admin/apps/{app_id} app + releases
-GET /admin/apps/{app_id}/edit edit form
-POST /admin/apps/{app_id}/edit
-POST /admin/apps/{app_id}/delete
-POST /admin/apps/{app_id}/releases add release (manual)
-POST /admin/apps/{app_id}/auto-generate scrape upstream + insert
-POST /admin/releases/{id}/delete
-POST /admin/releases/{id}/downloads add download URL
-POST /admin/downloads/{id}/delete
-```
-
-## Supported auto-generators
-
-| App | Source |
-| --- | --- |
-| cloudflared | `github.com/cloudflare/cloudflared` releases |
-| mailpit | `github.com/axllent/mailpit` releases |
-| caddy | `github.com/caddyserver/caddy` releases |
-| redis | `github.com/redis-windows/redis-windows` releases |
-| php | `windows.php.net/downloads/releases/` HTML listing |
-| apache | `www.apachelounge.com/download/` HTML listing |
-| nginx | `nginx.org/en/download.html` HTML listing |
-
-MySQL / MariaDB generators are TODO (their download pages gate by
-session cookies so scraping is fragile — for now use the seed JSON
-`app/data/apps/{mysql,mariadb}.json` which ships with known versions).
-
-## Deployment
-
-For production:
-
-1. Set `NKS_WDC_CATALOG_ADMIN_PASS` and `NKS_WDC_CATALOG_SECRET` (random 32+ chars).
-2. Run behind a TLS-terminating reverse proxy (Caddy, Traefik, nginx).
-3. Mount `/state` as a persistent volume so the SQLite DB and config
- snapshots survive container restarts.
-4. Restrict `/api/v1/sync/config*` behind an API key / Cloudflare
- Access header unless you want every device on the internet to
- write to your store.
diff --git a/services/catalog-api/app/__init__.py b/services/catalog-api/app/__init__.py
deleted file mode 100644
index d314a814..00000000
--- a/services/catalog-api/app/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""NKS WDC cloud catalog + config sync FastAPI service."""
-
-__version__ = "0.1.0"
diff --git a/services/catalog-api/app/auth.py b/services/catalog-api/app/auth.py
deleted file mode 100644
index cf64bf28..00000000
--- a/services/catalog-api/app/auth.py
+++ /dev/null
@@ -1,126 +0,0 @@
-"""Session-cookie auth for the admin UI.
-
-Password hashing uses bcrypt (purpose-built for this, 12 rounds default).
-Session identification uses itsdangerous signed cookies — no server-side
-session store required because all state fits into the username.
-
-The admin account is bootstrapped from two env vars at startup:
- NKS_WDC_CATALOG_ADMIN_USER (default: "admin")
- NKS_WDC_CATALOG_ADMIN_PASS (required — service refuses to start
- if unset in non-dev mode)
-
-Dev mode: when `NKS_WDC_CATALOG_DEV=1` a fallback password "admin" is
-used so `run.cmd` boots without friction. NEVER set that flag in prod.
-"""
-
-from __future__ import annotations
-
-import logging
-import os
-import secrets
-from typing import Annotated
-
-import bcrypt
-from fastapi import Cookie, Depends, HTTPException, status
-from itsdangerous import BadSignature, TimestampSigner
-from sqlalchemy import select
-from sqlalchemy.orm import Session
-
-from .db import User, session_factory
-
-log = logging.getLogger(__name__)
-
-SESSION_COOKIE = "nks_wdc_catalog_session"
-SESSION_MAX_AGE = 60 * 60 * 24 * 7 # 1 week
-
-
-def _secret_key() -> str:
- env = os.environ.get("NKS_WDC_CATALOG_SECRET")
- if env:
- return env
- # Dev fallback — persist so restarts don't log everyone out.
- return "dev-only-secret-change-me-in-production-32-chars"
-
-
-_signer = TimestampSigner(_secret_key())
-
-
-def hash_password(plain: str) -> str:
- return bcrypt.hashpw(plain.encode("utf-8"), bcrypt.gensalt(rounds=12)).decode("ascii")
-
-
-def verify_password(plain: str, hashed: str) -> bool:
- try:
- return bcrypt.checkpw(plain.encode("utf-8"), hashed.encode("ascii"))
- except ValueError:
- return False
-
-
-def issue_session(username: str) -> str:
- return _signer.sign(username.encode("utf-8")).decode("ascii")
-
-
-def read_session(cookie_value: str | None) -> str | None:
- if not cookie_value:
- return None
- try:
- raw = _signer.unsign(cookie_value.encode("ascii"), max_age=SESSION_MAX_AGE)
- return raw.decode("utf-8")
- except BadSignature:
- return None
-
-
-def ensure_admin_user() -> None:
- """Bootstrap a single admin account on first run.
-
- Subsequent runs are no-ops. If the user exists but the password env
- var was changed, we do NOT overwrite the hash — admins should rotate
- explicitly via the UI instead of env var games.
- """
- username = os.environ.get("NKS_WDC_CATALOG_ADMIN_USER", "admin")
- password = os.environ.get("NKS_WDC_CATALOG_ADMIN_PASS")
-
- if not password:
- if os.environ.get("NKS_WDC_CATALOG_DEV") == "1":
- password = "admin"
- log.warning("NKS_WDC_CATALOG_DEV=1 → using fallback admin/admin credentials")
- else:
- log.warning(
- "NKS_WDC_CATALOG_ADMIN_PASS not set — admin UI will accept "
- "no logins. Set the env var or NKS_WDC_CATALOG_DEV=1 for dev."
- )
- return
-
- with session_factory() as db:
- existing = db.scalar(select(User).where(User.username == username))
- if existing is None:
- db.add(User(username=username, password_hash=hash_password(password)))
- db.commit()
- log.info("Bootstrap admin user created: %s", username)
-
-
-# ── FastAPI dependency ─────────────────────────────────────────────────
-
-def current_user(
- session_cookie: Annotated[str | None, Cookie(alias=SESSION_COOKIE)] = None,
-) -> str:
- username = read_session(session_cookie)
- if username is None:
- raise HTTPException(
- status_code=status.HTTP_302_FOUND,
- detail="Not authenticated",
- headers={"Location": "/login"},
- )
- return username
-
-
-def optional_user(
- session_cookie: Annotated[str | None, Cookie(alias=SESSION_COOKIE)] = None,
-) -> str | None:
- return read_session(session_cookie)
-
-
-# ── Token-free random helper for CSRF etc. ─────────────────────────────
-
-def random_token(nbytes: int = 24) -> str:
- return secrets.token_urlsafe(nbytes)
diff --git a/services/catalog-api/app/catalog.py b/services/catalog-api/app/catalog.py
deleted file mode 100644
index 51488617..00000000
--- a/services/catalog-api/app/catalog.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""In-memory catalog loader backed by a directory of per-app JSON files.
-
-Each file under `app/data/apps/*.json` is parsed into an `AppDoc`. The
-filename (without extension) is the canonical app id and doubles as the
-dictionary key in the served `CatalogDocument`. No database — editing a
-JSON file and reloading the process is all that's needed to publish a
-new release, which keeps this service deploy-friendly on tiny VPS boxes.
-
-Thread-safety: the loader uses a single mutable dict under a re-entrant
-lock. `reload()` swaps the dict atomically after parsing so readers
-either see the old catalog or the new one, never a partial merge.
-"""
-
-from __future__ import annotations
-
-import json
-import logging
-import threading
-from datetime import datetime, timezone
-from pathlib import Path
-
-from .schemas import AppDoc, CatalogDocument
-
-log = logging.getLogger(__name__)
-
-
-class CatalogStore:
- def __init__(self, data_dir: Path) -> None:
- self._data_dir = data_dir
- self._lock = threading.RLock()
- self._doc = CatalogDocument(schema_version="1", apps={})
- self._last_loaded: datetime | None = None
-
- @property
- def last_loaded(self) -> datetime | None:
- return self._last_loaded
-
- @property
- def data_dir(self) -> Path:
- return self._data_dir
-
- def reload(self) -> int:
- """Rescan `data/apps/*.json` and replace the in-memory catalog.
-
- Returns the number of apps successfully loaded. Bad files are
- logged and skipped — one broken JSON never kills the whole
- catalog, so a typo in one file doesn't take the service down.
- """
- apps: dict[str, AppDoc] = {}
- if not self._data_dir.is_dir():
- log.warning("Catalog data dir does not exist: %s", self._data_dir)
- with self._lock:
- self._doc = CatalogDocument(schema_version="1", apps=apps)
- self._last_loaded = datetime.now(timezone.utc)
- return 0
-
- for path in sorted(self._data_dir.glob("*.json")):
- try:
- raw = json.loads(path.read_text(encoding="utf-8"))
- app = AppDoc.model_validate(raw)
- if not app.name:
- app.name = path.stem
- apps[app.name] = app
- except Exception as exc: # noqa: BLE001 — log + continue
- log.error("Failed to parse %s: %s", path, exc)
-
- with self._lock:
- self._doc = CatalogDocument(
- schema_version="1",
- generated_at=datetime.now(timezone.utc).isoformat(),
- apps=apps,
- )
- self._last_loaded = datetime.now(timezone.utc)
- log.info("Catalog reloaded: %d apps from %s", len(apps), self._data_dir)
- return len(apps)
-
- def document(self) -> CatalogDocument:
- with self._lock:
- return self._doc
-
- def app(self, name: str) -> AppDoc | None:
- with self._lock:
- return self._doc.apps.get(name.lower()) or self._doc.apps.get(name)
diff --git a/services/catalog-api/app/data/apps/apache.json b/services/catalog-api/app/data/apps/apache.json
deleted file mode 100644
index 75f42d96..00000000
--- a/services/catalog-api/app/data/apps/apache.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
- "name": "apache",
- "display_name": "Apache HTTP Server",
- "category": "webserver",
- "description": "Battle-tested open-source HTTP server. The NKS WDC daemon uses httpd.exe (Windows) with mod_fcgid for PHP routing.",
- "homepage": "https://httpd.apache.org",
- "license": "Apache-2.0",
- "releases": [
- {
- "version": "2.4.66",
- "major_minor": "2.4",
- "channel": "stable",
- "downloads": [
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-apache-2.4.66/httpd-2.4.66-windows-x64.zip",
- "os": "windows", "arch": "x64", "archive_type": "zip", "source": "nks-hub" },
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-apache-2.4.66/httpd-2.4.66-linux-x64.tar.xz",
- "os": "linux", "arch": "x64", "archive_type": "tar.xz", "source": "nks-hub" },
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-apache-2.4.66/httpd-2.4.66-macos-arm64.tar.xz",
- "os": "macos", "arch": "arm64", "archive_type": "tar.xz", "source": "nks-hub" }
- ]
- },
- {
- "version": "2.4.65",
- "major_minor": "2.4",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://www.apachelounge.com/download/VS18/binaries/httpd-2.4.65-250401-win64-VS18.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "apachelounge",
- "headers": { "User-Agent": "NKS-WebDevConsole/1.0 (compatible)" }
- }
- ]
- },
- {
- "version": "2.4.62",
- "major_minor": "2.4",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://www.apachelounge.com/download/VS17/binaries/httpd-2.4.62-240718-win64-VS17.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "apachelounge",
- "headers": { "User-Agent": "NKS-WebDevConsole/1.0 (compatible)" }
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/caddy.json b/services/catalog-api/app/data/apps/caddy.json
deleted file mode 100644
index a6cbf10c..00000000
--- a/services/catalog-api/app/data/apps/caddy.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "caddy",
- "display_name": "Caddy",
- "category": "webserver",
- "description": "Modern HTTP/2+3 web server written in Go with automatic HTTPS via Let's Encrypt. A lightweight alternative to Apache.",
- "homepage": "https://caddyserver.com",
- "license": "Apache-2.0",
- "releases": [
- {
- "version": "2.10.2",
- "major_minor": "2.10",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-caddy-2.10.2/caddy-2.10.2-windows-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-caddy-2.10.2/caddy-2.10.2-linux-x64.tar.gz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.gz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-caddy-2.10.2/caddy-2.10.2-macos-arm64.tar.gz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.gz",
- "source": "nks-hub-binaries"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/cloudflared.json b/services/catalog-api/app/data/apps/cloudflared.json
deleted file mode 100644
index 6d39854f..00000000
--- a/services/catalog-api/app/data/apps/cloudflared.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
- "name": "cloudflared",
- "display_name": "Cloudflare Tunnel Connector",
- "category": "tools",
- "description": "cloudflared runs a Cloudflare Tunnel on a local machine, exposing origin services to the Cloudflare edge network without inbound firewall rules.",
- "homepage": "https://github.com/cloudflare/cloudflared",
- "license": "Apache-2.0",
- "releases": [
- {
- "version": "2026.3.0",
- "major_minor": "2026.3",
- "channel": "stable",
- "released_at": "2026-03-05",
- "downloads": [
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-cloudflared-2026.3.0/cloudflared-2026.3.0-windows-x64.exe",
- "os": "windows",
- "arch": "x64",
- "archive_type": "exe",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-cloudflared-2026.3.0/cloudflared-2026.3.0-linux-x64",
- "os": "linux",
- "arch": "x64",
- "archive_type": "binary",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-cloudflared-2026.3.0/cloudflared-2026.3.0-linux-arm64",
- "os": "linux",
- "arch": "arm64",
- "archive_type": "binary",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-cloudflared-2026.3.0/cloudflared-2026.3.0-macos-x64",
- "os": "macos",
- "arch": "x64",
- "archive_type": "binary",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-cloudflared-2026.3.0/cloudflared-2026.3.0-macos-arm64",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "binary",
- "source": "nks-hub-binaries"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/mailpit.json b/services/catalog-api/app/data/apps/mailpit.json
deleted file mode 100644
index e97d6642..00000000
--- a/services/catalog-api/app/data/apps/mailpit.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "mailpit",
- "display_name": "Mailpit",
- "category": "mail",
- "description": "Email testing tool that catches SMTP from local apps and serves a web UI to inspect every message. Replaces MailHog.",
- "homepage": "https://mailpit.axllent.org",
- "license": "MIT",
- "releases": [
- {
- "version": "1.29.6",
- "major_minor": "1.29",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-mailpit-1.29.6/mailpit-1.29.6-windows-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-mailpit-1.29.6/mailpit-1.29.6-linux-x64.tar.gz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.gz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-mailpit-1.29.6/mailpit-1.29.6-macos-arm64.tar.gz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.gz",
- "source": "nks-hub-binaries"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/mariadb.json b/services/catalog-api/app/data/apps/mariadb.json
deleted file mode 100644
index d96be439..00000000
--- a/services/catalog-api/app/data/apps/mariadb.json
+++ /dev/null
@@ -1,49 +0,0 @@
-{
- "name": "mariadb",
- "display_name": "MariaDB",
- "category": "database",
- "description": "Community-driven MySQL fork with a drop-in compatible wire protocol. Uses the same mysqld.exe-style binary layout.",
- "homepage": "https://mariadb.org",
- "license": "GPL-2.0",
- "releases": [
- {
- "version": "11.4.4",
- "major_minor": "11.4",
- "channel": "lts",
- "downloads": [
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-mariadb-11.4.4/mariadb-11.4.4-windows-x64.msi",
- "os": "windows", "arch": "x64", "archive_type": "msi", "source": "nks-hub" },
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-mariadb-11.4.4/mariadb-11.4.4-linux-x64.tar.gz",
- "os": "linux", "arch": "x64", "archive_type": "tar.gz", "source": "nks-hub" }
- ]
- },
- {
- "version": "12.3.1",
- "major_minor": "12.3",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://archive.mariadb.org/mariadb-12.3.1/winx64-packages/mariadb-12.3.1-winx64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "mariadb.org"
- }
- ]
- },
- {
- "version": "11.8.3",
- "major_minor": "11.8",
- "channel": "lts",
- "downloads": [
- {
- "url": "https://archive.mariadb.org/mariadb-11.8.3/winx64-packages/mariadb-11.8.3-winx64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "mariadb.org"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/mkcert.json b/services/catalog-api/app/data/apps/mkcert.json
deleted file mode 100644
index 948cd5d2..00000000
--- a/services/catalog-api/app/data/apps/mkcert.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
- "name": "mkcert",
- "display_name": "mkcert",
- "category": "tools",
- "description": "Zero-config local CA for generating trusted development certificates. WDC uses it to issue HTTPS certs for local .loc vhosts.",
- "homepage": "https://github.com/FiloSottile/mkcert",
- "license": "MIT",
- "releases": [
- {
- "version": "1.4.4",
- "major_minor": "1.4",
- "channel": "stable",
- "released_at": "2022-07-14",
- "downloads": [
- {
- "url": "https://github.com/FiloSottile/mkcert/releases/download/v1.4.4/mkcert-v1.4.4-windows-amd64.exe",
- "os": "windows",
- "arch": "x64",
- "archive_type": "exe",
- "source": "github"
- },
- {
- "url": "https://github.com/FiloSottile/mkcert/releases/download/v1.4.4/mkcert-v1.4.4-linux-amd64",
- "os": "linux",
- "arch": "x64",
- "archive_type": "bin",
- "source": "github"
- },
- {
- "url": "https://github.com/FiloSottile/mkcert/releases/download/v1.4.4/mkcert-v1.4.4-linux-arm64",
- "os": "linux",
- "arch": "arm64",
- "archive_type": "bin",
- "source": "github"
- },
- {
- "url": "https://github.com/FiloSottile/mkcert/releases/download/v1.4.4/mkcert-v1.4.4-darwin-amd64",
- "os": "macos",
- "arch": "x64",
- "archive_type": "bin",
- "source": "github"
- },
- {
- "url": "https://github.com/FiloSottile/mkcert/releases/download/v1.4.4/mkcert-v1.4.4-darwin-arm64",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "bin",
- "source": "github"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/mysql.json b/services/catalog-api/app/data/apps/mysql.json
deleted file mode 100644
index a8f6c9e2..00000000
--- a/services/catalog-api/app/data/apps/mysql.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "mysql",
- "display_name": "MySQL",
- "category": "database",
- "description": "Oracle MySQL community edition. WDC uses mysqld.exe with a generated my.ini for local development databases.",
- "homepage": "https://dev.mysql.com/downloads/mysql/",
- "license": "GPL-2.0",
- "releases": [
- {
- "version": "8.4.8",
- "major_minor": "8.4",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://dev.mysql.com/get/Downloads/MySQL-8.4/mysql-8.4.8-winx64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "mysql.com"
- }
- ]
- },
- {
- "version": "8.0.43",
- "major_minor": "8.0",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://dev.mysql.com/get/Downloads/MySQL-8.0/mysql-8.0.43-winx64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "mysql.com"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/nginx.json b/services/catalog-api/app/data/apps/nginx.json
deleted file mode 100644
index 053345b1..00000000
--- a/services/catalog-api/app/data/apps/nginx.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "name": "nginx",
- "display_name": "Nginx",
- "category": "webserver",
- "description": "High-performance reverse proxy and HTTP server. Not wired into WDC by default but available for custom plugins.",
- "homepage": "https://nginx.org",
- "license": "BSD-2-Clause",
- "releases": [
- {
- "version": "1.27.3",
- "major_minor": "1.27",
- "channel": "stable",
- "downloads": [
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-nginx-1.27.3/nginx-1.27.3-windows-x64.zip",
- "os": "windows", "arch": "x64", "archive_type": "zip", "source": "nks-hub" },
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-nginx-1.27.3/nginx-1.27.3-linux-x64.tar.xz",
- "os": "linux", "arch": "x64", "archive_type": "tar.xz", "source": "nks-hub" },
- { "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-nginx-1.27.3/nginx-1.27.3-macos-arm64.tar.xz",
- "os": "macos", "arch": "arm64", "archive_type": "tar.xz", "source": "nks-hub" }
- ]
- },
- {
- "version": "1.29.2",
- "major_minor": "1.29",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://nginx.org/download/nginx-1.29.2.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "nginx.org"
- }
- ]
- },
- {
- "version": "1.28.1",
- "major_minor": "1.28",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://nginx.org/download/nginx-1.28.1.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "nginx.org"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/php.json b/services/catalog-api/app/data/apps/php.json
deleted file mode 100644
index 7545ea53..00000000
--- a/services/catalog-api/app/data/apps/php.json
+++ /dev/null
@@ -1,283 +0,0 @@
-{
- "name": "php",
- "display_name": "PHP",
- "category": "language",
- "description": "PHP interpreter. WDC installs each major.minor side-by-side so sites can pick per-host versions at runtime. Linux/macOS binaries are built from official php.net source by NKS WDC CI; Windows binaries are repackaged from windows.php.net upstream.",
- "homepage": "https://www.php.net",
- "license": "PHP-3.01",
- "releases": [
- {
- "version": "8.5.5",
- "major_minor": "8.5",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/php-8.5.5-nts-Win32-vs17-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.5.5/php-8.5.5-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.5.5/php-8.5.5-macos-arm64.tar.xz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "8.4.20",
- "major_minor": "8.4",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/php-8.4.20-nts-Win32-vs17-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.4.20/php-8.4.20-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.4.20/php-8.4.20-macos-arm64.tar.xz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "8.3.25",
- "major_minor": "8.3",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/php-8.3.25-nts-Win32-vs16-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.3.25/php-8.3.25-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.3.25/php-8.3.25-macos-arm64.tar.xz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "8.2.30",
- "major_minor": "8.2",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/php-8.2.30-nts-Win32-vs16-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.2.30/php-8.2.30-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.2.30/php-8.2.30-macos-arm64.tar.xz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "8.1.33",
- "major_minor": "8.1",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-8.1.33-nts-Win32-vs16-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.1.33/php-8.1.33-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.1.33/php-8.1.33-macos-arm64.tar.xz",
- "os": "macos",
- "arch": "arm64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "8.0.30",
- "major_minor": "8.0",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-8.0.30-nts-Win32-vs16-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-8.0.30/php-8.0.30-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "7.4.33",
- "major_minor": "7.4",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-7.4.33-nts-Win32-vc15-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-7.4.33/php-7.4.33-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "7.3.33",
- "major_minor": "7.3",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-7.3.33-nts-Win32-VC15-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-7.3.33/php-7.3.33-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "7.2.34",
- "major_minor": "7.2",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-7.2.34-nts-Win32-VC15-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-7.2.34/php-7.2.34-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "7.1.33",
- "major_minor": "7.1",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-7.1.33-nts-Win32-VC14-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- },
- {
- "url": "https://github.com/nks-hub/webdev-console-binaries/releases/download/binaries-php-7.1.33/php-7.1.33-linux-x64.tar.xz",
- "os": "linux",
- "arch": "x64",
- "archive_type": "tar.xz",
- "source": "nks-hub-binaries"
- }
- ]
- },
- {
- "version": "7.0.33",
- "major_minor": "7.0",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-7.0.33-nts-Win32-VC14-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- }
- ]
- },
- {
- "version": "5.6.40",
- "major_minor": "5.6",
- "channel": "eol",
- "downloads": [
- {
- "url": "https://windows.php.net/downloads/releases/archives/php-5.6.40-nts-Win32-VC11-x64.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "php.net"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/data/apps/redis.json b/services/catalog-api/app/data/apps/redis.json
deleted file mode 100644
index 6190463d..00000000
--- a/services/catalog-api/app/data/apps/redis.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "redis",
- "display_name": "Redis",
- "category": "cache",
- "description": "In-memory key-value store used for caching, queues, and pub/sub.",
- "homepage": "https://redis.io",
- "license": "BSD-3-Clause",
- "releases": [
- {
- "version": "8.2.2",
- "major_minor": "8.2",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://github.com/redis-windows/redis-windows/releases/download/8.2.2/Redis-8.2.2-Windows-x64-msys2.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "github/redis-windows"
- }
- ]
- },
- {
- "version": "7.4.2",
- "major_minor": "7.4",
- "channel": "stable",
- "downloads": [
- {
- "url": "https://github.com/redis-windows/redis-windows/releases/download/7.4.2/Redis-7.4.2-Windows-x64-msys2.zip",
- "os": "windows",
- "arch": "x64",
- "archive_type": "zip",
- "source": "github/redis-windows"
- }
- ]
- }
- ]
-}
diff --git a/services/catalog-api/app/db.py b/services/catalog-api/app/db.py
deleted file mode 100644
index c2ef6831..00000000
--- a/services/catalog-api/app/db.py
+++ /dev/null
@@ -1,208 +0,0 @@
-"""SQLAlchemy setup + ORM models for the catalog API.
-
-Schema:
- apps — one row per application (id = canonical name)
- releases — versioned release per app
- downloads — per-platform download URL for a release
- users — admin users with bcrypt-hashed passwords
- config_sync — per-device config snapshots
-
-SQLite is the default backend (`catalog.db` under the state dir). Switch to
-Postgres via `DATABASE_URL=postgresql+psycopg://...` when the install
-outgrows a single file. The ORM layer and queries are DB-agnostic so the
-migration is a one-line change.
-"""
-
-from __future__ import annotations
-
-import os
-from datetime import datetime, timezone
-from pathlib import Path
-from typing import Iterator
-
-from sqlalchemy import (
- JSON,
- Column,
- DateTime,
- ForeignKey,
- Integer,
- String,
- UniqueConstraint,
- create_engine,
-)
-from sqlalchemy.orm import (
- DeclarativeBase,
- Mapped,
- mapped_column,
- relationship,
- Session,
- sessionmaker,
-)
-
-
-def _database_url() -> str:
- env = os.environ.get("DATABASE_URL")
- if env:
- return env
- state_dir = Path(
- os.environ.get("NKS_WDC_CATALOG_STATE_DIR")
- or (Path(__file__).parent.parent / "state")
- ).resolve()
- state_dir.mkdir(parents=True, exist_ok=True)
- return f"sqlite:///{state_dir / 'catalog.db'}"
-
-
-_engine = create_engine(
- _database_url(),
- connect_args={"check_same_thread": False} if "sqlite" in _database_url() else {},
- echo=False,
- future=True,
-)
-_SessionLocal = sessionmaker(bind=_engine, autoflush=False, autocommit=False, future=True)
-
-
-class Base(DeclarativeBase):
- pass
-
-
-def _utc_now() -> datetime:
- return datetime.now(timezone.utc)
-
-
-class App(Base):
- __tablename__ = "apps"
-
- id: Mapped[str] = mapped_column(String(64), primary_key=True)
- display_name: Mapped[str] = mapped_column(String(128), default="")
- category: Mapped[str] = mapped_column(String(32), default="other")
- description: Mapped[str] = mapped_column(String(2048), default="")
- homepage: Mapped[str | None] = mapped_column(String(512), nullable=True)
- license: Mapped[str | None] = mapped_column(String(64), nullable=True)
- created_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now)
- updated_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now, onupdate=_utc_now)
-
- releases: Mapped[list["Release"]] = relationship(
- "Release",
- back_populates="app",
- cascade="all, delete-orphan",
- order_by="Release.version.desc()",
- )
-
-
-class Release(Base):
- __tablename__ = "releases"
- __table_args__ = (UniqueConstraint("app_id", "version", name="uq_release_version"),)
-
- id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
- app_id: Mapped[str] = mapped_column(ForeignKey("apps.id", ondelete="CASCADE"), index=True)
- version: Mapped[str] = mapped_column(String(64))
- major_minor: Mapped[str] = mapped_column(String(32), default="")
- channel: Mapped[str] = mapped_column(String(32), default="stable")
- released_at: Mapped[str | None] = mapped_column(String(32), nullable=True)
- created_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now)
-
- app: Mapped[App] = relationship("App", back_populates="releases")
- downloads: Mapped[list["Download"]] = relationship(
- "Download",
- back_populates="release",
- cascade="all, delete-orphan",
- order_by="Download.os, Download.arch",
- )
-
-
-class Download(Base):
- __tablename__ = "downloads"
- __table_args__ = (
- UniqueConstraint(
- "release_id", "os", "arch", "archive_type",
- name="uq_download_platform",
- ),
- )
-
- id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
- release_id: Mapped[int] = mapped_column(
- ForeignKey("releases.id", ondelete="CASCADE"), index=True
- )
- url: Mapped[str] = mapped_column(String(1024))
- os: Mapped[str] = mapped_column(String(16), default="windows")
- arch: Mapped[str] = mapped_column(String(16), default="x64")
- archive_type: Mapped[str] = mapped_column(String(16), default="zip")
- source: Mapped[str] = mapped_column(String(64), default="unknown")
- headers: Mapped[dict | None] = mapped_column(JSON, nullable=True)
- sha256: Mapped[str | None] = mapped_column(String(64), nullable=True)
- size_bytes: Mapped[int | None] = mapped_column(Integer, nullable=True)
-
- release: Mapped[Release] = relationship("Release", back_populates="downloads")
-
-
-class User(Base):
- __tablename__ = "users"
-
- id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
- username: Mapped[str] = mapped_column(String(64), unique=True)
- password_hash: Mapped[str] = mapped_column(String(128))
- created_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now)
- last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
-
-
-class Account(Base):
- __tablename__ = "accounts"
-
- id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
- email: Mapped[str] = mapped_column(String(128), unique=True)
- password_hash: Mapped[str] = mapped_column(String(128))
- created_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now)
- last_login_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
-
-
-class DeviceConfig(Base):
- __tablename__ = "device_configs"
-
- device_id: Mapped[str] = mapped_column(String(64), primary_key=True)
- user_id: Mapped[int | None] = mapped_column(Integer, ForeignKey("accounts.id", ondelete="SET NULL"), nullable=True, index=True)
- name: Mapped[str | None] = mapped_column(String(128), nullable=True)
- os: Mapped[str | None] = mapped_column(String(16), nullable=True)
- arch: Mapped[str | None] = mapped_column(String(16), nullable=True)
- site_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
- last_seen_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
- updated_at: Mapped[datetime] = mapped_column(DateTime, default=_utc_now, onupdate=_utc_now)
- payload: Mapped[dict] = mapped_column(JSON)
-
-
-# ── Session helper ──────────────────────────────────────────────────────
-
-def create_all() -> None:
- """Idempotent schema creation — run on app startup.
-
- Uses ``checkfirst=True`` (the SQLAlchemy default) so ``CREATE TABLE``
- is skipped when the table already exists. Wrapped in a catch-all
- because some SQLite builds or concurrent-startup races can still
- raise ``OperationalError: table X already exists`` even with
- checkfirst — treating it as benign is the safest recovery since
- the table is already there.
- """
- try:
- Base.metadata.create_all(_engine, checkfirst=True)
- except Exception as exc:
- import logging
- logging.getLogger(__name__).warning(
- "create_all raised (likely tables already exist, continuing): %s", exc
- )
-
-
-def get_session() -> Iterator[Session]:
- """FastAPI dependency that yields a scoped session per request."""
- session: Session = _SessionLocal()
- try:
- yield session
- session.commit()
- except Exception:
- session.rollback()
- raise
- finally:
- session.close()
-
-
-def session_factory() -> Session:
- """Direct factory for code paths that aren't FastAPI routes."""
- return _SessionLocal()
diff --git a/services/catalog-api/app/devices.py b/services/catalog-api/app/devices.py
deleted file mode 100644
index 3cf6f3f2..00000000
--- a/services/catalog-api/app/devices.py
+++ /dev/null
@@ -1,270 +0,0 @@
-"""Account registration, JWT auth, and device management endpoints.
-
-Provides a user-scoped device management layer on top of the existing
-config-sync store. Accounts authenticate via email+password → JWT.
-Devices automatically link to the account on the first authenticated
-sync push, so there's no explicit "register device" step.
-
-JWT secrets default to a dev fallback — set NKS_WDC_CATALOG_SECRET in
-production. Tokens expire after 30 days so Electron clients don't need
-frequent re-auth.
-"""
-
-from __future__ import annotations
-
-import os
-import logging
-from datetime import datetime, timezone, timedelta
-from typing import Annotated
-
-from fastapi import APIRouter, Depends, HTTPException, status
-from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
-from jose import JWTError, jwt
-from pydantic import BaseModel, EmailStr
-from sqlalchemy import select
-from sqlalchemy.orm import Session
-
-from .auth import hash_password, verify_password
-from .db import Account, DeviceConfig, get_session
-
-log = logging.getLogger(__name__)
-
-router = APIRouter(prefix="/api/v1", tags=["accounts", "devices"])
-
-JWT_SECRET = os.environ.get("NKS_WDC_CATALOG_SECRET", "")
-if not JWT_SECRET:
- if os.environ.get("NKS_WDC_CATALOG_DEV") == "1":
- JWT_SECRET = "dev-only-jwt-secret-change-in-prod"
- log.warning("NKS_WDC_CATALOG_DEV=1 → using insecure JWT secret")
- else:
- JWT_SECRET = "dev-only-jwt-secret-change-in-prod"
- log.warning("NKS_WDC_CATALOG_SECRET not set — using insecure default. Set it in production!")
-JWT_ALGORITHM = "HS256"
-JWT_EXPIRE_DAYS = 30
-
-security = HTTPBearer(auto_error=False)
-
-
-# ── Schemas ─────────────────────────────────────────────────────────────
-
-class RegisterRequest(BaseModel):
- email: str
- password: str
-
-
-class LoginRequest(BaseModel):
- email: str
- password: str
-
-
-class TokenResponse(BaseModel):
- token: str
- email: str
-
-
-class DeviceInfo(BaseModel):
- device_id: str
- name: str | None = None
- os: str | None = None
- arch: str | None = None
- site_count: int | None = None
- last_seen_at: str | None = None
- updated_at: str | None = None
- online: bool = False
- is_current: bool = False
-
-
-class PushConfigRequest(BaseModel):
- source_device_id: str
-
-
-# ── JWT helpers ─────────────────────────────────────────────────────────
-
-def create_token(account_id: int, email: str) -> str:
- expire = datetime.now(timezone.utc) + timedelta(days=JWT_EXPIRE_DAYS)
- return jwt.encode(
- {"sub": str(account_id), "email": email, "exp": expire},
- JWT_SECRET,
- algorithm=JWT_ALGORITHM,
- )
-
-
-def decode_token(token: str) -> dict:
- return jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALGORITHM])
-
-
-# ── Dependencies ────────────────────────────────────────────────────────
-
-def get_current_account(
- credentials: Annotated[HTTPAuthorizationCredentials | None, Depends(security)] = None,
- db: Session = Depends(get_session),
-) -> Account:
- if credentials is None:
- raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Authentication required")
- try:
- payload = decode_token(credentials.credentials)
- account_id = int(payload["sub"])
- except (JWTError, KeyError, ValueError) as exc:
- raise HTTPException(status.HTTP_401_UNAUTHORIZED, f"Invalid token: {exc}")
- account = db.get(Account, account_id)
- if account is None:
- raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Account not found")
- return account
-
-
-def optional_account(
- credentials: Annotated[HTTPAuthorizationCredentials | None, Depends(security)] = None,
- db: Session = Depends(get_session),
-) -> Account | None:
- if credentials is None:
- return None
- try:
- payload = decode_token(credentials.credentials)
- account_id = int(payload["sub"])
- return db.get(Account, account_id)
- except Exception:
- return None
-
-
-# ── Auth endpoints ──────────────────────────────────────────────────────
-
-@router.post("/auth/register", response_model=TokenResponse)
-def register(body: RegisterRequest, db: Session = Depends(get_session)) -> TokenResponse:
- email = body.email.strip().lower()
- if not email or len(email) < 5:
- raise HTTPException(status.HTTP_400_BAD_REQUEST, "Invalid email")
- if len(body.password) < 8:
- raise HTTPException(status.HTTP_400_BAD_REQUEST, "Password must be at least 8 characters")
- existing = db.scalar(select(Account).where(Account.email == email))
- if existing:
- raise HTTPException(status.HTTP_409_CONFLICT, "Email already registered")
- account = Account(
- email=email,
- password_hash=hash_password(body.password),
- )
- db.add(account)
- db.flush()
- token = create_token(account.id, email)
- return TokenResponse(token=token, email=email)
-
-
-@router.post("/auth/login", response_model=TokenResponse)
-def login(body: LoginRequest, db: Session = Depends(get_session)) -> TokenResponse:
- email = body.email.strip().lower()
- account = db.scalar(select(Account).where(Account.email == email))
- if account is None or not verify_password(body.password, account.password_hash):
- raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid email or password")
- account.last_login_at = datetime.now(timezone.utc)
- token = create_token(account.id, email)
- return TokenResponse(token=token, email=email)
-
-
-@router.get("/auth/me")
-def auth_me(account: Account = Depends(get_current_account)) -> dict:
- return {
- "id": account.id,
- "email": account.email,
- "created_at": account.created_at.isoformat() if account.created_at else None,
- }
-
-
-# ── Device endpoints ────────────────────────────────────────────────────
-
-@router.get("/devices", response_model=list[DeviceInfo])
-def list_devices(
- current_device_id: str | None = None,
- account: Account = Depends(get_current_account),
- db: Session = Depends(get_session),
-) -> list[DeviceInfo]:
- """List all devices registered to the authenticated account.
-
- Optional ``current_device_id`` query param lets the caller tag which
- row represents their own device so the UI can render a "this device"
- badge. Without this, ``is_current`` stays False for every row (old
- behaviour) — callers can omit the param and still get a valid list.
- """
- devices = db.scalars(
- select(DeviceConfig).where(DeviceConfig.user_id == account.id)
- ).all()
- # SQLite stores datetimes as naive (no tzinfo). Use naive UTC now so
- # the subtraction doesn't throw "can't subtract offset-naive and
- # offset-aware datetimes".
- now = datetime.now(timezone.utc).replace(tzinfo=None)
- current = (current_device_id or "").strip().lower()
- return [
- DeviceInfo(
- device_id=d.device_id,
- name=d.name,
- os=d.os,
- arch=d.arch,
- site_count=d.site_count,
- last_seen_at=d.last_seen_at.isoformat() if d.last_seen_at else None,
- updated_at=d.updated_at.isoformat() if d.updated_at else None,
- online=d.last_seen_at is not None and (now - d.last_seen_at).total_seconds() < 300,
- is_current=bool(current) and d.device_id == current,
- )
- for d in devices
- ]
-
-
-@router.put("/devices/{device_id}")
-def update_device(
- device_id: str,
- name: str | None = None,
- account: Account = Depends(get_current_account),
- db: Session = Depends(get_session),
-) -> dict:
- device = db.get(DeviceConfig, device_id)
- if device is None or device.user_id != account.id:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Device not found")
- if name is not None:
- device.name = name
- return {"ok": True, "device_id": device_id}
-
-
-@router.delete("/devices/{device_id}")
-def delete_device(
- device_id: str,
- account: Account = Depends(get_current_account),
- db: Session = Depends(get_session),
-) -> dict:
- device = db.get(DeviceConfig, device_id)
- if device is None or device.user_id != account.id:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Device not found")
- db.delete(device)
- return {"ok": True, "removed": device_id}
-
-
-@router.get("/devices/{device_id}/config")
-def get_device_config(
- device_id: str,
- account: Account = Depends(get_current_account),
- db: Session = Depends(get_session),
-) -> dict:
- device = db.get(DeviceConfig, device_id)
- if device is None or device.user_id != account.id:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Device not found")
- return {
- "device_id": device.device_id,
- "name": device.name,
- "payload": device.payload,
- "updated_at": device.updated_at.isoformat() if device.updated_at else None,
- }
-
-
-@router.post("/devices/{device_id}/push-config")
-def push_config_to_device(
- device_id: str,
- body: PushConfigRequest,
- account: Account = Depends(get_current_account),
- db: Session = Depends(get_session),
-) -> dict:
- source = db.get(DeviceConfig, body.source_device_id)
- target = db.get(DeviceConfig, device_id)
- if source is None or source.user_id != account.id:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Source device not found")
- if target is None or target.user_id != account.id:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Target device not found")
- target.payload = source.payload
- target.updated_at = datetime.now(timezone.utc)
- return {"ok": True, "pushed_from": body.source_device_id, "pushed_to": device_id}
diff --git a/services/catalog-api/app/generators.py b/services/catalog-api/app/generators.py
deleted file mode 100644
index 7df08655..00000000
--- a/services/catalog-api/app/generators.py
+++ /dev/null
@@ -1,570 +0,0 @@
-"""Upstream URL auto-generators.
-
-Each generator fetches the canonical release listing for one upstream
-source and produces a list of `AppDoc` releases ready to import into the
-catalog DB. No JSON editing required — click "Auto-generate" in the
-admin UI for an app and the service calls the right generator to pull
-the latest versions + platform downloads directly from the vendor.
-
-Sources
--------
-php → https://windows.php.net/downloads/releases/ (HTML listing)
-apache → https://www.apachelounge.com/download/ (HTML listing)
-mysql → https://dev.mysql.com/downloads/mysql/ (static versions)
-mariadb → https://archive.mariadb.org (static versions)
-redis → github.com/redis-windows/redis-windows (GitHub releases API)
-mailpit → github.com/axllent/mailpit (GitHub releases API)
-caddy → github.com/caddyserver/caddy (GitHub releases API)
-nginx → https://nginx.org/en/download.html (HTML listing)
-cloudflared → github.com/cloudflare/cloudflared (GitHub releases API)
-
-Every generator is best-effort: upstream changes will break scraping.
-Failures log a warning and return an empty list so the UI surfaces
-"0 releases found" instead of a 500.
-"""
-
-from __future__ import annotations
-
-import logging
-import re
-from dataclasses import dataclass, field
-from typing import Iterable
-
-import httpx
-
-log = logging.getLogger(__name__)
-
-HTTP_TIMEOUT = httpx.Timeout(20.0, connect=10.0)
-DEFAULT_UA = "NKS-WebDevConsole-Catalog/0.1 (+https://github.com/nks-hub/webdev-console)"
-
-
-@dataclass
-class GenDownload:
- url: str
- os: str = "windows"
- arch: str = "x64"
- archive_type: str = "zip"
- source: str = "auto"
- headers: dict[str, str] | None = None
-
-
-@dataclass
-class GenRelease:
- version: str
- major_minor: str = ""
- channel: str = "stable"
- released_at: str | None = None
- downloads: list[GenDownload] = field(default_factory=list)
-
-
-# ── GitHub helper ───────────────────────────────────────────────────────
-
-def _github_releases(repo: str, limit: int = 10) -> list[dict]:
- """Fetch the last `limit` releases from a public GitHub repo."""
- url = f"https://api.github.com/repos/{repo}/releases?per_page={limit}"
- headers = {
- "Accept": "application/vnd.github+json",
- "User-Agent": DEFAULT_UA,
- "X-GitHub-Api-Version": "2022-11-28",
- }
- try:
- r = httpx.get(url, headers=headers, timeout=HTTP_TIMEOUT)
- r.raise_for_status()
- return r.json()
- except Exception as exc: # noqa: BLE001
- log.warning("GitHub fetch failed for %s: %s", repo, exc)
- return []
-
-
-def _major_minor(version: str) -> str:
- parts = version.split(".")
- return ".".join(parts[:2]) if len(parts) >= 2 else version
-
-
-# ── cloudflared ─────────────────────────────────────────────────────────
-
-def generate_cloudflared(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- for rel in _github_releases("cloudflare/cloudflared", limit=limit):
- tag = rel.get("tag_name", "").lstrip("v")
- if not tag:
- continue
- downloads: list[GenDownload] = []
- for asset in rel.get("assets", []):
- name: str = asset.get("name", "")
- url: str = asset.get("browser_download_url", "")
- if not url:
- continue
- if name.endswith("-windows-amd64.exe"):
- downloads.append(GenDownload(url, "windows", "x64", "exe", "github"))
- elif name.endswith("-windows-386.exe"):
- downloads.append(GenDownload(url, "windows", "x86", "exe", "github"))
- elif name == "cloudflared-linux-amd64":
- downloads.append(GenDownload(url, "linux", "x64", "bin", "github"))
- elif name == "cloudflared-linux-arm64":
- downloads.append(GenDownload(url, "linux", "arm64", "bin", "github"))
- elif name.endswith("-darwin-amd64.tgz"):
- downloads.append(GenDownload(url, "macos", "x64", "tgz", "github"))
- elif name.endswith("-darwin-arm64.tgz"):
- downloads.append(GenDownload(url, "macos", "arm64", "tgz", "github"))
- if downloads:
- releases.append(GenRelease(
- version=tag,
- major_minor=_major_minor(tag),
- released_at=(rel.get("published_at") or "")[:10] or None,
- downloads=downloads,
- ))
- return releases
-
-
-# ── mailpit ────────────────────────────────────────────────────────────
-
-def generate_mailpit(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- for rel in _github_releases("axllent/mailpit", limit=limit):
- tag = rel.get("tag_name", "").lstrip("v")
- if not tag:
- continue
- downloads: list[GenDownload] = []
- for asset in rel.get("assets", []):
- name: str = asset.get("name", "")
- url: str = asset.get("browser_download_url", "")
- if not url:
- continue
- if name == "mailpit-windows-amd64.zip":
- downloads.append(GenDownload(url, "windows", "x64", "zip", "github"))
- elif name == "mailpit-linux-amd64.tar.gz":
- downloads.append(GenDownload(url, "linux", "x64", "tar.gz", "github"))
- elif name == "mailpit-darwin-arm64.tar.gz":
- downloads.append(GenDownload(url, "macos", "arm64", "tar.gz", "github"))
- if downloads:
- releases.append(GenRelease(
- version=tag,
- major_minor=_major_minor(tag),
- released_at=(rel.get("published_at") or "")[:10] or None,
- downloads=downloads,
- ))
- return releases
-
-
-# ── caddy ──────────────────────────────────────────────────────────────
-
-def generate_caddy(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- for rel in _github_releases("caddyserver/caddy", limit=limit):
- tag = rel.get("tag_name", "").lstrip("v")
- if not tag:
- continue
- downloads: list[GenDownload] = []
- for asset in rel.get("assets", []):
- name: str = asset.get("name", "")
- url: str = asset.get("browser_download_url", "")
- if not url:
- continue
- if name.endswith("_windows_amd64.zip"):
- downloads.append(GenDownload(url, "windows", "x64", "zip", "github"))
- elif name.endswith("_linux_amd64.tar.gz"):
- downloads.append(GenDownload(url, "linux", "x64", "tar.gz", "github"))
- elif name.endswith("_mac_arm64.tar.gz"):
- downloads.append(GenDownload(url, "macos", "arm64", "tar.gz", "github"))
- if downloads:
- releases.append(GenRelease(
- version=tag,
- major_minor=_major_minor(tag),
- released_at=(rel.get("published_at") or "")[:10] or None,
- downloads=downloads,
- ))
- return releases
-
-
-# ── redis (redis-windows fork) ─────────────────────────────────────────
-
-def generate_redis(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- for rel in _github_releases("redis-windows/redis-windows", limit=limit):
- tag = rel.get("tag_name", "").lstrip("v")
- if not tag:
- continue
- downloads: list[GenDownload] = []
- for asset in rel.get("assets", []):
- name: str = asset.get("name", "")
- url: str = asset.get("browser_download_url", "")
- if not name or not url:
- continue
- if "Windows" in name and name.endswith(".zip"):
- downloads.append(GenDownload(url, "windows", "x64", "zip", "github/redis-windows"))
- break
- if downloads:
- releases.append(GenRelease(
- version=tag,
- major_minor=_major_minor(tag),
- released_at=(rel.get("published_at") or "")[:10] or None,
- downloads=downloads,
- ))
- return releases
-
-
-# ── PHP (windows.php.net) ───────────────────────────────────────────────
-
-_PHP_ROWS = (
- # (list URL, archive suffix pattern, regex for version extraction)
- (
- "https://windows.php.net/downloads/releases/",
- re.compile(r'href="(php-(\d+\.\d+\.\d+)-nts-Win32-vs\d+-x64\.zip)"'),
- ),
-)
-
-
-def generate_php(limit: int = 10) -> list[GenRelease]:
- releases: list[GenRelease] = []
- for list_url, pattern in _PHP_ROWS:
- try:
- r = httpx.get(list_url, timeout=HTTP_TIMEOUT, headers={"User-Agent": DEFAULT_UA})
- r.raise_for_status()
- seen: set[str] = set()
- for m in pattern.finditer(r.text):
- filename, version = m.group(1), m.group(2)
- if version in seen:
- continue
- seen.add(version)
- download_url = list_url + filename
- releases.append(GenRelease(
- version=version,
- major_minor=_major_minor(version),
- downloads=[GenDownload(download_url, "windows", "x64", "zip", "php.net")],
- ))
- if len(releases) >= limit:
- break
- except Exception as exc: # noqa: BLE001
- log.warning("PHP scrape failed for %s: %s", list_url, exc)
- # Sort descending by semver-ish key
- releases.sort(key=lambda r: tuple(int(x) for x in r.version.split(".")), reverse=True)
- return releases[:limit]
-
-
-# ── Apache (apachelounge.com) ──────────────────────────────────────────
-
-_APACHE_PATTERN = re.compile(
- r'href="(binaries/(httpd-(\d+\.\d+\.\d+)-[\d-]+-win64-VS\d+\.zip))"',
- re.IGNORECASE,
-)
-
-
-def generate_apache(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- try:
- r = httpx.get(
- "https://www.apachelounge.com/download/",
- timeout=HTTP_TIMEOUT,
- headers={"User-Agent": DEFAULT_UA},
- )
- r.raise_for_status()
- for m in _APACHE_PATTERN.finditer(r.text):
- rel_path, filename, version = m.group(1), m.group(2), m.group(3)
- url = "https://www.apachelounge.com/download/" + rel_path
- releases.append(GenRelease(
- version=version,
- major_minor=_major_minor(version),
- downloads=[GenDownload(url, "windows", "x64", "zip", "apachelounge", {"User-Agent": DEFAULT_UA})],
- ))
- if len(releases) >= limit:
- break
- except Exception as exc: # noqa: BLE001
- log.warning("Apache scrape failed: %s", exc)
- return releases
-
-
-# ── MariaDB (archive.mariadb.org) ──────────────────────────────────────
-#
-# The archive host serves an Apache-style open directory listing at /.
-# We scrape release directories matching `mariadb-X.Y.Z/`, filter to the
-# latest `limit` by semver-descending sort, then derive the direct
-# Windows zip URL from the known `winx64-packages/{name}.zip` pattern.
-# HEAD probe before adding so we never register a release whose
-# Windows build is missing upstream.
-
-_MARIADB_RELEASE_PATTERN = re.compile(
- r'href="mariadb-(\d+)\.(\d+)\.(\d+)/"',
- re.IGNORECASE,
-)
-
-
-def generate_mariadb(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- try:
- r = httpx.get(
- "https://archive.mariadb.org/",
- timeout=HTTP_TIMEOUT,
- headers={"User-Agent": DEFAULT_UA},
- )
- r.raise_for_status()
- except Exception as exc: # noqa: BLE001
- log.warning("MariaDB scrape failed: %s", exc)
- return releases
-
- seen: set[tuple[int, int, int]] = set()
- parsed: list[tuple[int, int, int]] = []
- for m in _MARIADB_RELEASE_PATTERN.finditer(r.text):
- triple = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
- if triple in seen:
- continue
- seen.add(triple)
- parsed.append(triple)
-
- # Sort descending so we hand the UI the freshest stable builds first.
- parsed.sort(reverse=True)
-
- for major, minor, patch in parsed:
- if len(releases) >= limit:
- break
- version = f"{major}.{minor}.{patch}"
- url = (
- f"https://archive.mariadb.org/mariadb-{version}/"
- f"winx64-packages/mariadb-{version}-winx64.zip"
- )
- # HEAD probe so we don't register a directory that exists but whose
- # Windows zip wasn't built (pre-10.x alpha betas, arch-only drops).
- try:
- head = httpx.head(url, timeout=httpx.Timeout(5.0, connect=5.0))
- if head.status_code >= 400:
- continue
- except Exception: # noqa: BLE001
- continue
-
- releases.append(GenRelease(
- version=version,
- major_minor=_major_minor(version),
- downloads=[GenDownload(url, "windows", "x64", "zip", "mariadb.org")],
- ))
-
- return releases
-
-
-# ── Nginx (nginx.org) ──────────────────────────────────────────────────
-
-_NGINX_PATTERN = re.compile(r'href="(nginx-(\d+\.\d+\.\d+)\.zip)"')
-
-
-def generate_nginx(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- try:
- r = httpx.get(
- "https://nginx.org/en/download.html",
- timeout=HTTP_TIMEOUT,
- headers={"User-Agent": DEFAULT_UA},
- )
- r.raise_for_status()
- seen: set[str] = set()
- for m in _NGINX_PATTERN.finditer(r.text):
- filename, version = m.group(1), m.group(2)
- if version in seen:
- continue
- seen.add(version)
- url = f"https://nginx.org/download/{filename}"
- releases.append(GenRelease(
- version=version,
- major_minor=_major_minor(version),
- downloads=[GenDownload(url, "windows", "x64", "zip", "nginx.org")],
- ))
- if len(releases) >= limit:
- break
- except Exception as exc: # noqa: BLE001
- log.warning("Nginx scrape failed: %s", exc)
- return releases
-
-
-# ── MySQL Community Server (dev.mysql.com) ────────────────────────────
-
-_MYSQL_VERSIONS_URL = "https://dev.mysql.com/downloads/mysql/"
-_MYSQL_CDN = "https://dev.mysql.com/get/Downloads/MySQL-{mm}/mysql-{ver}-winx64.zip"
-_MYSQL_VERSION_PATTERN = re.compile(
- r"MySQL Community Server (\d+)\.(\d+)\.(\d+)"
- r"|mysql-(\d+)\.(\d+)\.(\d+)-winx64\.zip"
-)
-
-
-def generate_mysql(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
-
- # Strategy: scrape the downloads page for advertised versions, then
- # construct CDN URLs. MySQL doesn't publish a simple API or GitHub
- # releases, so we parse the human-readable download page.
- try:
- r = httpx.get(
- _MYSQL_VERSIONS_URL,
- timeout=HTTP_TIMEOUT,
- headers={"User-Agent": DEFAULT_UA},
- follow_redirects=True,
- )
- r.raise_for_status()
- except Exception as exc: # noqa: BLE001
- log.warning("MySQL scrape failed: %s", exc)
- # Fall back to well-known recent stable versions.
- return _mysql_fallback(limit)
-
- seen: set[tuple[int, int, int]] = set()
- parsed: list[tuple[int, int, int]] = []
- for m in _MYSQL_VERSION_PATTERN.finditer(r.text):
- groups = m.groups()
- # The regex has two alternatives — pick whichever matched.
- if groups[0] is not None:
- triple = (int(groups[0]), int(groups[1]), int(groups[2]))
- else:
- triple = (int(groups[3]), int(groups[4]), int(groups[5]))
- if triple in seen:
- continue
- seen.add(triple)
- parsed.append(triple)
-
- parsed.sort(reverse=True)
-
- for major, minor, patch in parsed:
- if len(releases) >= limit:
- break
- version = f"{major}.{minor}.{patch}"
- mm = f"{major}.{minor}"
- url = _MYSQL_CDN.format(mm=mm, ver=version)
-
- # HEAD probe to confirm the archive exists (some point releases
- # skip the Windows zip or use a different naming scheme).
- try:
- head = httpx.head(url, timeout=httpx.Timeout(5.0, connect=5.0), follow_redirects=True)
- if head.status_code >= 400:
- continue
- except Exception: # noqa: BLE001
- continue
-
- releases.append(GenRelease(
- version=version,
- major_minor=mm,
- downloads=[GenDownload(url, "windows", "x64", "zip", "dev.mysql.com")],
- ))
-
- if not releases:
- return _mysql_fallback(limit)
- return releases
-
-
-def _mysql_fallback(limit: int) -> list[GenRelease]:
- """Hardcoded recent MySQL versions as a safety net when scraping fails."""
- fallback = [
- ("9.3.0", "9.3"),
- ("9.2.0", "9.2"),
- ("8.4.5", "8.4"),
- ("8.0.42", "8.0"),
- ]
- releases: list[GenRelease] = []
- for ver, mm in fallback[:limit]:
- releases.append(GenRelease(
- version=ver,
- major_minor=mm,
- downloads=[GenDownload(
- _MYSQL_CDN.format(mm=mm, ver=ver),
- "windows", "x64", "zip", "dev.mysql.com (fallback)",
- )],
- ))
- return releases
-
-
-# ── Node.js (nodejs.org) ──────────────────────────────────────────────
-
-_NODE_INDEX_URL = "https://nodejs.org/dist/index.json"
-
-
-def generate_node(limit: int = 5) -> list[GenRelease]:
- releases: list[GenRelease] = []
- try:
- r = httpx.get(
- _NODE_INDEX_URL,
- timeout=HTTP_TIMEOUT,
- headers={"User-Agent": DEFAULT_UA},
- )
- r.raise_for_status()
- data = r.json()
- except Exception as exc: # noqa: BLE001
- log.warning("Node.js fetch failed: %s", exc)
- return releases
-
- # index.json is sorted newest-first. Each entry has:
- # {"version":"v22.15.0","date":"...","files":["win-x64-zip","linux-x64",...], ...}
- for entry in data:
- if len(releases) >= limit:
- break
- version_raw = entry.get("version", "")
- if not version_raw.startswith("v"):
- continue
- version = version_raw[1:] # strip leading 'v'
- files = entry.get("files", [])
-
- downloads: list[GenDownload] = []
- # Windows x64 zip
- if "win-x64-zip" in files:
- downloads.append(GenDownload(
- url=f"https://nodejs.org/dist/{version_raw}/node-{version_raw}-win-x64.zip",
- os="windows", arch="x64", archive_type="zip", source="nodejs.org",
- ))
- # Linux x64 tar.xz
- if "linux-x64" in files:
- downloads.append(GenDownload(
- url=f"https://nodejs.org/dist/{version_raw}/node-{version_raw}-linux-x64.tar.xz",
- os="linux", arch="x64", archive_type="tar.xz", source="nodejs.org",
- ))
- # macOS arm64
- if "osx-arm64-tar" in files:
- downloads.append(GenDownload(
- url=f"https://nodejs.org/dist/{version_raw}/node-{version_raw}-darwin-arm64.tar.gz",
- os="macos", arch="arm64", archive_type="tar.gz", source="nodejs.org",
- ))
- # macOS x64
- if "osx-x64-tar" in files:
- downloads.append(GenDownload(
- url=f"https://nodejs.org/dist/{version_raw}/node-{version_raw}-darwin-x64.tar.gz",
- os="macos", arch="x64", archive_type="tar.gz", source="nodejs.org",
- ))
-
- if not downloads:
- continue
-
- # Determine channel: even major = LTS (once it reaches LTS status)
- lts = entry.get("lts")
- channel = "lts" if lts else "stable"
-
- releases.append(GenRelease(
- version=version,
- major_minor=_major_minor(version),
- channel=channel,
- downloads=downloads,
- ))
-
- return releases
-
-
-# ── Registry ────────────────────────────────────────────────────────────
-
-GENERATORS = {
- "cloudflared": generate_cloudflared,
- "mailpit": generate_mailpit,
- "caddy": generate_caddy,
- "redis": generate_redis,
- "php": generate_php,
- "apache": generate_apache,
- "nginx": generate_nginx,
- "mariadb": generate_mariadb,
- "mysql": generate_mysql,
- "node": generate_node,
-}
-
-
-def available_generators() -> Iterable[str]:
- return GENERATORS.keys()
-
-
-def run_generator(app_id: str, limit: int = 5) -> list[GenRelease]:
- gen = GENERATORS.get(app_id.lower())
- if gen is None:
- return []
- try:
- return gen(limit)
- except Exception as exc: # noqa: BLE001
- log.warning("Generator for %s threw: %s", app_id, exc)
- return []
diff --git a/services/catalog-api/app/main.py b/services/catalog-api/app/main.py
deleted file mode 100644
index 7ad4364e..00000000
--- a/services/catalog-api/app/main.py
+++ /dev/null
@@ -1,579 +0,0 @@
-"""FastAPI entrypoint for the NKS WDC catalog + config sync service.
-
-Two front-ends:
-
-1. Public JSON API consumed by the C# daemon's `CatalogClient`:
- GET /api/v1/catalog full catalog
- GET /api/v1/catalog/{app} single app
- POST /api/v1/sync/config upsert device snapshot
- GET /api/v1/sync/config/{id} fetch device snapshot
-
-2. HTML admin UI behind a bcrypt session login (`/login`, `/admin/*`).
- Backed by SQLite through SQLAlchemy. URL auto-generators scrape
- upstream release pages so admins don't hand-type download URLs.
-
-Environment
------------
-DATABASE_URL — override SQLite default (postgres etc.)
-NKS_WDC_CATALOG_STATE_DIR — dir for `catalog.db` + runtime state
-NKS_WDC_CATALOG_ADMIN_USER — bootstrap admin username (default "admin")
-NKS_WDC_CATALOG_ADMIN_PASS — bootstrap admin password (required unless dev)
-NKS_WDC_CATALOG_DEV — set to "1" to allow admin/admin fallback
-NKS_WDC_CATALOG_SECRET — itsdangerous signer key (set in prod!)
-NKS_WDC_CATALOG_ALLOW_CORS — "1" to enable permissive CORS
-"""
-
-from __future__ import annotations
-
-import logging
-import os
-from contextlib import asynccontextmanager
-from pathlib import Path
-from typing import Annotated, Iterator
-
-from fastapi import Cookie, Depends, FastAPI, Form, HTTPException, Request, status
-from fastapi.middleware.cors import CORSMiddleware
-from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse
-from fastapi.staticfiles import StaticFiles
-from fastapi.templating import Jinja2Templates
-from sqlalchemy import select
-from sqlalchemy.orm import Session
-
-from . import __version__
-from .auth import (
- SESSION_COOKIE,
- SESSION_MAX_AGE,
- current_user,
- ensure_admin_user,
- hash_password,
- issue_session,
- optional_user,
- verify_password,
-)
-from .db import Account, DeviceConfig, User, create_all, get_session, session_factory
-from .devices import router as devices_router, optional_account
-from .generators import GENERATORS, run_generator
-from .schemas import (
- AppDoc,
- CatalogDocument,
- ConfigSyncEntry,
- ConfigSyncListResponse,
- ConfigSyncUploadRequest,
-)
-from .service import (
- add_download,
- add_release,
- apply_generated_releases,
- build_catalog_document,
- create_app as svc_create_app,
- delete_app as svc_delete_app,
- delete_download,
- delete_release,
- get_app,
- get_app_document,
- list_apps,
- seed_from_json,
- update_app,
-)
-
-logging.basicConfig(
- level=os.environ.get("LOG_LEVEL", "INFO"),
- format="%(asctime)s %(levelname)-7s %(name)s: %(message)s",
-)
-log = logging.getLogger("nks-wdc-catalog")
-
-_APP_DIR = Path(__file__).parent
-_SEED_DIR = _APP_DIR / "data" / "apps"
-
-
-@asynccontextmanager
-async def lifespan(_app: FastAPI) -> Iterator[None]:
- create_all()
- ensure_admin_user()
- with session_factory() as db:
- count = seed_from_json(db, _SEED_DIR)
- if count:
- log.info("Seeded %d apps from %s", count, _SEED_DIR)
- yield
-
-
-app = FastAPI(
- title="NKS WDC Catalog API",
- version=__version__,
- description=(
- "Cloud-hosted binary catalog + per-device config sync for NKS "
- "WebDev Console. Ships an admin UI for managing catalog entries "
- "and auto-generators that scrape upstream release pages so you "
- "never hand-type download URLs."
- ),
- lifespan=lifespan,
-)
-
-if os.environ.get("NKS_WDC_CATALOG_ALLOW_CORS") == "1":
- app.add_middleware(
- CORSMiddleware,
- allow_origins=["*"],
- allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
- allow_headers=["*"],
- )
-
-# Mount the accounts + devices router (JWT-authenticated endpoints)
-app.include_router(devices_router)
-
-app.mount("/static", StaticFiles(directory=_APP_DIR / "static"), name="static")
-templates = Jinja2Templates(directory=_APP_DIR / "templates")
-
-
-def _base_context(request: Request, username: str | None, **extra) -> dict:
- """Shared template context — version always present so base.html renders."""
- ctx = {"request": request, "username": username, "version": __version__, "flash": None}
- ctx.update(extra)
- return ctx
-
-
-# ─────────────────────────────────────────────────────────────────────────
-# Health
-# ─────────────────────────────────────────────────────────────────────────
-
-@app.get("/healthz", tags=["health"])
-def healthz() -> dict:
- return {"ok": True, "service": "nks-wdc-catalog-api", "version": __version__}
-
-
-# ─────────────────────────────────────────────────────────────────────────
-# Public JSON API (consumed by C# CatalogClient)
-# ─────────────────────────────────────────────────────────────────────────
-
-@app.get("/api/v1/catalog", response_model=CatalogDocument, tags=["catalog"])
-def api_get_catalog(db: Session = Depends(get_session)) -> CatalogDocument:
- return build_catalog_document(db)
-
-
-@app.get("/api/v1/catalog/{app_name}", response_model=AppDoc, tags=["catalog"])
-def api_get_app(app_name: str, db: Session = Depends(get_session)) -> AppDoc:
- doc = get_app_document(db, app_name)
- if doc is None:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_name}'")
- return doc
-
-
-# ─────────────────────────────────────────────────────────────────────────
-# Config sync (public, runs behind reverse-proxy auth in prod)
-# ─────────────────────────────────────────────────────────────────────────
-
-# Device IDs are used as SQL primary keys and echoed back in responses.
-# Restrict to the same shape the Electron client generates (lowercased
-# alphanumerics + dashes, 3–64 chars) so garbage IDs can't pollute the
-# device_configs table. This is defence-in-depth — SQLAlchemy already
-# parameterizes the SQL, so the risk is cosmetic storage pollution, not
-# injection.
-import re as _re
-_DEVICE_ID_RE = _re.compile(r"^[a-z0-9][a-z0-9-]{2,63}$")
-
-
-def _normalize_device_id(raw: str) -> str:
- """Lowercase + strip + validate a client-supplied device id.
-
- Raises HTTP 400 on any format violation so clients see a clear
- error instead of the request silently succeeding with a mangled id.
- """
- normalized = raw.strip().lower()
- if not normalized:
- raise HTTPException(status.HTTP_400_BAD_REQUEST, "device_id is required")
- if not _DEVICE_ID_RE.match(normalized):
- raise HTTPException(
- status.HTTP_400_BAD_REQUEST,
- "device_id must be 3–64 chars, lowercase alphanumeric + dashes",
- )
- return normalized
-
-
-@app.post("/api/v1/sync/config", response_model=ConfigSyncEntry, tags=["sync"])
-def api_upsert_config(
- body: ConfigSyncUploadRequest,
- account: Account | None = Depends(optional_account),
- db: Session = Depends(get_session),
-) -> ConfigSyncEntry:
- from datetime import datetime, timezone
-
- device_id = _normalize_device_id(body.device_id)
-
- row = db.get(DeviceConfig, device_id)
- if row is None:
- row = DeviceConfig(device_id=device_id, payload=body.payload)
- db.add(row)
- else:
- row.payload = body.payload
- row.updated_at = datetime.now(timezone.utc)
-
- # Auto-link device to account on first authenticated push — no
- # explicit "register device" step needed. Also extract metadata
- # from the payload so the device list can show name/OS/arch/sites
- # without opening the full JSON blob.
- if account is not None and row.user_id is None:
- row.user_id = account.id
- elif account is not None and row.user_id == account.id:
- pass # already linked
- row.last_seen_at = datetime.now(timezone.utc)
-
- # Extract device metadata from payload if present
- p = body.payload or {}
- if isinstance(p.get("settings"), dict):
- settings = p["settings"]
- if "sync.deviceName" in settings:
- row.name = settings["sync.deviceName"]
- if isinstance(p.get("sites"), list):
- row.site_count = len(p["sites"])
- if "deviceId" in p:
- pass # already have device_id from URL
-
- # Extract OS info from system snapshot if pushed
- if isinstance(p.get("system"), dict):
- sys_info = p["system"]
- if isinstance(sys_info.get("os"), dict):
- row.os = sys_info["os"].get("tag")
- row.arch = sys_info["os"].get("arch")
-
- db.flush()
- return ConfigSyncEntry(
- device_id=row.device_id,
- updated_at=row.updated_at.isoformat() if row.updated_at else "",
- payload=row.payload,
- )
-
-
-@app.get("/api/v1/sync/config/{device_id}", response_model=ConfigSyncEntry, tags=["sync"])
-def api_get_config(device_id: str, db: Session = Depends(get_session)) -> ConfigSyncEntry:
- normalized = _normalize_device_id(device_id)
- row = db.get(DeviceConfig, normalized)
- if row is None:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"No snapshot for {normalized}")
- return ConfigSyncEntry(
- device_id=row.device_id,
- updated_at=row.updated_at.isoformat() if row.updated_at else "",
- payload=row.payload,
- )
-
-
-@app.get(
- "/api/v1/sync/config/{device_id}/exists",
- response_model=ConfigSyncListResponse,
- tags=["sync"],
-)
-def api_exists_config(device_id: str, db: Session = Depends(get_session)) -> ConfigSyncListResponse:
- normalized = _normalize_device_id(device_id)
- row = db.get(DeviceConfig, normalized)
- if row is None:
- return ConfigSyncListResponse(device_id=normalized, has_config=False)
- return ConfigSyncListResponse(
- device_id=row.device_id,
- updated_at=row.updated_at.isoformat() if row.updated_at else None,
- has_config=True,
- )
-
-
-@app.delete("/api/v1/sync/config/{device_id}", tags=["sync"])
-def api_delete_config(device_id: str, db: Session = Depends(get_session)) -> JSONResponse:
- normalized = _normalize_device_id(device_id)
- row = db.get(DeviceConfig, normalized)
- if row is None:
- return JSONResponse({"ok": True, "removed": False})
- db.delete(row)
- return JSONResponse({"ok": True, "removed": True})
-
-
-# ─────────────────────────────────────────────────────────────────────────
-# Auth (login / logout / session cookie)
-# ─────────────────────────────────────────────────────────────────────────
-
-@app.get("/", include_in_schema=False)
-def root(user: Annotated[str | None, Depends(optional_user)] = None):
- return RedirectResponse("/admin" if user else "/login")
-
-
-@app.get("/login", response_class=HTMLResponse, include_in_schema=False)
-def login_form(request: Request) -> HTMLResponse:
- return templates.TemplateResponse(request, "login.html", _base_context(request, None))
-
-
-@app.post("/login", include_in_schema=False)
-def login_submit(
- request: Request,
- username: Annotated[str, Form()],
- password: Annotated[str, Form()],
- db: Session = Depends(get_session),
-):
- user = db.scalar(select(User).where(User.username == username.strip()))
- if user is None or not verify_password(password, user.password_hash):
- return templates.TemplateResponse(
- request,
- "login.html",
- _base_context(request, None, error="Invalid username or password"),
- status_code=401,
- )
- token = issue_session(user.username)
- response = RedirectResponse("/admin", status_code=status.HTTP_303_SEE_OTHER)
- response.set_cookie(
- key=SESSION_COOKIE,
- value=token,
- max_age=SESSION_MAX_AGE,
- httponly=True,
- samesite="lax",
- )
- return response
-
-
-@app.post("/logout", include_in_schema=False)
-def logout() -> RedirectResponse:
- response = RedirectResponse("/login", status_code=status.HTTP_303_SEE_OTHER)
- response.delete_cookie(SESSION_COOKIE)
- return response
-
-
-# ─────────────────────────────────────────────────────────────────────────
-# Admin UI (authenticated HTML)
-# ─────────────────────────────────────────────────────────────────────────
-
-def _redirect(url: str, flash_kind: str | None = None, flash_message: str | None = None) -> RedirectResponse:
- response = RedirectResponse(url, status_code=status.HTTP_303_SEE_OTHER)
- if flash_kind and flash_message:
- # Flash via short-lived cookie so the next GET picks it up.
- response.set_cookie("flash", f"{flash_kind}|{flash_message}", max_age=15, samesite="lax")
- return response
-
-
-def _pop_flash(cookie: str | None) -> dict | None:
- if not cookie or "|" not in cookie:
- return None
- kind, _, message = cookie.partition("|")
- return {"kind": kind, "message": message}
-
-
-def _clear_flash(response) -> None:
- response.delete_cookie("flash")
-
-
-@app.get("/admin", response_class=HTMLResponse, include_in_schema=False)
-def admin_index(
- request: Request,
- username: Annotated[str, Depends(current_user)],
- flash: Annotated[str | None, Cookie(alias="flash")] = None,
- db: Session = Depends(get_session),
-) -> HTMLResponse:
- apps = list_apps(db)
- response = templates.TemplateResponse(
- request,
- "apps_list.html",
- _base_context(request, username, apps=apps, flash=_pop_flash(flash)),
- )
- _clear_flash(response)
- return response
-
-
-@app.get("/admin/new", response_class=HTMLResponse, include_in_schema=False)
-def admin_new_app(
- request: Request,
- username: Annotated[str, Depends(current_user)],
-) -> HTMLResponse:
- return templates.TemplateResponse(
- request,
- "app_form.html",
- _base_context(request, username, app=None),
- )
-
-
-@app.post("/admin/new", include_in_schema=False)
-def admin_create_app(
- username: Annotated[str, Depends(current_user)],
- id: Annotated[str, Form()],
- display_name: Annotated[str, Form()] = "",
- category: Annotated[str, Form()] = "other",
- description: Annotated[str, Form()] = "",
- homepage: Annotated[str, Form()] = "",
- license: Annotated[str, Form()] = "",
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- try:
- app_row = svc_create_app(
- db,
- app_id=id,
- display_name=display_name,
- category=category,
- description=description,
- homepage=homepage or None,
- license=license or None,
- )
- except ValueError as exc:
- return _redirect("/admin/new", "error", str(exc))
- return _redirect(f"/admin/apps/{app_row.id}", "success", f"Created {app_row.id}")
-
-
-@app.get("/admin/apps/{app_id}", response_class=HTMLResponse, include_in_schema=False)
-def admin_app_detail(
- request: Request,
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- flash: Annotated[str | None, Cookie(alias="flash")] = None,
- db: Session = Depends(get_session),
-) -> HTMLResponse:
- app_row = get_app(db, app_id)
- if not app_row:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_id}'")
- response = templates.TemplateResponse(
- request,
- "app_detail.html",
- _base_context(
- request,
- username,
- app=app_row,
- has_generator=app_id.lower() in GENERATORS,
- flash=_pop_flash(flash),
- ),
- )
- _clear_flash(response)
- return response
-
-
-@app.get("/admin/apps/{app_id}/edit", response_class=HTMLResponse, include_in_schema=False)
-def admin_edit_app(
- request: Request,
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- db: Session = Depends(get_session),
-) -> HTMLResponse:
- app_row = get_app(db, app_id)
- if not app_row:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_id}'")
- return templates.TemplateResponse(
- request,
- "app_form.html",
- _base_context(request, username, app=app_row),
- )
-
-
-@app.post("/admin/apps/{app_id}/edit", include_in_schema=False)
-def admin_save_app(
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- display_name: Annotated[str, Form()] = "",
- category: Annotated[str, Form()] = "other",
- description: Annotated[str, Form()] = "",
- homepage: Annotated[str, Form()] = "",
- license: Annotated[str, Form()] = "",
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- app_row = update_app(
- db, app_id,
- display_name=display_name,
- category=category,
- description=description,
- homepage=homepage,
- license=license,
- )
- if not app_row:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_id}'")
- return _redirect(f"/admin/apps/{app_row.id}", "success", "Saved")
-
-
-@app.post("/admin/apps/{app_id}/delete", include_in_schema=False)
-def admin_delete_app(
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- svc_delete_app(db, app_id)
- return _redirect("/admin", "success", f"Deleted {app_id}")
-
-
-@app.post("/admin/apps/{app_id}/releases", include_in_schema=False)
-def admin_add_release(
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- version: Annotated[str, Form()],
- channel: Annotated[str, Form()] = "stable",
- released_at: Annotated[str, Form()] = "",
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- rel = add_release(
- db, app_id, version,
- channel=channel,
- released_at=released_at or None,
- )
- if not rel:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_id}'")
- return _redirect(f"/admin/apps/{app_id}", "success", f"Added {version}")
-
-
-@app.post("/admin/releases/{release_id}/delete", include_in_schema=False)
-def admin_delete_release(
- release_id: int,
- username: Annotated[str, Depends(current_user)],
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- from .db import Release as ReleaseModel
-
- rel = db.get(ReleaseModel, release_id)
- app_id = rel.app_id if rel else None
- delete_release(db, release_id)
- return _redirect(f"/admin/apps/{app_id}" if app_id else "/admin", "success", "Release removed")
-
-
-@app.post("/admin/releases/{release_id}/downloads", include_in_schema=False)
-def admin_add_download(
- release_id: int,
- username: Annotated[str, Depends(current_user)],
- url: Annotated[str, Form()],
- os: Annotated[str, Form()] = "windows",
- arch: Annotated[str, Form()] = "x64",
- archive_type: Annotated[str, Form()] = "zip",
- source: Annotated[str, Form()] = "manual",
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- from .db import Release as ReleaseModel
-
- rel = db.get(ReleaseModel, release_id)
- if not rel:
- raise HTTPException(status.HTTP_404_NOT_FOUND, "Unknown release")
- add_download(
- db, release_id,
- url=url, os=os, arch=arch, archive_type=archive_type, source=source,
- )
- return _redirect(f"/admin/apps/{rel.app_id}", "success", "Download added")
-
-
-@app.post("/admin/downloads/{download_id}/delete", include_in_schema=False)
-def admin_delete_download(
- download_id: int,
- username: Annotated[str, Depends(current_user)],
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- from .db import Download as DownloadModel, Release as ReleaseModel
-
- dl = db.get(DownloadModel, download_id)
- app_id = None
- if dl:
- rel = db.get(ReleaseModel, dl.release_id)
- app_id = rel.app_id if rel else None
- delete_download(db, download_id)
- return _redirect(f"/admin/apps/{app_id}" if app_id else "/admin", "success", "Download removed")
-
-
-@app.post("/admin/apps/{app_id}/auto-generate", include_in_schema=False)
-def admin_auto_generate(
- app_id: str,
- username: Annotated[str, Depends(current_user)],
- limit: Annotated[int, Form()] = 5,
- db: Session = Depends(get_session),
-) -> RedirectResponse:
- app_row = get_app(db, app_id)
- if not app_row:
- raise HTTPException(status.HTTP_404_NOT_FOUND, f"Unknown app '{app_id}'")
- if app_id.lower() not in GENERATORS:
- return _redirect(f"/admin/apps/{app_id}", "error", f"No generator for '{app_id}'")
- releases = run_generator(app_id, limit=limit)
- inserted = apply_generated_releases(db, app_id, releases)
- return _redirect(
- f"/admin/apps/{app_id}",
- "success" if inserted else "info",
- f"Auto-generated: {inserted} new release(s) from {len(releases)} scraped",
- )
diff --git a/services/catalog-api/app/schemas.py b/services/catalog-api/app/schemas.py
deleted file mode 100644
index ebcaf98b..00000000
--- a/services/catalog-api/app/schemas.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""Pydantic models for the NKS WDC catalog + config sync API.
-
-The catalog shape is consumed by the C# daemon's `CatalogClient` (see
-`src/daemon/NKS.WebDevConsole.Daemon/Binaries/CatalogClient.cs`). DO NOT
-change field names here without updating the `CatalogDocument` DTOs in
-that file — the daemon serializes with `JsonNamingPolicy.SnakeCaseLower`
-so keys must stay snake_case in the wire format. Pydantic handles that
-via `alias_generator=to_snake` below.
-"""
-
-from __future__ import annotations
-
-from typing import Optional
-
-from pydantic import BaseModel, ConfigDict, Field
-from pydantic.alias_generators import to_snake
-
-
-class CamelModel(BaseModel):
- """Base with aliases so both snake_case and camelCase are accepted
- on input, while output is always snake_case (matches CatalogClient)."""
-
- model_config = ConfigDict(
- alias_generator=to_snake,
- populate_by_name=True,
- extra="ignore",
- )
-
-
-class DownloadDoc(CamelModel):
- """A single binary download: an OS/arch-specific URL + metadata."""
-
- url: str
- os: str = "windows"
- arch: str = "x64"
- archive_type: str = "zip"
- source: str = "unknown"
- # Optional HTTP headers the downloader must send. Typically only
- # `User-Agent` when the upstream mirror rejects the .NET default.
- headers: Optional[dict[str, str]] = None
- # SHA-256 of the downloaded archive, lowercase hex. Verifier will
- # compare after download; empty string means "skip verification".
- sha256: Optional[str] = None
- # Human-readable file size for UI progress planning (optional).
- size_bytes: Optional[int] = None
-
-
-class ReleaseDoc(CamelModel):
- """A versioned release with one or more per-platform downloads."""
-
- version: str
- major_minor: str = ""
- channel: str = "stable"
- released_at: Optional[str] = None
- downloads: list[DownloadDoc] = Field(default_factory=list)
-
-
-class AppDoc(CamelModel):
- """One application bucket: name, metadata, list of releases."""
-
- name: str
- display_name: str = ""
- category: str = "other"
- description: str = ""
- homepage: Optional[str] = None
- license: Optional[str] = None
- releases: list[ReleaseDoc] = Field(default_factory=list)
-
-
-class CatalogDocument(CamelModel):
- """Top-level catalog envelope returned by GET /api/v1/catalog."""
-
- schema_version: str = "1"
- generated_at: Optional[str] = None
- apps: dict[str, AppDoc] = Field(default_factory=dict)
-
-
-# ── Config sync ─────────────────────────────────────────────────────────
-# Devices push their local configuration snapshots so a fresh WDC install
-# can hydrate from the last known good state. Keep it deliberately thin:
-# free-form JSON body, server just timestamps + returns it.
-
-
-class ConfigSyncEntry(CamelModel):
- device_id: str
- updated_at: str
- payload: dict
-
-
-class ConfigSyncUploadRequest(CamelModel):
- device_id: str
- payload: dict
-
-
-class ConfigSyncListResponse(CamelModel):
- device_id: str
- updated_at: Optional[str] = None
- has_config: bool = False
diff --git a/services/catalog-api/app/service.py b/services/catalog-api/app/service.py
deleted file mode 100644
index 6f5558a9..00000000
--- a/services/catalog-api/app/service.py
+++ /dev/null
@@ -1,333 +0,0 @@
-"""Service layer that bridges SQLAlchemy models and the API schemas.
-
-Keeps routes thin — every persistence operation goes through a function
-here so tests can exercise the business logic without spinning up the
-HTTP layer.
-"""
-
-from __future__ import annotations
-
-import json
-import logging
-from datetime import datetime, timezone
-from pathlib import Path
-
-from sqlalchemy import delete, select
-from sqlalchemy.orm import Session, selectinload
-
-from .db import App, Download, Release
-from .generators import GenRelease
-from .schemas import AppDoc, CatalogDocument, DownloadDoc, ReleaseDoc
-
-log = logging.getLogger(__name__)
-
-
-# ── Read side — assemble the CatalogDocument for the public API ────────
-
-def build_catalog_document(db: Session) -> CatalogDocument:
- apps = db.scalars(
- select(App).options(selectinload(App.releases).selectinload(Release.downloads))
- ).all()
-
- return CatalogDocument(
- schema_version="1",
- generated_at=datetime.now(timezone.utc).isoformat(),
- apps={a.id: _app_to_schema(a) for a in apps},
- )
-
-
-def get_app_document(db: Session, app_id: str) -> AppDoc | None:
- app = db.scalar(
- select(App)
- .where(App.id == app_id.lower())
- .options(selectinload(App.releases).selectinload(Release.downloads))
- )
- return _app_to_schema(app) if app else None
-
-
-def _app_to_schema(app: App) -> AppDoc:
- return AppDoc(
- name=app.id,
- display_name=app.display_name or app.id,
- category=app.category or "other",
- description=app.description or "",
- homepage=app.homepage,
- license=app.license,
- releases=[
- ReleaseDoc(
- version=r.version,
- major_minor=r.major_minor,
- channel=r.channel,
- released_at=r.released_at,
- downloads=[
- DownloadDoc(
- url=d.url,
- os=d.os,
- arch=d.arch,
- archive_type=d.archive_type,
- source=d.source,
- headers=d.headers,
- sha256=d.sha256,
- size_bytes=d.size_bytes,
- )
- for d in r.downloads
- ],
- )
- for r in app.releases
- ],
- )
-
-
-# ── Write side — CRUD used by admin UI + auto-generators ───────────────
-
-def list_apps(db: Session) -> list[App]:
- return list(db.scalars(select(App).order_by(App.id)).all())
-
-
-def get_app(db: Session, app_id: str) -> App | None:
- return db.scalar(select(App).where(App.id == app_id.lower()))
-
-
-def create_app(
- db: Session,
- *,
- app_id: str,
- display_name: str = "",
- category: str = "other",
- description: str = "",
- homepage: str | None = None,
- license: str | None = None,
-) -> App:
- app_id = app_id.strip().lower()
- if not app_id:
- raise ValueError("app id must be non-empty")
- app = App(
- id=app_id,
- display_name=display_name or app_id,
- category=category,
- description=description,
- homepage=homepage,
- license=license,
- )
- db.add(app)
- db.commit()
- return app
-
-
-def update_app(
- db: Session,
- app_id: str,
- *,
- display_name: str | None = None,
- category: str | None = None,
- description: str | None = None,
- homepage: str | None = None,
- license: str | None = None,
-) -> App | None:
- app = get_app(db, app_id)
- if not app:
- return None
- if display_name is not None:
- app.display_name = display_name
- if category is not None:
- app.category = category
- if description is not None:
- app.description = description
- if homepage is not None:
- app.homepage = homepage or None
- if license is not None:
- app.license = license or None
- db.commit()
- return app
-
-
-def delete_app(db: Session, app_id: str) -> bool:
- app = get_app(db, app_id)
- if not app:
- return False
- db.delete(app)
- db.commit()
- return True
-
-
-def add_release(
- db: Session,
- app_id: str,
- version: str,
- *,
- major_minor: str = "",
- channel: str = "stable",
- released_at: str | None = None,
-) -> Release | None:
- app = get_app(db, app_id)
- if not app:
- return None
- rel = Release(
- app_id=app.id,
- version=version,
- major_minor=major_minor or _major_minor(version),
- channel=channel,
- released_at=released_at,
- )
- db.add(rel)
- db.commit()
- return rel
-
-
-def delete_release(db: Session, release_id: int) -> bool:
- rel = db.get(Release, release_id)
- if not rel:
- return False
- db.delete(rel)
- db.commit()
- return True
-
-
-def add_download(
- db: Session,
- release_id: int,
- *,
- url: str,
- os: str = "windows",
- arch: str = "x64",
- archive_type: str = "zip",
- source: str = "manual",
- headers: dict | None = None,
-) -> Download | None:
- rel = db.get(Release, release_id)
- if not rel:
- return None
- dl = Download(
- release_id=rel.id,
- url=url,
- os=os,
- arch=arch,
- archive_type=archive_type,
- source=source,
- headers=headers,
- )
- db.add(dl)
- db.commit()
- return dl
-
-
-def delete_download(db: Session, download_id: int) -> bool:
- dl = db.get(Download, download_id)
- if not dl:
- return False
- db.delete(dl)
- db.commit()
- return True
-
-
-def _major_minor(version: str) -> str:
- parts = version.split(".")
- return ".".join(parts[:2]) if len(parts) >= 2 else version
-
-
-# ── Auto-generator integration ──────────────────────────────────────────
-
-def apply_generated_releases(
- db: Session,
- app_id: str,
- releases: list[GenRelease],
- *,
- replace: bool = False,
-) -> int:
- """Persist scraped releases. Skips versions that already exist on
- the app unless `replace=True` (which wipes ALL releases first).
-
- Returns the number of NEW releases inserted.
- """
- app = get_app(db, app_id)
- if not app:
- return 0
-
- if replace:
- db.execute(delete(Release).where(Release.app_id == app.id))
- db.commit()
-
- existing = {r.version for r in app.releases}
- inserted = 0
- for gen in releases:
- if gen.version in existing:
- continue
- rel = Release(
- app_id=app.id,
- version=gen.version,
- major_minor=gen.major_minor or _major_minor(gen.version),
- channel=gen.channel,
- released_at=gen.released_at,
- )
- db.add(rel)
- db.flush() # need rel.id for downloads
- for gd in gen.downloads:
- db.add(Download(
- release_id=rel.id,
- url=gd.url,
- os=gd.os,
- arch=gd.arch,
- archive_type=gd.archive_type,
- source=gd.source,
- headers=gd.headers,
- ))
- inserted += 1
- db.commit()
- return inserted
-
-
-# ── Seed from existing JSON files on first run ──────────────────────────
-
-def seed_from_json(db: Session, data_dir: Path) -> int:
- """If the DB has zero apps, import every `*.json` file under
- `data_dir` so the service boots with a sensible catalog without
- requiring the admin to click Auto-generate for every app.
- """
- if db.scalar(select(App).limit(1)) is not None:
- return 0 # already seeded — no-op
-
- if not data_dir.is_dir():
- log.info("Seed dir not found, starting with empty catalog: %s", data_dir)
- return 0
-
- count = 0
- for path in sorted(data_dir.glob("*.json")):
- try:
- raw = json.loads(path.read_text(encoding="utf-8"))
- app_id = (raw.get("name") or path.stem).lower()
- app = App(
- id=app_id,
- display_name=raw.get("display_name", app_id),
- category=raw.get("category", "other"),
- description=raw.get("description", ""),
- homepage=raw.get("homepage"),
- license=raw.get("license"),
- )
- db.add(app)
- db.flush()
- for r in raw.get("releases", []):
- rel = Release(
- app_id=app.id,
- version=r.get("version", "0.0.0"),
- major_minor=r.get("major_minor") or _major_minor(r.get("version", "0.0.0")),
- channel=r.get("channel", "stable"),
- released_at=r.get("released_at"),
- )
- db.add(rel)
- db.flush()
- for d in r.get("downloads", []):
- db.add(Download(
- release_id=rel.id,
- url=d.get("url", ""),
- os=d.get("os", "windows"),
- arch=d.get("arch", "x64"),
- archive_type=d.get("archive_type", "zip"),
- source=d.get("source", "seed"),
- headers=d.get("headers"),
- ))
- count += 1
- except Exception as exc: # noqa: BLE001
- log.error("Seed parse failed for %s: %s", path, exc)
- db.commit()
- log.info("Seeded %d apps from %s", count, data_dir)
- return count
diff --git a/services/catalog-api/app/static/admin.css b/services/catalog-api/app/static/admin.css
deleted file mode 100644
index ed23a944..00000000
--- a/services/catalog-api/app/static/admin.css
+++ /dev/null
@@ -1,301 +0,0 @@
-:root {
- --bg: #0b0d17;
- --surface: #141827;
- --surface-2: #1e2336;
- --border: rgba(255,255,255,0.14);
- --border-strong: rgba(255,255,255,0.3);
- --text: #ffffff;
- --text-2: #cfd4ef;
- --text-3: #8690ad;
- --accent: #56c2ff;
- --accent-2: #7cffa5;
- --danger: #ff6b6b;
- --warning: #f59e0b;
- --success: #22c55e;
- font-family: -apple-system, 'Inter', 'Segoe UI', system-ui, sans-serif;
-}
-
-* { box-sizing: border-box; }
-
-body {
- margin: 0;
- min-height: 100vh;
- background: var(--bg);
- color: var(--text);
- font-size: 14px;
- line-height: 1.5;
-}
-
-code, pre {
- font-family: 'JetBrains Mono', 'Cascadia Code', monospace;
- font-size: 0.88em;
- color: var(--accent);
-}
-
-a { color: var(--accent); text-decoration: none; }
-a:hover { text-decoration: underline; }
-
-.topbar {
- display: flex;
- align-items: center;
- gap: 24px;
- padding: 14px 28px;
- background: var(--surface);
- border-bottom: 1px solid var(--border);
-}
-.brand {
- font-weight: 800;
- font-size: 1.05rem;
- letter-spacing: -0.01em;
- color: var(--text);
-}
-.nav {
- display: flex;
- gap: 18px;
- flex: 1;
-}
-.nav a {
- color: var(--text-2);
- font-weight: 600;
- font-size: 0.88rem;
-}
-.user { display: flex; align-items: center; gap: 10px; }
-.user-name { color: var(--text-3); font-size: 0.82rem; }
-
-.container {
- max-width: 1100px;
- margin: 0 auto;
- padding: 28px;
-}
-
-.footer {
- text-align: center;
- padding: 20px;
- color: var(--text-3);
- font-size: 0.76rem;
-}
-
-.page-head {
- display: flex;
- align-items: flex-start;
- justify-content: space-between;
- gap: 20px;
- margin-bottom: 24px;
-}
-.page-head h1 {
- margin: 0;
- font-size: 1.6rem;
- font-weight: 800;
- letter-spacing: -0.015em;
-}
-.page-head .actions { display: flex; gap: 8px; flex-wrap: wrap; }
-.back-link { font-size: 0.82rem; color: var(--text-3); }
-
-.muted { color: var(--text-3); }
-.description { color: var(--text-2); margin-top: 8px; max-width: 65ch; }
-
-.flash {
- padding: 12px 16px;
- border-radius: 6px;
- margin-bottom: 18px;
- font-weight: 600;
-}
-.flash-ok, .flash-success { background: rgba(34,197,94,0.12); color: var(--success); border-left: 3px solid var(--success); }
-.flash-error { background: rgba(255,107,107,0.12); color: var(--danger); border-left: 3px solid var(--danger); }
-.flash-info { background: rgba(86,194,255,0.12); color: var(--accent); border-left: 3px solid var(--accent); }
-
-.table {
- width: 100%;
- border-collapse: collapse;
- background: var(--surface);
- border: 1px solid var(--border);
- border-radius: 8px;
- overflow: hidden;
-}
-.table th, .table td { padding: 12px 16px; text-align: left; }
-.table th {
- background: var(--surface-2);
- font-size: 0.72rem;
- text-transform: uppercase;
- letter-spacing: 0.08em;
- color: var(--text-3);
- border-bottom: 1px solid var(--border);
-}
-.table tbody tr { border-bottom: 1px solid var(--border); }
-.table tbody tr:last-child { border-bottom: none; }
-.table tbody tr:hover { background: var(--surface-2); }
-.row-actions { text-align: right; }
-.empty { text-align: center; color: var(--text-3); padding: 30px; }
-
-.badge {
- display: inline-block;
- padding: 2px 8px;
- border-radius: 10px;
- background: var(--surface-2);
- font-size: 0.7rem;
- font-weight: 700;
- text-transform: uppercase;
- letter-spacing: 0.05em;
- color: var(--text-2);
-}
-.badge-webserver { background: #3182ce; color: #fff; }
-.badge-language { background: #4f5b93; color: #fff; }
-.badge-database { background: #047481; color: #fff; }
-.badge-cache { background: #dc2626; color: #fff; }
-.badge-mail { background: #047857; color: #fff; }
-.badge-tools { background: #f38020; color: #fff; }
-
-.btn {
- display: inline-flex;
- align-items: center;
- justify-content: center;
- gap: 6px;
- padding: 8px 16px;
- background: var(--surface-2);
- border: 1px solid var(--border);
- border-radius: 6px;
- color: var(--text);
- font-family: inherit;
- font-size: 0.82rem;
- font-weight: 600;
- cursor: pointer;
- text-decoration: none;
- transition: background 0.12s, border-color 0.12s;
-}
-.btn:hover { background: var(--surface); border-color: var(--border-strong); text-decoration: none; }
-.btn-primary { background: var(--accent); border-color: var(--accent); color: var(--bg); }
-.btn-primary:hover { background: var(--accent-2); border-color: var(--accent-2); color: var(--bg); }
-.btn-danger { background: var(--danger); border-color: var(--danger); color: var(--bg); }
-.btn-danger:hover { background: #e34949; border-color: #e34949; }
-.btn-ghost { background: transparent; border-color: var(--border); }
-.btn-sm { padding: 4px 10px; font-size: 0.76rem; }
-.btn-xs { padding: 2px 8px; font-size: 0.7rem; }
-.inline { display: inline; }
-
-.card {
- background: var(--surface);
- border: 1px solid var(--border);
- border-radius: 8px;
- margin-bottom: 20px;
- overflow: hidden;
-}
-.card-header {
- display: flex;
- align-items: center;
- justify-content: space-between;
- padding: 14px 18px;
- background: var(--surface-2);
- border-bottom: 1px solid var(--border);
-}
-.card-header h2 {
- margin: 0;
- font-size: 0.82rem;
- font-weight: 700;
- text-transform: uppercase;
- letter-spacing: 0.08em;
-}
-.card-body { padding: 18px; }
-.card-footer {
- padding: 14px 18px;
- border-top: 1px solid var(--border);
- display: flex;
- gap: 8px;
-}
-
-.release {
- padding: 16px 0;
- border-bottom: 1px dashed var(--border);
-}
-.release:last-child { border-bottom: none; }
-.release-head {
- display: flex;
- align-items: center;
- justify-content: space-between;
-}
-.release-head h3 { margin: 0; font-size: 0.92rem; font-weight: 700; }
-
-.sub-table {
- width: 100%;
- margin-top: 10px;
- border-collapse: collapse;
-}
-.sub-table th, .sub-table td {
- padding: 6px 10px;
- font-size: 0.78rem;
- border-bottom: 1px solid rgba(255,255,255,0.06);
-}
-.sub-table th { color: var(--text-3); font-weight: 600; text-align: left; }
-.url-cell { max-width: 420px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
-
-.add-download {
- margin-top: 8px;
- border-top: 1px solid var(--border);
- padding-top: 8px;
-}
-.add-download summary {
- cursor: pointer;
- font-size: 0.78rem;
- color: var(--text-3);
- font-weight: 600;
-}
-
-.form {
- display: flex;
- flex-direction: column;
- gap: 14px;
-}
-.form label {
- display: flex;
- flex-direction: column;
- gap: 4px;
- font-size: 0.78rem;
- font-weight: 600;
- color: var(--text-3);
- text-transform: uppercase;
- letter-spacing: 0.06em;
-}
-.form input, .form textarea, .form select {
- padding: 10px 12px;
- background: var(--surface-2);
- border: 1px solid var(--border);
- border-radius: 5px;
- color: var(--text);
- font-family: inherit;
- font-size: 0.88rem;
- text-transform: none;
- letter-spacing: normal;
-}
-.form input:focus, .form textarea:focus, .form select:focus {
- outline: none;
- border-color: var(--accent);
-}
-
-.form-inline {
- display: flex;
- flex-wrap: wrap;
- gap: 8px;
- align-items: center;
-}
-.form-inline input, .form-inline select {
- padding: 6px 10px;
- background: var(--surface-2);
- border: 1px solid var(--border);
- border-radius: 5px;
- color: var(--text);
- font-size: 0.82rem;
-}
-.form-inline input[type="url"] { min-width: 320px; flex: 1; }
-
-.login-card {
- max-width: 400px;
- margin: 60px auto 0;
- padding: 30px;
- background: var(--surface);
- border: 1px solid var(--border);
- border-radius: 10px;
-}
-.login-card h1 {
- margin: 0 0 6px;
- font-size: 1.4rem;
- font-weight: 800;
-}
diff --git a/services/catalog-api/app/templates/app_detail.html b/services/catalog-api/app/templates/app_detail.html
deleted file mode 100644
index 339e9d35..00000000
--- a/services/catalog-api/app/templates/app_detail.html
+++ /dev/null
@@ -1,134 +0,0 @@
-{% extends "base.html" %}
-{% block title %}{{ app.display_name or app.id }} — NKS WDC Catalog{% endblock %}
-{% block content %}
-
-
-
← Apps
-
{{ app.display_name or app.id }}
-
- {{ app.id }}
- · {{ app.category }}
- {% if app.license %}· {{ app.license }}{% endif %}
-
- {% if app.homepage %}
-
{{ app.homepage }}
- {% endif %}
- {% if app.description %}
-
{{ app.description }}
- {% endif %}
-
-
-
Edit
- {% if has_generator %}
-
- {% endif %}
-
-
-
-
-
-
-
- {% for release in app.releases %}
-
-
-
- v{{ release.version }}
- {{ release.channel }}
- {% if release.released_at %}
- · {{ release.released_at }}
- {% endif %}
-
-
-
-
-
-
- | OS |
- Arch |
- Archive |
- URL |
- Source |
- |
-
-
-
- {% for dl in release.downloads %}
-
- | {{ dl.os }} |
- {{ dl.arch }} |
- {{ dl.archive_type }} |
-
- {{ dl.url }}
- |
- {{ dl.source }} |
-
-
- |
-
- {% endfor %}
-
-
-
-
- + Add download
-
-
-
- {% else %}
-
No releases yet.
- {% endfor %}
-
-
-
-
-{% endblock %}
diff --git a/services/catalog-api/app/templates/app_form.html b/services/catalog-api/app/templates/app_form.html
deleted file mode 100644
index e643839b..00000000
--- a/services/catalog-api/app/templates/app_form.html
+++ /dev/null
@@ -1,49 +0,0 @@
-{% extends "base.html" %}
-{% block title %}{{ 'Edit ' + app.id if app else 'New app' }} — NKS WDC Catalog{% endblock %}
-{% block content %}
-
-
-
← Back
-
{{ 'Edit ' + (app.display_name or app.id) if app else 'New app' }}
-
-
-
-
-{% endblock %}
diff --git a/services/catalog-api/app/templates/apps_list.html b/services/catalog-api/app/templates/apps_list.html
deleted file mode 100644
index 36d53d1e..00000000
--- a/services/catalog-api/app/templates/apps_list.html
+++ /dev/null
@@ -1,49 +0,0 @@
-{% extends "base.html" %}
-{% block title %}Apps — NKS WDC Catalog{% endblock %}
-{% block content %}
-
-
-
Apps
-
{{ apps|length }} app{{ 's' if apps|length != 1 else '' }} in the catalog
-
-
-
-
-
-
-
- | ID |
- Name |
- Category |
- Releases |
- Last updated |
- |
-
-
-
- {% for app in apps %}
-
- {{ app.id }} |
- {{ app.display_name or app.id }} |
- {{ app.category }} |
- {{ app.releases|length }} |
- {{ app.updated_at.strftime('%Y-%m-%d %H:%M') if app.updated_at else '—' }} |
-
- Manage
- |
-
- {% else %}
-
-
- No apps yet.
- Create one
- or import from the seed JSON with
- POST /admin/seed.
- |
-
- {% endfor %}
-
-
-{% endblock %}
diff --git a/services/catalog-api/app/templates/base.html b/services/catalog-api/app/templates/base.html
deleted file mode 100644
index 25bfb262..00000000
--- a/services/catalog-api/app/templates/base.html
+++ /dev/null
@@ -1,38 +0,0 @@
-
-
-
-
-
- {% block title %}NKS WDC Catalog{% endblock %}
-
-
-
-
-
-
- {% if flash %}
- {{ flash.message }}
- {% endif %}
- {% block content %}{% endblock %}
-
-
-
-
-
diff --git a/services/catalog-api/app/templates/login.html b/services/catalog-api/app/templates/login.html
deleted file mode 100644
index 770fe004..00000000
--- a/services/catalog-api/app/templates/login.html
+++ /dev/null
@@ -1,22 +0,0 @@
-{% extends "base.html" %}
-{% block title %}Log in — NKS WDC Catalog{% endblock %}
-{% block content %}
-
-{% endblock %}
diff --git a/services/catalog-api/docker-compose.yml b/services/catalog-api/docker-compose.yml
deleted file mode 100644
index ba0eafc1..00000000
--- a/services/catalog-api/docker-compose.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-services:
- catalog-api:
- build: .
- image: nks-wdc-catalog-api:latest
- container_name: nks-wdc-catalog-api
- restart: unless-stopped
- ports:
- - "8765:8765"
- environment:
- - NKS_WDC_CATALOG_STATE_DIR=/state
- - LOG_LEVEL=INFO
- volumes:
- # Persist config-sync uploads outside the container image
- - catalog-state:/state
- # Hot-reload the catalog JSONs without rebuilding the image — drop
- # an updated file into this folder and POST /api/v1/catalog/reload
- - ./app/data/apps:/srv/app/app/data/apps:ro
- healthcheck:
- test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8765/healthz').read()"]
- interval: 30s
- timeout: 5s
- retries: 3
-
-volumes:
- catalog-state:
diff --git a/services/catalog-api/pyproject.toml b/services/catalog-api/pyproject.toml
deleted file mode 100644
index b5f137a0..00000000
--- a/services/catalog-api/pyproject.toml
+++ /dev/null
@@ -1,27 +0,0 @@
-[project]
-name = "nks-wdc-catalog-api"
-version = "0.1.0"
-description = "NKS WebDev Console — cloud catalog + config sync FastAPI service"
-authors = [{ name = "NKS" }]
-license = { text = "Apache-2.0" }
-requires-python = ">=3.11"
-dependencies = [
- "fastapi>=0.115",
- "uvicorn[standard]>=0.32",
- "pydantic>=2.9",
-]
-
-[project.optional-dependencies]
-dev = [
- "httpx>=0.27",
- "pytest>=8.3",
- "pytest-asyncio>=0.24",
-]
-
-[build-system]
-requires = ["setuptools>=68"]
-build-backend = "setuptools.build_meta"
-
-[tool.setuptools.packages.find]
-where = ["."]
-include = ["app*"]
diff --git a/services/catalog-api/requirements.txt b/services/catalog-api/requirements.txt
deleted file mode 100644
index 11077571..00000000
--- a/services/catalog-api/requirements.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-fastapi>=0.115
-uvicorn[standard]>=0.32
-pydantic>=2.9
-sqlalchemy>=2.0
-jinja2>=3.1
-python-multipart>=0.0.17
-bcrypt>=4.2
-itsdangerous>=2.2
-httpx>=0.27
-beautifulsoup4>=4.12
-python-jose[cryptography]>=3.3
diff --git a/services/catalog-api/run.cmd b/services/catalog-api/run.cmd
deleted file mode 100644
index 0b203e16..00000000
--- a/services/catalog-api/run.cmd
+++ /dev/null
@@ -1,25 +0,0 @@
-@echo off
-REM NKS WDC Catalog API — local dev launcher (Windows)
-REM Creates a venv in .venv, installs deps, and runs uvicorn on :8765.
-
-setlocal
-cd /d "%~dp0"
-
-if not exist ".venv\Scripts\python.exe" (
- echo [catalog-api] Creating virtualenv...
- py -3 -m venv .venv
- if errorlevel 1 (
- echo [catalog-api] Failed to create venv. Is Python 3.11+ installed?
- exit /b 1
- )
-)
-
-call ".venv\Scripts\activate.bat"
-
-echo [catalog-api] Installing / updating dependencies...
-python -m pip install --quiet --upgrade pip
-python -m pip install --quiet -r requirements.txt
-
-echo [catalog-api] Starting uvicorn on http://127.0.0.1:8765
-python -m uvicorn app.main:app --host 127.0.0.1 --port 8765 --reload
-endlocal
diff --git a/services/catalog-api/tests/conftest.py b/services/catalog-api/tests/conftest.py
deleted file mode 100644
index d03b6afc..00000000
--- a/services/catalog-api/tests/conftest.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""Shared test configuration — ensures isolated DB state."""
-
-import os
-import tempfile
-
-# Set ONCE before any app module imports. All test files share this
-# same temp directory so the SQLAlchemy engine singleton connects to
-# the same SQLite file across the entire test session.
-if "NKS_WDC_CATALOG_STATE_DIR" not in os.environ:
- os.environ["NKS_WDC_CATALOG_STATE_DIR"] = tempfile.mkdtemp(prefix="nks-wdc-test-")
- os.environ["NKS_WDC_CATALOG_DEV"] = "1"
diff --git a/services/catalog-api/tests/test_auth.py b/services/catalog-api/tests/test_auth.py
deleted file mode 100644
index 4e2d2570..00000000
--- a/services/catalog-api/tests/test_auth.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""Tests for auth utilities — password hashing + verification + JWT."""
-
-from app.auth import hash_password, verify_password
-from app.devices import create_token, decode_token
-
-
-class TestPasswordHashing:
- def test_hash_returns_bcrypt_string(self):
- h = hash_password("testpassword123")
- assert h.startswith("$2b$") or h.startswith("$2a$")
- assert len(h) == 60
-
- def test_verify_correct_password(self):
- h = hash_password("mySecret!")
- assert verify_password("mySecret!", h) is True
-
- def test_verify_wrong_password(self):
- h = hash_password("correct")
- assert verify_password("wrong", h) is False
-
- def test_different_hashes_for_same_password(self):
- h1 = hash_password("same")
- h2 = hash_password("same")
- assert h1 != h2 # bcrypt uses random salt
-
- def test_empty_password_hashes(self):
- h = hash_password("")
- assert verify_password("", h) is True
- assert verify_password("notempty", h) is False
-
-
-class TestJWT:
- def test_create_and_decode_roundtrip(self):
- token = create_token(42, "user@test.com")
- payload = decode_token(token)
- assert payload["sub"] == "42"
- assert payload["email"] == "user@test.com"
- assert "exp" in payload
-
- def test_token_is_string(self):
- token = create_token(1, "a@b.com")
- assert isinstance(token, str)
- assert len(token) > 20
-
- def test_decode_invalid_token_raises(self):
- import pytest
- with pytest.raises(Exception):
- decode_token("not.a.valid.token")
-
-
diff --git a/services/catalog-api/tests/test_devices.py b/services/catalog-api/tests/test_devices.py
deleted file mode 100644
index 6dff4190..00000000
--- a/services/catalog-api/tests/test_devices.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""Tests for account registration, JWT auth, and device management."""
-
-from __future__ import annotations
-
-import pytest
-from fastapi.testclient import TestClient
-from app.main import app
-
-
-@pytest.fixture(scope="module")
-def client() -> TestClient:
- with TestClient(app) as c:
- yield c
-
-
-_test_email = f"test-{__import__('uuid').uuid4().hex[:8]}@nks-wdc.dev"
-_test_password = "testpass123"
-
-
-@pytest.fixture(scope="module")
-def auth_token(client: TestClient) -> str:
- r = client.post("/api/v1/auth/register", json={
- "email": _test_email,
- "password": _test_password,
- })
- assert r.status_code == 200, f"Register failed ({r.status_code}): {r.text}"
- return r.json()["token"]
-
-
-def test_register_creates_account(client: TestClient) -> None:
- r = client.post("/api/v1/auth/register", json={
- "email": "another@nks-wdc.dev",
- "password": "pass4567long",
- })
- assert r.status_code == 200
- body = r.json()
- assert body["email"] == "another@nks-wdc.dev"
- assert "token" in body
-
-
-def test_register_duplicate_email_rejects(client: TestClient, auth_token: str) -> None:
- # auth_token fixture registers test@nks-wdc.dev first, so this is a dup
- r = client.post("/api/v1/auth/register", json={
- "email": _test_email,
- "password": "anything",
- })
- assert r.status_code == 409
-
-
-def test_login_valid_credentials(client: TestClient) -> None:
- r = client.post("/api/v1/auth/login", json={
- "email": _test_email,
- "password": "testpass123",
- })
- assert r.status_code == 200
- assert r.json()["email"] == _test_email
-
-
-def test_login_bad_password(client: TestClient) -> None:
- r = client.post("/api/v1/auth/login", json={
- "email": _test_email,
- "password": "wrong",
- })
- assert r.status_code == 401
-
-
-def test_me_returns_account(client: TestClient, auth_token: str) -> None:
- r = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {auth_token}"})
- assert r.status_code == 200
- assert r.json()["email"] == _test_email
-
-
-def test_me_rejects_no_token(client: TestClient) -> None:
- r = client.get("/api/v1/auth/me")
- assert r.status_code == 401
-
-
-def test_devices_empty_initially(client: TestClient, auth_token: str) -> None:
- r = client.get("/api/v1/devices", headers={"Authorization": f"Bearer {auth_token}"})
- assert r.status_code == 200
- assert r.json() == []
-
-
-def test_sync_push_auto_links_device(client: TestClient, auth_token: str) -> None:
- r = client.post("/api/v1/sync/config", json={
- "device_id": "test-device-001",
- "payload": {
- "settings": {"sync.deviceName": "Test PC"},
- "sites": [{"domain": "a.loc"}, {"domain": "b.loc"}],
- "system": {"os": {"tag": "windows", "arch": "x64"}},
- },
- }, headers={"Authorization": f"Bearer {auth_token}"})
- assert r.status_code == 200
-
- # Device should now appear in the fleet
- r2 = client.get("/api/v1/devices", headers={"Authorization": f"Bearer {auth_token}"})
- devices = r2.json()
- assert len(devices) == 1
- d = devices[0]
- assert d["device_id"] == "test-device-001"
- assert d["name"] == "Test PC"
- assert d["os"] == "windows"
- assert d["arch"] == "x64"
- assert d["site_count"] == 2
-
-
-def test_device_config_readable(client: TestClient, auth_token: str) -> None:
- r = client.get(
- "/api/v1/devices/test-device-001/config",
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r.status_code == 200
- assert r.json()["payload"]["settings"]["sync.deviceName"] == "Test PC"
-
-
-def test_list_devices_is_current_flag(client: TestClient, auth_token: str) -> None:
- """The caller can pass ?current_device_id to flag its own row with
- is_current=true. Without the param all rows stay is_current=false
- (back-compat with pre-flag clients)."""
- # No param → all False
- r = client.get("/api/v1/devices", headers={"Authorization": f"Bearer {auth_token}"})
- assert r.status_code == 200
- assert all(d["is_current"] is False for d in r.json())
-
- # With matching param → exactly one True
- r = client.get(
- "/api/v1/devices?current_device_id=test-device-001",
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r.status_code == 200
- flagged = [d for d in r.json() if d["is_current"]]
- assert len(flagged) == 1
- assert flagged[0]["device_id"] == "test-device-001"
-
- # With non-matching param → all False (no crash)
- r = client.get(
- "/api/v1/devices?current_device_id=nonexistent-device",
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r.status_code == 200
- assert all(d["is_current"] is False for d in r.json())
-
-
-def test_push_config_between_devices(client: TestClient, auth_token: str) -> None:
- # Create a second device
- client.post("/api/v1/sync/config", json={
- "device_id": "test-device-002",
- "payload": {"settings": {}, "sites": []},
- }, headers={"Authorization": f"Bearer {auth_token}"})
-
- # Push from device-001 to device-002
- r = client.post(
- "/api/v1/devices/test-device-002/push-config",
- json={"source_device_id": "test-device-001"},
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r.status_code == 200
- assert r.json()["pushed_from"] == "test-device-001"
-
- # Verify target now has the source's payload
- r2 = client.get(
- "/api/v1/devices/test-device-002/config",
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r2.json()["payload"]["settings"]["sync.deviceName"] == "Test PC"
-
-
-def test_delete_device(client: TestClient, auth_token: str) -> None:
- r = client.delete(
- "/api/v1/devices/test-device-002",
- headers={"Authorization": f"Bearer {auth_token}"},
- )
- assert r.status_code == 200
- assert r.json()["removed"] == "test-device-002"
-
- # Should be gone from fleet
- r2 = client.get("/api/v1/devices", headers={"Authorization": f"Bearer {auth_token}"})
- assert len(r2.json()) == 1
diff --git a/services/catalog-api/tests/test_generators.py b/services/catalog-api/tests/test_generators.py
deleted file mode 100644
index b07bdf02..00000000
--- a/services/catalog-api/tests/test_generators.py
+++ /dev/null
@@ -1,218 +0,0 @@
-"""Tests for the catalog-api auto-generators.
-
-Verifies that every registered generator is callable and returns the
-expected GenRelease structure. The MySQL generator specifically tests
-the fallback path since the live scrape depends on dev.mysql.com being
-reachable (unreliable in CI).
-"""
-
-from __future__ import annotations
-
-import pytest
-from app.generators import (
- GENERATORS,
- GenRelease,
- available_generators,
- run_generator,
- generate_mysql,
- _mysql_fallback,
-)
-
-
-class TestGeneratorRegistry:
- def test_all_generators_registered(self):
- expected = {
- "cloudflared", "mailpit", "caddy", "redis",
- "php", "apache", "nginx", "mariadb", "mysql", "node",
- }
- assert set(GENERATORS.keys()) == expected
-
- def test_available_generators_matches_registry(self):
- assert set(available_generators()) == set(GENERATORS.keys())
-
- def test_run_generator_unknown_returns_empty(self):
- assert run_generator("nonexistent-app") == []
-
-
-class TestMySQLGenerator:
- def test_fallback_returns_releases(self):
- releases = _mysql_fallback(limit=5)
- assert len(releases) > 0
- assert len(releases) <= 5
- for rel in releases:
- assert isinstance(rel, GenRelease)
- assert rel.version
- assert rel.major_minor
- assert len(rel.downloads) > 0
- assert "mysql" in rel.downloads[0].url.lower()
- assert rel.downloads[0].os == "windows"
- assert rel.downloads[0].arch == "x64"
-
- def test_fallback_versions_are_semver(self):
- releases = _mysql_fallback(limit=10)
- for rel in releases:
- parts = rel.version.split(".")
- assert len(parts) == 3, f"Version {rel.version} is not semver"
- for part in parts:
- assert part.isdigit(), f"Version segment '{part}' in {rel.version} is not numeric"
-
- def test_fallback_limit_respected(self):
- assert len(_mysql_fallback(limit=2)) == 2
- assert len(_mysql_fallback(limit=1)) == 1
-
- def test_generate_mysql_returns_list(self):
- # This may hit the network or fall back — either way must return a list.
- result = generate_mysql(limit=3)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
-
- def test_fallback_urls_contain_version(self):
- releases = _mysql_fallback(limit=4)
- for rel in releases:
- for dl in rel.downloads:
- assert rel.version in dl.url, f"URL {dl.url} should contain version {rel.version}"
- assert "dev.mysql.com" in dl.url
-
- def test_fallback_major_minor_matches_version(self):
- releases = _mysql_fallback(limit=4)
- for rel in releases:
- expected_mm = ".".join(rel.version.split(".")[:2])
- assert rel.major_minor == expected_mm
-
-
-class TestNodeGenerator:
- def test_generate_node_returns_list(self):
- from app.generators import generate_node
- result = generate_node(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert not rel.version.startswith("v")
-
- def test_generate_node_has_multi_platform(self):
- from app.generators import generate_node
- result = generate_node(limit=1)
- if result:
- downloads = result[0].downloads
- os_set = {d.os for d in downloads}
- assert "windows" in os_set or len(downloads) > 0
-
- def test_generate_node_channel_detection(self):
- from app.generators import generate_node
- result = generate_node(limit=5)
- channels = {r.channel for r in result}
- assert channels <= {"stable", "lts"}
-
-
-class TestPHPGenerator:
- def test_generate_php_returns_list(self):
- from app.generators import generate_php
- result = generate_php(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert rel.version
- assert rel.major_minor
-
- def test_generate_php_major_minor_format(self):
- from app.generators import generate_php
- result = generate_php(limit=3)
- for rel in result:
- parts = rel.major_minor.split(".")
- assert len(parts) == 2, f"major_minor {rel.major_minor} should be X.Y"
-
-
-class TestMariaDBGenerator:
- def test_generate_mariadb_returns_list(self):
- from app.generators import generate_mariadb
- result = generate_mariadb(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert "mariadb.org" in rel.downloads[0].source if rel.downloads else True
-
- def test_generate_mariadb_downloads_are_zip(self):
- from app.generators import generate_mariadb
- result = generate_mariadb(limit=1)
- for rel in result:
- for dl in rel.downloads:
- assert dl.archive_type == "zip"
- assert dl.os == "windows"
-
-
-class TestMailpitGenerator:
- def test_generate_mailpit_returns_list(self):
- from app.generators import generate_mailpit
- result = generate_mailpit(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert not rel.version.startswith("v")
-
-
-class TestCaddyGenerator:
- def test_generate_caddy_returns_list(self):
- from app.generators import generate_caddy
- result = generate_caddy(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert not rel.version.startswith("v")
-
-
-class TestRedisGenerator:
- def test_generate_redis_returns_list(self):
- from app.generators import generate_redis
- result = generate_redis(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert not rel.version.startswith("v")
-
-
-class TestNginxGenerator:
- def test_generate_nginx_returns_list(self):
- from app.generators import generate_nginx
- result = generate_nginx(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert "." in rel.version
- for dl in rel.downloads:
- assert "nginx" in dl.url
-
-
-class TestCloudflaredGenerator:
- def test_generate_cloudflared_has_exe(self):
- from app.generators import generate_cloudflared
- result = generate_cloudflared(limit=1)
- if result and result[0].downloads:
- exts = {dl.archive_type for dl in result[0].downloads}
- assert len(exts) > 0
-
-
-class TestApacheGenerator:
- def test_generate_apache_returns_list(self):
- from app.generators import generate_apache
- result = generate_apache(limit=2)
- assert isinstance(result, list)
- for rel in result:
- assert isinstance(rel, GenRelease)
- assert rel.version
- assert "." in rel.version
-
-
-class TestGenReleaseStructure:
- """Spot-check that every generator's output conforms to GenRelease."""
-
- @pytest.mark.parametrize("app_id", list(GENERATORS.keys()))
- def test_generator_returns_valid_releases(self, app_id: str):
- # Run with limit=1 to minimize network calls in CI.
- # Some generators may return 0 if the upstream is unreachable.
- releases = run_generator(app_id, limit=1)
- assert isinstance(releases, list)
- for rel in releases:
- assert isinstance(rel, GenRelease)
- assert rel.version
- assert len(rel.downloads) >= 0
diff --git a/services/catalog-api/tests/test_service.py b/services/catalog-api/tests/test_service.py
deleted file mode 100644
index bf6cc2ee..00000000
--- a/services/catalog-api/tests/test_service.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""Tests for catalog service CRUD operations.
-
-Re-uses the state dir from test_devices.py (set at import-time in conftest
-or the first test module). The SQLAlchemy engine is a module-level singleton
-so setting NKS_WDC_CATALOG_STATE_DIR again would be ignored anyway.
-"""
-
-from __future__ import annotations
-
-from app.db import create_all, session_factory, App
-from app.service import create_app, list_apps
-from sqlalchemy import select
-
-
-class TestServiceCRUD:
- def test_create_app_returns_app(self):
- import uuid
- create_all()
- app_id = f"test-crud-{uuid.uuid4().hex[:6]}"
- with session_factory() as db:
- app = create_app(db, app_id=app_id, display_name="Test CRUD")
- assert app.id == app_id
-
- def test_list_apps_includes_created(self):
- import uuid
- create_all()
- app_id = f"list-{uuid.uuid4().hex[:6]}"
- with session_factory() as db:
- create_app(db, app_id=app_id, display_name="List Test")
- db.commit()
- with session_factory() as db:
- apps = list_apps(db)
- ids = [a.id for a in apps]
- assert app_id in ids
-
- def test_create_duplicate_raises(self):
- import uuid, pytest
- create_all()
- app_id = f"dup-{uuid.uuid4().hex[:6]}"
- with session_factory() as db:
- create_app(db, app_id=app_id, display_name="V1")
- db.commit()
- with session_factory() as db:
- with pytest.raises(Exception):
- create_app(db, app_id=app_id, display_name="V2")
- db.flush()
-
- def test_create_app_normalizes_id_to_lowercase(self):
- import uuid
- create_all()
- app_id = f"UPPER-{uuid.uuid4().hex[:6]}"
- with session_factory() as db:
- app = create_app(db, app_id=app_id, display_name="Test")
- assert app.id == app_id.strip().lower()
-
- def test_get_app_returns_none_for_unknown(self):
- from app.service import get_app
- create_all()
- with session_factory() as db:
- assert get_app(db, "totally-nonexistent-app") is None
-
- def test_create_app_empty_id_raises(self):
- import pytest
- create_all()
- with session_factory() as db:
- with pytest.raises(ValueError):
- create_app(db, app_id="", display_name="Bad")
diff --git a/services/catalog-api/tests/test_smoke.py b/services/catalog-api/tests/test_smoke.py
deleted file mode 100644
index 860fa9cc..00000000
--- a/services/catalog-api/tests/test_smoke.py
+++ /dev/null
@@ -1,212 +0,0 @@
-"""Smoke tests for the catalog-api FastAPI service.
-
-Uses FastAPI's TestClient to run the app in-process — no separate uvicorn
-server, no port collisions. Covers the public JSON contract the C# daemon
-depends on (schema, app keys, snake_case fields) so a schema drift breaks
-CI before it breaks the daemon.
-"""
-
-from __future__ import annotations
-
-import os
-from pathlib import Path
-
-import pytest
-
-# State dir set by conftest.py — no need to set again here.
-os.environ["NKS_WDC_CATALOG_DEV"] = "1" # bootstrap admin/admin
-
-from fastapi.testclient import TestClient # noqa: E402
-
-from app.main import app # noqa: E402
-
-
-@pytest.fixture(scope="module")
-def client() -> TestClient:
- # TestClient's __enter__ runs the FastAPI lifespan so the SQLite
- # schema is created and seed JSONs imported.
- with TestClient(app) as c:
- yield c
-
-
-def test_healthz_returns_ok(client: TestClient) -> None:
- r = client.get("/healthz")
- assert r.status_code == 200
- body = r.json()
- assert body["ok"] is True
- assert body["service"] == "nks-wdc-catalog-api"
-
-
-def test_catalog_contains_seeded_apps(client: TestClient) -> None:
- r = client.get("/api/v1/catalog")
- assert r.status_code == 200
- body = r.json()
- assert body["schema_version"] == "1"
- # Every seed JSON under app/data/apps/ should round-trip into the db.
- expected = {
- "apache", "caddy", "cloudflared", "mailpit", "mariadb",
- "mkcert", "mysql", "nginx", "php", "redis",
- }
- assert expected.issubset(set(body["apps"].keys())), \
- f"missing apps: {expected - set(body['apps'].keys())}"
-
-
-def test_catalog_app_shape_matches_csharp_contract(client: TestClient) -> None:
- """Every AppDoc must serialize as snake_case fields the CatalogClient
- C# DTOs expect — catching Pydantic alias regressions early."""
- r = client.get("/api/v1/catalog/cloudflared")
- assert r.status_code == 200
- doc = r.json()
- assert doc["name"] == "cloudflared"
- assert "display_name" in doc
- assert "category" in doc
- assert "releases" in doc and isinstance(doc["releases"], list)
- if doc["releases"]:
- rel = doc["releases"][0]
- assert "version" in rel
- assert "major_minor" in rel
- assert "downloads" in rel
- for dl in rel["downloads"]:
- # Fields CatalogClient.cs reads
- assert "url" in dl
- assert "os" in dl
- assert "arch" in dl
- assert "archive_type" in dl
- assert "source" in dl
-
-
-def test_unknown_app_returns_404(client: TestClient) -> None:
- r = client.get("/api/v1/catalog/definitely-not-an-app")
- assert r.status_code == 404
-
-
-def test_login_rejects_bad_credentials(client: TestClient) -> None:
- r = client.post(
- "/login",
- data={"username": "admin", "password": "wrong"},
- follow_redirects=False,
- )
- assert r.status_code == 401
-
-
-def test_admin_requires_auth(client: TestClient) -> None:
- r = client.get("/admin", follow_redirects=False)
- assert r.status_code in (302, 401)
-
-
-def test_login_accepts_dev_admin(client: TestClient) -> None:
- r = client.post(
- "/login",
- data={"username": "admin", "password": "admin"},
- follow_redirects=False,
- )
- assert r.status_code == 303
- assert "nks_wdc_catalog_session" in r.cookies
-
-
-def test_config_sync_round_trip(client: TestClient) -> None:
- device_id = "test-device-12345"
- payload = {"sites": [{"domain": "blog.loc"}], "version": 1}
-
- # Upsert
- r = client.post(
- "/api/v1/sync/config",
- json={"device_id": device_id, "payload": payload},
- )
- assert r.status_code == 200
- body = r.json()
- assert body["device_id"] == device_id
- assert body["payload"] == payload
-
- # Fetch back
- r = client.get(f"/api/v1/sync/config/{device_id}")
- assert r.status_code == 200
- assert r.json()["payload"] == payload
-
- # Exists probe
- r = client.get(f"/api/v1/sync/config/{device_id}/exists")
- assert r.status_code == 200
- assert r.json()["has_config"] is True
-
- # Delete
- r = client.delete(f"/api/v1/sync/config/{device_id}")
- assert r.status_code == 200
- assert r.json()["removed"] is True
-
- # Post-delete fetch returns 404
- r = client.get(f"/api/v1/sync/config/{device_id}")
- assert r.status_code == 404
-
-
-def test_config_sync_rejects_invalid_device_id_on_upsert(client: TestClient) -> None:
- """Device IDs that don't match the strict shape must fail with 400,
- not silently store garbage rows the admin UI can't interpret."""
- # Empty
- r = client.post("/api/v1/sync/config", json={"device_id": "", "payload": {}})
- assert r.status_code == 400
-
- # Uppercase (we lowercase, but "A!" has a shell metachar)
- r = client.post("/api/v1/sync/config", json={"device_id": "A!", "payload": {}})
- assert r.status_code == 400
-
- # Path traversal attempt
- r = client.post(
- "/api/v1/sync/config",
- json={"device_id": "../etc/passwd", "payload": {}},
- )
- assert r.status_code == 400
-
- # Too short (< 3 chars)
- r = client.post("/api/v1/sync/config", json={"device_id": "ab", "payload": {}})
- assert r.status_code == 400
-
- # Too long (> 64 chars)
- r = client.post(
- "/api/v1/sync/config",
- json={"device_id": "a" * 65, "payload": {}},
- )
- assert r.status_code == 400
-
- # Whitespace-only
- r = client.post("/api/v1/sync/config", json={"device_id": " ", "payload": {}})
- assert r.status_code == 400
-
-
-def test_config_sync_rejects_invalid_device_id_on_get(client: TestClient) -> None:
- """GET endpoints also validate so a malformed URL returns 400, not 404
- (which would confuse clients into retrying a lookup that will never
- succeed). Note: path-segment '..' would be URL-normalized into the
- parent route by the test client, so we use characters that fail the
- validation regex without altering the matched route."""
- # 'A!' includes an upper-case letter (which we lowercase) but also '!'
- # which isn't in the [a-z0-9-] charset — so after normalization it's
- # still rejected.
- r = client.get("/api/v1/sync/config/A!")
- assert r.status_code == 400
-
- # Two characters total — below the 3-char minimum
- r = client.get("/api/v1/sync/config/ab")
- assert r.status_code == 400
-
- # Leading dash is also invalid per the [a-z0-9] start requirement
- r = client.get("/api/v1/sync/config/-foo")
- assert r.status_code == 400
-
-
-def test_config_sync_device_id_normalized_to_lowercase(client: TestClient) -> None:
- """Clients can upload with mixed case — it gets stored lowercased.
- Verifies the normalization actually happens both on write and echo."""
- r = client.post(
- "/api/v1/sync/config",
- json={"device_id": "MixedCase-DEVICE", "payload": {"marker": True}},
- )
- assert r.status_code == 200
- assert r.json()["device_id"] == "mixedcase-device"
-
- # Fetch with original casing — should still find it
- r = client.get("/api/v1/sync/config/MixedCase-DEVICE")
- assert r.status_code == 200
- assert r.json()["payload"]["marker"] is True
-
- # Cleanup
- client.delete("/api/v1/sync/config/mixedcase-device")