From 180131e7ed7d1612cc49b7dfb746d73621328d4d Mon Sep 17 00:00:00 2001 From: CrazyVulcan Date: Wed, 25 Feb 2026 13:05:51 -0500 Subject: [PATCH] Wire Utopia app shell to normalized schema with API and drag-drop --- Frontiers/Utopia/.gitignore | 3 + Frontiers/Utopia/NOTES.md | 27 ++ Frontiers/Utopia/README.md | 33 ++ .../migrations/001_create_utopia_v2_down.sql | 12 + .../db/migrations/001_create_utopia_v2_up.sql | 124 +++++++ .../Utopia/db/seeds/seed_sample_data.sql | 19 + Frontiers/Utopia/main/run_utopia | 4 + Frontiers/Utopia/main/server.py | 349 ++++++++++++++++++ Frontiers/Utopia/main/static/app.js | 244 ++++++++++++ .../Utopia/main/static/domain/adapters.js | 53 +++ Frontiers/Utopia/main/static/index.html | 45 +++ Frontiers/Utopia/main/static/styles.css | 15 + Frontiers/Utopia/migrate_utopia_data | 4 + Frontiers/Utopia/schema.md | 102 +++++ .../Utopia/scripts/migrate_utopia_data.py | 138 +++++++ .../Utopia/scripts/verify_utopia_data.py | 51 +++ Frontiers/Utopia/tests/smoke_phase2.py | 61 +++ Frontiers/Utopia/utopia_data/__init__.py | 15 + Frontiers/Utopia/utopia_data/db.py | 27 ++ Frontiers/Utopia/utopia_data/models.py | 46 +++ Frontiers/Utopia/utopia_data/repositories.py | 99 +++++ Frontiers/Utopia/utopia_data/services.py | 60 +++ 22 files changed, 1531 insertions(+) create mode 100644 Frontiers/Utopia/.gitignore create mode 100644 Frontiers/Utopia/NOTES.md create mode 100644 Frontiers/Utopia/README.md create mode 100644 Frontiers/Utopia/db/migrations/001_create_utopia_v2_down.sql create mode 100644 Frontiers/Utopia/db/migrations/001_create_utopia_v2_up.sql create mode 100644 Frontiers/Utopia/db/seeds/seed_sample_data.sql create mode 100755 Frontiers/Utopia/main/run_utopia create mode 100644 Frontiers/Utopia/main/server.py create mode 100644 Frontiers/Utopia/main/static/app.js create mode 100644 Frontiers/Utopia/main/static/domain/adapters.js create mode 100644 Frontiers/Utopia/main/static/index.html create mode 100644 Frontiers/Utopia/main/static/styles.css create mode 100755 Frontiers/Utopia/migrate_utopia_data create mode 100644 Frontiers/Utopia/schema.md create mode 100755 Frontiers/Utopia/scripts/migrate_utopia_data.py create mode 100755 Frontiers/Utopia/scripts/verify_utopia_data.py create mode 100755 Frontiers/Utopia/tests/smoke_phase2.py create mode 100644 Frontiers/Utopia/utopia_data/__init__.py create mode 100644 Frontiers/Utopia/utopia_data/db.py create mode 100644 Frontiers/Utopia/utopia_data/models.py create mode 100644 Frontiers/Utopia/utopia_data/repositories.py create mode 100644 Frontiers/Utopia/utopia_data/services.py diff --git a/Frontiers/Utopia/.gitignore b/Frontiers/Utopia/.gitignore new file mode 100644 index 000000000..f94195ffc --- /dev/null +++ b/Frontiers/Utopia/.gitignore @@ -0,0 +1,3 @@ +db/*.sqlite +db/*.sqlite.bak.* +**/__pycache__/ diff --git a/Frontiers/Utopia/NOTES.md b/Frontiers/Utopia/NOTES.md new file mode 100644 index 000000000..4c5f1a1d6 --- /dev/null +++ b/Frontiers/Utopia/NOTES.md @@ -0,0 +1,27 @@ +# Phase 2 Wiring Notes + +## Recon + Diff map +- Current app entry path is now `main/static/index.html` + `main/static/app.js`, served by `main/server.py`. +- UI read/write now flows through REST bindings in `main/server.py` (bootstrap, board state, card/list mutations, views). +- Removed old schema assumptions from UI wiring: no category-bucket reads (`ships/captains/...`) and no denormalized single-array access in components. +- Replaced with relational assumptions: `board -> columns -> cards`, tags through `card_tags`, stable ordering by `position` with `id` tie-break. + + +## Updated entry points +- Added browser app entry at `main/static/index.html` and JS entry `main/static/app.js`. +- Added runtime launcher `main/run_utopia` (wraps `main/server.py`). + +## Updated state/store/data hooks +- Added canonical board-state adapter `main/static/domain/adapters.js`. +- UI now boots via `/api/bootstrap` and loads board state from `/api/boards/:id/state`. +- Added API writes for create board/list/card, edit card, move card, and save views. + +## Updated components/data contracts +- UI now uses canonical schema concepts: `boards`, `columns`, `cards`, `tags` (`card_tags` join), and `saved_views`. +- Legacy bucket assumptions are removed from UI wiring; rendering is column/card relational. +- Drag/drop uses numeric gap indexing (`position` midpoint strategy) and stable sort by `(position, id)`. + +## Remaining known follow-ups +- Server is stdlib HTTP (no auth/multi-user session isolation yet). +- Rebalancing logic for dense `position` values is not yet implemented (midpoint strategy in place). +- Activity stream exists in schema but is not surfaced in UI yet. diff --git a/Frontiers/Utopia/README.md b/Frontiers/Utopia/README.md new file mode 100644 index 000000000..5b1341db2 --- /dev/null +++ b/Frontiers/Utopia/README.md @@ -0,0 +1,33 @@ +# Utopia Phase 1+2: Data Layer + App Wiring + +This folder now contains: +- normalized schema/migrations/backfill tooling, +- repository/service data access layer, +- and a wired app shell that runs end-to-end against the new schema. + +## Run app + +```bash +Frontiers/Utopia/main/run_utopia +``` + +Open `http://127.0.0.1:8787`. + +## Data setup + +```bash +# Build schema + backfill from legacy data +Frontiers/Utopia/migrate_utopia_data --rebuild + +# Verify migrated integrity and counts +python3 Frontiers/Utopia/scripts/verify_utopia_data.py --db-path Frontiers/Utopia/db/utopia.sqlite +``` + +## Smoke test + +```bash +python3 Frontiers/Utopia/tests/smoke_phase2.py +``` + +See `schema.md` for ERD, normalization rationale, indexing strategy, and migration notes. +See `NOTES.md` for Phase 2 wiring details and known follow-ups. diff --git a/Frontiers/Utopia/db/migrations/001_create_utopia_v2_down.sql b/Frontiers/Utopia/db/migrations/001_create_utopia_v2_down.sql new file mode 100644 index 000000000..0033622c7 --- /dev/null +++ b/Frontiers/Utopia/db/migrations/001_create_utopia_v2_down.sql @@ -0,0 +1,12 @@ +PRAGMA foreign_keys = OFF; +DROP TABLE IF EXISTS activities; +DROP TABLE IF EXISTS saved_views; +DROP TABLE IF EXISTS card_links; +DROP TABLE IF EXISTS card_tags; +DROP TABLE IF EXISTS tags; +DROP TABLE IF EXISTS cards; +DROP TABLE IF EXISTS columns; +DROP TABLE IF EXISTS boards; +DROP TABLE IF EXISTS workspaces; +DROP TABLE IF EXISTS users; +PRAGMA foreign_keys = ON; diff --git a/Frontiers/Utopia/db/migrations/001_create_utopia_v2_up.sql b/Frontiers/Utopia/db/migrations/001_create_utopia_v2_up.sql new file mode 100644 index 000000000..cfb1141c5 --- /dev/null +++ b/Frontiers/Utopia/db/migrations/001_create_utopia_v2_up.sql @@ -0,0 +1,124 @@ +PRAGMA foreign_keys = ON; + +CREATE TABLE IF NOT EXISTS users ( + id TEXT PRIMARY KEY, + email TEXT UNIQUE, + display_name TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE IF NOT EXISTS workspaces ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + owner_user_id TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (owner_user_id) REFERENCES users(id) ON DELETE SET NULL +); + +CREATE TABLE IF NOT EXISTS boards ( + id TEXT PRIMARY KEY, + workspace_id TEXT NOT NULL, + name TEXT NOT NULL, + description TEXT, + archived_at TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE, + UNIQUE(workspace_id, name) +); + +CREATE TABLE IF NOT EXISTS columns ( + id TEXT PRIMARY KEY, + board_id TEXT NOT NULL, + name TEXT NOT NULL, + position REAL NOT NULL, + archived_at TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE, + UNIQUE(board_id, name) +); + +CREATE TABLE IF NOT EXISTS cards ( + id TEXT PRIMARY KEY, + board_id TEXT NOT NULL, + column_id TEXT NOT NULL, + title TEXT NOT NULL, + description TEXT, + card_type TEXT, + external_id TEXT, + position REAL NOT NULL, + metadata_json TEXT NOT NULL DEFAULT '{}', + archived_at TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE, + FOREIGN KEY (column_id) REFERENCES columns(id) ON DELETE CASCADE, + UNIQUE(board_id, card_type, external_id) +); + +CREATE TABLE IF NOT EXISTS tags ( + id TEXT PRIMARY KEY, + workspace_id TEXT NOT NULL, + name TEXT NOT NULL, + color TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE, + UNIQUE(workspace_id, name) +); + +CREATE TABLE IF NOT EXISTS card_tags ( + card_id TEXT NOT NULL, + tag_id TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (card_id, tag_id), + FOREIGN KEY (card_id) REFERENCES cards(id) ON DELETE CASCADE, + FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS card_links ( + id TEXT PRIMARY KEY, + card_id TEXT NOT NULL, + label TEXT NOT NULL, + url TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (card_id) REFERENCES cards(id) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS saved_views ( + id TEXT PRIMARY KEY, + board_id TEXT NOT NULL, + name TEXT NOT NULL, + filter_json TEXT NOT NULL DEFAULT '{}', + sort_json TEXT NOT NULL DEFAULT '{}', + created_by_user_id TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE, + FOREIGN KEY (created_by_user_id) REFERENCES users(id) ON DELETE SET NULL, + UNIQUE(board_id, name) +); + +CREATE TABLE IF NOT EXISTS activities ( + id TEXT PRIMARY KEY, + board_id TEXT NOT NULL, + card_id TEXT, + actor_user_id TEXT, + activity_type TEXT NOT NULL, + payload_json TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE, + FOREIGN KEY (card_id) REFERENCES cards(id) ON DELETE SET NULL, + FOREIGN KEY (actor_user_id) REFERENCES users(id) ON DELETE SET NULL +); + +CREATE INDEX IF NOT EXISTS idx_boards_workspace_id ON boards(workspace_id); +CREATE INDEX IF NOT EXISTS idx_columns_board_position ON columns(board_id, position); +CREATE INDEX IF NOT EXISTS idx_cards_board_column_position ON cards(board_id, column_id, position); +CREATE INDEX IF NOT EXISTS idx_cards_updated_at ON cards(updated_at); +CREATE INDEX IF NOT EXISTS idx_cards_type_external ON cards(card_type, external_id); +CREATE INDEX IF NOT EXISTS idx_tags_workspace ON tags(workspace_id); +CREATE INDEX IF NOT EXISTS idx_activities_board_created ON activities(board_id, created_at DESC); diff --git a/Frontiers/Utopia/db/seeds/seed_sample_data.sql b/Frontiers/Utopia/db/seeds/seed_sample_data.sql new file mode 100644 index 000000000..350970bd8 --- /dev/null +++ b/Frontiers/Utopia/db/seeds/seed_sample_data.sql @@ -0,0 +1,19 @@ +INSERT INTO users (id, email, display_name) +VALUES ('usr_demo', 'demo@utopia.local', 'Demo User'); + +INSERT INTO workspaces (id, name, owner_user_id) +VALUES ('wsp_demo', 'Demo Workspace', 'usr_demo'); + +INSERT INTO boards (id, workspace_id, name, description) +VALUES ('brd_demo', 'wsp_demo', 'Getting Started', 'Sample board for local development'); + +INSERT INTO columns (id, board_id, name, position) +VALUES + ('col_demo_backlog', 'brd_demo', 'Backlog', 1000), + ('col_demo_doing', 'brd_demo', 'Doing', 2000), + ('col_demo_done', 'brd_demo', 'Done', 3000); + +INSERT INTO cards (id, board_id, column_id, title, description, card_type, external_id, position, metadata_json) +VALUES + ('crd_demo_1', 'brd_demo', 'col_demo_backlog', 'Import legacy fleet data', 'Run migrate_utopia_data.', 'task', 'demo-1', 1000, '{"priority":"high"}'), + ('crd_demo_2', 'brd_demo', 'col_demo_doing', 'Refactor repository layer', 'Adopt repositories for all reads/writes.', 'task', 'demo-2', 1000, '{"priority":"medium"}'); diff --git a/Frontiers/Utopia/main/run_utopia b/Frontiers/Utopia/main/run_utopia new file mode 100755 index 000000000..89c99b0af --- /dev/null +++ b/Frontiers/Utopia/main/run_utopia @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +python3 "$SCRIPT_DIR/server.py" "$@" diff --git a/Frontiers/Utopia/main/server.py b/Frontiers/Utopia/main/server.py new file mode 100644 index 000000000..06eb9bb75 --- /dev/null +++ b/Frontiers/Utopia/main/server.py @@ -0,0 +1,349 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import json +import sqlite3 +import sys +import uuid +from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer +from pathlib import Path +from urllib.parse import parse_qs, urlparse + +ROOT = Path(__file__).resolve().parents[1] +if str(ROOT) not in sys.path: + sys.path.insert(0, str(ROOT)) + +from utopia_data.db import connect, migrate_up + +DB_PATH = ROOT / "db" / "utopia.sqlite" +STATIC_DIR = ROOT / "main" / "static" + + +def _row_to_dict(row: sqlite3.Row) -> dict: + return {k: row[k] for k in row.keys()} + + +def ensure_schema(conn: sqlite3.Connection) -> None: + migrate_up(conn) + + +def ensure_default_board(conn: sqlite3.Connection) -> None: + board_count = conn.execute("SELECT COUNT(*) FROM boards").fetchone()[0] + if board_count: + return + + workspace_id = f"wsp_{uuid.uuid4().hex[:10]}" + board_id = f"brd_{uuid.uuid4().hex[:10]}" + conn.execute("INSERT INTO workspaces (id, name) VALUES (?, ?)", (workspace_id, "Default Workspace")) + conn.execute( + "INSERT INTO boards (id, workspace_id, name, description) VALUES (?, ?, ?, ?)", + (board_id, workspace_id, "Utopia Board", "Default board"), + ) + for i, name in enumerate(["Backlog", "Doing", "Done"], start=1): + conn.execute( + "INSERT INTO columns (id, board_id, name, position) VALUES (?, ?, ?, ?)", + (f"col_{uuid.uuid4().hex[:10]}", board_id, name, float(i * 1000)), + ) + conn.commit() + + +def list_board_payload(conn: sqlite3.Connection, board_id: str, tag: str | None = None) -> dict: + board = conn.execute("SELECT * FROM boards WHERE id = ?", (board_id,)).fetchone() + if not board: + raise KeyError("Board not found") + + columns = [ + _row_to_dict(r) + for r in conn.execute( + "SELECT * FROM columns WHERE board_id = ? AND archived_at IS NULL ORDER BY position, id", (board_id,) + ).fetchall() + ] + + where = "WHERE c.board_id = ? AND c.archived_at IS NULL" + params: list[object] = [board_id] + if tag: + where += " AND EXISTS (SELECT 1 FROM card_tags ct JOIN tags t ON t.id = ct.tag_id WHERE ct.card_id = c.id AND t.name = ?)" + params.append(tag) + + cards = [ + _row_to_dict(r) + for r in conn.execute( + f""" + SELECT c.* FROM cards c + {where} + ORDER BY c.position, c.id + """, + params, + ).fetchall() + ] + + tag_rows = conn.execute( + """ + SELECT ct.card_id, t.id AS tag_id, t.name, t.color + FROM card_tags ct + JOIN tags t ON t.id = ct.tag_id + JOIN cards c ON c.id = ct.card_id + WHERE c.board_id = ? + """, + (board_id,), + ).fetchall() + + tags_by_card: dict[str, list[dict]] = {} + for row in tag_rows: + tags_by_card.setdefault(row["card_id"], []).append( + {"id": row["tag_id"], "name": row["name"], "color": row["color"]} + ) + + for card in cards: + card["metadata"] = json.loads(card.get("metadata_json") or "{}") + card["tags"] = tags_by_card.get(card["id"], []) + + views = [ + _row_to_dict(r) + for r in conn.execute("SELECT * FROM saved_views WHERE board_id = ? ORDER BY name", (board_id,)).fetchall() + ] + + return {"board": _row_to_dict(board), "columns": columns, "cards": cards, "views": views} + + +def create_tag(conn: sqlite3.Connection, workspace_id: str, name: str, color: str | None = None) -> str: + row = conn.execute("SELECT id FROM tags WHERE workspace_id = ? AND name = ?", (workspace_id, name)).fetchone() + if row: + return row["id"] + tag_id = f"tag_{uuid.uuid4().hex[:10]}" + conn.execute("INSERT INTO tags (id, workspace_id, name, color) VALUES (?, ?, ?, ?)", (tag_id, workspace_id, name, color)) + return tag_id + + +def compute_midpoint(prev_pos: float | None, next_pos: float | None) -> float: + if prev_pos is None and next_pos is None: + return 1000.0 + if prev_pos is None: + return next_pos / 2.0 + if next_pos is None: + return prev_pos + 1000.0 + return (prev_pos + next_pos) / 2.0 + + +class Handler(BaseHTTPRequestHandler): + def _conn(self) -> sqlite3.Connection: + conn = connect(DB_PATH) + conn.row_factory = sqlite3.Row + return conn + + def _json(self, code: int, payload: dict | list): + body = json.dumps(payload).encode("utf-8") + self.send_response(code) + self.send_header("Content-Type", "application/json") + self.send_header("Content-Length", str(len(body))) + self.end_headers() + self.wfile.write(body) + + def _read_json(self) -> dict: + length = int(self.headers.get("Content-Length", "0")) + raw = self.rfile.read(length) if length else b"{}" + return json.loads(raw.decode("utf-8")) + + def do_GET(self): + parsed = urlparse(self.path) + if parsed.path == "/": + html = (STATIC_DIR / "index.html").read_bytes() + self.send_response(200) + self.send_header("Content-Type", "text/html; charset=utf-8") + self.send_header("Content-Length", str(len(html))) + self.end_headers() + self.wfile.write(html) + return + + if parsed.path.startswith("/static/"): + path = STATIC_DIR / parsed.path.removeprefix("/static/") + if path.exists() and path.is_file(): + data = path.read_bytes() + ctype = "application/javascript" if path.suffix == ".js" else "text/css" + self.send_response(200) + self.send_header("Content-Type", ctype) + self.send_header("Content-Length", str(len(data))) + self.end_headers() + self.wfile.write(data) + return + self.send_error(404) + return + + if parsed.path == "/api/bootstrap": + conn = self._conn() + try: + ensure_schema(conn) + ensure_default_board(conn) + boards = [_row_to_dict(r) for r in conn.execute("SELECT * FROM boards ORDER BY created_at, id").fetchall()] + self._json(200, {"boards": boards}) + finally: + conn.close() + return + + if parsed.path.startswith("/api/boards/") and parsed.path.endswith("/state"): + board_id = parsed.path.split("/")[3] + tag = parse_qs(parsed.query).get("tag", [None])[0] + conn = self._conn() + try: + self._json(200, list_board_payload(conn, board_id, tag=tag)) + except KeyError: + self._json(404, {"error": "board_not_found"}) + finally: + conn.close() + return + + self.send_error(404) + + def do_POST(self): + parsed = urlparse(self.path) + payload = self._read_json() + conn = self._conn() + try: + ensure_schema(conn) + if parsed.path == "/api/boards": + name = payload.get("name", "New Board").strip() or "New Board" + workspace = conn.execute("SELECT id FROM workspaces ORDER BY created_at LIMIT 1").fetchone() + if not workspace: + workspace_id = f"wsp_{uuid.uuid4().hex[:10]}" + conn.execute("INSERT INTO workspaces (id, name) VALUES (?, ?)", (workspace_id, "Default Workspace")) + else: + workspace_id = workspace["id"] + board_id = f"brd_{uuid.uuid4().hex[:10]}" + conn.execute("INSERT INTO boards (id, workspace_id, name) VALUES (?, ?, ?)", (board_id, workspace_id, name)) + for i, col in enumerate(["Backlog", "Doing", "Done"], start=1): + conn.execute( + "INSERT INTO columns (id, board_id, name, position) VALUES (?, ?, ?, ?)", + (f"col_{uuid.uuid4().hex[:10]}", board_id, col, i * 1000.0), + ) + conn.commit() + self._json(201, {"id": board_id}) + return + + if parsed.path.startswith("/api/boards/") and parsed.path.endswith("/columns"): + board_id = parsed.path.split("/")[3] + row = conn.execute("SELECT COALESCE(MAX(position),0) AS p FROM columns WHERE board_id = ?", (board_id,)).fetchone() + col_id = f"col_{uuid.uuid4().hex[:10]}" + conn.execute( + "INSERT INTO columns (id, board_id, name, position) VALUES (?, ?, ?, ?)", + (col_id, board_id, payload.get("name", "New List"), float(row["p"]) + 1000.0), + ) + conn.commit() + self._json(201, {"id": col_id}) + return + + if parsed.path == "/api/cards": + card_id = f"crd_{uuid.uuid4().hex[:12]}" + board_id = payload["board_id"] + col_id = payload["column_id"] + row = conn.execute( + "SELECT COALESCE(MAX(position),0) AS p FROM cards WHERE board_id = ? AND column_id = ?", + (board_id, col_id), + ).fetchone() + conn.execute( + """ + INSERT INTO cards (id, board_id, column_id, title, description, card_type, external_id, position, metadata_json) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + card_id, + board_id, + col_id, + payload.get("title", "Untitled"), + payload.get("description"), + payload.get("card_type", "card"), + payload.get("external_id"), + float(row["p"]) + 1000.0, + json.dumps(payload.get("metadata", {}), separators=(",", ":")), + ), + ) + for tag in payload.get("tags", []): + board = conn.execute("SELECT workspace_id FROM boards WHERE id = ?", (board_id,)).fetchone() + tag_id = create_tag(conn, board["workspace_id"], tag) + conn.execute("INSERT INTO card_tags (card_id, tag_id) VALUES (?, ?)", (card_id, tag_id)) + conn.commit() + self._json(201, {"id": card_id}) + return + + if parsed.path.startswith("/api/cards/") and parsed.path.endswith("/move"): + card_id = parsed.path.split("/")[3] + to_column = payload["to_column_id"] + prev_pos = payload.get("prev_position") + next_pos = payload.get("next_position") + new_pos = compute_midpoint(prev_pos, next_pos) + conn.execute("UPDATE cards SET column_id = ?, position = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?", (to_column, new_pos, card_id)) + conn.commit() + self._json(200, {"id": card_id, "position": new_pos, "column_id": to_column}) + return + + if parsed.path.startswith("/api/boards/") and parsed.path.endswith("/views"): + board_id = parsed.path.split("/")[3] + view_id = f"view_{uuid.uuid4().hex[:10]}" + conn.execute( + "INSERT INTO saved_views (id, board_id, name, filter_json, sort_json) VALUES (?, ?, ?, ?, ?)", + ( + view_id, + board_id, + payload.get("name", "View"), + json.dumps(payload.get("filter", {}), separators=(",", ":")), + json.dumps(payload.get("sort", {"field": "position", "direction": "asc"}), separators=(",", ":")), + ), + ) + conn.commit() + self._json(201, {"id": view_id}) + return + + self._json(404, {"error": "not_found"}) + except Exception as exc: + conn.rollback() + self._json(500, {"error": "server_error", "detail": str(exc)}) + finally: + conn.close() + + def do_PATCH(self): + parsed = urlparse(self.path) + if not parsed.path.startswith("/api/cards/"): + return self.send_error(404) + card_id = parsed.path.split("/")[3] + payload = self._read_json() + conn = self._conn() + try: + card = conn.execute("SELECT board_id FROM cards WHERE id = ?", (card_id,)).fetchone() + if not card: + self._json(404, {"error": "card_not_found"}) + return + + conn.execute( + "UPDATE cards SET title = ?, description = ?, metadata_json = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?", + ( + payload.get("title", "Untitled"), + payload.get("description"), + json.dumps(payload.get("metadata", {}), separators=(",", ":")), + card_id, + ), + ) + + if "tags" in payload: + conn.execute("DELETE FROM card_tags WHERE card_id = ?", (card_id,)) + workspace_id = conn.execute("SELECT workspace_id FROM boards WHERE id = ?", (card["board_id"],)).fetchone()[0] + for tag in payload["tags"]: + tag_id = create_tag(conn, workspace_id, tag) + conn.execute("INSERT INTO card_tags (card_id, tag_id) VALUES (?, ?)", (card_id, tag_id)) + + conn.commit() + self._json(200, {"id": card_id}) + except Exception as exc: + conn.rollback() + self._json(500, {"error": "server_error", "detail": str(exc)}) + finally: + conn.close() + + +def run(port: int = 8787) -> None: + DB_PATH.parent.mkdir(parents=True, exist_ok=True) + httpd = ThreadingHTTPServer(("0.0.0.0", port), Handler) + print(f"Utopia server running on http://0.0.0.0:{port}") + httpd.serve_forever() + + +if __name__ == "__main__": + run() diff --git a/Frontiers/Utopia/main/static/app.js b/Frontiers/Utopia/main/static/app.js new file mode 100644 index 000000000..49c4f40ab --- /dev/null +++ b/Frontiers/Utopia/main/static/app.js @@ -0,0 +1,244 @@ +import { computeMidpoint, mapBoardState, sortByPosition } from "/static/domain/adapters.js"; + +const state = { + boards: [], + boardId: null, + tagFilter: "", + boardState: null, + draggingCardId: null, +}; + +const els = { + status: document.getElementById("global-status"), + app: document.getElementById("app"), + boardSelect: document.getElementById("board-select"), + createBoard: document.getElementById("create-board"), + createColumn: document.getElementById("create-column"), + tagFilter: document.getElementById("tag-filter"), + applyFilter: document.getElementById("apply-filter"), + saveView: document.getElementById("save-view"), + viewSelect: document.getElementById("saved-view-select"), +}; + +async function api(path, options = {}) { + const res = await fetch(path, { + headers: { "Content-Type": "application/json" }, + ...options, + }); + const data = await res.json().catch(() => ({})); + if (!res.ok) throw new Error(data.detail || data.error || `HTTP ${res.status}`); + return data; +} + +async function bootstrap() { + setStatus("Loading data…"); + try { + const payload = await api("/api/bootstrap"); + state.boards = payload.boards; + state.boardId = state.boards[0]?.id || null; + renderBoardSelect(); + await loadBoardState(); + bindToolbar(); + setStatus("Ready"); + } catch (err) { + setStatus(`Bootstrap failed: ${err.message}`, true); + } +} + +function bindToolbar() { + els.boardSelect.onchange = async () => { + state.boardId = els.boardSelect.value; + await loadBoardState(); + }; + + els.createBoard.onclick = async () => { + const name = prompt("Board name:", "New Board"); + if (!name) return; + await api("/api/boards", { method: "POST", body: JSON.stringify({ name }) }); + await bootstrap(); + }; + + els.createColumn.onclick = async () => { + if (!state.boardId) return; + const name = prompt("List name:", "New List"); + if (!name) return; + await api(`/api/boards/${state.boardId}/columns`, { method: "POST", body: JSON.stringify({ name }) }); + await loadBoardState(); + }; + + els.applyFilter.onclick = async () => { + state.tagFilter = els.tagFilter.value.trim(); + await loadBoardState(); + }; + + els.saveView.onclick = async () => { + if (!state.boardId) return; + const name = prompt("Save view as:", "Filtered View"); + if (!name) return; + await api(`/api/boards/${state.boardId}/views`, { + method: "POST", + body: JSON.stringify({ name, filter: { tag: state.tagFilter } }), + }); + await loadBoardState(); + }; + + els.viewSelect.onchange = async () => { + const selected = state.boardState?.views.find((v) => v.id === els.viewSelect.value); + if (!selected) return; + state.tagFilter = selected.filter.tag || ""; + els.tagFilter.value = state.tagFilter; + await loadBoardState(); + }; +} + +async function loadBoardState() { + if (!state.boardId) { + els.app.innerHTML = "

No boards yet.

"; + return; + } + const query = state.tagFilter ? `?tag=${encodeURIComponent(state.tagFilter)}` : ""; + const payload = await api(`/api/boards/${state.boardId}/state${query}`); + state.boardState = mapBoardState(payload); + renderViews(); + renderBoard(); +} + +function renderBoardSelect() { + els.boardSelect.innerHTML = ""; + for (const board of state.boards) { + const opt = document.createElement("option"); + opt.value = board.id; + opt.textContent = board.name; + if (board.id === state.boardId) opt.selected = true; + els.boardSelect.appendChild(opt); + } +} + +function renderViews() { + const current = els.viewSelect.value; + els.viewSelect.innerHTML = ``; + for (const view of state.boardState.views || []) { + const opt = document.createElement("option"); + opt.value = view.id; + opt.textContent = view.name; + if (view.id === current) opt.selected = true; + els.viewSelect.appendChild(opt); + } +} + +function renderBoard() { + const columnTpl = document.getElementById("column-template"); + const cardTpl = document.getElementById("card-template"); + els.app.innerHTML = ""; + + for (const column of state.boardState.columns) { + const node = columnTpl.content.firstElementChild.cloneNode(true); + node.dataset.columnId = column.id; + node.querySelector(".column-header").textContent = column.name; + + const cardsBox = node.querySelector(".cards"); + const cards = [...(state.boardState.cardsByColumn[column.id] || [])].sort(sortByPosition); + for (const card of cards) { + const cardNode = cardTpl.content.firstElementChild.cloneNode(true); + cardNode.dataset.cardId = card.id; + cardNode.dataset.columnId = column.id; + cardNode.querySelector(".title").textContent = card.title; + cardNode.querySelector(".desc").textContent = card.description || ""; + const tags = cardNode.querySelector(".tags"); + for (const tag of card.tags) { + const span = document.createElement("span"); + span.className = "tag"; + span.textContent = tag.name; + tags.appendChild(span); + } + + cardNode.addEventListener("dblclick", () => editCard(card)); + wireCardDnD(cardNode); + cardsBox.appendChild(cardNode); + } + + cardsBox.addEventListener("dragover", (ev) => ev.preventDefault()); + cardsBox.addEventListener("drop", async (ev) => { + ev.preventDefault(); + if (!state.draggingCardId) return; + const afterEl = getDropAfterElement(cardsBox, ev.clientY); + const currentCards = (state.boardState.cardsByColumn[column.id] || []).sort(sortByPosition); + let prev = null; + let next = null; + + if (!afterEl) { + prev = currentCards[currentCards.length - 1] || null; + } else { + const idx = currentCards.findIndex((c) => c.id === afterEl.dataset.cardId); + next = idx >= 0 ? currentCards[idx] : null; + prev = idx > 0 ? currentCards[idx - 1] : null; + } + + const position = computeMidpoint(prev, next); + await api(`/api/cards/${state.draggingCardId}/move`, { + method: "POST", + body: JSON.stringify({ to_column_id: column.id, prev_position: prev?.position ?? null, next_position: next?.position ?? null, position }), + }); + await loadBoardState(); + }); + + node.querySelector(".add-card").onclick = async () => { + const title = prompt("Card title:", "New Card"); + if (!title) return; + await api("/api/cards", { + method: "POST", + body: JSON.stringify({ board_id: state.boardId, column_id: column.id, title, card_type: "task", tags: [] }), + }); + await loadBoardState(); + }; + + els.app.appendChild(node); + } +} + +function wireCardDnD(node) { + node.addEventListener("dragstart", () => { + state.draggingCardId = node.dataset.cardId; + node.classList.add("dragging"); + }); + node.addEventListener("dragend", () => { + node.classList.remove("dragging"); + state.draggingCardId = null; + }); +} + +function getDropAfterElement(container, y) { + const draggableElements = [...container.querySelectorAll(".card:not(.dragging)")]; + return draggableElements.reduce( + (closest, child) => { + const box = child.getBoundingClientRect(); + const offset = y - box.top - box.height / 2; + if (offset < 0 && offset > closest.offset) { + return { offset, element: child }; + } + return closest; + }, + { offset: Number.NEGATIVE_INFINITY, element: null }, + ).element; +} + +async function editCard(card) { + const title = prompt("Title", card.title); + if (title == null) return; + const description = prompt("Description", card.description || "") ?? ""; + const tagsText = prompt("Tags (comma separated)", (card.tags || []).map((t) => t.name).join(", ")) ?? ""; + const tags = tagsText.split(",").map((s) => s.trim()).filter(Boolean); + + await api(`/api/cards/${card.id}`, { + method: "PATCH", + body: JSON.stringify({ title, description, metadata: card.metadata || {}, tags }), + }); + await loadBoardState(); +} + +function setStatus(text, isError = false) { + els.status.textContent = text; + els.status.className = isError ? "error" : "muted"; +} + +bootstrap(); diff --git a/Frontiers/Utopia/main/static/domain/adapters.js b/Frontiers/Utopia/main/static/domain/adapters.js new file mode 100644 index 000000000..b5589ce67 --- /dev/null +++ b/Frontiers/Utopia/main/static/domain/adapters.js @@ -0,0 +1,53 @@ +export function mapBoardState(payload) { + const board = payload.board; + const columns = [...payload.columns].sort((a, b) => sortByPosition(a, b)); + const cards = [...payload.cards].sort((a, b) => sortByPosition(a, b)); + + const cardsByColumn = {}; + for (const column of columns) cardsByColumn[column.id] = []; + for (const card of cards) { + if (!cardsByColumn[card.column_id]) cardsByColumn[card.column_id] = []; + cardsByColumn[card.column_id].push({ + ...card, + tags: card.tags || [], + metadata: card.metadata || {}, + }); + } + + return { + board, + columns, + cardsByColumn, + views: (payload.views || []).map((v) => ({ + ...v, + filter: safeJson(v.filter_json), + sort: safeJson(v.sort_json), + })), + }; +} + +export function sortByPosition(a, b) { + const ap = Number(a.position ?? 0); + const bp = Number(b.position ?? 0); + if (ap !== bp) return ap - bp; + return String(a.id).localeCompare(String(b.id)); +} + +export function computeMidpoint(prev, next) { + const prevPos = prev?.position != null ? Number(prev.position) : null; + const nextPos = next?.position != null ? Number(next.position) : null; + + if (prevPos == null && nextPos == null) return 1000; + if (prevPos == null) return nextPos / 2; + if (nextPos == null) return prevPos + 1000; + return (prevPos + nextPos) / 2; +} + +function safeJson(raw) { + if (!raw) return {}; + try { + return JSON.parse(raw); + } catch { + return {}; + } +} diff --git a/Frontiers/Utopia/main/static/index.html b/Frontiers/Utopia/main/static/index.html new file mode 100644 index 000000000..e21e37218 --- /dev/null +++ b/Frontiers/Utopia/main/static/index.html @@ -0,0 +1,45 @@ + + + + + + Utopia + + + +
+

Utopia

+
Loading…
+
+ +
+ + + + + + + +
+ +
+ + + + + + + + diff --git a/Frontiers/Utopia/main/static/styles.css b/Frontiers/Utopia/main/static/styles.css new file mode 100644 index 000000000..7096a26b9 --- /dev/null +++ b/Frontiers/Utopia/main/static/styles.css @@ -0,0 +1,15 @@ +body { font-family: Arial, sans-serif; margin: 0; background: #f7f7f9; } +header { background: #222; color: #fff; padding: 10px 16px; display:flex; justify-content:space-between; align-items:center; } +#toolbar { display:flex; gap:8px; align-items:center; padding:10px 16px; flex-wrap:wrap; background:#fff; border-bottom:1px solid #ddd; } +#app { display:flex; gap:12px; padding:12px; overflow-x:auto; min-height:70vh; } +.column { background:#fff; border:1px solid #ddd; border-radius:8px; padding:10px; width:320px; flex:0 0 320px; } +.column-header { font-weight:700; margin-bottom:8px; } +.cards { min-height:60px; display:flex; flex-direction:column; gap:8px; } +.card { background:#fefefe; border:1px solid #ccc; border-radius:6px; padding:8px; cursor:grab; } +.card.dragging { opacity:0.5; } +.card .title { margin:0 0 4px; font-size:14px; } +.card .desc { margin:0 0 8px; font-size:12px; color:#555; white-space:pre-wrap; } +.tags { display:flex; flex-wrap:wrap; gap:4px; } +.tag { background:#e8eefc; border-radius:12px; padding:2px 6px; font-size:11px; } +.muted { opacity:0.8; font-size:12px; } +.error { color:#b00020; } diff --git a/Frontiers/Utopia/migrate_utopia_data b/Frontiers/Utopia/migrate_utopia_data new file mode 100755 index 000000000..39db2a495 --- /dev/null +++ b/Frontiers/Utopia/migrate_utopia_data @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +python3 "$SCRIPT_DIR/scripts/migrate_utopia_data.py" "$@" diff --git a/Frontiers/Utopia/schema.md b/Frontiers/Utopia/schema.md new file mode 100644 index 000000000..0bdb640d9 --- /dev/null +++ b/Frontiers/Utopia/schema.md @@ -0,0 +1,102 @@ +# Utopia Data Layer (Phase 1) + +## Repo Recon (Current State) + +### Where schema/data lives today +- Utopia currently loads a single denormalized JSON payload from `data/data.json` via Angular `$http` in `utopia-card-loader`. There is no relational DB/ORM layer yet. +- Source records are authored in category-specific JS modules (`ships`, `captains`, `upgrades`, etc.) and merged into one payload by `src/data/index.js`. +- User state is mostly browser-local (`localStorage.defaults`) and fleet serialization in URL hash. + +### Where data is accessed +- `src/js/common/utopia-card-loader.js` performs direct category iteration, duplicate checks, enrichment, and merges all cards into one in-memory array. +- `src/js/utopia-fleet-builder.js` mutates nested fleet objects directly in controllers/directives. + +### Current pain points +1. **No persistence boundary**: UI code and data-transform logic are tightly coupled. +2. **Denormalized payload**: all card categories are loaded as large arrays; query/index constraints are not enforceable. +3. **Inconsistent ownership/lifecycle**: rules, data loading, and mutation are interwoven. +4. **Stringly-typed identifiers and category buckets**: category keys drive behavior implicitly. +5. **No referential integrity or migration safety**: duplicates are runtime-checked only. + +--- + +## Proposed Schema (ERD style) + +- **users** (optional actor identity) +- **workspaces** (root ownership boundary) +- **boards** (list-building spaces inside a workspace) +- **columns** (ordered lanes/list buckets per board) +- **cards** (ordered items in a column; typed; extensible metadata) +- **tags** + **card_tags** (many-to-many labels) +- **card_links** (external reference URLs) +- **saved_views** (persisted filters/sorts) +- **activities** (audit/event stream) + +Relationships: +- `workspace 1..n boards` +- `board 1..n columns` +- `board 1..n cards` +- `column 1..n cards` +- `cards n..m tags` via `card_tags` +- `card 1..n card_links` +- `board 1..n saved_views` +- `board 1..n activities` and optionally `card 0..n activities` + +### Normalized vs embedded JSON +- **Normalized**: workspace/board/column/card/tag/link/view/activity relations, because these are queried/joined frequently and need integrity guarantees. +- **Embedded JSON**: `cards.metadata_json`, `saved_views.filter_json`, `saved_views.sort_json`, `activities.payload_json` for flexible, evolving payloads without schema churn. + +### Ordering strategy +- Uses **fractional/numeric positions with gaps** (`REAL`, seeded with `1000, 2000, ...`). +- Rationale: simple drag/drop insertions without full reindex in common operations; easy to rebalance later. + +### Integrity and indexing +- FK cascades on board/column/card ownership. +- Unique constraints for: + - `boards(workspace_id, name)` + - `columns(board_id, name)` + - `tags(workspace_id, name)` + - `cards(board_id, card_type, external_id)` +- Indexes for board/column ordering, tags, card lookup, and activity feed recency. + +--- + +## Migration + Backfill Plan + +1. Apply `001_create_utopia_v2_up.sql`. +2. Back up existing sqlite DB file (if any). +3. Import legacy `src/data/backup/data.json` into: + - one workspace (`Legacy Utopia Import`) + - one board (`Imported Card Catalog`) + - columns by legacy bucket (`Ships`, `Captains`, ...) + - one card row per legacy record with raw data in `metadata_json` + - tags for source bucket and factions +4. Run verification script to confirm row counts + no orphan join rows. + +Rollback: +- `--rebuild` option runs down migration before rebuilding. +- DB file backup is written before mutation for recovery. + +--- + +## Developer Ergonomics + +### Migration command +```bash +Frontiers/Utopia/migrate_utopia_data --rebuild +``` + +### Validation command +```bash +python3 Frontiers/Utopia/scripts/verify_utopia_data.py --db-path Frontiers/Utopia/db/utopia.sqlite +``` + +### Data access boundary +Use `utopia_data/repositories.py` + `utopia_data/services.py` for read/write paths instead of direct SQL from app/business logic. + +--- + +## Assumptions +- This repo currently has no running server-side DB integration for Utopia; Phase 1 therefore introduces a DB layer in parallel without forcing immediate UI rewrites. +- Legacy source-of-truth for migration is `src/data/backup/data.json` (JSON-safe and deterministic). +- Soft-delete behavior is retained as optional fields (`archived_at`) only where list/card archival semantics are expected. diff --git a/Frontiers/Utopia/scripts/migrate_utopia_data.py b/Frontiers/Utopia/scripts/migrate_utopia_data.py new file mode 100755 index 000000000..6d0219b02 --- /dev/null +++ b/Frontiers/Utopia/scripts/migrate_utopia_data.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import json +import shutil +import sys +from datetime import datetime +from pathlib import Path + +ROOT = Path(__file__).resolve().parents[1] +if str(ROOT) not in sys.path: + sys.path.insert(0, str(ROOT)) + +from utopia_data import ( + BoardRepository, + CardRepository, + ColumnRepository, + TagRepository, + UtopiaBoardService, + WorkspaceRepository, + connect, + migrate_down, + migrate_up, +) + +DEFAULT_DB_PATH = ROOT / "db" / "utopia.sqlite" +DEFAULT_LEGACY_PATH = ROOT.parent / "src" / "data" / "backup" / "data.json" + + +def backup_db(db_path: Path) -> Path | None: + if not db_path.exists(): + return None + ts = datetime.utcnow().strftime("%Y%m%d%H%M%S") + backup_path = db_path.with_suffix(f".sqlite.bak.{ts}") + shutil.copy2(db_path, backup_path) + return backup_path + + +def load_legacy_data(path: Path) -> dict: + with path.open("r", encoding="utf-8") as fp: + return json.load(fp) + + +def source_buckets() -> list[str]: + return ["ships", "captains", "admirals", "ambassadors", "upgrades", "resources", "others"] + + +def migrate(args: argparse.Namespace) -> None: + db_path = Path(args.db_path) + legacy_path = Path(args.legacy_path) + + if not legacy_path.exists(): + raise FileNotFoundError(f"Legacy source does not exist: {legacy_path}") + + backup_path = backup_db(db_path) + db_path.parent.mkdir(parents=True, exist_ok=True) + + conn = connect(db_path) + try: + if args.rebuild: + migrate_down(conn) + migrate_up(conn) + + service = UtopiaBoardService( + WorkspaceRepository(conn), + BoardRepository(conn), + ColumnRepository(conn), + CardRepository(conn), + TagRepository(conn), + ) + card_repo = CardRepository(conn) + legacy = load_legacy_data(legacy_path) + + workspace_id, board_id, columns = service.create_workspace_with_board( + workspace_name="Legacy Utopia Import", + board_name="Imported Card Catalog", + column_names=[bucket.capitalize() for bucket in source_buckets()], + ) + + seen_keys: dict[tuple[str, str | None], int] = {} + + for bucket in source_buckets(): + position = 1000.0 + for item in legacy.get(bucket, []): + metadata = { + "legacy_source": bucket, + "set": item.get("set"), + "factions": item.get("factions", []), + "cost": item.get("cost"), + "raw": item, + } + tags = [(bucket[:-1] if bucket.endswith("s") else bucket, "#64748b")] + for faction in item.get("factions", []) or []: + tags.append((f"faction:{faction}", None)) + + card_type = item.get("type", bucket[:-1]) + base_external_id = str(item.get("id")) if item.get("id") is not None else None + key = (card_type, base_external_id) + dedupe_count = seen_keys.get(key, 0) + seen_keys[key] = dedupe_count + 1 + external_id = base_external_id if dedupe_count == 0 else f"{base_external_id}::dup{dedupe_count}" + + service.create_card( + board_id=board_id, + column_id=columns[bucket.capitalize()], + title=item.get("name") or f"{bucket}:{item.get('id', 'unknown')}", + card_type=card_type, + external_id=external_id, + metadata=metadata, + tags=tags, + workspace_id=workspace_id, + position=position, + ) + position += 1000.0 + + conn.commit() + print(f"Migration completed: {db_path}") + print(f"Imported {card_repo.count()} cards from {legacy_path}") + if backup_path: + print(f"Backup: {backup_path}") + except Exception: + conn.rollback() + raise + finally: + conn.close() + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Migrate legacy Utopia data to normalized schema.") + parser.add_argument("--db-path", default=str(DEFAULT_DB_PATH), help="Target sqlite database path") + parser.add_argument("--legacy-path", default=str(DEFAULT_LEGACY_PATH), help="Legacy data.json path") + parser.add_argument("--rebuild", action="store_true", help="Drop v2 tables before migrating") + return parser.parse_args() + + +if __name__ == "__main__": + migrate(parse_args()) diff --git a/Frontiers/Utopia/scripts/verify_utopia_data.py b/Frontiers/Utopia/scripts/verify_utopia_data.py new file mode 100755 index 000000000..372a96c5d --- /dev/null +++ b/Frontiers/Utopia/scripts/verify_utopia_data.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import sqlite3 +from pathlib import Path + + +def fetch_int(conn: sqlite3.Connection, sql: str) -> int: + return int(conn.execute(sql).fetchone()[0]) + + +def verify(db_path: Path) -> None: + conn = sqlite3.connect(db_path) + try: + conn.execute("PRAGMA foreign_keys = ON") + + cards = fetch_int(conn, "SELECT COUNT(*) FROM cards") + columns = fetch_int(conn, "SELECT COUNT(*) FROM columns") + boards = fetch_int(conn, "SELECT COUNT(*) FROM boards") + orphan_tags = fetch_int( + conn, + """ + SELECT COUNT(*) FROM card_tags ct + LEFT JOIN cards c ON c.id = ct.card_id + LEFT JOIN tags t ON t.id = ct.tag_id + WHERE c.id IS NULL OR t.id IS NULL + """, + ) + + if cards <= 0: + raise AssertionError("No cards found after migration") + if columns <= 0 or boards <= 0: + raise AssertionError("Missing core entities (boards/columns)") + if orphan_tags != 0: + raise AssertionError(f"Found {orphan_tags} orphan card_tags") + + print(f"OK: boards={boards}, columns={columns}, cards={cards}, orphan_card_tags={orphan_tags}") + finally: + conn.close() + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Validate migrated Utopia data") + parser.add_argument("--db-path", default="Frontiers/Utopia/db/utopia.sqlite") + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + verify(Path(args.db_path)) diff --git a/Frontiers/Utopia/tests/smoke_phase2.py b/Frontiers/Utopia/tests/smoke_phase2.py new file mode 100755 index 000000000..f752e0f93 --- /dev/null +++ b/Frontiers/Utopia/tests/smoke_phase2.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import json +import subprocess +import time +import urllib.request + +BASE = "http://127.0.0.1:8787" + + +def req(path: str, method: str = "GET", payload: dict | None = None) -> dict: + data = None + headers = {"Content-Type": "application/json"} + if payload is not None: + data = json.dumps(payload).encode("utf-8") + r = urllib.request.Request(f"{BASE}{path}", method=method, data=data, headers=headers) + with urllib.request.urlopen(r, timeout=10) as resp: + return json.loads(resp.read().decode("utf-8")) + + +def main() -> None: + proc = subprocess.Popen(["python3", "Frontiers/Utopia/main/server.py"]) + try: + time.sleep(1.2) + boot = req("/api/bootstrap") + assert boot["boards"], "Expected at least one board" + board_id = boot["boards"][0]["id"] + + col_resp = req(f"/api/boards/{board_id}/columns", "POST", {"name": "QA"}) + assert col_resp["id"] + + state = req(f"/api/boards/{board_id}/state") + columns = state["columns"] + first_col, second_col = columns[0]["id"], columns[1]["id"] + + card = req( + "/api/cards", + "POST", + {"board_id": board_id, "column_id": first_col, "title": "Smoke Card", "description": "hello", "tags": ["test"]}, + ) + card_id = card["id"] + + req(f"/api/cards/{card_id}", "PATCH", {"title": "Smoke Card Edited", "description": "edited", "tags": ["test", "smoke"]}) + req(f"/api/cards/{card_id}/move", "POST", {"to_column_id": second_col, "prev_position": None, "next_position": None}) + + filtered = req(f"/api/boards/{board_id}/state?tag=smoke") + assert any(c["id"] == card_id for c in filtered["cards"]), "Tag filtering failed" + + req(f"/api/boards/{board_id}/views", "POST", {"name": "Smoke View", "filter": {"tag": "smoke"}}) + persisted = req(f"/api/boards/{board_id}/state") + assert any(v["name"] == "Smoke View" for v in persisted["views"]), "Saved view not persisted" + + print("Smoke OK") + finally: + proc.terminate() + proc.wait(timeout=5) + + +if __name__ == "__main__": + main() diff --git a/Frontiers/Utopia/utopia_data/__init__.py b/Frontiers/Utopia/utopia_data/__init__.py new file mode 100644 index 000000000..9852537a6 --- /dev/null +++ b/Frontiers/Utopia/utopia_data/__init__.py @@ -0,0 +1,15 @@ +from .db import connect, migrate_down, migrate_up +from .repositories import BoardRepository, CardRepository, ColumnRepository, TagRepository, WorkspaceRepository +from .services import UtopiaBoardService + +__all__ = [ + "connect", + "migrate_up", + "migrate_down", + "WorkspaceRepository", + "BoardRepository", + "ColumnRepository", + "CardRepository", + "TagRepository", + "UtopiaBoardService", +] diff --git a/Frontiers/Utopia/utopia_data/db.py b/Frontiers/Utopia/utopia_data/db.py new file mode 100644 index 000000000..f48310cec --- /dev/null +++ b/Frontiers/Utopia/utopia_data/db.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import sqlite3 +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +MIGRATIONS_DIR = ROOT / "db" / "migrations" + + +def connect(db_path: Path) -> sqlite3.Connection: + conn = sqlite3.connect(db_path) + conn.row_factory = sqlite3.Row + conn.execute("PRAGMA foreign_keys = ON") + return conn + + +def apply_sql_file(conn: sqlite3.Connection, sql_path: Path) -> None: + conn.executescript(sql_path.read_text(encoding="utf-8")) + + +def migrate_up(conn: sqlite3.Connection) -> None: + apply_sql_file(conn, MIGRATIONS_DIR / "001_create_utopia_v2_up.sql") + + +def migrate_down(conn: sqlite3.Connection) -> None: + apply_sql_file(conn, MIGRATIONS_DIR / "001_create_utopia_v2_down.sql") diff --git a/Frontiers/Utopia/utopia_data/models.py b/Frontiers/Utopia/utopia_data/models.py new file mode 100644 index 000000000..4174b25d5 --- /dev/null +++ b/Frontiers/Utopia/utopia_data/models.py @@ -0,0 +1,46 @@ +from dataclasses import dataclass +from typing import Optional + + +@dataclass(frozen=True) +class Workspace: + id: str + name: str + owner_user_id: Optional[str] = None + + +@dataclass(frozen=True) +class Board: + id: str + workspace_id: str + name: str + description: Optional[str] = None + + +@dataclass(frozen=True) +class Column: + id: str + board_id: str + name: str + position: float + + +@dataclass(frozen=True) +class Card: + id: str + board_id: str + column_id: str + title: str + description: Optional[str] + card_type: Optional[str] + external_id: Optional[str] + position: float + metadata_json: str + + +@dataclass(frozen=True) +class Tag: + id: str + workspace_id: str + name: str + color: Optional[str] = None diff --git a/Frontiers/Utopia/utopia_data/repositories.py b/Frontiers/Utopia/utopia_data/repositories.py new file mode 100644 index 000000000..ba7b266c1 --- /dev/null +++ b/Frontiers/Utopia/utopia_data/repositories.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import json +import sqlite3 +from dataclasses import asdict +from typing import Iterable + +from .models import Board, Card, Column, Tag, Workspace + + +class WorkspaceRepository: + def __init__(self, conn: sqlite3.Connection): + self.conn = conn + + def create(self, workspace: Workspace) -> None: + self.conn.execute( + "INSERT INTO workspaces (id, name, owner_user_id) VALUES (:id, :name, :owner_user_id)", + asdict(workspace), + ) + + +class BoardRepository: + def __init__(self, conn: sqlite3.Connection): + self.conn = conn + + def create(self, board: Board) -> None: + self.conn.execute( + "INSERT INTO boards (id, workspace_id, name, description) VALUES (:id, :workspace_id, :name, :description)", + asdict(board), + ) + + +class ColumnRepository: + def __init__(self, conn: sqlite3.Connection): + self.conn = conn + + def create_many(self, columns: Iterable[Column]) -> None: + self.conn.executemany( + "INSERT INTO columns (id, board_id, name, position) VALUES (:id, :board_id, :name, :position)", + [asdict(col) for col in columns], + ) + + +class TagRepository: + def __init__(self, conn: sqlite3.Connection): + self.conn = conn + + def get_or_create(self, tag: Tag) -> Tag: + row = self.conn.execute( + "SELECT id, workspace_id, name, color FROM tags WHERE workspace_id = ? AND name = ?", + (tag.workspace_id, tag.name), + ).fetchone() + if row: + return Tag(**dict(row)) + self.conn.execute( + "INSERT INTO tags (id, workspace_id, name, color) VALUES (:id, :workspace_id, :name, :color)", + asdict(tag), + ) + return tag + + +class CardRepository: + def __init__(self, conn: sqlite3.Connection): + self.conn = conn + + def create(self, card: Card, tag_ids: list[str] | None = None) -> None: + payload = asdict(card) + payload["metadata_json"] = card.metadata_json if isinstance(card.metadata_json, str) else json.dumps(card.metadata_json) + self.conn.execute( + """ + INSERT INTO cards + (id, board_id, column_id, title, description, card_type, external_id, position, metadata_json) + VALUES + (:id, :board_id, :column_id, :title, :description, :card_type, :external_id, :position, :metadata_json) + """, + payload, + ) + if tag_ids: + self.conn.executemany( + "INSERT INTO card_tags (card_id, tag_id) VALUES (?, ?)", + [(card.id, tag_id) for tag_id in tag_ids], + ) + + def count(self) -> int: + return int(self.conn.execute("SELECT COUNT(*) FROM cards").fetchone()[0]) + + def list_for_board(self, board_id: str) -> list[sqlite3.Row]: + return list( + self.conn.execute( + """ + SELECT c.*, col.name AS column_name + FROM cards c + JOIN columns col ON col.id = c.column_id + WHERE c.board_id = ? AND c.archived_at IS NULL + ORDER BY col.position, c.position + """, + (board_id,), + ).fetchall() + ) diff --git a/Frontiers/Utopia/utopia_data/services.py b/Frontiers/Utopia/utopia_data/services.py new file mode 100644 index 000000000..1722c5517 --- /dev/null +++ b/Frontiers/Utopia/utopia_data/services.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import json +import uuid +from typing import Any + +from .models import Board, Card, Column, Tag, Workspace +from .repositories import BoardRepository, CardRepository, ColumnRepository, TagRepository, WorkspaceRepository + + +class UtopiaBoardService: + """Service boundary so UI/business code doesn't query raw tables directly.""" + + def __init__(self, workspace_repo: WorkspaceRepository, board_repo: BoardRepository, column_repo: ColumnRepository, card_repo: CardRepository, tag_repo: TagRepository): + self.workspace_repo = workspace_repo + self.board_repo = board_repo + self.column_repo = column_repo + self.card_repo = card_repo + self.tag_repo = tag_repo + + def create_workspace_with_board(self, workspace_name: str, board_name: str, column_names: list[str]) -> tuple[str, str, dict[str, str]]: + workspace_id = f"wsp_{uuid.uuid4().hex[:10]}" + board_id = f"brd_{uuid.uuid4().hex[:10]}" + self.workspace_repo.create(Workspace(id=workspace_id, name=workspace_name)) + self.board_repo.create(Board(id=board_id, workspace_id=workspace_id, name=board_name)) + + columns = [] + column_ids: dict[str, str] = {} + position = 1000.0 + for name in column_names: + col_id = f"col_{uuid.uuid4().hex[:10]}" + column_ids[name] = col_id + columns.append(Column(id=col_id, board_id=board_id, name=name, position=position)) + position += 1000.0 + self.column_repo.create_many(columns) + return workspace_id, board_id, column_ids + + def create_card(self, board_id: str, column_id: str, title: str, card_type: str, external_id: str | None, metadata: dict[str, Any], tags: list[tuple[str, str | None]], workspace_id: str, position: float) -> str: + card_id = f"crd_{uuid.uuid4().hex[:12]}" + tag_ids: list[str] = [] + for tag_name, color in tags: + tag_id = f"tag_{uuid.uuid4().hex[:10]}" + tag = self.tag_repo.get_or_create(Tag(id=tag_id, workspace_id=workspace_id, name=tag_name, color=color)) + tag_ids.append(tag.id) + + self.card_repo.create( + Card( + id=card_id, + board_id=board_id, + column_id=column_id, + title=title, + description=None, + card_type=card_type, + external_id=external_id, + position=position, + metadata_json=json.dumps(metadata, separators=(",", ":")), + ), + tag_ids=tag_ids, + ) + return card_id