Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ video_cache
yolo_cache
ftp_incoming
config/config.yaml
.coverage
29 changes: 26 additions & 3 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
- **Preserve stack traces**: Custom errors must set `self.__cause__ = cause` to preserve original exception.
- **Tests must use Given/When/Then comments**: Every test must include these comments and follow behavioral testing principles (see `TESTING.md`).
- **Postgres for state**: Use `clip_states` table with `clip_id` (primary key) + `data` (jsonb) for evolvable schema.
- **DB migrations**: If you change schema/models, follow `docs/migrations.md` (PostgreSQL-first autogenerate, add SQLite compatibility, run migration smoke test).
- **Pydantic everywhere**: Validate config, DB payloads, VLM outputs, and MQTT payloads with Pydantic models.
- **Clarify before complexity**: Ask user for clarification when simpler design may exist. Don't proceed with complex workarounds.
- **Product priorities**: Recording + uploading (P0) must work even if Postgres is down. Analysis/notifications are best-effort (P1).
Expand Down Expand Up @@ -349,12 +350,34 @@ async def test_filter_stage_failure():
```bash
uv sync # Install dependencies
make typecheck # Run mypy --strict (mandatory before commit)
make test # Run pytest
make check # Run both typecheck + test
make db-up # Start Postgres for development
make test # Run pytest (both SQLite and PostgreSQL)
make test-sqlite # Run pytest with SQLite only (fast, no PG required)
make check # Run lint + typecheck + test
make db # Start Postgres for development
make db-migrate # Run Alembic migrations
```

### Database Testing

Tests run against **both SQLite and PostgreSQL** by default using parametrized fixtures. This ensures compatibility with both backends.

**If PostgreSQL is unavailable or causing issues**, you can skip PostgreSQL tests:

```bash
# Skip PostgreSQL tests, run only SQLite
SKIP_POSTGRES_TESTS=1 make test

# Or use the dedicated target
make test-sqlite
```

| Variable | Default | Description |
|----------|---------|-------------|
| `SKIP_POSTGRES_TESTS` | `0` (off) | Set to `1` to skip PostgreSQL tests |
| `TEST_DB_DSN` | (from `.env`) | PostgreSQL connection string for tests |

**Note:** CI always runs both backends. Only use `SKIP_POSTGRES_TESTS=1` for local development when PostgreSQL is unavailable.

### Pattern Usage Examples

**Error-as-value:** `src/homesec/pipeline/core.py::_filter_stage()`, `_upload_stage()`, `_vlm_stage()`
Expand Down
14 changes: 10 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
SHELL := /bin/bash
.SHELLFLAGS := -eu -o pipefail -c

.PHONY: help up down docker-build docker-push run db test coverage typecheck lint check db-migrate db-migration publish
.PHONY: help up down docker-build docker-push run db test test-sqlite coverage typecheck lint check db-migrate db-migration publish

help:
@echo "Targets:"
Expand All @@ -15,7 +15,8 @@ help:
@echo " Local dev:"
@echo " make run Run HomeSec locally (requires Postgres)"
@echo " make db Start just Postgres"
@echo " make test Run tests with coverage"
@echo " make test Run tests with coverage (SQLite + PostgreSQL)"
@echo " make test-sqlite Run tests with SQLite only (fast, no PG required)"
@echo " make coverage Run tests and generate HTML coverage report"
@echo " make typecheck Run mypy"
@echo " make lint Run ruff linter"
Expand Down Expand Up @@ -64,9 +65,14 @@ run:
db:
docker compose up -d postgres

# Run all tests (both SQLite and PostgreSQL backends)
test:
uv run pytest tests/homesec/ -v --cov=homesec --cov-report=term-missing

# Run tests with SQLite only (fast, no PostgreSQL required)
test-sqlite:
SKIP_POSTGRES_TESTS=1 uv run pytest tests/homesec/ -v --cov=homesec --cov-report=term-missing

coverage:
uv run pytest tests/homesec/ -v --cov=homesec --cov-report=html --cov-report=xml
@echo "Coverage report: htmlcov/index.html"
Expand All @@ -86,14 +92,14 @@ check: lint typecheck test

# Database
db-migrate:
uv run --with alembic --with sqlalchemy --with asyncpg --with python-dotenv alembic -c alembic.ini upgrade head
uv run --with alembic --with sqlalchemy --with asyncpg --with aiosqlite --with python-dotenv alembic -c alembic.ini upgrade head

db-migration:
@if [ -z "$(m)" ]; then \
echo "Error: message required. Run: make db-migration m=\"your description\""; \
exit 1; \
fi
uv run --with alembic --with sqlalchemy --with asyncpg --with python-dotenv alembic -c alembic.ini revision --autogenerate -m "$(m)"
uv run --with alembic --with sqlalchemy --with asyncpg --with aiosqlite --with python-dotenv alembic -c alembic.ini revision --autogenerate -m "$(m)"

# Release
publish: check
Expand Down
54 changes: 46 additions & 8 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
"""Alembic migration environment configuration.

Supports both PostgreSQL and SQLite through the database abstraction layer.
"""

from __future__ import annotations

import asyncio
Expand All @@ -11,17 +16,17 @@
load_dotenv()

from alembic import context
from sqlalchemy import pool
from sqlalchemy import MetaData, pool
from sqlalchemy.ext.asyncio import async_engine_from_config

PROJECT_ROOT = Path(__file__).resolve().parents[1]
SRC_DIR = PROJECT_ROOT / "src"
if str(SRC_DIR) not in sys.path:
sys.path.insert(0, str(SRC_DIR))

from sqlalchemy import MetaData # noqa: E402
from homesec.telemetry.db.log_table import metadata as telemetry_metadata # noqa: E402
from homesec.db import DialectHelper # noqa: E402
from homesec.state.postgres import Base as StateBase # noqa: E402
from homesec.telemetry.db.log_table import metadata as telemetry_metadata # noqa: E402

# Combine all metadata into one for alembic
target_metadata = MetaData()
Expand All @@ -33,41 +38,74 @@
config = context.config

if config.config_file_name is not None:
fileConfig(config.config_file_name)
fileConfig(config.config_file_name, disable_existing_loggers=False)


def _get_url() -> str:
"""Get database URL from environment or config."""
url = os.getenv("DB_DSN") or os.getenv("DATABASE_URL") or config.get_main_option("sqlalchemy.url")
if not url:
raise RuntimeError("Missing DB_DSN (or DATABASE_URL) for alembic migration.")
return url


def _normalize_url(url: str) -> str:
"""Normalize URL to use appropriate async driver."""
return DialectHelper.normalize_dsn(url)


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.

This configures the context with just a URL and not an Engine,
though an Engine is acceptable here as well. By skipping the Engine
creation we don't even need a DBAPI to be available.
"""
url = _normalize_url(_get_url())

context.configure(
url=_get_url(),
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
# Enable batch mode for SQLite ALTER TABLE support
render_as_batch=True,
)

with context.begin_transaction():
context.run_migrations()


def do_run_migrations(connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata, compare_type=True)
"""Execute migrations with the given connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
# Enable batch mode for SQLite ALTER TABLE support
render_as_batch=True,
)

with context.begin_transaction():
context.run_migrations()


async def run_migrations_online() -> None:
"""Run migrations in 'online' mode.

In this scenario we need to create an Engine and associate
a connection with the context.
"""
url = _normalize_url(_get_url())
configuration = config.get_section(config.config_ini_section, {})
configuration["sqlalchemy.url"] = _get_url()
configuration["sqlalchemy.url"] = url

connectable = async_engine_from_config(configuration, prefix="sqlalchemy.", poolclass=pool.NullPool)
connectable = async_engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
Expand Down
55 changes: 44 additions & 11 deletions alembic/versions/e6f25df0df90_initial.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@
"""
from typing import Sequence, Union

from alembic import op
from alembic import context, op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

from homesec.db import JSONType

# revision identifiers, used by Alembic.
revision: str = 'e6f25df0df90'
Expand All @@ -18,30 +19,62 @@
depends_on: Union[str, Sequence[str], None] = None


def _dialect_name() -> str:
return context.get_context().dialect.name


def _is_postgres() -> bool:
return _dialect_name() == "postgresql"


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('clip_states',
sa.Column('clip_id', sa.Text(), nullable=False),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('data', JSONType(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint('clip_id')
)
op.create_index('idx_clip_states_camera', 'clip_states', [sa.literal_column("jsonb_extract_path_text(data, 'camera_name')")], unique=False)
op.create_index('idx_clip_states_status', 'clip_states', [sa.literal_column("jsonb_extract_path_text(data, 'status')")], unique=False)
if _is_postgres():
op.create_index(
'idx_clip_states_camera',
'clip_states',
[sa.literal_column("jsonb_extract_path_text(data, 'camera_name')")],
unique=False,
)
op.create_index(
'idx_clip_states_status',
'clip_states',
[sa.literal_column("jsonb_extract_path_text(data, 'status')")],
unique=False,
)
else:
op.create_index(
'idx_clip_states_camera',
'clip_states',
[sa.literal_column("json_extract(data, '$.camera_name')")],
unique=False,
)
op.create_index(
'idx_clip_states_status',
'clip_states',
[sa.literal_column("json_extract(data, '$.status')")],
unique=False,
)
op.create_table('logs',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('ts', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('ts', sa.DateTime(timezone=True), server_default=sa.func.now(), nullable=False),
sa.Column('payload', JSONType(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('logs_ts_idx', 'logs', [sa.literal_column('ts DESC')], unique=False)
op.create_table('clip_events',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('id', sa.Integer(), sa.Identity(), nullable=False),
sa.Column('clip_id', sa.Text(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('event_type', sa.Text(), nullable=False),
sa.Column('event_data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('event_data', JSONType(), nullable=False),
sa.ForeignKeyConstraint(['clip_id'], ['clip_states.clip_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
Expand Down
Loading
Loading