Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions backend/app/schemas/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ class ErrorResponse(BaseModel):
description="Error information containing code, message, http_status, details, and request_id",
)

class Config:
json_schema_extra = {
model_config = {
"json_schema_extra": {
"example": {
"error": {
"code": "VALIDATION_FAILED",
Expand All @@ -65,3 +65,4 @@ class Config:
}
}
}
}
14 changes: 14 additions & 0 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,12 @@ dependencies = [
[dependency-groups]
dev = [
"pytest<8.0.0,>=7.4.3",
"pytest-asyncio>=0.23.0",
"testcontainers[postgres]>=4.0.0",
"ruff<1.0.0,>=0.2.2",
"prek>=0.2.24,<1.0.0",
"coverage<8.0.0,>=7.4.3",
"pytest-xdist>=3.5.0,<4.0.0", # tracked in ISSUE-1 (see backend/ISSUE-1.md)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there is no such issue. plus we don't a TODO to schedule its removal as it should always be there to run tests in parallel.

]

[build-system]
Expand All @@ -29,3 +32,14 @@ build-backend = "hatchling.build"

[tool.hatch.build.targets.wheel]
packages = ["app"]

[tool.pytest.ini_options]
testpaths = ["tests"]
asyncio_mode = "auto"
markers = [
"unit: Pure unit tests (no external dependencies)",
"integration: Integration tests (require database / containers)",
]
filterwarnings = [
"ignore::DeprecationWarning:pytest_asyncio",
]
Empty file added backend/tests/__init__.py
Empty file.
159 changes: 159 additions & 0 deletions backend/tests/conftest.py
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should be overriding the get_db dependency in fastapi.testclient in the fixtures. since the endpoints would be using the dependency injection. from the perspective of endpoint, there is only cursor object and there could be no pool for that is managed by get_db dependency. Following is an example snippet which might simplify this entire file.

import pytest
from testcontainers.postgres import PostgresContainer
from fastapi.testclient import TestClient
from psycopg import connect
from app.main import app
from app.api.deps import get_db

@pytest.fixture(scope="module")
def override_get_db():
    def _get_db():
        # use appropriate exception handling as necessary
        with PostgresContainer(
            "postgres:18.2-alpine3.23"
        ).with_volume_mapping(
            "path/to/migration/dir",
            "/docker-entrypoint-initdb.d"
        ) as container:
            with connect(container.get_connection_url()) as conn:
                with conn.cursor() as cursor:
                    yield cursor
    app.dependency_overrides[get_db] = _get_db
    yield
    app.dependency_overrides.pop(get_db, None)

Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
"""
Root conftest - shared fixtures for unit and integration tests.

Key fixtures:
postgres_container: Session-scoped Testcontainers PostgreSQL instance.
app: FastAPI application wired to the test database.
client: httpx.AsyncClient bound to the app.
db_session: Per-test psycopg cursor inside a transaction that is
rolled back after each test (full isolation).
"""

from __future__ import annotations

import typing
from psycopg import Cursor

import logging
import os
from collections.abc import Generator


import psycopg
import pytest
from dotenv import load_dotenv
from fastapi.testclient import TestClient


from testcontainers.postgres import PostgresContainer
from psycopg import connect

# Load .env before importing app.main to ensure env vars are set
dotenv_path = os.path.join(os.path.dirname(__file__), "..", ".env")
load_dotenv(dotenv_path)

Comment on lines +30 to +34
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should not load the full env here as well using the dotenv for the obvious reasons. this might work in the local env but .env files are not present in CI so this would be not working.

# noqa: E402 to allow imports after env setup
from app.main import app # noqa: E402
from app.api.deps import get_db # noqa: E402

logger = logging.getLogger(__name__)


def _make_dsn(container: PostgresContainer) -> str:
dsn = container.get_connection_url()
# Patch SQLAlchemy/Testcontainers DSN to psycopg3-compatible
if dsn.startswith("postgresql+psycopg2://"):
dsn = dsn.replace("postgresql+psycopg2://", "postgresql://", 1)
return dsn


Comment on lines +42 to +49
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this functionality can be done by passing the optional driver to None in get_connection_url or when creating the testcontainer so this should be removed completely.

# ---------------------------------------------------------------------------
# 1. Testcontainers - session-scoped Postgres
# ---------------------------------------------------------------------------
@pytest.fixture(scope="session")
def postgres_container() -> Generator[PostgresContainer, None, None]:
"""Start a PostgreSQL container once for the entire test session.

The container is stopped automatically when the session ends.

Yields:
A running PostgresContainer instance.
"""
container = PostgresContainer("postgres:16-alpine")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this image is different than the one we are using for the production in docker-compose.yml. we should keep the testing and production same for consistency.

container.start()
try:
yield container
finally:
container.stop()


# ---------------------------------------------------------------------------
# 2. Environment variables for the test session
# ---------------------------------------------------------------------------
@pytest.fixture(scope="session", autouse=True)
def _set_test_env(postgres_container: PostgresContainer) -> None:
"""Override environment variables for the test session.

Sets connection details so that ``app.core.config.Settings`` picks up
the Testcontainers Postgres instance instead of a real database.

Args:
postgres_container: The running Testcontainers Postgres instance.
"""
os.environ["POSTGRES_SERVER"] = postgres_container.get_container_host_ip()
os.environ["POSTGRES_PORT"] = str(postgres_container.get_exposed_port(5432))
os.environ["POSTGRES_USER"] = postgres_container.username
os.environ["POSTGRES_PASSWORD"] = postgres_container.password
os.environ["POSTGRES_DB"] = postgres_container.dbname
os.environ["SECRET_KEY"] = "test-secret-key-for-testing-only"
os.environ["PROJECT_NAME"] = "permit-test"
Comment on lines +73 to +89
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should not bother with setting env vars since dev environment should already have those. with that being said, the testcontainers should be providing the database connection so these env vars should never be directly used by any code.



# ---------------------------------------------------------------------------
# 3. Run migrations against the container
# ---------------------------------------------------------------------------
@pytest.fixture(scope="session", autouse=True)
def _run_migrations(
postgres_container: PostgresContainer,
_set_test_env: None, # ensure env is set first
) -> None:
"""Apply SQL migration files from the ``migrations/`` directory.

Reads every ``.sql`` file in alphabetical order and executes it
against the Testcontainers Postgres instance.

Args:
postgres_container: The running Testcontainers Postgres instance.
_set_test_env: Ensures environment variables are configured first.
"""
import pathlib

migrations_dir = (
pathlib.Path(__file__).resolve().parent.parent.parent / "migrations"
)
if not migrations_dir.exists():
pytest.fail(
f"Migrations directory not found: {migrations_dir}. "
"Tests require migration SQL files to set up the database schema."
)

dsn = _make_dsn(postgres_container)
with psycopg.connect(dsn) as conn:
conn.autocommit = True
for sql_file in sorted(migrations_dir.glob("*.sql")):
conn.execute(sql_file.read_text())

Comment on lines +95 to +125
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this fixture is redundant since we are already mounting the migrations to /docker-entrypoint-initdb.d in postgres container so migrations should be applied automatically.


# Reuse the session-scoped postgres_container for override_get_db


@pytest.fixture(scope="module")
def override_get_db(postgres_container):
dsn = _make_dsn(postgres_container)

def _get_db() -> typing.Generator[Cursor, None, None]:
with connect(dsn) as conn:
with conn.cursor() as cursor:
yield cursor

app.dependency_overrides[get_db] = _get_db
yield
app.dependency_overrides.pop(get_db, None)
Comment on lines +131 to +141
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should not be using the single container for the entire testing session since it might have unintended side effects when tests are run in the parallel. this override should start the testcontainer for each override on the module level.



@pytest.fixture
def client(override_get_db):
with TestClient(app) as c:
yield c

Comment on lines +144 to +148
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this simple wrapper does very basic thing of returning the fastapi.testclient so instead of importing this one, one should import the testclient directly imho


# Per-test db_session fixture for psycopg cursor with rollback
@pytest.fixture
def db_session(postgres_container) -> typing.Generator[Cursor, None, None]:
dsn = _make_dsn(postgres_container)
with connect(dsn, autocommit=False) as conn:
with conn.cursor() as cursor:
try:
yield cursor
finally:
conn.rollback()
Empty file.
66 changes: 66 additions & 0 deletions backend/tests/integration/test_smoke.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
"""Integration smoke tests - verify Testcontainers and fixtures work end-to-end."""

import pytest
import psycopg
from fastapi.testclient import TestClient


@pytest.mark.integration
def test_db_session_provides_cursor(db_session: psycopg.Cursor) -> None:
"""Verify that the db_session fixture yields a live database cursor.

NOTE: This is a trivial test. It only checks the cursor in this module and does
not guarantee isolation across modules or test classes. If you use a different
isolation strategy for the database fixture, you may need more robust checks.

Executes a trivial query and asserts that the returned row is valid,
confirming the cursor is connected to the Testcontainers Postgres.
"""
db_session.execute("SELECT 1 AS ok")
row = db_session.fetchone()
assert row is not None
assert row[0] == 1


@pytest.mark.integration
def test_db_session_rolls_back_and_isolation(db_session: psycopg.Cursor) -> None:
"""Verify transactional rollback provides full test isolation.

NOTE: This test only verifies isolation if run repeatedly. If run in isolation,
it will always pass. If run with other tests that create the same table, or if
the fixture is broken, it may pass/fail alternately. This can be confusing, so
do not rely on this test as a sole indicator of isolation.

First asserts that ``_test_isolation`` does not exist (proving the
fixture rolled back any prior transaction), then creates the table,
inserts a row, and confirms it is visible inside the current
transaction. The fixture teardown will roll back the transaction
so subsequent tests never see these changes.
"""
# The table must not exist at the start (fixture rolled back prior txn)
db_session.execute(
"SELECT EXISTS ("
" SELECT FROM information_schema.tables "
" WHERE table_name = '_test_isolation'"
")"
)
assert not db_session.fetchone()[0]

# Write inside the transactional db_session
db_session.execute("CREATE TABLE _test_isolation (id serial PRIMARY KEY, val text)")
db_session.execute("INSERT INTO _test_isolation (val) VALUES ('should_be_gone')")
db_session.execute("SELECT count(*) FROM _test_isolation")
assert db_session.fetchone()[0] == 1 # visible inside this txn
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this should not work in theory. the db_session is not running anything in any kind of transaction. just because autocommit is off with a always rollback does not make it a transaction.
another thing that is wrong here is we are trying to create the table without specifying the schema which makes this dependent on the schema in search_path hence making it flaky.



@pytest.mark.integration
def test_client_health(client: TestClient):
"""Verify that the TestClient fixture can reach the health endpoint in-process.

Sends a GET request to ``/api/v1/health`` and asserts a 200 response
with ``{"data": {"status": "ok"}}``.
"""
resp = client.get("/api/v1/health")
assert resp.status_code == 200
body = resp.json()
assert body["data"]["status"] == "ok"
Empty file added backend/tests/unit/__init__.py
Empty file.
13 changes: 13 additions & 0 deletions backend/tests/unit/test_smoke.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
"""Unit smoke tests - verify the test infrastructure works."""

import pytest


@pytest.mark.unit
def test_marker_registered() -> None:
"""Confirm that the ``unit`` marker is registered and accepted.

This is a placeholder test that simply passes. Its purpose is to
verify that pytest recognises the custom ``unit`` marker without
emitting unknown-marker warnings.
"""
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
[tool.uv.workspace]
members = ["backend"]

[dependency-groups]
dev = [
"python-dotenv>=1.2.1",
]
Loading