Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,16 @@ FROM python:3.12-slim

WORKDIR /app

# Install dependencies (no build tools needed for current deps)
# Install system deps for ODBC/SQL Server (unixODBC, msodbcsql18) and Python requirements
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN apt-get update && apt-get install -y --no-install-recommends curl gnupg ca-certificates unixodbc unixodbc-dev && \
mkdir -p /etc/apt/keyrings && \
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /etc/apt/keyrings/microsoft.gpg && \
chmod 644 /etc/apt/keyrings/microsoft.gpg && \
curl https://packages.microsoft.com/config/debian/12/prod.list | sed 's|^deb |deb [signed-by=/etc/apt/keyrings/microsoft.gpg] |' > /etc/apt/sources.list.d/mssql-release.list && \
apt-get update && ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 && \
rm -rf /var/lib/apt/lists/* && \
pip install --no-cache-dir -r requirements.txt

# Application
COPY main.py .
Expand Down
103 changes: 103 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
# _blob_container_client: container client if connected, False if init failed, None if not tried
_blob_container_client: Any = None

# Optional Azure SQL connection cache.
# None = not yet attempted, False = permanently disabled (misconfiguration or import error), object = live connection
_sql_connection: Any = None


def _get_blob_container():
"""Lazy-init Azure Blob container client from env. Returns None if not configured or init failed."""
Expand Down Expand Up @@ -87,6 +91,8 @@ def _append_to_blob(blob_name: str, line: str) -> None:
# Configuration
DATA_DIR = Path(os.environ.get("THERMAL_DATA_DIR", "thermal_data"))
SAVE_DATA = os.environ.get("SAVE_THERMAL_DATA", "true").lower() in ("1", "true", "yes")
SQL_CONNECTION_STRING = os.environ.get("SQL_CONNECTION_STRING", "").strip()
SAVE_TO_SQL = os.environ.get("SAVE_TO_SQL", "true").lower() in ("1", "true", "yes")

# Occupancy detection parameters
MIN_HUMAN_TEMP = 30.0
Expand Down Expand Up @@ -316,6 +322,102 @@ def convert_numpy_types(obj: Any) -> Any:
return tuple(convert_numpy_types(x) for x in obj)
return obj

def _get_sql_connection():
"""Return a cached Azure SQL connection. Returns None if not configured or permanently disabled."""
global _sql_connection
if _sql_connection is False:
return None # permanently disabled after earlier failure
if _sql_connection is not None:
try:
# Lightweight connectivity check at the ODBC driver level (no query sent to server).
_sql_connection.getinfo(2) # SQL_DATA_SOURCE_NAME
return _sql_connection
except Exception:
_sql_connection = None # stale, attempt to reconnect below

if not SQL_CONNECTION_STRING:
return None

try:
import pyodbc # noqa: PLC0415 – intentionally deferred for optional dependency
except ImportError:
print("pyodbc is not installed; Azure SQL saving disabled.")
_sql_connection = False
return None

try:
conn = pyodbc.connect(SQL_CONNECTION_STRING, timeout=10)
_sql_connection = conn
return _sql_connection
except Exception as e:
Comment on lines +325 to +352
Copy link

Copilot AI Mar 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A new ODBC connection is created on every call, which can become a significant overhead under load. Consider reusing a cached connection (similar to the blob client pattern), enabling/confirming pyodbc pooling explicitly, and adding a simple backoff/"disable after failure" flag to avoid repeated connection attempts on persistent misconfiguration/outage.

Copilot uses AI. Check for mistakes.
print(f"Azure SQL connection failed ({type(e).__name__}); SQL saving disabled.")
_sql_connection = False
return None
Comment on lines +348 to +355
Copy link

Copilot AI Mar 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Avoid printing raw SQL connection exceptions here. pyodbc error strings can include server/user details (and in some cases parts of the connection string), which can leak sensitive info into logs. Prefer structured logging and a sanitized message (e.g., log an error code/class, or gate detailed errors behind a debug flag).

Copilot uses AI. Check for mistakes.


def save_occupancy_data_sql(occupancy_result: dict, timestamp_iso: Optional[str] = None) -> None:
"""Save occupancy estimation to Azure SQL."""
global _sql_connection
if not SAVE_TO_SQL:
return

conn = _get_sql_connection()
if conn is None:
return

cursor = None
try:
sid = occupancy_result.get("sensor_id") or "unknown"
ts = timestamp_iso or datetime.now().isoformat()

entry = {
"timestamp": ts,
"sensor_id": sid,
"occupancy": int(occupancy_result["occupancy"]),
"room_temperature": (
float(occupancy_result["room_temperature"])
if occupancy_result.get("room_temperature") is not None
else None
),
"people_clusters": json.dumps(
convert_numpy_types(occupancy_result.get("people_clusters", []))
),
"fever_count": int(occupancy_result.get("fever_count", 0)),
"any_fever": bool(occupancy_result.get("any_fever", False)),
}

cursor = conn.cursor()
cursor.execute(
"""
INSERT INTO occupancy_data
([timestamp], sensor_id, occupancy, room_temperature,
people_clusters, fever_count, any_fever)
VALUES (?, ?, ?, ?, ?, ?, ?)
""",
entry["timestamp"],
entry["sensor_id"],
entry["occupancy"],
entry["room_temperature"],
entry["people_clusters"],
entry["fever_count"],
1 if entry["any_fever"] else 0,
)
conn.commit()
except Exception as e:
print(f"Error saving occupancy data to Azure SQL ({type(e).__name__}); will retry on next call.")
try:
conn.rollback()
except Exception:
pass
Comment on lines +389 to +411
Copy link

Copilot AI Mar 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If an exception occurs after creating the cursor, the cursor is never explicitly closed (only the connection is closed in the except). Use a finally block (or context managers) to guarantee both cursor and connection are closed, and consider calling rollback() on failures before closing to avoid leaving open transactions in pooled connections.

Copilot uses AI. Check for mistakes.
# Invalidate the cached connection so the next call will reconnect
_sql_connection = None
finally:
if cursor is not None:
try:
cursor.close()
except Exception:
pass


def save_thermal_data(
compact_data: dict, expanded_data: dict, sensor_id: Optional[str] = None
Expand Down Expand Up @@ -544,6 +646,7 @@ def receive_thermal_data(data: dict) -> dict:
last_update_time_by_sensor[sensor_id] = now_iso
save_thermal_data(compact_data, latest_thermal_data, sensor_id)
save_occupancy_data(occupancy_result)
save_occupancy_data_sql(occupancy_result, timestamp_iso=now_iso)
Comment on lines 647 to +649
Copy link

Copilot AI Mar 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR description mentions "switching" between local JSONL storage and Azure SQL, but the API still always writes local occupancy history when SAVE_THERMAL_DATA is true, and all history/stats endpoints still read only from local files. If Azure SQL is intended to be the backend, either add SQL-backed read paths (or a clear fallback), or update the description/env flags to reflect that SQL is an additional sink rather than a full backend.

Copilot uses AI. Check for mistakes.
pixel_count = len(latest_thermal_data.get("pixels", []))
return {
"status": "success",
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ numpy>=1.20.0
scipy>=1.7.0
azure-storage-blob>=12.19.0
requests>=2.25.0
pyodbc>=5.1.0