+
diff --git a/app/utils/excel_export.py b/app/utils/excel_export.py
index ec820233..e6ebd860 100644
--- a/app/utils/excel_export.py
+++ b/app/utils/excel_export.py
@@ -384,14 +384,7 @@ def create_invoice_excel(invoice, items):
wb.save(output)
output.seek(0)
- # Get invoice prefix from settings, default to "INV"
- from app.models import Settings
-
- settings = Settings.get_settings()
- prefix = getattr(settings, "invoice_prefix", "INV") if settings else "INV"
- if not prefix:
- prefix = "INV"
- filename = f"{prefix}_{invoice.invoice_number}.xlsx"
+ filename = f"{invoice.invoice_number}.xlsx"
return output, filename
diff --git a/app/utils/invoice_numbering.py b/app/utils/invoice_numbering.py
new file mode 100644
index 00000000..f6f51bca
--- /dev/null
+++ b/app/utils/invoice_numbering.py
@@ -0,0 +1,129 @@
+import re
+from datetime import datetime
+
+
+DEFAULT_INVOICE_PATTERN = "{PREFIX}-{YYYY}{MM}{DD}-{SEQ}"
+_ALLOWED_TOKENS = {"SEQ", "YYYY", "YY", "MM", "DD", "PREFIX"}
+
+
+def sanitize_invoice_prefix(prefix_value):
+ """Normalize legacy prefix input while allowing empty values."""
+ if prefix_value is None:
+ return ""
+ return str(prefix_value).strip()
+
+
+def sanitize_invoice_pattern(pattern_value):
+ """Normalize pattern input while allowing empty values."""
+ if pattern_value is None:
+ return ""
+ return str(pattern_value).strip()
+
+
+def validate_invoice_pattern(pattern_value):
+ """Validate invoice number pattern and return (ok, error_message)."""
+ pattern = sanitize_invoice_pattern(pattern_value)
+ if not pattern:
+ return True, ""
+
+ tokens = re.findall(r"\{([A-Z]+)\}", pattern)
+ if not tokens:
+ return False, "Pattern must include at least one token such as {SEQ}."
+
+ invalid_tokens = sorted({token for token in tokens if token not in _ALLOWED_TOKENS})
+ if invalid_tokens:
+ return False, f"Unsupported token(s): {', '.join(invalid_tokens)}"
+
+ if "SEQ" not in tokens:
+ return False, "Pattern must include {SEQ}."
+
+ return True, ""
+
+
+def resolve_invoice_pattern(settings):
+ """Resolve the effective pattern from settings with compatibility fallback."""
+ raw_pattern = sanitize_invoice_pattern(getattr(settings, "invoice_number_pattern", ""))
+ if raw_pattern:
+ return raw_pattern
+ return "{SEQ}"
+
+
+def _normalize_start_number(start_number):
+ try:
+ normalized = int(start_number)
+ return max(1, normalized)
+ except (TypeError, ValueError):
+ return 1
+
+
+def _build_token_values(now, prefix):
+ return {
+ "YYYY": now.strftime("%Y"),
+ "YY": now.strftime("%y"),
+ "MM": now.strftime("%m"),
+ "DD": now.strftime("%d"),
+ "PREFIX": prefix,
+ }
+
+
+def _materialize_pattern_without_seq(pattern, token_values):
+ rendered = pattern
+ for key, value in token_values.items():
+ rendered = rendered.replace(f"{{{key}}}", value)
+ return rendered
+
+
+def _extract_seq_width(pattern):
+ return max(3, len(re.findall(r"\{SEQ\}", pattern)))
+
+
+def generate_next_invoice_number(invoice_model, invoice_query=None, settings=None, now=None):
+ """Generate next invoice number for the current pattern and settings."""
+ if settings is None:
+ from app.models import Settings
+
+ settings = Settings.get_settings()
+
+ now = now or datetime.utcnow()
+ prefix = sanitize_invoice_prefix(getattr(settings, "invoice_prefix", ""))
+ start_number = _normalize_start_number(getattr(settings, "invoice_start_number", 1))
+ pattern = resolve_invoice_pattern(settings)
+
+ token_values = _build_token_values(now, prefix)
+ materialized = _materialize_pattern_without_seq(pattern, token_values)
+ seq_placeholder = "{SEQ}"
+
+ if seq_placeholder not in materialized:
+ materialized = f"{materialized}{seq_placeholder}"
+
+ seq_width = _extract_seq_width(materialized)
+ regex_pattern = "^" + re.escape(materialized).replace(re.escape(seq_placeholder), r"(?P\d+)") + "$"
+ seq_regex = re.compile(regex_pattern)
+
+ first_seq_idx = materialized.index(seq_placeholder)
+ prefix_probe = materialized[:first_seq_idx]
+
+ # Use a lightweight pre-filter when possible.
+ query = invoice_query or invoice_model.query
+ if prefix_probe:
+ query = query.filter(invoice_model.invoice_number.startswith(prefix_probe))
+
+ max_seq = None
+ for (invoice_number,) in query.with_entities(invoice_model.invoice_number).all():
+ if not invoice_number:
+ continue
+ match = seq_regex.match(invoice_number)
+ if not match:
+ continue
+ try:
+ seq_value = int(match.group("seq"))
+ except (TypeError, ValueError):
+ continue
+ max_seq = seq_value if max_seq is None else max(max_seq, seq_value)
+
+ if max_seq is None:
+ next_seq = start_number
+ else:
+ next_seq = max(max_seq + 1, start_number)
+
+ return materialized.replace(seq_placeholder, f"{next_seq:0{seq_width}d}", 1)
diff --git a/app/utils/posthog_features.py b/app/utils/posthog_features.py
index ba535f59..f52d4742 100644
--- a/app/utils/posthog_features.py
+++ b/app/utils/posthog_features.py
@@ -12,13 +12,12 @@
from functools import wraps
from typing import Any, Dict, Optional
-import posthog
from flask import request
def is_posthog_enabled() -> bool:
- """Check if PostHog is enabled and configured"""
- return bool(os.getenv("POSTHOG_API_KEY", ""))
+ """Legacy feature-flag hook; disabled after Grafana cutover."""
+ return False
def get_feature_flag(user_id: Any, flag_key: str, default: bool = False) -> bool:
@@ -36,10 +35,7 @@ def get_feature_flag(user_id: Any, flag_key: str, default: bool = False) -> bool
if not is_posthog_enabled():
return default
- try:
- return posthog.feature_enabled(flag_key, str(user_id)) or default
- except Exception:
- return default
+ return default
def get_feature_flag_payload(user_id: Any, flag_key: str) -> Optional[Dict[str, Any]]:
@@ -62,10 +58,7 @@ def get_feature_flag_payload(user_id: Any, flag_key: str) -> Optional[Dict[str,
if not is_posthog_enabled():
return None
- try:
- return posthog.get_feature_flag_payload(flag_key, str(user_id))
- except Exception:
- return None
+ return None
def get_all_feature_flags(user_id: Any) -> Dict[str, Any]:
@@ -83,10 +76,7 @@ def get_all_feature_flags(user_id: Any) -> Dict[str, Any]:
if not is_posthog_enabled():
return {}
- try:
- return posthog.get_all_flags(str(user_id)) or {}
- except Exception:
- return {}
+ return {}
def feature_flag_required(flag_key: str, redirect_to: Optional[str] = None):
diff --git a/app/utils/posthog_funnels.py b/app/utils/posthog_funnels.py
index bec170ac..51fa1037 100644
--- a/app/utils/posthog_funnels.py
+++ b/app/utils/posthog_funnels.py
@@ -11,10 +11,12 @@
def is_funnel_tracking_enabled() -> bool:
- """Check if funnel tracking is enabled (PostHog configured and user opted in)."""
+ """Check if funnel tracking is enabled (Grafana configured and user opted in)."""
from app.utils.telemetry import is_telemetry_enabled
- return bool(os.getenv("POSTHOG_API_KEY", "")) and is_telemetry_enabled()
+ return bool(os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "")) and bool(
+ os.getenv("OTEL_EXPORTER_OTLP_TOKEN", "")
+ ) and is_telemetry_enabled()
def track_funnel_step(user_id: Any, funnel_name: str, step: str, properties: Optional[Dict[str, Any]] = None) -> None:
diff --git a/app/utils/posthog_monitoring.py b/app/utils/posthog_monitoring.py
index ed873041..ca6f34dc 100644
--- a/app/utils/posthog_monitoring.py
+++ b/app/utils/posthog_monitoring.py
@@ -12,10 +12,12 @@
def is_monitoring_enabled() -> bool:
- """Check if PostHog monitoring is enabled (PostHog configured and user opted in)."""
+ """Check if monitoring telemetry is enabled and user opted in."""
from app.utils.telemetry import is_telemetry_enabled
- return bool(os.getenv("POSTHOG_API_KEY", "")) and is_telemetry_enabled()
+ return bool(os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "")) and bool(
+ os.getenv("OTEL_EXPORTER_OTLP_TOKEN", "")
+ ) and is_telemetry_enabled()
# ============================================================================
diff --git a/app/utils/posthog_segmentation.py b/app/utils/posthog_segmentation.py
index 4bb778a9..cd8a8f9b 100644
--- a/app/utils/posthog_segmentation.py
+++ b/app/utils/posthog_segmentation.py
@@ -10,8 +10,8 @@
def is_segmentation_enabled() -> bool:
- """Check if PostHog segmentation is enabled."""
- return bool(os.getenv("POSTHOG_API_KEY", ""))
+ """Check if segmentation telemetry is enabled."""
+ return bool(os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "")) and bool(os.getenv("OTEL_EXPORTER_OTLP_TOKEN", ""))
def identify_user_with_segments(user_id: Any, user) -> None:
diff --git a/app/utils/telemetry.py b/app/utils/telemetry.py
index 55a2fc65..ccdaf5b9 100644
--- a/app/utils/telemetry.py
+++ b/app/utils/telemetry.py
@@ -1,11 +1,8 @@
"""
-Telemetry utilities for anonymous usage tracking
+Telemetry utility wrappers.
-This module provides opt-in telemetry functionality that sends anonymized
-installation information via PostHog. All telemetry is:
-- Opt-in (disabled by default)
-- Anonymous (no PII)
-- Transparent (see docs/privacy.md)
+Legacy helper names are preserved for compatibility and delegated to the
+consent-aware telemetry service backed by Grafana OTLP.
"""
import hashlib
@@ -15,8 +12,6 @@
import time
from typing import Optional
-import posthog
-
def get_telemetry_fingerprint() -> str:
"""
@@ -80,103 +75,6 @@ def get_installation_config(): # type: ignore
raise RuntimeError("installation config unavailable")
-def _ensure_posthog_initialized() -> bool:
- """
- Ensure PostHog is initialized with API key and host.
-
- Returns:
- True if PostHog is ready to use, False otherwise
- """
- posthog_api_key = os.getenv("POSTHOG_API_KEY", "")
- if not posthog_api_key:
- return False
-
- try:
- # Initialize PostHog if not already done
- if not hasattr(posthog, "project_api_key") or not posthog.project_api_key:
- posthog.project_api_key = posthog_api_key
- posthog.host = os.getenv("POSTHOG_HOST", "https://app.posthog.com")
- return True
- except Exception:
- return False
-
-
-def _get_installation_properties() -> dict:
- """
- Get installation properties for PostHog person/group properties.
-
- Returns:
- Dictionary of installation characteristics (no PII)
- """
- import sys
-
- # Get app version from analytics config (which reads from setup.py)
- from app.config.analytics_defaults import get_analytics_config
-
- analytics_config = get_analytics_config()
- app_version = analytics_config.get("app_version")
- flask_env = os.getenv("FLASK_ENV", "production")
-
- properties = {
- # Version info
- "app_version": app_version,
- "python_version": platform.python_version(),
- "python_major_version": f"{sys.version_info.major}.{sys.version_info.minor}",
- # Platform info
- "platform": platform.system(),
- "platform_release": platform.release(),
- "platform_version": platform.version(),
- "machine": platform.machine(),
- # Environment
- "environment": flask_env,
- "timezone": os.getenv("TZ", "Unknown"),
- # Deployment info
- "deployment_method": "docker" if os.path.exists("/.dockerenv") else "native",
- "auth_method": os.getenv("AUTH_METHOD", "local"),
- }
-
- return properties
-
-
-def _identify_installation(fingerprint: str) -> None:
- """
- Identify the installation in PostHog with person properties.
-
- This sets/updates properties on the installation fingerprint for better
- segmentation and cohort analysis in PostHog.
-
- Args:
- fingerprint: The installation fingerprint (distinct_id)
- """
- try:
- properties = _get_installation_properties()
-
- # Use $set_once for properties that shouldn't change (first install data)
- set_once_properties = {
- "first_seen_platform": properties["platform"],
- "first_seen_python_version": properties["python_version"],
- "first_seen_version": properties["app_version"],
- }
-
- # Regular $set properties that can update
- set_properties = {
- "current_version": properties["app_version"],
- "current_platform": properties["platform"],
- "current_python_version": properties["python_version"],
- "environment": properties["environment"],
- "deployment_method": properties["deployment_method"],
- "auth_method": properties["auth_method"],
- "timezone": properties["timezone"],
- "last_seen": time.strftime("%Y-%m-%d %H:%M:%S"),
- }
-
- # Identify the installation
- posthog.identify(distinct_id=fingerprint, properties={"$set": set_properties, "$set_once": set_once_properties})
- except Exception:
- # Don't let identification errors break telemetry
- pass
-
-
def send_telemetry_ping(event_type: str = "install", extra_data: Optional[dict] = None) -> bool:
"""
Send a telemetry ping via PostHog with person properties and groups.
@@ -192,86 +90,28 @@ def send_telemetry_ping(event_type: str = "install", extra_data: Optional[dict]
if not is_telemetry_enabled():
return False
- # Ensure PostHog is initialized and ready
- if not _ensure_posthog_initialized():
- return False
-
- # Get fingerprint for distinct_id
- fingerprint = get_telemetry_fingerprint()
-
- # Identify the installation with person properties (for better segmentation)
- _identify_installation(fingerprint)
-
- # Get installation properties
- install_props = _get_installation_properties()
-
- # Build event properties
- properties = {
- "app_version": install_props["app_version"],
- "platform": install_props["platform"],
- "python_version": install_props["python_version"],
- "environment": install_props["environment"],
- "deployment_method": install_props["deployment_method"],
- }
-
- # Add extra data if provided
- if extra_data:
- properties.update(extra_data)
-
- # Send telemetry via PostHog
try:
- posthog.capture(
- distinct_id=fingerprint,
- event=f"telemetry.{event_type}",
- properties=properties,
- groups={
- "version": install_props["app_version"],
- "platform": install_props["platform"],
- },
- )
-
- # Also update group properties for cohort analysis
- _update_group_properties(install_props)
+ from app.config.analytics_defaults import get_analytics_config
- return True
+ cfg = get_analytics_config()
+ endpoint = cfg.get("otel_exporter_otlp_endpoint") or os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT", "")
+ token = cfg.get("otel_exporter_otlp_token") or os.getenv("OTEL_EXPORTER_OTLP_TOKEN", "")
+ if not endpoint or not token:
+ return False
except Exception:
- # Silently fail - telemetry should never break the application
return False
-
-def _update_group_properties(install_props: dict) -> None:
- """
- Update PostHog group properties for version and platform cohorts.
-
- This enables analysis like "all installations on version X" or
- "all Linux installations".
-
- Args:
- install_props: Installation properties dictionary
- """
try:
- # Group by version
- posthog.group_identify(
- group_type="version",
- group_key=install_props["app_version"],
- properties={
- "version_number": install_props["app_version"],
- "python_versions": [install_props["python_version"]], # Will aggregate
- },
- )
+ from app.telemetry.service import send_analytics_event
- # Group by platform
- posthog.group_identify(
- group_type="platform",
- group_key=install_props["platform"],
- properties={
- "platform_name": install_props["platform"],
- "platform_release": install_props.get("platform_release", "Unknown"),
- },
+ send_analytics_event(
+ user_id=get_telemetry_fingerprint(),
+ event_name=f"telemetry.{event_type}",
+ properties=extra_data or {},
)
+ return True
except Exception:
- # Don't let group errors break telemetry
- pass
+ return False
def send_install_ping() -> bool:
diff --git a/docker-compose.example.yml b/docker-compose.example.yml
index 617e82d4..e46f6371 100644
--- a/docker-compose.example.yml
+++ b/docker-compose.example.yml
@@ -31,8 +31,9 @@ services:
# Analytics (optional)
- SENTRY_DSN=${SENTRY_DSN:-}
- SENTRY_TRACES_RATE=${SENTRY_TRACES_RATE:-0.0}
- - POSTHOG_API_KEY=${POSTHOG_API_KEY:-}
- - POSTHOG_HOST=${POSTHOG_HOST:-https://app.posthog.com}
+ - OTEL_EXPORTER_OTLP_ENDPOINT=${OTEL_EXPORTER_OTLP_ENDPOINT:-}
+ - OTEL_EXPORTER_OTLP_TOKEN=${OTEL_EXPORTER_OTLP_TOKEN:-}
+ - OTEL_DEBUG_LOGGING=${OTEL_DEBUG_LOGGING:-false}
- ENABLE_TELEMETRY=${ENABLE_TELEMETRY:-false}
- TELE_SALT=${TELE_SALT:-}
ports:
diff --git a/docker-compose.yml b/docker-compose.yml
index 65c60def..33508f8e 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -79,10 +79,10 @@ services:
# See docs/analytics.md for configuration details
- SENTRY_DSN=${SENTRY_DSN:-}
- SENTRY_TRACES_RATE=${SENTRY_TRACES_RATE:-0.0}
- - POSTHOG_API_KEY=${POSTHOG_API_KEY:-}
- - POSTHOG_HOST=${POSTHOG_HOST:-https://app.posthog.com}
+ - OTEL_EXPORTER_OTLP_ENDPOINT=${OTEL_EXPORTER_OTLP_ENDPOINT:-}
+ - OTEL_EXPORTER_OTLP_TOKEN=${OTEL_EXPORTER_OTLP_TOKEN:-}
+ - OTEL_DEBUG_LOGGING=${OTEL_DEBUG_LOGGING:-false}
- ENABLE_TELEMETRY=${ENABLE_TELEMETRY:-false}
- - TELE_URL=${TELE_URL:-}
- TELE_SALT=${TELE_SALT:-8f4a7b2e9c1d6f3a5e8b4c7d2a9f6e3b1c8d5a7f2e9b4c6d3a8f5e1b7c4d9a2f}
# Expose only internally; nginx publishes ports
diff --git a/docker/docker-compose.analytics.yml b/docker/docker-compose.analytics.yml
index 302fcaf7..c3cc4664 100644
--- a/docker/docker-compose.analytics.yml
+++ b/docker/docker-compose.analytics.yml
@@ -10,11 +10,11 @@ services:
- SENTRY_DSN=${SENTRY_DSN:-}
- SENTRY_TRACES_RATE=${SENTRY_TRACES_RATE:-0.0}
- # PostHog Product Analytics
- - POSTHOG_API_KEY=${POSTHOG_API_KEY:-}
- - POSTHOG_HOST=${POSTHOG_HOST:-https://app.posthog.com}
+ # OTEL OTLP export
+ - OTEL_EXPORTER_OTLP_ENDPOINT=${OTEL_EXPORTER_OTLP_ENDPOINT:-}
+ - OTEL_EXPORTER_OTLP_TOKEN=${OTEL_EXPORTER_OTLP_TOKEN:-}
- # Telemetry (opt-in, uses PostHog)
+ # Telemetry (opt-in detailed analytics; base telemetry is always-on)
- ENABLE_TELEMETRY=${ENABLE_TELEMETRY:-false}
- TELE_SALT=${TELE_SALT:-change-me}
- APP_VERSION=${APP_VERSION:-1.0.0}
@@ -102,7 +102,7 @@ volumes:
# Usage:
#
-# 1. Base setup with analytics enabled (Sentry, PostHog):
+# 1. Base setup with analytics enabled (Sentry, Grafana OTLP):
# docker-compose -f docker-compose.yml -f docker/docker-compose.analytics.yml up -d
#
# 2. With self-hosted monitoring (Prometheus + Grafana):
diff --git a/docker/fix-all-column-issues.py b/docker/fix-all-column-issues.py
index 5e8b7066..5dbf5d1c 100644
--- a/docker/fix-all-column-issues.py
+++ b/docker/fix-all-column-issues.py
@@ -62,7 +62,8 @@ def main():
'company_website': 'VARCHAR(200) DEFAULT \'www.yourcompany.com\' NOT NULL',
'company_tax_id': 'VARCHAR(100) DEFAULT \'\' NOT NULL',
'company_bank_info': 'TEXT DEFAULT \'\' NOT NULL',
- 'invoice_prefix': 'VARCHAR(10) DEFAULT \'INV\' NOT NULL',
+ 'invoice_prefix': 'VARCHAR(50) DEFAULT \'INV\' NOT NULL',
+ 'invoice_number_pattern': 'VARCHAR(120) DEFAULT \'{PREFIX}-{YYYY}{MM}{DD}-{SEQ}\' NOT NULL',
'invoice_start_number': 'INTEGER DEFAULT 1000 NOT NULL',
'invoice_terms': 'TEXT DEFAULT \'Payment is due within 30 days of invoice date.\' NOT NULL',
'invoice_notes': 'TEXT DEFAULT \'Thank you for your business!\' NOT NULL'
@@ -91,6 +92,7 @@ def main():
company_tax_id = COALESCE(company_tax_id, ''),
company_bank_info = COALESCE(company_bank_info, ''),
invoice_prefix = COALESCE(invoice_prefix, 'INV'),
+ invoice_number_pattern = COALESCE(invoice_number_pattern, '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}'),
invoice_start_number = COALESCE(invoice_start_number, 1000),
invoice_terms = COALESCE(invoice_terms, 'Payment is due within 30 days of invoice date.'),
invoice_notes = COALESCE(invoice_notes, 'Thank you for your business!')
diff --git a/docker/fix-all-issues.py b/docker/fix-all-issues.py
index 7abce29f..a1939c03 100644
--- a/docker/fix-all-issues.py
+++ b/docker/fix-all-issues.py
@@ -53,7 +53,8 @@ def fix_database_schema(engine):
'company_website': 'VARCHAR(200) DEFAULT \'www.yourcompany.com\' NOT NULL',
'company_tax_id': 'VARCHAR(100) DEFAULT \'\' NOT NULL',
'company_bank_info': 'TEXT DEFAULT \'\' NOT NULL',
- 'invoice_prefix': 'VARCHAR(10) DEFAULT \'INV\' NOT NULL',
+ 'invoice_prefix': 'VARCHAR(50) DEFAULT \'INV\' NOT NULL',
+ 'invoice_number_pattern': 'VARCHAR(120) DEFAULT \'{PREFIX}-{YYYY}{MM}{DD}-{SEQ}\' NOT NULL',
'invoice_start_number': 'INTEGER DEFAULT 1000 NOT NULL',
'invoice_terms': 'TEXT DEFAULT \'Payment is due within 30 days of invoice date.\' NOT NULL',
'invoice_notes': 'TEXT DEFAULT \'Thank you for your business!\' NOT NULL'
@@ -81,6 +82,7 @@ def fix_database_schema(engine):
company_tax_id = COALESCE(company_tax_id, ''),
company_bank_info = COALESCE(company_bank_info, ''),
invoice_prefix = COALESCE(invoice_prefix, 'INV'),
+ invoice_number_pattern = COALESCE(invoice_number_pattern, '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}'),
invoice_start_number = COALESCE(invoice_start_number, 1000),
invoice_terms = COALESCE(invoice_terms, 'Payment is due within 30 days of invoice date.'),
invoice_notes = COALESCE(invoice_notes, 'Thank you for your business!')
diff --git a/docker/fix-settings-table.py b/docker/fix-settings-table.py
index ef8d8ff9..fdd54403 100644
--- a/docker/fix-settings-table.py
+++ b/docker/fix-settings-table.py
@@ -30,7 +30,8 @@ def main():
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS company_logo_filename VARCHAR(255) DEFAULT '' NOT NULL;",
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS company_tax_id VARCHAR(100) DEFAULT '' NOT NULL;",
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS company_bank_info TEXT DEFAULT '' NOT NULL;",
- "ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_prefix VARCHAR(10) DEFAULT 'INV' NOT NULL;",
+ "ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_prefix VARCHAR(50) DEFAULT 'INV' NOT NULL;",
+ "ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_number_pattern VARCHAR(120) DEFAULT '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}' NOT NULL;",
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_start_number INTEGER DEFAULT 1000 NOT NULL;",
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_terms TEXT DEFAULT 'Payment is due within 30 days of invoice date.' NOT NULL;",
"ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_notes TEXT DEFAULT 'Thank you for your business!' NOT NULL;"
diff --git a/docker/fix-settings-table.sql b/docker/fix-settings-table.sql
index 8117b088..905bcde1 100644
--- a/docker/fix-settings-table.sql
+++ b/docker/fix-settings-table.sql
@@ -38,7 +38,11 @@ BEGIN
-- Invoice default columns
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'settings' AND column_name = 'invoice_prefix') THEN
- ALTER TABLE settings ADD COLUMN invoice_prefix VARCHAR(10) DEFAULT 'INV' NOT NULL;
+ ALTER TABLE settings ADD COLUMN invoice_prefix VARCHAR(50) DEFAULT 'INV' NOT NULL;
+ END IF;
+
+ IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'settings' AND column_name = 'invoice_number_pattern') THEN
+ ALTER TABLE settings ADD COLUMN invoice_number_pattern VARCHAR(120) DEFAULT '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}' NOT NULL;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'settings' AND column_name = 'invoice_start_number') THEN
@@ -65,6 +69,7 @@ UPDATE settings SET
company_tax_id = COALESCE(company_tax_id, ''),
company_bank_info = COALESCE(company_bank_info, ''),
invoice_prefix = COALESCE(invoice_prefix, 'INV'),
+ invoice_number_pattern = COALESCE(invoice_number_pattern, '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}'),
invoice_start_number = COALESCE(invoice_start_number, 1000),
invoice_terms = COALESCE(invoice_terms, 'Payment is due within 30 days of invoice date.'),
invoice_notes = COALESCE(invoice_notes, 'Thank you for your business!')
diff --git a/docker/init-database-enhanced.py b/docker/init-database-enhanced.py
index cff32017..23ef0e8e 100644
--- a/docker/init-database-enhanced.py
+++ b/docker/init-database-enhanced.py
@@ -172,7 +172,8 @@ def get_required_schema():
'company_bank_info TEXT DEFAULT \'\' NOT NULL',
# Invoice defaults
- 'invoice_prefix VARCHAR(10) DEFAULT \'INV\' NOT NULL',
+ 'invoice_prefix VARCHAR(50) DEFAULT \'INV\' NOT NULL',
+ 'invoice_number_pattern VARCHAR(120) DEFAULT \'{PREFIX}-{YYYY}{MM}{DD}-{SEQ}\' NOT NULL',
'invoice_start_number INTEGER DEFAULT 1000 NOT NULL',
'invoice_terms TEXT DEFAULT \'Payment is due within 30 days of invoice date.\' NOT NULL',
'invoice_notes TEXT DEFAULT \'Thank you for your business!\' NOT NULL',
diff --git a/docker/init-database-sql.py b/docker/init-database-sql.py
index 4296aa67..d2bb20d1 100644
--- a/docker/init-database-sql.py
+++ b/docker/init-database-sql.py
@@ -156,7 +156,8 @@ def create_tables_sql(engine):
company_bank_info TEXT DEFAULT '' NOT NULL,
-- Invoice defaults
- invoice_prefix VARCHAR(10) DEFAULT 'INV' NOT NULL,
+ invoice_prefix VARCHAR(50) DEFAULT 'INV' NOT NULL,
+ invoice_number_pattern VARCHAR(120) DEFAULT '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}' NOT NULL,
invoice_start_number INTEGER DEFAULT 1000 NOT NULL,
invoice_terms TEXT DEFAULT 'Payment is due within 30 days of invoice date.' NOT NULL,
invoice_notes TEXT DEFAULT 'Thank you for your business!' NOT NULL,
diff --git a/docker/migrate-add-company-branding.py b/docker/migrate-add-company-branding.py
index 982aa724..1460aa7e 100644
--- a/docker/migrate-add-company-branding.py
+++ b/docker/migrate-add-company-branding.py
@@ -95,7 +95,8 @@ def add_company_branding_fields(engine, existing_columns):
def add_invoice_default_fields(engine, existing_columns):
"""Add invoice default fields to settings table"""
fields_to_add = [
- ('invoice_prefix', 'VARCHAR(10) DEFAULT \'INV\' NOT NULL'),
+ ('invoice_prefix', 'VARCHAR(50) DEFAULT \'INV\' NOT NULL'),
+ ('invoice_number_pattern', 'VARCHAR(120) DEFAULT \'{PREFIX}-{YYYY}{MM}{DD}-{SEQ}\' NOT NULL'),
('invoice_start_number', 'INTEGER DEFAULT 1000 NOT NULL'),
('invoice_terms', 'TEXT DEFAULT \'Payment is due within 30 days of invoice date.\' NOT NULL'),
('invoice_notes', 'TEXT DEFAULT \'Thank you for your business!\' NOT NULL')
diff --git a/docker/migrate-add-missing-settings-columns.py b/docker/migrate-add-missing-settings-columns.py
index 92e1ba05..40167b77 100644
--- a/docker/migrate-add-missing-settings-columns.py
+++ b/docker/migrate-add-missing-settings-columns.py
@@ -47,7 +47,8 @@ def add_missing_columns(engine):
ALTER TABLE settings ADD COLUMN IF NOT EXISTS company_bank_info TEXT DEFAULT '' NOT NULL;
-- Add invoice default columns
- ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_prefix VARCHAR(10) DEFAULT 'INV' NOT NULL;
+ ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_prefix VARCHAR(50) DEFAULT 'INV' NOT NULL;
+ ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_number_pattern VARCHAR(120) DEFAULT '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}' NOT NULL;
ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_start_number INTEGER DEFAULT 1000 NOT NULL;
ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_terms TEXT DEFAULT 'Payment is due within 30 days of invoice date.' NOT NULL;
ALTER TABLE settings ADD COLUMN IF NOT EXISTS invoice_notes TEXT DEFAULT 'Thank you for your business!' NOT NULL;
@@ -79,7 +80,7 @@ def verify_columns(engine):
'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days',
'backup_time', 'export_delimiter', 'company_name', 'company_address',
'company_email', 'company_phone', 'company_website', 'company_logo_filename',
- 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_start_number',
+ 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_number_pattern', 'invoice_start_number',
'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'
]
@@ -111,6 +112,7 @@ def update_existing_settings(engine):
company_tax_id = COALESCE(company_tax_id, ''),
company_bank_info = COALESCE(company_bank_info, ''),
invoice_prefix = COALESCE(invoice_prefix, 'INV'),
+ invoice_number_pattern = COALESCE(invoice_number_pattern, '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}'),
invoice_start_number = COALESCE(invoice_start_number, 1000),
invoice_terms = COALESCE(invoice_terms, 'Payment is due within 30 days of invoice date.'),
invoice_notes = COALESCE(invoice_notes, 'Thank you for your business!')
diff --git a/docker/migrate-logo-upload.py b/docker/migrate-logo-upload.py
index 0083cf06..74b110a4 100644
--- a/docker/migrate-logo-upload.py
+++ b/docker/migrate-logo-upload.py
@@ -74,7 +74,8 @@ def migrate_logo_system():
company_logo_filename VARCHAR(255) DEFAULT '',
company_tax_id VARCHAR(100) DEFAULT '',
company_bank_info TEXT DEFAULT '',
- invoice_prefix VARCHAR(10) NOT NULL DEFAULT 'INV',
+ invoice_prefix VARCHAR(50) NOT NULL DEFAULT 'INV',
+ invoice_number_pattern VARCHAR(120) NOT NULL DEFAULT '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}',
invoice_start_number INTEGER NOT NULL DEFAULT 1000,
invoice_terms TEXT NOT NULL DEFAULT 'Payment is due within 30 days of invoice date.',
invoice_notes TEXT NOT NULL DEFAULT 'Thank you for your business!',
@@ -105,14 +106,14 @@ def migrate_logo_system():
allow_self_register, idle_timeout_minutes, backup_retention_days,
backup_time, export_delimiter, company_name, company_address,
company_email, company_phone, company_website, company_logo_filename,
- company_tax_id, company_bank_info, invoice_prefix, invoice_start_number,
+ company_tax_id, company_bank_info, invoice_prefix, invoice_number_pattern, invoice_start_number,
invoice_terms, invoice_notes, created_at, updated_at
) VALUES (
:id, :timezone, :currency, :rounding_minutes, :single_active_timer,
:allow_self_register, :idle_timeout_minutes, :backup_retention_days,
:backup_time, :export_delimiter, :company_name, :company_address,
:company_email, :company_phone, :company_website, :company_logo_filename,
- :company_tax_id, :company_bank_info, :invoice_prefix, :invoice_start_number,
+ :company_tax_id, :company_bank_info, :invoice_prefix, :invoice_number_pattern, :invoice_start_number,
:invoice_terms, :invoice_notes, :created_at, :updated_at
)
"""), {
@@ -135,6 +136,9 @@ def migrate_logo_system():
'company_tax_id': row_dict.get('company_tax_id', ''),
'company_bank_info': row_dict.get('company_bank_info', ''),
'invoice_prefix': row_dict.get('invoice_prefix', 'INV'),
+ 'invoice_number_pattern': row_dict.get(
+ 'invoice_number_pattern', '{PREFIX}-{YYYY}{MM}{DD}-{SEQ}'
+ ),
'invoice_start_number': row_dict.get('invoice_start_number', 1000),
'invoice_terms': row_dict.get('invoice_terms', 'Payment is due within 30 days of invoice date.'),
'invoice_notes': row_dict.get('invoice_notes', 'Thank you for your business!'),
diff --git a/docker/test-database-complete.py b/docker/test-database-complete.py
index 8bcea197..d1d7e11d 100644
--- a/docker/test-database-complete.py
+++ b/docker/test-database-complete.py
@@ -89,7 +89,7 @@ def main():
'projects': ['id', 'name', 'client', 'description', 'billable', 'hourly_rate', 'billing_ref', 'status', 'created_at', 'updated_at'],
'time_entries': ['id', 'user_id', 'project_id', 'task_id', 'start_time', 'end_time', 'duration_seconds', 'notes', 'tags', 'source', 'billable', 'created_at', 'updated_at'],
'tasks': ['id', 'project_id', 'name', 'description', 'status', 'priority', 'assigned_to', 'created_by', 'due_date', 'estimated_hours', 'actual_hours', 'started_at', 'completed_at', 'created_at', 'updated_at'],
- 'settings': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
+ 'settings': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_number_pattern', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
'invoices': ['id', 'invoice_number', 'project_id', 'client_name', 'client_email', 'client_address', 'issue_date', 'due_date', 'status', 'subtotal', 'tax_rate', 'tax_amount', 'total_amount', 'notes', 'terms', 'created_by', 'created_at', 'updated_at'],
'invoice_items': ['id', 'invoice_id', 'description', 'quantity', 'unit_price', 'total_amount', 'time_entry_ids', 'created_at']
}
diff --git a/docker/verify-database.py b/docker/verify-database.py
index 6fbf9a0a..edb2e4fc 100644
--- a/docker/verify-database.py
+++ b/docker/verify-database.py
@@ -37,8 +37,8 @@ def get_expected_schema():
'foreign_keys': ['project_id', 'assigned_to', 'created_by']
},
'settings': {
- 'columns': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'allow_analytics', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
- 'required_columns': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'allow_analytics', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
+ 'columns': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'allow_analytics', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_number_pattern', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
+ 'required_columns': ['id', 'timezone', 'currency', 'rounding_minutes', 'single_active_timer', 'allow_self_register', 'idle_timeout_minutes', 'backup_retention_days', 'backup_time', 'export_delimiter', 'allow_analytics', 'company_name', 'company_address', 'company_email', 'company_phone', 'company_website', 'company_logo_filename', 'company_tax_id', 'company_bank_info', 'invoice_prefix', 'invoice_number_pattern', 'invoice_start_number', 'invoice_terms', 'invoice_notes', 'created_at', 'updated_at'],
'indexes': [],
'foreign_keys': []
},
diff --git a/docs/TELEMETRY_QUICK_START.md b/docs/TELEMETRY_QUICK_START.md
index 343e12b6..bd362cdb 100644
--- a/docs/TELEMETRY_QUICK_START.md
+++ b/docs/TELEMETRY_QUICK_START.md
@@ -11,9 +11,9 @@ When you first access TimeTracker, you'll see a **guided setup wizard** (6 steps
3. **Company** – Company name, address, email; optional phone and website (for invoices and branding).
4. **System** – Allow self-registration, time rounding, single active timer per user, idle timeout (minutes).
5. **Integrations (optional)** – Google Calendar OAuth (Client ID / Secret). You can skip this and configure later in Admin → Settings.
-6. **Privacy & finish** – Choose whether to enable anonymous telemetry:
- - ✅ **Enable Telemetry** – Help improve TimeTracker with anonymous usage data
- - ⬜ **Disable Telemetry** – No data will be sent (default)
+6. **Privacy & finish** – Choose whether to enable detailed analytics:
+ - ✅ **Enable detailed analytics** – Send richer product usage diagnostics
+ - ⬜ **Disable detailed analytics** – Only anonymous base telemetry will be sent (default)
- Click **Complete Setup & Continue** to finish.
You can change telemetry and all other options anytime in **Admin → Settings**.
@@ -25,7 +25,7 @@ You can change telemetry and all other options anytime in **Admin → Settings**
3. View:
- Current telemetry status (enabled/disabled)
- Installation ID and fingerprint
- - PostHog configuration status
+ - Grafana OTLP configuration status
- Sentry configuration status
- What data is being collected
@@ -39,19 +39,17 @@ You can change telemetry and all other options anytime in **Admin → Settings**
### Setting Up Analytics Services
-#### PostHog (Product Analytics)
+#### Grafana Cloud OTLP (Telemetry Sink)
-To enable PostHog tracking:
+To enable telemetry export:
-1. Sign up for PostHog at https://posthog.com (or self-host)
-2. Get your API key from PostHog dashboard
-3. Set environment variable:
+1. Set your OTLP endpoint and token:
```bash
- export POSTHOG_API_KEY="your-api-key-here"
- export POSTHOG_HOST="https://app.posthog.com" # Default, change if self-hosting
+ export GRAFANA_OTLP_ENDPOINT="https://otlp-gateway-.../otlp/v1/logs"
+ export GRAFANA_OTLP_TOKEN="your-token-here"
```
-4. Restart the application
-5. Enable telemetry in admin dashboard (if not already enabled)
+2. Restart the application
+3. In Admin, choose whether detailed analytics should be enabled
#### Sentry (Error Monitoring)
@@ -141,7 +139,7 @@ TimeTracker's telemetry system is designed with GDPR principles in mind:
### Data Retention
- **JSON Logs:** Rotate daily, keep 30 days (configurable)
-- **PostHog:** Follow PostHog's retention policy
+- **Grafana OTLP sink:** Follow your Grafana Cloud retention policy
- **Sentry:** Follow Sentry's retention policy
- **Prometheus:** 15 days default (configurable in `prometheus/prometheus.yml`)
@@ -152,7 +150,8 @@ To completely disable all telemetry and analytics:
1. **In Application:** Disable in `/admin/telemetry`
2. **Remove API Keys:**
```bash
- unset POSTHOG_API_KEY
+ unset GRAFANA_OTLP_ENDPOINT
+ unset GRAFANA_OTLP_TOKEN
unset SENTRY_DSN
unset ENABLE_TELEMETRY
```
@@ -168,12 +167,12 @@ If the setup page keeps appearing after completion:
2. Check file permissions (application must be able to write to `data/` directory)
3. Check logs for errors: `tail -f logs/app.jsonl`
-### Events Not Appearing in PostHog
+### Events Not Appearing in Grafana
-1. **Check API Key:** Verify `POSTHOG_API_KEY` is set
+1. **Check OTLP config:** Verify `GRAFANA_OTLP_ENDPOINT` and `GRAFANA_OTLP_TOKEN` are set
2. **Check Telemetry Status:** Go to `/admin/telemetry` and verify it's enabled
-3. **Check Logs:** `tail -f logs/app.jsonl | grep PostHog`
-4. **Check Network:** Ensure server can reach PostHog host
+3. **Check Logs:** `tail -f logs/app.jsonl | grep telemetry`
+4. **Check Network:** Ensure server can reach OTLP endpoint
### Admin Dashboard Not Accessible
@@ -199,8 +198,8 @@ A: No. We only collect anonymous event types and numeric IDs. No usernames, emai
**Q: How do I know what's being sent?**
A: Check the `/admin/telemetry` dashboard and review `docs/all_tracked_events.md` for a complete list.
-**Q: Can I use my own PostHog/Sentry instance?**
-A: Yes! Set `POSTHOG_HOST` and `SENTRY_DSN` to your self-hosted instances.
+**Q: Can I use my own Grafana/Sentry instance?**
+A: Yes. Configure your own OTLP endpoint/token and Sentry DSN.
**Q: What happens to my data if I disable telemetry?**
A: Nothing is sent to external services. Events are still logged locally in `logs/app.jsonl` for debugging.
diff --git a/docs/TELEMETRY_TRANSPARENCY.md b/docs/TELEMETRY_TRANSPARENCY.md
index d47c6ce7..74b5878c 100644
--- a/docs/TELEMETRY_TRANSPARENCY.md
+++ b/docs/TELEMETRY_TRANSPARENCY.md
@@ -1,204 +1,39 @@
# Telemetry Transparency Notice
-## Overview
+TimeTracker uses a two-layer model:
-TimeTracker includes embedded analytics configuration to help us understand how the software is used and improve it for everyone. **However, telemetry is completely opt-in and disabled by default.**
+- **Anonymous base telemetry (default behavior):** installation registration + heartbeat
+- **Detailed analytics (opt-in):** richer usage/error/performance context
-## Your Control
+## What is always sent
-### Default State: Disabled
-When you first access TimeTracker, you'll see a setup page where you can:
-- ✅ **Enable telemetry** - Help us improve TimeTracker
-- ⬜ **Keep it disabled** - Complete privacy (default choice)
+Base telemetry includes installation-level, non-PII metadata:
+- install UUID
+- app version
+- platform/OS/architecture
+- locale/timezone
+- first_seen + heartbeat timestamps
-### Change Anytime
-You can toggle telemetry on/off at any time:
-1. Login as administrator
-2. Go to **Admin → Telemetry Dashboard**
-3. Click **Enable** or **Disable** button
+## What is only sent when opted in
-## What We Collect (Only If You Enable It)
+Detailed telemetry events such as feature usage and error context, with direct PII fields filtered out.
-### ✅ What We Track
-- **Event types**: e.g., "timer.started", "project.created"
-- **Internal numeric IDs**: e.g., user_id=5, project_id=42
-- **Timestamps**: When events occurred
-- **Platform info**: OS type, Python version, app version
-- **Anonymous fingerprint**: Hashed installation ID (cannot identify you)
+## What is never sent
-### ❌ What We NEVER Collect
-- Email addresses or usernames
-- Project names or descriptions
-- Time entry notes or descriptions
-- Client names or business information
-- IP addresses
-- Any personally identifiable information (PII)
+- emails
+- usernames
+- project/client names and content
+- time entry notes/content
+- raw password/token fields
-## Complete Event List
+## Sink
-All tracked events are documented in [`docs/all_tracked_events.md`](./all_tracked_events.md).
+Telemetry is sent to Grafana Cloud OTLP when configured:
+- `GRAFANA_OTLP_ENDPOINT`
+- `GRAFANA_OTLP_TOKEN`
-Examples:
-- `auth.login` - User logged in (only user_id, no username)
-- `timer.started` - Timer started (entry_id, project_id)
-- `project.created` - Project created (project_id, no project name)
-- `task.status_changed` - Task status changed (task_id, old_status, new_status)
+## Control
-## Why Can't I Override the Keys?
-
-Analytics keys are embedded at build time and cannot be overridden for consistency:
-
-### Reasons
-1. **Unified insights**: Helps us understand usage across all installations
-2. **Feature prioritization**: Shows which features are most used
-3. **Bug detection**: Helps identify issues affecting users
-4. **Community improvement**: Better product for everyone
-
-### Your Protection
-Even with embedded keys:
-- ✅ Telemetry is **disabled by default**
-- ✅ You must **explicitly opt-in**
-- ✅ You can **disable anytime**
-- ✅ **No PII** is ever collected
-- ✅ **Open source** - you can audit the code
-
-## Technical Details
-
-### How Keys Are Embedded
-
-During the build process, GitHub Actions replaces placeholders:
-```python
-# Before build (in source code)
-POSTHOG_API_KEY_DEFAULT = "%%POSTHOG_API_KEY_PLACEHOLDER%%"
-
-# After build (in Docker image)
-POSTHOG_API_KEY_DEFAULT = "phc_abc123..." # Real key
-```
-
-### No Environment Override
-
-Unlike typical configurations, these keys cannot be overridden via environment variables:
-```bash
-# This will NOT work (intentionally)
-export POSTHOG_API_KEY="my-key"
-
-# Telemetry control is via the admin dashboard toggle only
-```
-
-### Code Location
-
-All analytics code is open source:
-- Configuration: [`app/config/analytics_defaults.py`](../app/config/analytics_defaults.py)
-- Telemetry logic: [`app/utils/telemetry.py`](../app/utils/telemetry.py)
-- Event tracking: Search for `log_event` and `track_event` in route files
-- Build process: [`.github/workflows/build-and-publish.yml`](../.github/workflows/build-and-publish.yml)
-
-## Data Flow
-
-### When Telemetry is Enabled
-
-```
-User Action (e.g., start timer)
- ↓
-Application code calls track_event()
- ↓
-Check: Is telemetry enabled?
- ├─ No → Stop (do nothing)
- └─ Yes → Continue
- ↓
- Add context (no PII)
- ↓
- Send to PostHog
- ↓
- Also log locally (logs/app.jsonl)
-```
-
-### When Telemetry is Disabled
-
-```
-User Action (e.g., start timer)
- ↓
-Application code calls track_event()
- ↓
-Check: Is telemetry enabled?
- └─ No → Stop immediately
-
-No data sent anywhere.
-Only local logging (for debugging).
-```
-
-## Privacy Compliance
-
-### GDPR Compliance
-- ✅ **Consent-based**: Explicit opt-in required
-- ✅ **Right to withdraw**: Can disable anytime
-- ✅ **Data minimization**: Only collect what's necessary
-- ✅ **No PII**: Cannot identify individuals
-- ✅ **Transparency**: Fully documented
-
-### Your Rights
-1. **Right to disable**: Toggle off anytime
-2. **Right to know**: All events documented
-3. **Right to audit**: Open source code
-4. **Right to verify**: Check logs locally
-
-## Frequently Asked Questions
-
-### Q: Why embed keys instead of making them configurable?
-**A:** To ensure consistent telemetry across all installations, helping us improve the product for everyone. However, you maintain full control via the opt-in toggle.
-
-### Q: Can you track me personally?
-**A:** No. We only collect event types and numeric IDs. We cannot identify users, see project names, or access any business data.
-
-### Q: What if I want complete privacy?
-**A:** Simply keep telemetry disabled (the default). No data will be sent to our servers.
-
-### Q: Can I audit what's being sent?
-**A:** Yes! Check `logs/app.jsonl` to see all events logged locally. The code is also open source for full transparency.
-
-### Q: What happens to my data?
-**A:** Data is stored in PostHog (privacy-focused analytics) and Sentry (error monitoring). Both are GDPR-compliant services.
-
-### Q: Can I self-host analytics?
-**A:** The keys are embedded, so you cannot use your own PostHog/Sentry instances. However, you can disable telemetry entirely for complete privacy.
-
-### Q: How long is data retained?
-**A:** PostHog: 7 years (configurable). Sentry: 90 days. Both follow data retention best practices.
-
-### Q: Can I see what data you have about me?
-**A:** Since we only collect anonymous numeric IDs, we cannot associate data with specific users. All data is anonymized by design.
-
-## Trust & Transparency
-
-### Our Commitment
-- 🔒 **Privacy-first**: Opt-in, no PII, user control
-- 📖 **Transparent**: Open source, documented events
-- 🎯 **Purpose-driven**: Only collect what helps improve the product
-- ⚖️ **Ethical**: Respect user choices and privacy
-
-### Verification
-You can verify our claims:
-1. **Read the code**: All analytics code is in the repository
-2. **Check the logs**: Events logged locally in `logs/app.jsonl`
-3. **Inspect network**: Use browser dev tools to see what's sent
-4. **Review events**: Complete list in `docs/all_tracked_events.md`
-
-## Contact
-
-If you have privacy concerns or questions:
-- Open an issue on GitHub
-- Review the privacy policy: [`docs/privacy.md`](./privacy.md)
-- Check all tracked events: [`docs/all_tracked_events.md`](./all_tracked_events.md)
-
----
-
-## Summary
-
-✅ **Telemetry is OPT-IN** (disabled by default)
-✅ **You control it** (enable/disable anytime)
-✅ **No PII collected** (ever)
-✅ **Fully transparent** (open source, documented)
-✅ **GDPR compliant** (consent, minimization, rights)
-
-**Your privacy is respected. Your choice is honored.**
+Admins can enable/disable detailed analytics in the app at any time.
+Disabling detailed analytics does **not** stop base anonymous telemetry.
diff --git a/docs/analytics.md b/docs/analytics.md
index 109d1d77..f8349252 100644
--- a/docs/analytics.md
+++ b/docs/analytics.md
@@ -1,211 +1,44 @@
# Analytics and Monitoring
-TimeTracker includes comprehensive analytics and monitoring capabilities to help understand application usage, performance, and errors.
+TimeTracker provides privacy-aware analytics and monitoring with Grafana Cloud OTLP as the telemetry sink.
## Overview
-The analytics system consists of several components:
+1. **Structured JSON Logging** - Application event logs in `logs/app.jsonl`
+2. **Sentry Integration** - Error monitoring and tracing (optional)
+3. **Prometheus Metrics** - Runtime metrics at `/metrics`
+4. **Grafana OTLP Telemetry** - Installation + product analytics telemetry
-1. **Structured JSON Logging** - Application-wide event logging in JSON format
-2. **Sentry Integration** - Error monitoring and performance tracking
-3. **Prometheus Metrics** - Performance metrics and monitoring
-4. **PostHog Analytics** - Product analytics and user behavior tracking
-5. **Telemetry** - Opt-in installation and version tracking
+## Telemetry Model
-## Features
+### Base telemetry (anonymous, default behavior)
+- Installation-level telemetry (`base_telemetry.first_seen`, `base_telemetry.heartbeat`)
+- Includes install UUID, app version, platform, OS, architecture, locale, timezone
+- No direct PII fields
-### Structured Logging
-
-All application events are logged in structured JSON format to `logs/app.jsonl`. Each log entry includes:
-
-- Timestamp
-- Log level
-- Event name
-- Request ID (for tracing requests)
-- Additional context (user ID, project ID, etc.)
-
-Example log entry:
-```json
-{
- "asctime": "2025-10-20T10:30:45.123Z",
- "levelname": "INFO",
- "name": "timetracker",
- "message": "project.created",
- "request_id": "abc123-def456",
- "user_id": 42,
- "project_id": 15
-}
-```
-
-### Error Monitoring (Sentry)
-
-When enabled, Sentry captures:
-- Uncaught exceptions
-- Performance traces
-- Request context
-- User context
-
-### Performance Metrics (Prometheus)
-
-Exposed at `/metrics` endpoint:
-- Total request count by method, endpoint, and status code
-- Request latency histogram by endpoint
-- Custom business metrics
-
-### Product Analytics (PostHog)
-
-Tracks user behavior and feature usage with advanced features:
-- **Event Tracking**: Timer operations, project management, reports, exports
-- **Person Properties**: User role, auth method, login history
-- **Feature Flags**: Gradual rollouts, A/B testing, kill switches
-- **Group Analytics**: Segment by platform, version, deployment method
-- **Cohort Analysis**: Target specific user segments
-- **Rich Context**: Browser, device, URL, environment on every event
-
-See [POSTHOG_ADVANCED_FEATURES.md](../POSTHOG_ADVANCED_FEATURES.md) for complete guide.
-
-### Two-Layer Telemetry
-
-**Base telemetry (always on when PostHog is configured):** Minimal install footprint—version, platform, first/last seen, heartbeat. No PII. See [Telemetry Architecture](telemetry-architecture.md).
-
-**Detailed analytics (opt-in):** Feature usage, screens, errors, retention. Enabled in Admin → Privacy & Analytics or Admin → Telemetry. Only when opted in are product events sent to PostHog.
-
-**Privacy:** Base layer has fixed minimal schema; detailed layer is off by default and can be turned off anytime. No PII in either layer.
+### Detailed analytics (explicit opt-in only)
+- Product events such as `timer.started`, `project.created`, `auth.login`
+- Sent only when admins enable detailed analytics in the app
+- PII-filtered before export
## Configuration
-All analytics features are controlled via environment variables. See `env.example` for configuration options.
-
-### Enabling Analytics
-
```bash
-# Enable Sentry
-SENTRY_DSN=https://your-sentry-dsn@sentry.io/project-id
-SENTRY_TRACES_RATE=0.1 # 10% sampling for performance traces
+# Grafana OTLP sink
+GRAFANA_OTLP_ENDPOINT=https://otlp-gateway-.../otlp/v1/logs
+GRAFANA_OTLP_TOKEN=your-token
-# Enable PostHog
-POSTHOG_API_KEY=your-posthog-api-key
-POSTHOG_HOST=https://app.posthog.com
-
-# Enable Telemetry (opt-in, uses PostHog)
+# Detailed analytics consent switch (app-controlled per installation)
ENABLE_TELEMETRY=true
-TELE_SALT=your-unique-salt
-APP_VERSION=1.0.0
-```
-
-## Disabling Analytics
-By default, most analytics features are disabled. To ensure they remain disabled:
-
-```bash
-# Disable all optional analytics
+# Optional error monitoring
SENTRY_DSN=
-POSTHOG_API_KEY=
-ENABLE_TELEMETRY=false
+SENTRY_TRACES_RATE=0.1
```
-Structured logging to files is always enabled as it's essential for troubleshooting.
-
-## Log Management
-
-Logs are written to `logs/app.jsonl` and should be rotated using:
-- Docker volume mounts + host logrotate
-- Grafana Loki + Promtail
-- Elasticsearch + Filebeat
-- Or similar log aggregation solutions
-
-## Dashboards
-
-Recommended dashboards:
-
-### Sentry
-- Error rate alerts
-- New issue notifications
-- Performance regression alerts
-
-### Grafana + Prometheus
-- Request rate and latency (P50, P95, P99)
-- Error rates by endpoint
-- Active timers gauge
-- Database connection pool metrics
-
-### PostHog
-- User engagement funnels
-- Feature adoption rates
-- Session recordings (if enabled)
-
-## Data Retention
-
-- **Logs**: Retained locally based on your logrotate configuration
-- **Sentry**: Based on your Sentry plan (typically 90 days)
-- **Prometheus**: Based on your Prometheus configuration (typically 15-30 days)
-- **PostHog**: Based on your PostHog plan
-- **Telemetry**: 12 months
-
-## Privacy & Compliance
-
-See [privacy.md](privacy.md) for detailed information about data collection, retention, and GDPR compliance.
-
-## Event Schema
-
-See [events.md](events.md) for a complete list of tracked events and their properties.
-
-## Maintenance
-
-### Adding New Events
-
-1. Define the event in `docs/events.md`
-2. Instrument the code using `log_event()` or `track_event()`
-3. Update this documentation
-4. Test in development environment
-5. Monitor in production dashboards
-
-### Event Naming Convention
-
-- Use dot notation: `resource.action`
-- Examples: `project.created`, `timer.started`, `export.csv`
-- Be consistent with existing event names
-
-### Who Can Add Events
-
-Changes to analytics require approval from:
-- Product owner (for PostHog events)
-- DevOps/SRE (for infrastructure metrics)
-- Privacy officer (for any data collection changes)
-
## Troubleshooting
-### Logs Not Appearing
-
-1. Check `logs/` directory permissions
-2. Verify LOG_LEVEL is set correctly
-3. Check disk space
-
-### Sentry Not Receiving Errors
-
-1. Verify SENTRY_DSN is set correctly
-2. Check network connectivity
-3. Verify Sentry project is active
-4. Check Sentry rate limits
-
-### Prometheus Metrics Not Available
-
-1. Verify `/metrics` endpoint is accessible
-2. Check Prometheus scrape configuration
-3. Verify network connectivity
-
-### PostHog Events Not Appearing
-
-1. Verify POSTHOG_API_KEY is set correctly
-2. Check PostHog project settings
-3. Verify network connectivity
-4. Check PostHog rate limits
-
-## Support
-
-For analytics-related issues:
-1. Check this documentation
-2. Review logs in `logs/app.jsonl`
-3. Check service-specific dashboards (Sentry, Grafana, PostHog)
-4. Contact support with relevant log excerpts
+- If no telemetry arrives, verify `GRAFANA_OTLP_ENDPOINT` and `GRAFANA_OTLP_TOKEN`
+- If detailed events are missing, confirm detailed analytics is enabled in admin settings
+- If only base events appear, consent is likely disabled (expected behavior)
diff --git a/docs/privacy.md b/docs/privacy.md
index 5e4752e3..b829cad9 100644
--- a/docs/privacy.md
+++ b/docs/privacy.md
@@ -7,7 +7,7 @@ This document describes how TimeTracker collects, uses, and protects data throug
TimeTracker is designed with privacy as a core principle. All analytics features are either:
1. **Local-only** (structured logging)
2. **Self-hosted** (Prometheus metrics)
-3. **Optional and opt-in** (PostHog, Sentry, Telemetry)
+3. **Optional and opt-in** (Grafana OTLP detailed analytics, Sentry)
## Data Collection
@@ -74,7 +74,7 @@ When enabled, sends error reports to Sentry.
**Retention:** Based on your Sentry plan (typically 90 days)
**Access:** Team members with Sentry access
-#### 4. Base Telemetry (Minimal) - Always On When PostHog Configured
+#### 4. Base Telemetry (Minimal) - Anonymous Installation Telemetry
**Purpose:** Install footprint and distribution (version, platform, active installs).
**Data collected (no PII):**
@@ -83,13 +83,13 @@ When enabled, sends error reports to Sentry.
**Not collected:** Raw IP (stored), email, usernames, feature usage, paths, business data
-**Storage:** PostHog (or custom sink if configured)
-**Retention:** Recommend 12 months; configure in PostHog
+**Storage:** Grafana OTLP sink (if configured)
+**Retention:** Recommend 12 months
**Access:** Product/ops for install analytics
-#### 5. Detailed Analytics (PostHog) - Optional & Opt-In
+#### 5. Detailed Analytics (Grafana OTLP) - Optional & Opt-In
**Default:** Disabled (user must opt in via Admin → Privacy & Analytics)
-**Requires:** `POSTHOG_API_KEY` set and user enabling "detailed analytics"
+**Requires:** OTLP endpoint/token configured and user enabling "detailed analytics"
When opted in, tracks product usage and feature adoption.
@@ -99,9 +99,9 @@ When opted in, tracks product usage and feature adoption.
**Not collected:** Email, usernames, time entry content, client/project names, stored IP
-**Storage:** PostHog servers (or self-hosted PostHog)
-**Retention:** Per PostHog plan (e.g. 24 months)
-**Access:** Team members with PostHog access
+**Storage:** Grafana Cloud OTLP backend (or self-hosted OTLP receiver)
+**Retention:** Per your Grafana retention policy
+**Access:** Team members with Grafana access
**Consent:** You can turn detailed analytics off anytime in Admin → Settings or Admin → Telemetry. Base telemetry (minimal) continues; no product events are sent when opted out.
@@ -136,14 +136,13 @@ When you enable optional services, data is sent to:
| Service | Data Sent | Purpose | Control |
|---------|-----------|---------|---------|
| Sentry | Errors, request context | Error monitoring | Set `SENTRY_DSN` |
-| PostHog | Product events, user IDs | Product analytics | Set `POSTHOG_API_KEY` |
-| Telemetry Server | Anonymized fingerprint, version | Version tracking | Set `ENABLE_TELEMETRY=true` |
+| Grafana OTLP | Base telemetry + product events | Product analytics | Set `GRAFANA_OTLP_ENDPOINT` and `GRAFANA_OTLP_TOKEN` |
### Self-Hosting
You can self-host all optional services:
- **Sentry**: https://develop.sentry.dev/self-hosted/
-- **PostHog**: https://posthog.com/docs/self-host
+- **Grafana/OTLP receiver**: Use Grafana Cloud or self-host an OTLP-compatible receiver
- **Prometheus**: Already self-hosted by default
## Your Rights (GDPR Compliance)
@@ -154,7 +153,7 @@ TimeTracker is designed to be GDPR-compliant. You have the right to:
- **Logs**: Access files in `logs/` directory
- **Metrics**: Query your Prometheus instance
- **Sentry**: Export data from Sentry UI
-- **PostHog**: Export data from PostHog UI
+- **Grafana telemetry**: Export/query from Grafana stack
### 2. Rectify Your Data
Contact your TimeTracker administrator to correct inaccurate data.
@@ -174,15 +173,8 @@ Data automatically expires based on retention settings.
#### Sentry
Use Sentry's data deletion features or contact support.
-#### PostHog
-Use PostHog's GDPR deletion features:
-```python
-posthog.capture(
- distinct_id='user_id',
- event='$delete',
- properties={}
-)
-```
+#### Grafana telemetry
+Use your Grafana data retention/deletion policy and tooling.
#### Telemetry
Set `ENABLE_TELEMETRY=false` to stop sending data. To delete existing telemetry data, contact the telemetry service operator with your fingerprint hash.
@@ -192,7 +184,7 @@ All data can be exported:
- **Logs**: Copy files from `logs/` directory
- **Metrics**: Query and export from Prometheus
- **Sentry**: Use Sentry export features
-- **PostHog**: Use PostHog export features
+- **Grafana telemetry**: Use Grafana export/query features
### 5. Opt-Out
To opt out of all optional analytics:
@@ -200,7 +192,8 @@ To opt out of all optional analytics:
```bash
# .env file
SENTRY_DSN=
-POSTHOG_API_KEY=
+GRAFANA_OTLP_ENDPOINT=
+GRAFANA_OTLP_TOKEN=
ENABLE_TELEMETRY=false
```
@@ -210,20 +203,20 @@ ENABLE_TELEMETRY=false
- Logs: Local filesystem only (no transit)
- Metrics: Scraped via HTTP/HTTPS (configure TLS in Prometheus)
- Sentry: HTTPS only
-- PostHog: HTTPS only
+- Grafana OTLP: HTTPS only
- Telemetry: HTTPS only
### At Rest
- **Logs**: Protected by filesystem permissions (use encryption at rest if required)
- **Metrics**: Protected by Prometheus access controls
- **Sentry**: Protected by Sentry (encrypted at rest)
-- **PostHog**: Protected by PostHog (encrypted at rest)
+- **Grafana telemetry**: Protected by your Grafana backend
### Access Controls
- Logs: Require server filesystem access
- Metrics: Require Prometheus/Grafana access
- Sentry: Require Sentry account with appropriate permissions
-- PostHog: Require PostHog account with appropriate permissions
+- Grafana telemetry: Require Grafana account with appropriate permissions
## Data Minimization
@@ -239,7 +232,7 @@ TimeTracker follows data minimization principles:
### Explicit Consent Required
- Installation telemetry (`ENABLE_TELEMETRY`)
-- Product analytics (`POSTHOG_API_KEY`)
+- Product analytics sink (`GRAFANA_OTLP_ENDPOINT` + `GRAFANA_OTLP_TOKEN`)
- Error monitoring (`SENTRY_DSN`)
### Implicit Consent
@@ -259,8 +252,7 @@ TimeTracker is not intended for use by children under 16. We do not knowingly co
If you enable optional services hosted outside your region:
- **Sentry**: Data may be transferred to US/EU Sentry servers
-- **PostHog**: Data may be transferred to US/EU PostHog servers
-- **Telemetry**: Data location depends on your `TELE_URL` configuration
+- **Grafana telemetry**: Data location depends on your Grafana region/stack
Use self-hosted instances to keep data in your region.
@@ -292,7 +284,7 @@ For privacy-related questions:
## Frequently Asked Questions
### Can I disable all analytics?
-You can disable optional analytics (Sentry, PostHog, Telemetry). Local logs and Prometheus metrics are essential for operation but stay on your infrastructure.
+You can disable optional analytics (Sentry and detailed telemetry). Local logs and Prometheus metrics are essential for operation but stay on your infrastructure.
### Where is my data stored?
- **Logs**: Your server's filesystem
@@ -301,7 +293,7 @@ You can disable optional analytics (Sentry, PostHog, Telemetry). Local logs and
### Can someone else see my data?
Only if you:
-1. Enable optional cloud services (Sentry, PostHog)
+1. Enable optional cloud services (Sentry, Grafana OTLP)
2. Grant them access to your infrastructure
Self-hosted deployments are completely private.
@@ -317,7 +309,8 @@ rm -rf logs/*.jsonl*
# Remove optional service configurations
# Edit .env and remove:
# - SENTRY_DSN
-# - POSTHOG_API_KEY
+# - GRAFANA_OTLP_ENDPOINT
+# - GRAFANA_OTLP_TOKEN
# - ENABLE_TELEMETRY
# Restart application
diff --git a/docs/telemetry-architecture.md b/docs/telemetry-architecture.md
index e40bfa26..f2f307f4 100644
--- a/docs/telemetry-architecture.md
+++ b/docs/telemetry-architecture.md
@@ -6,27 +6,27 @@ This document describes the privacy-aware, two-layer telemetry system: **base te
| Layer | When | Purpose | Events / Data |
|-------|------|---------|----------------|
-| **Base telemetry** | Always (when PostHog is configured) | Install footprint, version/platform distribution, active installs | `base_telemetry.first_seen`, `base_telemetry.heartbeat` |
+| **Base telemetry** | Always (when OTLP sink is configured) | Install footprint, version/platform distribution, active installs | `base_telemetry.first_seen`, `base_telemetry.heartbeat` |
| **Detailed analytics** | Only when user opts in | Feature usage, funnels, errors, retention | All product events (e.g. `auth.login`, `timer.started`) |
- **Consent:** Stored in `installation.json` (`telemetry_enabled`) and synced to `settings.allow_analytics`. Source of truth: `installation_config.get_telemetry_preference()` / `is_telemetry_enabled()`.
-- **Identifiers:** One **install_id** (random UUID in installation config) used for base telemetry and, when opt-in, sent with product events. Product events use internal `user_id` as distinct_id in PostHog.
+- **Identifiers:** One **install_id** (random UUID in installation config) used for base telemetry and, when opt-in, sent with product events. Product events use internal `user_id` identity.
## Base Telemetry (Always-On)
- **Schema (no PII):** `install_id`, `app_version`, `platform`, `os_version`, `architecture`, `locale`, `timezone`, `first_seen_at`, `last_seen_at`, `heartbeat_at`, `release_channel`, `deployment_type`.
- **Events:** `base_telemetry.first_seen` (once per install), `base_telemetry.heartbeat` (e.g. daily via scheduler).
-- **Sink:** PostHog with `distinct_id = install_id`. No user-level linkage.
+- **Sink:** Grafana Cloud OTLP with `identity = install_id`. No user-level linkage.
- **Trigger:** First-seen sent at app startup (idempotent). Heartbeat via scheduled task (e.g. 03:00 daily).
-- **Retention:** Configure in PostHog (e.g. 12 months for base). No raw IP storage.
+- **Retention:** Configure in Grafana backend (e.g. 12 months for base). No raw IP storage.
## Detailed Analytics (Opt-In Only)
- **Gated by:** `is_telemetry_enabled()` / `allow_analytics`. No product events sent without opt-in.
- **Events:** Existing names (e.g. `auth.login`, `timer.started`, `project.created`). Optional prefix `analytics.*` in future.
- **Properties:** Include `install_id`, app_version, deployment, request context (path, browser, device) only when opted in.
-- **Sink:** PostHog (`distinct_id = user_id` for events).
-- **Retention:** Per PostHog plan (e.g. 24 months). Document in privacy policy.
+- **Sink:** Grafana Cloud OTLP (`identity = user_id` for events).
+- **Retention:** Per Grafana retention policy. Document in privacy policy.
## Consent Behavior
@@ -46,46 +46,9 @@ This document describes the privacy-aware, two-layer telemetry system: **base te
- **Scheduler:** `app/utils/scheduled_tasks.py` — job `send_base_telemetry_heartbeat` (daily).
- **Startup:** In `create_app`, after scheduler start, call `send_base_first_seen()` once per install.
-## Self-Hosting / Replacing Vendors
+## Sink Configuration
-- **Base telemetry:** Currently sent to PostHog. To use a custom backend, add an env var (e.g. `BASE_TELEMETRY_URL`) and in `send_base_telemetry()` POST the same schema to that URL; do not store raw IP; derive country server-side if needed and discard IP.
-- **Detailed analytics:** PostHog can be replaced by implementing an analytics sink in `app/telemetry/service.py` (e.g. `send_analytics_event` writing to another provider or your own API).
-
-## PostHog Dashboard Setup (Base Telemetry)
-
-Base telemetry sends two events to PostHog (when `POSTHOG_API_KEY` is set):
-
-- **`base_telemetry.first_seen`** — emitted once per install at first startup.
-- **`base_telemetry.heartbeat`** — emitted daily (e.g. 03:00 UTC) per install.
-
-Both use **`distinct_id` = install_id** (UUID). Event properties: `install_id`, `app_version`, `platform`, `os_version`, `architecture`, `locale`, `timezone`, `first_seen_at`, `last_seen_at`, `heartbeat_at`, `release_channel`, `deployment_type`. **Note:** `country` is not sent in the payload; add server-side geo later if needed.
-
-### How to update your PostHog dashboard
-
-1. **Open PostHog** → **Product Analytics** → **Insights** (or **Dashboards**).
-
-2. **Create a new dashboard** (e.g. “TimeTracker installs”) or add tiles to an existing one.
-
-3. **Add these insights:**
-
-| Insight | Type | Event(s) | What to set |
-|--------|------|----------|-------------|
-| **New installs per day** | Trends | `base_telemetry.first_seen` | Series: Total count. Breakdown: none. Interval: Day. |
-| **Active installs over time** | Trends | `base_telemetry.heartbeat` | Series: **Unique users** (this is unique install_id). Interval: Day or Week. |
-| **Installs by app version** | Trends or Bar | `base_telemetry.heartbeat` | Series: Unique users. **Breakdown by** → property → `app_version`. |
-| **Installs by platform** | Bar or Pie | `base_telemetry.heartbeat` | Series: Unique users. **Breakdown by** → `platform`. |
-| **Installs by OS version** | Bar | `base_telemetry.heartbeat` | Breakdown by `os_version`. |
-| **Installs by deployment type** | Bar | `base_telemetry.heartbeat` | Breakdown by `deployment_type` (docker vs native). |
-
-4. **Unique users = unique installs:** In PostHog, “Unique users” for these events is “unique distinct_id”, which is **install_id**, so it equals unique installs.
-
-5. **Churned / inactive installs:** Build a **Lifecycle** or custom insight: e.g. “Unique distinct_ids that had `base_telemetry.heartbeat` in the previous 30 days but not in the last 7 days”. Or use a **Stickiness** insight on `base_telemetry.heartbeat` and invert (install_ids that didn’t stick in last N days).
-
-6. **Country (if you add it later):** If you add a `country` property to the base payload (e.g. from server-side IP lookup), add an insight: **Breakdown by** `country` on `base_telemetry.heartbeat` (Unique users).
-
-7. **Retention (optional):** For “install_ids that sent a heartbeat again after 7 days”, use PostHog **Retention** with first event = `base_telemetry.first_seen` and return event = `base_telemetry.heartbeat`.
-
-### Filters
-
-- Restrict to base telemetry only: **Event name** is one of `base_telemetry.first_seen`, `base_telemetry.heartbeat`.
-- Exclude test: filter out `app_version` containing `dev` or `test` if you use that convention.
+- Base and detailed telemetry are emitted through the same OTLP sender in `app/telemetry/service.py`.
+- Required configuration:
+ - `GRAFANA_OTLP_ENDPOINT`
+ - `GRAFANA_OTLP_TOKEN`
diff --git a/env.example b/env.example
index 2a8145f2..a6bc5743 100644
--- a/env.example
+++ b/env.example
@@ -149,14 +149,14 @@ LOG_FILE=/data/logs/timetracker.log
# SENTRY_DSN=
# SENTRY_TRACES_RATE=0.0
-# PostHog Product Analytics (optional)
-# Get your API key from https://app.posthog.com/project/settings
-# POSTHOG_API_KEY=phc_DDrseL1KJhVn4wKj12fVc7ryhHiaxJ4CAbgUpzC1354
-# POSTHOG_HOST=https://us.i.posthog.com
+# OTEL OTLP export (optional sink)
+# Example endpoint: https://otlp-gateway-prod-eu-west-2.grafana.net/otlp
+# OTEL_EXPORTER_OTLP_ENDPOINT=
+# OTEL_EXPORTER_OTLP_TOKEN=
# Telemetry (optional, opt-in, anonymous)
-# Sends anonymous installation data via PostHog (version, hashed fingerprint)
-# Requires POSTHOG_API_KEY to be set
+# Sends anonymous installation data to Grafana OTLP (version/platform/install heartbeat)
+# Requires OTEL_EXPORTER_OTLP_ENDPOINT and OTEL_EXPORTER_OTLP_TOKEN
# Default: false (disabled)
# See docs/privacy.md for details
# ENABLE_TELEMETRY=true
diff --git a/migrations/versions/141_add_invoice_number_pattern.py b/migrations/versions/141_add_invoice_number_pattern.py
new file mode 100644
index 00000000..baa97400
--- /dev/null
+++ b/migrations/versions/141_add_invoice_number_pattern.py
@@ -0,0 +1,39 @@
+"""Add invoice_number_pattern setting
+
+Revision ID: 141_add_invoice_number_pattern
+Revises: 140_client_portal_dashboard_prefs
+Create Date: 2026-03-26
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+revision = "141_add_invoice_number_pattern"
+down_revision = "140_client_portal_dashboard_prefs"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ bind = op.get_bind()
+ inspector = sa.inspect(bind)
+ columns = {col["name"] for col in inspector.get_columns("settings")} if "settings" in inspector.get_table_names() else set()
+ if "invoice_number_pattern" not in columns:
+ op.add_column(
+ "settings",
+ sa.Column(
+ "invoice_number_pattern",
+ sa.String(length=120),
+ nullable=False,
+ server_default="{PREFIX}-{YYYY}{MM}{DD}-{SEQ}",
+ ),
+ )
+
+
+def downgrade():
+ bind = op.get_bind()
+ inspector = sa.inspect(bind)
+ columns = {col["name"] for col in inspector.get_columns("settings")} if "settings" in inspector.get_table_names() else set()
+ if "invoice_number_pattern" in columns:
+ op.drop_column("settings", "invoice_number_pattern")
diff --git a/requirements.txt b/requirements.txt
index ddfd0f0f..ad8226a4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -58,7 +58,6 @@ bleach==6.1.0
python-json-logger==2.0.7
sentry-sdk==1.40.0
prometheus-client==0.19.0
-posthog==3.1.0
# API Documentation
flask-swagger-ui==5.21.0
diff --git a/scripts/setup-dev-analytics.bat b/scripts/setup-dev-analytics.bat
index 95ddc761..26006004 100644
--- a/scripts/setup-dev-analytics.bat
+++ b/scripts/setup-dev-analytics.bat
@@ -27,9 +27,8 @@ echo 📝 Enter your development analytics keys:
echo (Leave empty to skip)
echo.
-set /p POSTHOG_KEY="PostHog API Key (starts with phc_): "
-set /p POSTHOG_HOST="PostHog Host [https://app.posthog.com]: "
-if "%POSTHOG_HOST%"=="" set POSTHOG_HOST=https://app.posthog.com
+set /p GRAFANA_OTLP_ENDPOINT="Grafana OTLP Endpoint: "
+set /p GRAFANA_OTLP_TOKEN="Grafana OTLP Token: "
set /p SENTRY_DSN="Sentry DSN (optional): "
set /p SENTRY_RATE="Sentry Traces Rate [1.0]: "
@@ -45,9 +44,9 @@ echo.
echo This file is gitignored and contains your development API keys.
echo """
echo.
-echo # PostHog Configuration ^(Development^)
-echo POSTHOG_API_KEY_DEFAULT = "%POSTHOG_KEY%"
-echo POSTHOG_HOST_DEFAULT = "%POSTHOG_HOST%"
+echo # Grafana OTLP Configuration ^(Development^)
+echo GRAFANA_OTLP_ENDPOINT_DEFAULT = "%GRAFANA_OTLP_ENDPOINT%"
+echo GRAFANA_OTLP_TOKEN_DEFAULT = "%GRAFANA_OTLP_TOKEN%"
echo.
echo # Sentry Configuration ^(Development^)
echo SENTRY_DSN_DEFAULT = "%SENTRY_DSN%"
@@ -73,8 +72,8 @@ echo def get_analytics_config^(^):
echo """Get analytics configuration for local development."""
echo app_version = _get_version_from_setup^(^)
echo return {
-echo "posthog_api_key": POSTHOG_API_KEY_DEFAULT,
-echo "posthog_host": POSTHOG_HOST_DEFAULT,
+echo "grafana_otlp_endpoint": GRAFANA_OTLP_ENDPOINT_DEFAULT,
+echo "grafana_otlp_token": GRAFANA_OTLP_TOKEN_DEFAULT,
echo "sentry_dsn": SENTRY_DSN_DEFAULT,
echo "sentry_traces_rate": float^(SENTRY_TRACES_RATE_DEFAULT^),
echo "app_version": app_version,
@@ -84,7 +83,7 @@ echo.
echo.
echo def has_analytics_configured^(^):
echo """Check if analytics keys are configured."""
-echo return bool^(POSTHOG_API_KEY_DEFAULT^)
+echo return bool^(GRAFANA_OTLP_ENDPOINT_DEFAULT^) and bool^(GRAFANA_OTLP_TOKEN_DEFAULT^)
) > app\config\analytics_defaults_local.py
echo.
@@ -97,11 +96,11 @@ echo Next steps:
echo 1. Start the application: docker-compose up -d
echo 2. Access: http://localhost:5000
echo 3. Complete setup and enable telemetry
-echo 4. Check PostHog dashboard for events
+echo 4. Check Grafana Cloud OTLP ingestion for events
echo.
echo ⚠️ Remember:
echo - This config is gitignored and won't be committed
-echo - Use a separate PostHog project for development
+echo - Use a separate Grafana Cloud stack for development
echo - Before committing, ensure no keys in analytics_defaults.py
echo.
echo To remove:
diff --git a/scripts/setup-dev-analytics.sh b/scripts/setup-dev-analytics.sh
index 89c7b431..1e16390a 100644
--- a/scripts/setup-dev-analytics.sh
+++ b/scripts/setup-dev-analytics.sh
@@ -29,9 +29,8 @@ echo "📝 Enter your development analytics keys:"
echo "(Leave empty to skip)"
echo ""
-read -p "PostHog API Key (starts with phc_): " POSTHOG_KEY
-read -p "PostHog Host [https://app.posthog.com]: " POSTHOG_HOST
-POSTHOG_HOST=${POSTHOG_HOST:-https://app.posthog.com}
+read -p "Grafana OTLP Endpoint: " GRAFANA_OTLP_ENDPOINT
+read -p "Grafana OTLP Token: " GRAFANA_OTLP_TOKEN
read -p "Sentry DSN (optional): " SENTRY_DSN
read -p "Sentry Traces Rate [1.0]: " SENTRY_RATE
@@ -47,9 +46,9 @@ Local development analytics configuration.
This file is gitignored and contains your development API keys.
"""
-# PostHog Configuration (Development)
-POSTHOG_API_KEY_DEFAULT = "${POSTHOG_KEY}"
-POSTHOG_HOST_DEFAULT = "${POSTHOG_HOST}"
+# Grafana OTLP Configuration (Development)
+GRAFANA_OTLP_ENDPOINT_DEFAULT = "${GRAFANA_OTLP_ENDPOINT}"
+GRAFANA_OTLP_TOKEN_DEFAULT = "${GRAFANA_OTLP_TOKEN}"
# Sentry Configuration (Development)
SENTRY_DSN_DEFAULT = "${SENTRY_DSN}"
@@ -96,8 +95,8 @@ def get_analytics_config():
app_version = _get_version_from_setup()
return {
- "posthog_api_key": POSTHOG_API_KEY_DEFAULT,
- "posthog_host": POSTHOG_HOST_DEFAULT,
+ "grafana_otlp_endpoint": GRAFANA_OTLP_ENDPOINT_DEFAULT,
+ "grafana_otlp_token": GRAFANA_OTLP_TOKEN_DEFAULT,
"sentry_dsn": SENTRY_DSN_DEFAULT,
"sentry_traces_rate": float(SENTRY_TRACES_RATE_DEFAULT),
"app_version": app_version,
@@ -107,7 +106,7 @@ def get_analytics_config():
def has_analytics_configured():
"""Check if analytics keys are configured."""
- return bool(POSTHOG_API_KEY_DEFAULT)
+ return bool(GRAFANA_OTLP_ENDPOINT_DEFAULT) and bool(GRAFANA_OTLP_TOKEN_DEFAULT)
EOF
echo ""
@@ -153,11 +152,11 @@ echo "Next steps:"
echo "1. Start the application: docker-compose up -d"
echo "2. Access: http://localhost:5000"
echo "3. Complete setup and enable telemetry"
-echo "4. Check PostHog dashboard for events"
+echo "4. Check Grafana Cloud OTLP ingestion for events"
echo ""
echo "⚠️ Remember:"
echo "- This config is gitignored and won't be committed"
-echo "- Use a separate PostHog project for development"
+echo "- Use a separate Grafana Cloud stack for development"
echo "- Before committing, ensure no keys in analytics_defaults.py"
echo ""
echo "To revert changes:"
diff --git a/setup.py b/setup.py
index 368ed05e..a1052ee5 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@
setup(
name='timetracker',
- version='5.0.0',
+ version='5.1.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
diff --git a/tests/test_analytics.py b/tests/test_analytics.py
index 706f5223..634f68b5 100644
--- a/tests/test_analytics.py
+++ b/tests/test_analytics.py
@@ -1,5 +1,5 @@
"""
-Tests for analytics functionality (logging, Prometheus, PostHog)
+Tests for analytics functionality (logging, Prometheus, OTLP telemetry)
"""
import pytest
@@ -60,47 +60,46 @@ def test_log_event_with_extra_data(self, app):
class TestTrackEvent:
- """Tests for PostHog event tracking"""
+ """Tests for event tracking"""
- @patch("app.posthog.capture")
- def test_track_event_when_enabled(self, mock_capture, app):
- """Test that PostHog events are tracked when API key is set"""
- with patch.dict(os.environ, {"POSTHOG_API_KEY": "test-key"}):
+ @patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=True)
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_track_event_when_enabled(self, mock_send, _mock_enabled, app):
+ """Test that events are sent when OTLP is configured"""
+ with patch.dict(os.environ, {"OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com", "OTEL_EXPORTER_OTLP_TOKEN": "x"}):
track_event(123, "test.event", {"property": "value"})
- # Verify the event was tracked
- assert mock_capture.called
- call_args = mock_capture.call_args
- assert call_args[1]["distinct_id"] == "123"
- assert call_args[1]["event"] == "test.event"
- # Verify our property is included (along with context properties)
+ assert mock_send.called
+ call_args = mock_send.call_args
+ assert call_args[1]["identity"] == "123"
+ assert call_args[1]["event_name"] == "test.event"
assert call_args[1]["properties"]["property"] == "value"
- @patch("app.posthog.capture")
- def test_track_event_when_disabled(self, mock_capture, app):
- """Test that PostHog events are not tracked when API key is not set"""
- with patch.dict(os.environ, {"POSTHOG_API_KEY": ""}):
+ @patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=False)
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_track_event_when_disabled(self, mock_send, _mock_enabled, app):
+ """Test that events are not sent when sink is not configured"""
+ with patch.dict(os.environ, {"OTEL_EXPORTER_OTLP_ENDPOINT": "", "OTEL_EXPORTER_OTLP_TOKEN": ""}):
track_event(123, "test.event", {"property": "value"})
- mock_capture.assert_not_called()
+ mock_send.assert_not_called()
- @patch("app.posthog.capture")
- def test_track_event_handles_errors_gracefully(self, mock_capture, app):
+ @patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=True)
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_track_event_handles_errors_gracefully(self, mock_send, _mock_enabled, app):
"""Test that tracking errors don't crash the application"""
- mock_capture.side_effect = Exception("PostHog error")
- with patch.dict(os.environ, {"POSTHOG_API_KEY": "test-key"}):
+ mock_send.side_effect = Exception("Telemetry error")
+ with patch.dict(os.environ, {"OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com", "OTEL_EXPORTER_OTLP_TOKEN": "x"}):
# Should not raise an exception
track_event(123, "test.event", {})
def test_track_event_with_none_properties(self, app):
"""Test that track_event handles None properties"""
- with patch.dict(os.environ, {"POSTHOG_API_KEY": "test-key"}):
- with patch("app.posthog.capture") as mock_capture:
- track_event(123, "test.event", None)
- # Should have context properties even when None is passed
- call_args = mock_capture.call_args
- # Properties should be a dict (not None) with at least context properties
- assert isinstance(call_args[1]["properties"], dict)
- # Context properties should be present
- assert "environment" in call_args[1]["properties"]
+ with patch.dict(os.environ, {"OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com", "OTEL_EXPORTER_OTLP_TOKEN": "x"}):
+ with patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=True):
+ with patch("app.telemetry.service._send_otlp_event") as mock_send:
+ track_event(123, "test.event", None)
+ call_args = mock_send.call_args
+ assert isinstance(call_args[1]["properties"], dict)
+ assert "environment" in call_args[1]["properties"]
class TestPrometheusMetrics:
@@ -241,16 +240,16 @@ def test_no_pii_in_standard_events(self, app):
# - IP addresses (unless explicitly needed)
# - passwords or tokens
- @patch("app.posthog.capture")
- def test_posthog_uses_internal_ids(self, mock_capture, app):
- """Test that PostHog events use internal IDs, not PII"""
- with patch.dict(os.environ, {"POSTHOG_API_KEY": "test-key"}):
+ @patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=True)
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_telemetry_uses_internal_ids(self, mock_send, _mock_enabled, app):
+ """Test that telemetry events use internal IDs, not PII"""
+ with patch.dict(os.environ, {"OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com", "OTEL_EXPORTER_OTLP_TOKEN": "x"}):
# Should use numeric ID, not email
track_event(123, "test.event", {"project_id": 456})
- call_args = mock_capture.call_args
- # distinct_id should be the internal user ID (converted to string)
- assert call_args[1]["distinct_id"] == "123"
+ call_args = mock_send.call_args
+ assert call_args[1]["identity"] == "123"
class TestAnalyticsPerformance:
@@ -268,10 +267,10 @@ def test_analytics_dont_block_requests(self, client):
assert duration < 1.0 # Should complete in less than 1 second
assert response.status_code == 200
- @patch("app.posthog.capture")
- def test_analytics_errors_dont_break_app(self, mock_capture, app, client):
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_analytics_errors_dont_break_app(self, mock_send, app, client):
"""Test that analytics failures don't break the application"""
- mock_capture.side_effect = Exception("Analytics service down")
+ mock_send.side_effect = Exception("Analytics service down")
# Application should still work
response = client.get("/metrics")
diff --git a/tests/test_api_purchase_orders_v1.py b/tests/test_api_purchase_orders_v1.py
new file mode 100644
index 00000000..34be9b53
--- /dev/null
+++ b/tests/test_api_purchase_orders_v1.py
@@ -0,0 +1,85 @@
+"""API tests for purchase order create edge cases."""
+
+import json
+from datetime import date
+from unittest.mock import patch
+
+import pytest
+from sqlalchemy.exc import IntegrityError
+
+pytestmark = [pytest.mark.api, pytest.mark.integration]
+
+from app import db
+from app.models import ApiToken, Supplier
+
+
+@pytest.fixture
+def api_token(db_session, test_user):
+ token, plain_token = ApiToken.create_token(
+ user_id=test_user.id,
+ name="Purchase Order API Test Token",
+ description="For purchase order API tests",
+ scopes="read:projects,write:projects",
+ )
+ db.session.add(token)
+ db.session.commit()
+ return plain_token
+
+
+def _auth_headers(token):
+ return {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
+
+
+@pytest.fixture
+def test_supplier(db_session, test_user):
+ supplier = Supplier(code="SUP-API-001", name="API Supplier", created_by=test_user.id)
+ db_session.add(supplier)
+ db_session.commit()
+ return supplier
+
+
+class TestPurchaseOrderCreateAPI:
+ def test_create_purchase_order_first_record(self, client, api_token, test_supplier):
+ payload = {
+ "supplier_id": test_supplier.id,
+ "order_date": date.today().isoformat(),
+ "currency_code": "EUR",
+ "items": [{"description": "Cable", "quantity_ordered": "2", "unit_cost": "3.50"}],
+ }
+ response = client.post(
+ "/api/v1/inventory/purchase-orders",
+ data=json.dumps(payload),
+ headers=_auth_headers(api_token),
+ )
+ assert response.status_code == 201
+ data = response.get_json()
+ assert data["purchase_order"]["po_number"].startswith("PO-")
+
+ def test_create_purchase_order_rejects_invalid_item(self, client, api_token, test_supplier):
+ payload = {
+ "supplier_id": test_supplier.id,
+ "items": [{"description": "", "quantity_ordered": "0", "unit_cost": "-1"}],
+ }
+ response = client.post(
+ "/api/v1/inventory/purchase-orders",
+ data=json.dumps(payload),
+ headers=_auth_headers(api_token),
+ )
+ assert response.status_code == 400
+
+ def test_create_purchase_order_conflict_maps_to_409(self, client, api_token, test_supplier):
+ payload = {
+ "supplier_id": test_supplier.id,
+ "order_date": date.today().isoformat(),
+ "items": [{"description": "Item", "quantity_ordered": "1", "unit_cost": "1.00"}],
+ }
+ with patch(
+ "app.routes.api_v1.db.session.commit",
+ side_effect=IntegrityError("INSERT", {"po_number": "PO-CONFLICT"}, Exception("duplicate key")),
+ ):
+ response = client.post(
+ "/api/v1/inventory/purchase-orders",
+ data=json.dumps(payload),
+ headers=_auth_headers(api_token),
+ )
+ assert response.status_code == 409
diff --git a/tests/test_integration/test_inventory_integration.py b/tests/test_integration/test_inventory_integration.py
index 84bb2885..38dcce6e 100644
--- a/tests/test_integration/test_inventory_integration.py
+++ b/tests/test_integration/test_inventory_integration.py
@@ -4,6 +4,7 @@
pytestmark = [pytest.mark.integration]
+from datetime import datetime, timedelta
from decimal import Decimal
from flask import url_for
from app import db
@@ -264,6 +265,63 @@ def test_invoice_sent_reduces_stock(
# This test verifies the integration point exists
assert invoice.status == "sent" or response.status_code in [200, 302]
+ def test_invoice_sent_twice_does_not_double_reduce_stock(
+ self, client, test_user, test_client, test_stock_item, test_warehouse, test_stock_with_quantity
+ ):
+ """Sending an already-sent invoice should not create extra sale movement."""
+ import os
+
+ os.environ["INVENTORY_REDUCE_ON_INVOICE_SENT"] = "true"
+
+ project = Project(name="Idempotency Project", client_id=test_client.id, billable=True)
+ db.session.add(project)
+ db.session.commit()
+
+ invoice = Invoice(
+ invoice_number="INV-TEST-IDEMPOTENT",
+ project_id=project.id,
+ client_name=test_client.name,
+ client_id=test_client.id,
+ due_date=datetime.utcnow().date() + timedelta(days=30),
+ created_by=test_user.id,
+ status="draft",
+ )
+ db.session.add(invoice)
+ db.session.flush()
+ db.session.add(
+ InvoiceItem(
+ invoice_id=invoice.id,
+ description="Test Product",
+ quantity=Decimal("2.00"),
+ unit_price=Decimal("25.00"),
+ stock_item_id=test_stock_item.id,
+ warehouse_id=test_warehouse.id,
+ )
+ )
+ db.session.commit()
+
+ with client.session_transaction() as sess:
+ sess["_user_id"] = str(test_user.id)
+
+ response_first = client.post(
+ url_for("invoices.update_invoice_status", invoice_id=invoice.id),
+ data={"new_status": "sent"},
+ follow_redirects=False,
+ )
+ assert response_first.status_code == 200
+
+ first_count = StockMovement.query.filter_by(reference_type="invoice", reference_id=invoice.id).count()
+
+ response_second = client.post(
+ url_for("invoices.update_invoice_status", invoice_id=invoice.id),
+ data={"new_status": "sent"},
+ follow_redirects=False,
+ )
+ assert response_second.status_code == 200
+
+ second_count = StockMovement.query.filter_by(reference_type="invoice", reference_id=invoice.id).count()
+ assert second_count == first_count
+
class TestStockReservationLifecycle:
"""Test stock reservation lifecycle"""
diff --git a/tests/test_invoice_numbering.py b/tests/test_invoice_numbering.py
new file mode 100644
index 00000000..89b3c418
--- /dev/null
+++ b/tests/test_invoice_numbering.py
@@ -0,0 +1,47 @@
+from datetime import date
+
+import pytest
+
+from app import db
+from app.models import Invoice, Settings
+from app.utils.invoice_numbering import validate_invoice_pattern
+
+
+@pytest.mark.unit
+def test_validate_invoice_pattern_rejects_missing_seq():
+ ok, reason = validate_invoice_pattern("{YYYY}-{MM}")
+ assert ok is False
+ assert "{SEQ}" in reason
+
+
+@pytest.mark.unit
+def test_validate_invoice_pattern_rejects_unknown_token():
+ ok, reason = validate_invoice_pattern("{YYYY}-{RANDOM}-{SEQ}")
+ assert ok is False
+ assert "RANDOM" in reason
+
+
+@pytest.mark.unit
+def test_invoice_sequence_increments_for_same_pattern(app, user, project, test_client):
+ settings = Settings.get_settings()
+ settings.invoice_prefix = "RE"
+ settings.invoice_number_pattern = "{PREFIX}-{YYYY}-{SEQ}"
+ settings.invoice_start_number = 5
+ db.session.commit()
+
+ invoice_number_1 = Invoice.generate_invoice_number()
+ db.session.add(
+ Invoice(
+ invoice_number=invoice_number_1,
+ project_id=project.id,
+ client_name=test_client.name,
+ due_date=date.today(),
+ created_by=user.id,
+ client_id=test_client.id,
+ )
+ )
+ db.session.commit()
+
+ invoice_number_2 = Invoice.generate_invoice_number()
+ assert invoice_number_1.endswith("-005")
+ assert invoice_number_2.endswith("-006")
diff --git a/tests/test_invoices.py b/tests/test_invoices.py
index 9ab441ab..71082df4 100644
--- a/tests/test_invoices.py
+++ b/tests/test_invoices.py
@@ -180,6 +180,31 @@ def test_invoice_number_generation(app):
assert len(invoice_number.split("-")) == 3
+def test_invoice_number_generation_with_custom_pattern(app):
+ """Invoice number follows custom settings pattern."""
+ settings = Settings.get_settings()
+ settings.invoice_prefix = "RE"
+ settings.invoice_number_pattern = "{PREFIX}-{YYYY}-{SEQ}"
+ settings.invoice_start_number = 12
+ db.session.commit()
+
+ invoice_number = Invoice.generate_invoice_number()
+ assert invoice_number.startswith("RE-")
+ assert invoice_number.endswith("-012")
+
+
+def test_invoice_number_generation_with_empty_pattern_uses_sequence(app):
+ """Empty pattern generates sequence-only invoice numbers."""
+ settings = Settings.get_settings()
+ settings.invoice_prefix = ""
+ settings.invoice_number_pattern = ""
+ settings.invoice_start_number = 7
+ db.session.commit()
+
+ invoice_number = Invoice.generate_invoice_number()
+ assert invoice_number == "007"
+
+
def test_invoice_overdue_status(app, sample_user, sample_project):
"""Test that invoices are marked as overdue correctly."""
# Create a client first
diff --git a/tests/test_models/test_purchase_order.py b/tests/test_models/test_purchase_order.py
index 37d549a8..35096ec7 100644
--- a/tests/test_models/test_purchase_order.py
+++ b/tests/test_models/test_purchase_order.py
@@ -95,7 +95,7 @@ def test_purchase_order_with_items(self, db_session, test_purchase_order, test_s
test_purchase_order.calculate_totals()
db_session.commit()
- assert len(test_purchase_order.items) == 1
+ assert test_purchase_order.items.count() == 1
assert test_purchase_order.total_amount == Decimal("50.00")
def test_purchase_order_receive(self, db_session, test_purchase_order, test_stock_item, test_warehouse):
@@ -134,3 +134,23 @@ def test_purchase_order_to_dict(self, db_session, test_purchase_order):
assert data["status"] == "draft"
assert "created_at" in data
assert "items" in data
+
+ def test_purchase_order_none_safe_normalization(self, db_session, test_supplier, test_user):
+ """Model constructor should reject missing required string fields cleanly."""
+ with pytest.raises(ValueError):
+ PurchaseOrder(
+ po_number=None,
+ supplier_id=test_supplier.id,
+ order_date=date.today(),
+ created_by=test_user.id,
+ currency_code=None,
+ )
+
+ def test_purchase_order_item_description_required(self, db_session, test_purchase_order):
+ with pytest.raises(ValueError):
+ PurchaseOrderItem(
+ purchase_order_id=test_purchase_order.id,
+ description=" ",
+ quantity_ordered=Decimal("1.00"),
+ unit_cost=Decimal("1.00"),
+ )
diff --git a/tests/test_routes/test_purchase_order_routes.py b/tests/test_routes/test_purchase_order_routes.py
index 9a267765..e625959b 100644
--- a/tests/test_routes/test_purchase_order_routes.py
+++ b/tests/test_routes/test_purchase_order_routes.py
@@ -1,6 +1,7 @@
"""Tests for purchase order routes"""
import pytest
+from unittest.mock import patch
pytestmark = [pytest.mark.unit, pytest.mark.routes]
@@ -189,3 +190,22 @@ def test_delete_purchase_order(self, client, test_user, test_purchase_order):
# Check if PO was deleted
po = PurchaseOrder.query.get(test_purchase_order.id)
assert po is None
+
+ def test_create_purchase_order_safe_commit_failure(self, client, test_user, test_supplier):
+ """Create route should handle safe_commit failure without success redirect."""
+ with client.session_transaction() as sess:
+ sess["_user_id"] = str(test_user.id)
+
+ with patch("app.routes.inventory.safe_commit", return_value=False):
+ response = client.post(
+ url_for("inventory.new_purchase_order"),
+ data={
+ "supplier_id": test_supplier.id,
+ "order_date": date.today().isoformat(),
+ "currency_code": "EUR",
+ },
+ follow_redirects=True,
+ )
+
+ assert response.status_code == 200
+ assert b"database error" in response.data.lower()
diff --git a/tests/test_telemetry.py b/tests/test_telemetry.py
index 9cb1e469..637b4ef2 100644
--- a/tests/test_telemetry.py
+++ b/tests/test_telemetry.py
@@ -91,77 +91,78 @@ def test_telemetry_disabled_by_default(self):
class TestSendTelemetryPing:
"""Tests for sending telemetry pings"""
- @patch("app.utils.telemetry.posthog.capture")
- def test_send_ping_when_enabled(self, mock_capture):
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_send_ping_when_enabled(self, mock_send):
"""Test sending telemetry ping when enabled"""
with patch.dict(
os.environ,
{
"ENABLE_TELEMETRY": "true",
- "POSTHOG_API_KEY": "test-api-key",
+ "OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com",
+ "OTEL_EXPORTER_OTLP_TOKEN": "test-token",
"APP_VERSION": "1.0.0",
"TELE_SALT": "test-salt",
},
):
result = send_telemetry_ping("install")
assert result is True
- assert mock_capture.called
+ assert mock_send.called
- # Verify the call
- call_args = mock_capture.call_args
- assert call_args[1]["event"] == "telemetry.install"
- assert "distinct_id" in call_args[1]
+ call_args = mock_send.call_args
+ assert call_args[1]["event_name"] == "telemetry.install"
+ assert "identity" in call_args[1]
assert "properties" in call_args[1]
- @patch("app.utils.telemetry.posthog.capture")
- def test_no_ping_when_disabled(self, mock_capture):
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_no_ping_when_disabled(self, mock_send):
"""Test that no ping is sent when telemetry is disabled"""
with patch.dict(os.environ, {"ENABLE_TELEMETRY": "false"}):
result = send_telemetry_ping("install")
assert result is False
- assert not mock_capture.called
+ assert not mock_send.called
- @patch("app.utils.telemetry.posthog.capture")
- def test_no_ping_when_no_api_key(self, mock_capture):
- """Test that no ping is sent when POSTHOG_API_KEY is not set"""
- with patch.dict(os.environ, {"ENABLE_TELEMETRY": "true", "POSTHOG_API_KEY": ""}):
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_no_ping_when_no_sink_config(self, mock_send):
+ """Test that no ping is sent when OTLP sink is not set."""
+ with patch.dict(
+ os.environ, {"ENABLE_TELEMETRY": "true", "OTEL_EXPORTER_OTLP_ENDPOINT": "", "OTEL_EXPORTER_OTLP_TOKEN": ""}
+ ):
result = send_telemetry_ping("install")
assert result is False
- assert not mock_capture.called
+ assert not mock_send.called
- @patch("app.utils.telemetry.posthog.capture")
- def test_ping_includes_required_fields(self, mock_capture):
- """Test that telemetry ping includes required fields"""
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_ping_forwards_extra_data(self, mock_send):
+ """Test that telemetry ping forwards custom event data."""
with patch.dict(
os.environ,
{
"ENABLE_TELEMETRY": "true",
- "POSTHOG_API_KEY": "test-api-key",
+ "OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com",
+ "OTEL_EXPORTER_OTLP_TOKEN": "test-token",
"APP_VERSION": "1.0.0",
"TELE_SALT": "test-salt",
},
):
send_telemetry_ping("install", extra_data={"test": "value"})
- # Get the call arguments
- call_args = mock_capture.call_args
- event = call_args[1]["event"]
+ call_args = mock_send.call_args
properties = call_args[1]["properties"]
-
- assert event == "telemetry.install"
- assert "app_version" in properties
- assert "platform" in properties
- assert "python_version" in properties
- assert "environment" in properties
- assert "deployment_method" in properties
assert properties["test"] == "value"
- @patch("app.utils.telemetry.posthog.capture")
- def test_ping_handles_network_errors_gracefully(self, mock_capture):
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_ping_handles_network_errors_gracefully(self, mock_send):
"""Test that network errors don't crash the application"""
- mock_capture.side_effect = Exception("Network error")
+ mock_send.side_effect = Exception("Network error")
- with patch.dict(os.environ, {"ENABLE_TELEMETRY": "true", "POSTHOG_API_KEY": "test-api-key"}):
+ with patch.dict(
+ os.environ,
+ {
+ "ENABLE_TELEMETRY": "true",
+ "OTEL_EXPORTER_OTLP_ENDPOINT": "https://otlp.example.com",
+ "OTEL_EXPORTER_OTLP_TOKEN": "test-token",
+ },
+ ):
result = send_telemetry_ping("install")
assert result is False
diff --git a/tests/test_telemetry_consent_and_base.py b/tests/test_telemetry_consent_and_base.py
index 9d78c617..0934af9d 100644
--- a/tests/test_telemetry_consent_and_base.py
+++ b/tests/test_telemetry_consent_and_base.py
@@ -11,34 +11,34 @@
class TestConsentGate:
"""Product analytics only sent when opt-in is enabled."""
- @patch("posthog.capture")
- def test_send_analytics_event_no_capture_when_opt_out(self, mock_capture):
- """When detailed analytics is disabled, send_analytics_event must not call posthog.capture."""
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_send_analytics_event_no_capture_when_opt_out(self, mock_send):
+ """When detailed analytics is disabled, send_analytics_event must not call OTLP sender."""
from app.telemetry.service import send_analytics_event
with patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=False):
send_analytics_event(1, "test.event", {"k": "v"})
- mock_capture.assert_not_called()
+ mock_send.assert_not_called()
- @patch("posthog.capture")
- def test_send_analytics_event_capture_when_opt_in(self, mock_capture):
- """When detailed analytics is enabled and PostHog configured, capture is called."""
+ @patch("app.telemetry.service._send_otlp_event")
+ def test_send_analytics_event_capture_when_opt_in(self, mock_send):
+ """When detailed analytics is enabled and OTLP configured, sender is called."""
from app.telemetry.service import send_analytics_event
with patch("app.telemetry.service.is_detailed_analytics_enabled", return_value=True):
with patch("app.config.analytics_defaults.get_analytics_config") as mock_config:
mock_config.return_value = {
- "posthog_api_key": "phc_test",
- "posthog_host": "https://test.posthog.com",
+ "otel_exporter_otlp_endpoint": "https://otlp.example.com",
+ "otel_exporter_otlp_token": "test-token",
"app_version": "1.0.0",
}
with patch("app.utils.installation.get_installation_config") as mock_inst:
mock_inst.return_value.get_install_id.return_value = "install-uuid-123"
send_analytics_event(1, "test.event", {"k": "v"})
- mock_capture.assert_called_once()
- call_kw = mock_capture.call_args[1]
- assert call_kw["distinct_id"] == "1"
- assert call_kw["event"] == "test.event"
+ mock_send.assert_called_once()
+ call_kw = mock_send.call_args[1]
+ assert call_kw["identity"] == "1"
+ assert call_kw["event_name"] == "test.event"
assert call_kw["properties"].get("install_id") == "install-uuid-123"