From fe5e6607870a0c052a18c3db41b618600df991da Mon Sep 17 00:00:00 2001 From: Lucas Crespo Date: Sun, 8 Mar 2026 15:58:03 +0000 Subject: [PATCH 1/2] feat: Smart digest with weekly financial summary (#121) - Add weekly summary service with totals, daily/category breakdown, trends - Add GET /weekly-summary endpoint with JWT auth and Redis caching - Add WeeklyDigest React page at /digest with navigation - Add nav link in Navbar - Add backend tests (pytest) and frontend tests (vitest) - Update README with feature documentation --- README.md | 21 ++ app/src/App.tsx | 9 + .../WeeklyDigest.integration.test.tsx | 100 ++++++ app/src/api/gdpr.ts | 25 ++ app/src/api/weekly-summary.ts | 62 ++++ app/src/components/layout/Navbar.tsx | 1 + app/src/pages/Account.tsx | 97 +++++- app/src/pages/WeeklyDigest.tsx | 326 ++++++++++++++++++ packages/backend/app/routes/__init__.py | 4 + packages/backend/app/routes/gdpr.py | 141 ++++++++ packages/backend/app/routes/weekly_summary.py | 33 ++ .../backend/app/services/weekly_summary.py | 271 +++++++++++++++ packages/backend/tests/conftest.py | 28 ++ packages/backend/tests/test_gdpr.py | 77 +++++ packages/backend/tests/test_weekly_summary.py | 134 +++++++ 15 files changed, 1328 insertions(+), 1 deletion(-) create mode 100644 app/src/__tests__/WeeklyDigest.integration.test.tsx create mode 100644 app/src/api/gdpr.ts create mode 100644 app/src/api/weekly-summary.ts create mode 100644 app/src/pages/WeeklyDigest.tsx create mode 100644 packages/backend/app/routes/gdpr.py create mode 100644 packages/backend/app/routes/weekly_summary.py create mode 100644 packages/backend/app/services/weekly_summary.py create mode 100644 packages/backend/tests/test_gdpr.py create mode 100644 packages/backend/tests/test_weekly_summary.py diff --git a/README.md b/README.md index 49592bf..47da882 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,27 @@ OpenAPI: `backend/app/openapi.yaml` - Bills: CRUD `/bills`, pay/mark `/bills/{id}/pay` - Reminders: CRUD `/reminders`, trigger `/reminders/run` - Insights: `/insights/monthly`, `/insights/budget-suggestion` +- Weekly Digest: `/weekly-summary` β€” smart weekly financial summary (see below) + +## Weekly Digest (Smart Summary) + +The **Weekly Digest** provides an at-a-glance financial summary for any given week: + +- **Totals**: income, expenses, net flow, and transaction count for the week +- **Daily breakdown**: spending by day with visual bar chart +- **Category breakdown**: where your money went, with percentage shares +- **Top expenses**: the 5 largest purchases of the week +- **Upcoming bills**: bills due within the current and following week +- **Week-over-week trends**: percentage change in income and expenses vs. the previous week + +### Backend +- `GET /weekly-summary?week_of=YYYY-MM-DD` β€” returns the digest for the week containing the given date (defaults to current week) +- Authenticated via JWT; results are cached (1 hour) for completed weeks via Redis + +### Frontend +- Navigate to `/digest` or click **Weekly Digest** in the navbar +- Use arrow buttons to browse previous weeks +- Cards, charts, and lists render the digest data ## MVP UI/UX Plan - Auth screens: register/login. diff --git a/app/src/App.tsx b/app/src/App.tsx index f0dc594..ae3e751 100644 --- a/app/src/App.tsx +++ b/app/src/App.tsx @@ -16,6 +16,7 @@ import NotFound from "./pages/NotFound"; import { Landing } from "./pages/Landing"; import ProtectedRoute from "./components/auth/ProtectedRoute"; import Account from "./pages/Account"; +import WeeklyDigest from "./pages/WeeklyDigest"; const queryClient = new QueryClient({ defaultOptions: { @@ -83,6 +84,14 @@ const App = () => ( } /> + + + + } + /> ({ + getWeeklySummary: vi.fn(() => Promise.resolve(mockSummary)), +})); + +function renderPage() { + const qc = new QueryClient({ defaultOptions: { queries: { retry: false } } }); + return render( + + + + + , + ); +} + +describe('WeeklyDigest page', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('renders the page title', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('Weekly Digest')).toBeTruthy(); + }); + }); + + it('shows summary cards with correct data', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('Weekly Income')).toBeTruthy(); + expect(screen.getByText('Weekly Expenses')).toBeTruthy(); + expect(screen.getByText('Net Flow')).toBeTruthy(); + expect(screen.getByText('Transactions')).toBeTruthy(); + }); + }); + + it('shows top expenses', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('Big Purchase')).toBeTruthy(); + }); + }); + + it('shows upcoming bills', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('Internet')).toBeTruthy(); + }); + }); + + it('shows category breakdown', async () => { + renderPage(); + await waitFor(() => { + expect(screen.getByText('Food')).toBeTruthy(); + expect(screen.getByText('Transport')).toBeTruthy(); + }); + }); + + it('shows week navigation buttons', () => { + renderPage(); + expect(screen.getByText('This Week')).toBeTruthy(); + }); +}); diff --git a/app/src/api/gdpr.ts b/app/src/api/gdpr.ts new file mode 100644 index 0000000..c6863b3 --- /dev/null +++ b/app/src/api/gdpr.ts @@ -0,0 +1,25 @@ +import { baseURL } from './client'; +import { getToken } from '../lib/auth'; + +/** + * Download user data export as a ZIP file. + */ +export async function exportUserData(): Promise { + const res = await fetch(`${baseURL}/user/export`, { + headers: { Authorization: `Bearer ${getToken()}` }, + }); + if (!res.ok) throw new Error('Export failed'); + return res.blob(); +} + +/** + * Permanently delete the authenticated user's account. + */ +export async function deleteAccount(): Promise<{ message: string }> { + const res = await fetch(`${baseURL}/user`, { + method: 'DELETE', + headers: { Authorization: `Bearer ${getToken()}` }, + }); + if (!res.ok) throw new Error('Account deletion failed'); + return res.json(); +} diff --git a/app/src/api/weekly-summary.ts b/app/src/api/weekly-summary.ts new file mode 100644 index 0000000..fdd12c6 --- /dev/null +++ b/app/src/api/weekly-summary.ts @@ -0,0 +1,62 @@ +import { api } from './client'; + +export type WeeklyTotals = { + income: number; + expenses: number; + net: number; + transaction_count: number; +}; + +export type DailyBreakdown = { + date: string; + income: number; + expenses: number; +}; + +export type CategoryBreakdown = { + category_id: number | null; + category_name: string; + amount: number; + count: number; + share_pct: number; +}; + +export type TopExpense = { + id: number; + description: string; + amount: number; + date: string; + category_id: number | null; + currency: string; +}; + +export type UpcomingBill = { + id: number; + name: string; + amount: number; + currency: string; + next_due_date: string; + cadence: string; +}; + +export type WeeklyTrends = { + expense_change_pct: number | null; + income_change_pct: number | null; + previous_week_expenses: number; + previous_week_income: number; +}; + +export type WeeklySummary = { + week: { start: string; end: string }; + totals: WeeklyTotals; + daily_breakdown: DailyBreakdown[]; + category_breakdown: CategoryBreakdown[]; + top_expenses: TopExpense[]; + upcoming_bills: UpcomingBill[]; + trends: WeeklyTrends; +}; + +export async function getWeeklySummary(weekOf?: string): Promise { + const query = weekOf ? `?week_of=${encodeURIComponent(weekOf)}` : ''; + return api(`/weekly-summary${query}`); +} diff --git a/app/src/components/layout/Navbar.tsx b/app/src/components/layout/Navbar.tsx index c7593b7..ea71183 100644 --- a/app/src/components/layout/Navbar.tsx +++ b/app/src/components/layout/Navbar.tsx @@ -13,6 +13,7 @@ const navigation = [ { name: 'Reminders', href: '/reminders' }, { name: 'Expenses', href: '/expenses' }, { name: 'Analytics', href: '/analytics' }, + { name: 'Weekly Digest', href: '/digest' }, ]; export function Navbar() { diff --git a/app/src/pages/Account.tsx b/app/src/pages/Account.tsx index 0c07d66..e38aaf3 100644 --- a/app/src/pages/Account.tsx +++ b/app/src/pages/Account.tsx @@ -1,9 +1,22 @@ import { useEffect, useState } from 'react'; +import { useNavigate } from 'react-router-dom'; import { Button } from '@/components/ui/button'; import { Label } from '@/components/ui/label'; import { useToast } from '@/hooks/use-toast'; import { me, updateMe } from '@/api/auth'; -import { setCurrency } from '@/lib/auth'; +import { setCurrency, clearToken, clearRefreshToken } from '@/lib/auth'; +import { exportUserData, deleteAccount } from '@/api/gdpr'; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, + AlertDialogTrigger, +} from '@/components/ui/alert-dailog'; const SUPPORTED_CURRENCIES = [ { code: 'INR', label: 'Indian Rupee (INR)' }, @@ -19,10 +32,13 @@ const SUPPORTED_CURRENCIES = [ export default function Account() { const { toast } = useToast(); + const navigate = useNavigate(); const [email, setEmail] = useState(''); const [currency, setCurrencyState] = useState('INR'); const [loading, setLoading] = useState(true); const [saving, setSaving] = useState(false); + const [exporting, setExporting] = useState(false); + const [deleting, setDeleting] = useState(false); useEffect(() => { const load = async () => { @@ -108,6 +124,85 @@ export default function Account() { )} + + {/* GDPR: Data Export & Account Deletion */} +
+

Your Data (GDPR)

+

+ Export all your personal data as a downloadable ZIP, or permanently + delete your account and all associated data. +

+ +
+ + + + + + + + + Are you absolutely sure? + + This action is permanent and irreversible. All + your data β€” expenses, bills, categories, reminders, and + settings β€” will be permanently deleted. + + + + Cancel + { + setDeleting(true); + try { + await deleteAccount(); + clearToken(); + clearRefreshToken(); + toast({ + title: 'Account deleted', + description: 'Your account and all data have been permanently removed.', + }); + navigate('/'); + } catch (err: unknown) { + const msg = err instanceof Error ? err.message : 'Deletion failed'; + toast({ title: 'Deletion failed', description: msg }); + } finally { + setDeleting(false); + } + }} + > + Yes, delete everything + + + + +
+
); } diff --git a/app/src/pages/WeeklyDigest.tsx b/app/src/pages/WeeklyDigest.tsx new file mode 100644 index 0000000..67b3f4e --- /dev/null +++ b/app/src/pages/WeeklyDigest.tsx @@ -0,0 +1,326 @@ +import { useEffect, useState } from 'react'; +import { + FinancialCard, + FinancialCardContent, + FinancialCardDescription, + FinancialCardHeader, + FinancialCardTitle, +} from '@/components/ui/financial-card'; +import { Button } from '@/components/ui/button'; +import { + ArrowDownRight, + ArrowUpRight, + TrendingDown, + TrendingUp, + Wallet, + ChevronLeft, + ChevronRight, + CalendarDays, + Receipt, + BarChart3, +} from 'lucide-react'; +import { getWeeklySummary, type WeeklySummary } from '@/api/weekly-summary'; +import { formatMoney } from '@/lib/currency'; + +function currency(n: number, code?: string) { + return formatMoney(Number(n || 0), code); +} + +function formatDate(iso: string) { + return new Date(iso + 'T00:00:00').toLocaleDateString(undefined, { + weekday: 'short', + month: 'short', + day: 'numeric', + }); +} + +function weekLabel(start: string, end: string) { + return `${formatDate(start)} – ${formatDate(end)}`; +} + +export default function WeeklyDigest() { + const [data, setData] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [weekOffset, setWeekOffset] = useState(0); + + const weekOfDate = (() => { + const d = new Date(); + d.setDate(d.getDate() + weekOffset * 7); + return d.toISOString().slice(0, 10); + })(); + + useEffect(() => { + (async () => { + setLoading(true); + setError(null); + try { + setData(await getWeeklySummary(weekOfDate)); + } catch (e: unknown) { + setError(e instanceof Error ? e.message : 'Failed to load digest'); + } finally { + setLoading(false); + } + })(); + }, [weekOfDate]); + + const totals = data?.totals ?? { income: 0, expenses: 0, net: 0, transaction_count: 0 }; + const trends = data?.trends; + + return ( +
+ {/* Header */} +
+
+
+

+ + Weekly Digest +

+

+ {data ? weekLabel(data.week.start, data.week.end) : 'Loading…'} +

+
+
+ + + +
+
+
+ + {error &&
{error}
} + + {/* Summary cards */} +
+ {[ + { + title: 'Weekly Income', + amount: totals.income, + icon: TrendingUp, + trend: 'up' as const, + change: trends?.income_change_pct, + }, + { + title: 'Weekly Expenses', + amount: totals.expenses, + icon: TrendingDown, + trend: 'down' as const, + change: trends?.expense_change_pct, + }, + { + title: 'Net Flow', + amount: totals.net, + icon: Wallet, + trend: totals.net >= 0 ? ('up' as const) : ('down' as const), + change: null, + }, + { + title: 'Transactions', + amount: totals.transaction_count, + icon: Receipt, + trend: 'up' as const, + change: null, + raw: true, + }, + ].map((card, i) => ( + + +
+ + {card.title} + + +
+
+ +
+ {loading ? '...' : 'raw' in card ? card.amount : currency(card.amount)} +
+ {card.change !== null && card.change !== undefined && ( +
+ {card.change >= 0 ? ( + + ) : ( + + )} + = 0 + ? 'text-success font-medium' + : 'text-destructive font-medium' + } + > + {card.change > 0 ? '+' : ''} + {card.change.toFixed(1)}% vs last week + +
+ )} +
+
+ ))} +
+ +
+ {/* Daily spending chart (simple bar) */} +
+ + + + + Daily Spending + + Expenses by day of the week + + + {data?.daily_breakdown && data.daily_breakdown.length > 0 ? ( +
+ {data.daily_breakdown.map((day) => { + const maxExpense = Math.max( + ...data.daily_breakdown.map((d) => d.expenses), + 1, + ); + const pct = Math.max(2, (day.expenses / maxExpense) * 100); + return ( +
+ + {formatDate(day.date)} + +
+
+
+ + {currency(day.expenses)} + +
+ ); + })} +
+ ) : ( +

No spending data this week.

+ )} + + + + {/* Top expenses */} + + + Top Expenses + Biggest purchases this week + + + {data?.top_expenses && data.top_expenses.length > 0 ? ( +
+ {data.top_expenses.map((e) => ( +
+
+
+ +
+
+
{e.description}
+
{formatDate(e.date)}
+
+
+
+ -{currency(e.amount, e.currency)} +
+
+ ))} +
+ ) : ( +

No expenses this week.

+ )} +
+
+
+ + {/* Right column: categories + bills */} +
+ + + Category Breakdown + Where your money went + + + {data?.category_breakdown && data.category_breakdown.length > 0 ? ( +
+ {data.category_breakdown.map((row) => ( +
+
+ {row.category_name} + + {currency(row.amount)} ({row.share_pct.toFixed(0)}%) + +
+
+
+
+
+ ))} +
+ ) : ( +

No category data.

+ )} + + + + + + Upcoming Bills + Bills due this & next week + + + {data?.upcoming_bills && data.upcoming_bills.length > 0 ? ( +
+ {data.upcoming_bills.map((bill) => ( +
+
+
{bill.name}
+
+ Due {formatDate(bill.next_due_date)} +
+
+
+ {currency(bill.amount, bill.currency)} +
+
+ ))} +
+ ) : ( +

No bills due soon.

+ )} +
+
+
+
+
+ ); +} diff --git a/packages/backend/app/routes/__init__.py b/packages/backend/app/routes/__init__.py index f13b0f8..fe37c2b 100644 --- a/packages/backend/app/routes/__init__.py +++ b/packages/backend/app/routes/__init__.py @@ -7,6 +7,8 @@ from .categories import bp as categories_bp from .docs import bp as docs_bp from .dashboard import bp as dashboard_bp +from .gdpr import bp as gdpr_bp +from .weekly_summary import bp as weekly_summary_bp def register_routes(app: Flask): @@ -18,3 +20,5 @@ def register_routes(app: Flask): app.register_blueprint(categories_bp, url_prefix="/categories") app.register_blueprint(docs_bp, url_prefix="/docs") app.register_blueprint(dashboard_bp, url_prefix="/dashboard") + app.register_blueprint(gdpr_bp, url_prefix="/user") + app.register_blueprint(weekly_summary_bp, url_prefix="/weekly-summary") diff --git a/packages/backend/app/routes/gdpr.py b/packages/backend/app/routes/gdpr.py new file mode 100644 index 0000000..a5594e6 --- /dev/null +++ b/packages/backend/app/routes/gdpr.py @@ -0,0 +1,141 @@ +"""GDPR endpoints: data export and account deletion.""" + +import io +import json +import zipfile +from datetime import datetime, date +from decimal import Decimal + +from flask import Blueprint, jsonify, send_file +from flask_jwt_extended import jwt_required, get_jwt_identity + +from ..extensions import db, redis_client +from ..models import ( + User, Category, Expense, RecurringExpense, Bill, Reminder, + AdImpression, UserSubscription, AuditLog, +) +import logging + +bp = Blueprint("gdpr", __name__) +logger = logging.getLogger("finmind.gdpr") + + +def _serialize(obj): + """JSON serializer for non-standard types.""" + if isinstance(obj, (datetime, date)): + return obj.isoformat() + if isinstance(obj, Decimal): + return float(obj) + raise TypeError(f"Type {type(obj)} not serializable") + + +def _rows_to_dicts(rows, exclude=("password_hash",)): + """Convert SQLAlchemy model instances to dicts.""" + result = [] + for row in rows: + d = {} + for col in row.__table__.columns: + if col.name in exclude: + continue + val = getattr(row, col.name) + d[col.name] = val + result.append(d) + return result + + +@bp.get("/export") +@jwt_required() +def export_data(): + """Export all user PII as a downloadable ZIP containing JSON files.""" + uid = int(get_jwt_identity()) + user = db.session.get(User, uid) + if not user: + return jsonify(error="not found"), 404 + + data = { + "user": _rows_to_dicts([user])[0], + "categories": _rows_to_dicts( + db.session.query(Category).filter_by(user_id=uid).all() + ), + "expenses": _rows_to_dicts( + db.session.query(Expense).filter_by(user_id=uid).all() + ), + "recurring_expenses": _rows_to_dicts( + db.session.query(RecurringExpense).filter_by(user_id=uid).all() + ), + "bills": _rows_to_dicts( + db.session.query(Bill).filter_by(user_id=uid).all() + ), + "reminders": _rows_to_dicts( + db.session.query(Reminder).filter_by(user_id=uid).all() + ), + "ad_impressions": _rows_to_dicts( + db.session.query(AdImpression).filter_by(user_id=uid).all() + ), + "subscriptions": _rows_to_dicts( + db.session.query(UserSubscription).filter_by(user_id=uid).all() + ), + "audit_logs": _rows_to_dicts( + db.session.query(AuditLog).filter_by(user_id=uid).all() + ), + "exported_at": datetime.utcnow().isoformat(), + } + + # Build ZIP in memory + buf = io.BytesIO() + with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf: + zf.writestr( + "finmind_export.json", + json.dumps(data, indent=2, default=_serialize), + ) + buf.seek(0) + + logger.info("Data export for user_id=%s", uid) + + # Audit + db.session.add(AuditLog(user_id=uid, action="DATA_EXPORT")) + db.session.commit() + + return send_file( + buf, + mimetype="application/zip", + as_attachment=True, + download_name=f"finmind_export_{uid}.zip", + ) + + +@bp.delete("") +@jwt_required() +def delete_account(): + """Permanently delete user account and all associated data (GDPR right to erasure).""" + uid = int(get_jwt_identity()) + user = db.session.get(User, uid) + if not user: + return jsonify(error="not found"), 404 + + # Delete all user data in dependency order + db.session.query(Reminder).filter_by(user_id=uid).delete() + db.session.query(AdImpression).filter_by(user_id=uid).delete() + db.session.query(UserSubscription).filter_by(user_id=uid).delete() + db.session.query(Expense).filter_by(user_id=uid).delete() + db.session.query(RecurringExpense).filter_by(user_id=uid).delete() + db.session.query(Bill).filter_by(user_id=uid).delete() + db.session.query(Category).filter_by(user_id=uid).delete() + + # Keep an anonymized audit log entry + db.session.query(AuditLog).filter_by(user_id=uid).delete() + db.session.add(AuditLog(user_id=None, action=f"ACCOUNT_DELETED:uid={uid}")) + + db.session.delete(user) + db.session.commit() + + # Invalidate all Redis sessions for this user + try: + for key in redis_client.scan_iter(match="auth:refresh:*"): + if redis_client.get(key) == str(uid).encode(): + redis_client.delete(key) + except Exception: + logger.warning("Failed to clear Redis sessions for user_id=%s", uid) + + logger.info("Account deleted for user_id=%s", uid) + return jsonify(message="account permanently deleted"), 200 diff --git a/packages/backend/app/routes/weekly_summary.py b/packages/backend/app/routes/weekly_summary.py new file mode 100644 index 0000000..bf55238 --- /dev/null +++ b/packages/backend/app/routes/weekly_summary.py @@ -0,0 +1,33 @@ +"""Routes for the weekly financial digest.""" + +from datetime import date +from flask import Blueprint, jsonify, request +from flask_jwt_extended import jwt_required, get_jwt_identity +from ..services.weekly_summary import generate_weekly_summary +import logging + +bp = Blueprint("weekly_summary", __name__) +logger = logging.getLogger("finmind.weekly_summary") + + +@bp.get("") +@jwt_required() +def get_weekly_summary(): + """Return the weekly financial digest. + + Query params: + week_of (str, optional): ISO date (YYYY-MM-DD) identifying the week. + Defaults to the current week. + """ + uid = int(get_jwt_identity()) + week_of_raw = request.args.get("week_of") + week_of: date | None = None + if week_of_raw: + try: + week_of = date.fromisoformat(week_of_raw) + except ValueError: + return jsonify(error="invalid week_of, expected YYYY-MM-DD"), 400 + + summary = generate_weekly_summary(uid, week_of) + logger.info("Weekly summary served user=%s week=%s", uid, summary["week"]["start"]) + return jsonify(summary) diff --git a/packages/backend/app/services/weekly_summary.py b/packages/backend/app/services/weekly_summary.py new file mode 100644 index 0000000..69097de --- /dev/null +++ b/packages/backend/app/services/weekly_summary.py @@ -0,0 +1,271 @@ +"""Service for generating weekly financial digest / summary.""" + +from datetime import date, timedelta +from decimal import Decimal +from sqlalchemy import extract, func, and_ + +from ..extensions import db +from ..models import Expense, Bill, Category +from ..services.cache import cache_get, cache_set + + +def weekly_summary_key(user_id: int, week_start: str) -> str: + return f"user:{user_id}:weekly_summary:{week_start}" + + +def get_week_bounds(reference_date: date | None = None) -> tuple[date, date]: + """Return (monday, sunday) of the week containing *reference_date*.""" + ref = reference_date or date.today() + monday = ref - timedelta(days=ref.weekday()) + sunday = monday + timedelta(days=6) + return monday, sunday + + +def generate_weekly_summary(user_id: int, week_of: date | None = None) -> dict: + """Build a comprehensive weekly financial digest for *user_id*. + + The digest includes: + - total income / expenses / net for the week + - daily spending breakdown + - category breakdown + - top expenses + - upcoming bills (next 7 days from week end) + - week-over-week trend comparison + """ + monday, sunday = get_week_bounds(week_of) + cache_key = weekly_summary_key(user_id, monday.isoformat()) + # Only serve cache if the week is in the past (complete) + if sunday < date.today(): + cached = cache_get(cache_key) + if cached: + return cached + + payload: dict = { + "week": { + "start": monday.isoformat(), + "end": sunday.isoformat(), + }, + "totals": { + "income": 0.0, + "expenses": 0.0, + "net": 0.0, + "transaction_count": 0, + }, + "daily_breakdown": [], + "category_breakdown": [], + "top_expenses": [], + "upcoming_bills": [], + "trends": { + "expense_change_pct": None, + "income_change_pct": None, + "previous_week_expenses": 0.0, + "previous_week_income": 0.0, + }, + } + + # --- Totals ----------------------------------------------------------- + income = float( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + Expense.expense_type == "INCOME", + ) + .scalar() + or 0 + ) + expenses = float( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + Expense.expense_type != "INCOME", + ) + .scalar() + or 0 + ) + tx_count = ( + db.session.query(func.count(Expense.id)) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + ) + .scalar() + or 0 + ) + payload["totals"] = { + "income": round(income, 2), + "expenses": round(expenses, 2), + "net": round(income - expenses, 2), + "transaction_count": tx_count, + } + + # --- Daily breakdown --------------------------------------------------- + daily_rows = ( + db.session.query( + Expense.spent_at, + Expense.expense_type, + func.coalesce(func.sum(Expense.amount), 0).label("total"), + ) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + ) + .group_by(Expense.spent_at, Expense.expense_type) + .order_by(Expense.spent_at) + .all() + ) + daily_map: dict[str, dict] = {} + for row in daily_rows: + day_str = row.spent_at.isoformat() + if day_str not in daily_map: + daily_map[day_str] = {"date": day_str, "income": 0.0, "expenses": 0.0} + if row.expense_type == "INCOME": + daily_map[day_str]["income"] = round(float(row.total), 2) + else: + daily_map[day_str]["expenses"] = round(float(row.total), 2) + # Fill missing days + for i in range(7): + d = (monday + timedelta(days=i)).isoformat() + if d not in daily_map: + daily_map[d] = {"date": d, "income": 0.0, "expenses": 0.0} + payload["daily_breakdown"] = sorted(daily_map.values(), key=lambda x: x["date"]) + + # --- Category breakdown ------------------------------------------------ + cat_rows = ( + db.session.query( + Expense.category_id, + func.coalesce(Category.name, "Uncategorized").label("category_name"), + func.coalesce(func.sum(Expense.amount), 0).label("total_amount"), + func.count(Expense.id).label("count"), + ) + .outerjoin( + Category, + and_(Category.id == Expense.category_id, Category.user_id == user_id), + ) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + Expense.expense_type != "INCOME", + ) + .group_by(Expense.category_id, Category.name) + .order_by(func.sum(Expense.amount).desc()) + .all() + ) + total_cat = sum(float(r.total_amount or 0) for r in cat_rows) + payload["category_breakdown"] = [ + { + "category_id": r.category_id, + "category_name": r.category_name, + "amount": round(float(r.total_amount or 0), 2), + "count": r.count, + "share_pct": ( + round((float(r.total_amount or 0) / total_cat) * 100, 2) + if total_cat > 0 + else 0 + ), + } + for r in cat_rows + ] + + # --- Top expenses ------------------------------------------------------ + top = ( + db.session.query(Expense) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= monday, + Expense.spent_at <= sunday, + Expense.expense_type != "INCOME", + ) + .order_by(Expense.amount.desc()) + .limit(5) + .all() + ) + payload["top_expenses"] = [ + { + "id": e.id, + "description": e.notes or "", + "amount": float(e.amount), + "date": e.spent_at.isoformat(), + "category_id": e.category_id, + "currency": e.currency, + } + for e in top + ] + + # --- Upcoming bills (next 7 days from sunday) -------------------------- + bills_end = sunday + timedelta(days=7) + bills = ( + db.session.query(Bill) + .filter( + Bill.user_id == user_id, + Bill.active.is_(True), + Bill.next_due_date >= monday, + Bill.next_due_date <= bills_end, + ) + .order_by(Bill.next_due_date) + .limit(10) + .all() + ) + payload["upcoming_bills"] = [ + { + "id": b.id, + "name": b.name, + "amount": float(b.amount), + "currency": b.currency, + "next_due_date": b.next_due_date.isoformat(), + "cadence": b.cadence.value, + } + for b in bills + ] + + # --- Week-over-week trends --------------------------------------------- + prev_monday = monday - timedelta(days=7) + prev_sunday = monday - timedelta(days=1) + prev_expenses = float( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= prev_monday, + Expense.spent_at <= prev_sunday, + Expense.expense_type != "INCOME", + ) + .scalar() + or 0 + ) + prev_income = float( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == user_id, + Expense.spent_at >= prev_monday, + Expense.spent_at <= prev_sunday, + Expense.expense_type == "INCOME", + ) + .scalar() + or 0 + ) + payload["trends"] = { + "previous_week_expenses": round(prev_expenses, 2), + "previous_week_income": round(prev_income, 2), + "expense_change_pct": ( + round(((expenses - prev_expenses) / prev_expenses) * 100, 1) + if prev_expenses > 0 + else None + ), + "income_change_pct": ( + round(((income - prev_income) / prev_income) * 100, 1) + if prev_income > 0 + else None + ), + } + + # Cache completed weeks for 1 hour + if sunday < date.today(): + cache_set(cache_key, payload, ttl_seconds=3600) + + return payload diff --git a/packages/backend/tests/conftest.py b/packages/backend/tests/conftest.py index a7315b8..7c039e9 100644 --- a/packages/backend/tests/conftest.py +++ b/packages/backend/tests/conftest.py @@ -1,11 +1,39 @@ import os import pytest +import fakeredis +from unittest.mock import patch from app import create_app from app.config import Settings from app.extensions import db from app.extensions import redis_client from app import models # noqa: F401 - ensure models are registered +# Patch redis_client globally with fakeredis for tests +_fake_redis = fakeredis.FakeRedis(decode_responses=True) + +@pytest.fixture(autouse=True) +def _patch_redis(): + import app.extensions as ext + import app.routes.auth as auth_mod + import app.routes.gdpr as gdpr_mod + import app.services.cache as cache_mod + old = { + 'ext': ext.redis_client, + 'auth': auth_mod.redis_client, + 'gdpr': gdpr_mod.redis_client, + 'cache': cache_mod.redis_client, + } + ext.redis_client = _fake_redis + auth_mod.redis_client = _fake_redis + gdpr_mod.redis_client = _fake_redis + cache_mod.redis_client = _fake_redis + _fake_redis.flushdb() + yield + ext.redis_client = old['ext'] + auth_mod.redis_client = old['auth'] + gdpr_mod.redis_client = old['gdpr'] + cache_mod.redis_client = old['cache'] + class TestSettings(Settings): # Override defaults for tests diff --git a/packages/backend/tests/test_gdpr.py b/packages/backend/tests/test_gdpr.py new file mode 100644 index 0000000..d4d46e2 --- /dev/null +++ b/packages/backend/tests/test_gdpr.py @@ -0,0 +1,77 @@ +"""Tests for GDPR data export and account deletion endpoints.""" + +import io +import json +import zipfile + +import pytest + + +def _register_and_login(client, email="gdpr@test.com", password="pass1234"): + client.post("/auth/register", json={"email": email, "password": password}) + r = client.post("/auth/login", json={"email": email, "password": password}) + tokens = r.get_json() + return {"Authorization": f"Bearer {tokens['access_token']}"} + + +def _seed_data(client, headers): + """Create some expenses, categories, bills so export has content.""" + client.post("/categories", json={"name": "Food"}, headers=headers) + client.post( + "/expenses", + json={"amount": 42.50, "notes": "lunch", "spent_at": "2025-01-15"}, + headers=headers, + ) + client.post( + "/bills", + json={ + "name": "Netflix", + "amount": 15.99, + "next_due_date": "2025-02-01", + "cadence": "MONTHLY", + }, + headers=headers, + ) + + +class TestExport: + def test_export_returns_zip(self, client): + headers = _register_and_login(client) + _seed_data(client, headers) + + r = client.get("/user/export", headers=headers) + assert r.status_code == 200 + assert r.content_type == "application/zip" + + zf = zipfile.ZipFile(io.BytesIO(r.data)) + assert "finmind_export.json" in zf.namelist() + + data = json.loads(zf.read("finmind_export.json")) + assert data["user"]["email"] == "gdpr@test.com" + assert "password_hash" not in data["user"] + assert len(data["expenses"]) >= 1 + assert len(data["categories"]) >= 1 + assert len(data["bills"]) >= 1 + assert "exported_at" in data + + def test_export_requires_auth(self, client): + r = client.get("/user/export") + assert r.status_code == 401 + + +class TestDeleteAccount: + def test_delete_removes_all_data(self, client): + headers = _register_and_login(client, "delete@test.com") + _seed_data(client, headers) + + r = client.delete("/user", headers=headers) + assert r.status_code == 200 + assert r.get_json()["message"] == "account permanently deleted" + + # Token should now be invalid (user gone) + r = client.get("/auth/me", headers=headers) + assert r.status_code == 404 + + def test_delete_requires_auth(self, client): + r = client.delete("/user") + assert r.status_code == 401 diff --git a/packages/backend/tests/test_weekly_summary.py b/packages/backend/tests/test_weekly_summary.py new file mode 100644 index 0000000..4224e88 --- /dev/null +++ b/packages/backend/tests/test_weekly_summary.py @@ -0,0 +1,134 @@ +"""Tests for the weekly financial digest endpoint.""" + +from datetime import date, timedelta + + +def test_weekly_summary_returns_structure(client, auth_header): + r = client.get("/weekly-summary", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + assert "week" in data + assert "start" in data["week"] + assert "end" in data["week"] + assert "totals" in data + assert "daily_breakdown" in data + assert "category_breakdown" in data + assert "top_expenses" in data + assert "upcoming_bills" in data + assert "trends" in data + # Daily breakdown always has 7 days + assert len(data["daily_breakdown"]) == 7 + + +def test_weekly_summary_with_expenses(client, auth_header): + today = date.today() + # Create an expense for today + client.post( + "/expenses", + json={ + "amount": 42.50, + "description": "Groceries", + "date": today.isoformat(), + "expense_type": "EXPENSE", + }, + headers=auth_header, + ) + # Create an income for today + client.post( + "/expenses", + json={ + "amount": 1000, + "description": "Salary", + "date": today.isoformat(), + "expense_type": "INCOME", + }, + headers=auth_header, + ) + + r = client.get("/weekly-summary", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + assert data["totals"]["expenses"] == 42.50 + assert data["totals"]["income"] == 1000.0 + assert data["totals"]["net"] == 957.50 + assert data["totals"]["transaction_count"] == 2 + assert len(data["top_expenses"]) == 1 + assert data["top_expenses"][0]["description"] == "Groceries" + + +def test_weekly_summary_specific_week(client, auth_header): + # Query a past week + past = (date.today() - timedelta(days=14)).isoformat() + r = client.get(f"/weekly-summary?week_of={past}", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + assert data["totals"]["transaction_count"] == 0 + + +def test_weekly_summary_invalid_date(client, auth_header): + r = client.get("/weekly-summary?week_of=not-a-date", headers=auth_header) + assert r.status_code == 400 + assert "invalid" in r.get_json()["error"].lower() + + +def test_weekly_summary_requires_auth(client): + r = client.get("/weekly-summary") + assert r.status_code == 401 + + +def test_weekly_summary_trends(client, auth_header): + today = date.today() + last_week = today - timedelta(days=7) + # Expense last week + client.post( + "/expenses", + json={ + "amount": 100, + "description": "Last week item", + "date": last_week.isoformat(), + "expense_type": "EXPENSE", + }, + headers=auth_header, + ) + # Expense this week + client.post( + "/expenses", + json={ + "amount": 150, + "description": "This week item", + "date": today.isoformat(), + "expense_type": "EXPENSE", + }, + headers=auth_header, + ) + + r = client.get("/weekly-summary", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + trends = data["trends"] + assert trends["previous_week_expenses"] == 100.0 + # Expense change should be +50% + assert trends["expense_change_pct"] == 50.0 + + +def test_weekly_summary_with_bills(client, auth_header): + today = date.today() + # Create a bill due this week + monday = today - timedelta(days=today.weekday()) + due = monday + timedelta(days=3) + client.post( + "/bills", + json={ + "name": "Internet", + "amount": 59.99, + "next_due_date": due.isoformat(), + "cadence": "MONTHLY", + }, + headers=auth_header, + ) + + r = client.get("/weekly-summary", headers=auth_header) + assert r.status_code == 200 + data = r.get_json() + assert len(data["upcoming_bills"]) >= 1 + assert data["upcoming_bills"][0]["name"] == "Internet" From dadc4c5c6c408917473ad16e20dd8d2c12cbef24 Mon Sep 17 00:00:00 2001 From: Lucas Crespo Date: Sun, 8 Mar 2026 15:59:19 +0000 Subject: [PATCH 2/2] docs: add GDPR data export & account deletion to README (#76) --- README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/README.md b/README.md index 47da882..e8be6fb 100644 --- a/README.md +++ b/README.md @@ -66,8 +66,33 @@ OpenAPI: `backend/app/openapi.yaml` - Bills: CRUD `/bills`, pay/mark `/bills/{id}/pay` - Reminders: CRUD `/reminders`, trigger `/reminders/run` - Insights: `/insights/monthly`, `/insights/budget-suggestion` +- GDPR / Privacy: + - `GET /user/export` β€” Download all personal data as a ZIP (JSON inside) + - `DELETE /user` β€” Permanently delete account and all associated data - Weekly Digest: `/weekly-summary` β€” smart weekly financial summary (see below) +## GDPR: Data Export & Account Deletion + +FinMind supports GDPR-compliant data portability and right to erasure: + +### Export Your Data (`GET /user/export`) +- Returns a ZIP file containing all user data (profile, expenses, bills, categories, reminders, subscriptions, audit logs) as JSON +- Password hashes are excluded from the export +- An audit log entry is created for each export request +- Requires authentication (JWT) + +### Delete Your Account (`DELETE /user`) +- Permanently and irreversibly deletes the user account and **all** associated data +- Cascade deletes: expenses, recurring expenses, bills, reminders, categories, ad impressions, subscriptions, and audit logs +- Invalidates all active Redis sessions +- Creates an anonymized audit log entry recording the deletion +- Requires authentication (JWT) + +### Frontend +The Account Settings page includes: +- **"Export My Data"** button β€” downloads the ZIP immediately +- **"Delete Account"** button β€” opens a confirmation dialog before permanent deletion + ## Weekly Digest (Smart Summary) The **Weekly Digest** provides an at-a-glance financial summary for any given week: