diff --git a/.project.md b/.project.md index 2b932ec..345325c 100644 --- a/.project.md +++ b/.project.md @@ -29,15 +29,39 @@ const activities = await getActivities({ limit: 50 }) Always use auto-generated functions from `@/lib/client/apiClient` for type safety and automatic camelCase↔snake_case conversion. -### 2. Always Define Structured Request/Response Models +### 2. Always Use Relative Import for `api_handler` + +**CRITICAL: Handler modules MUST use relative import to avoid circular import issues.** + +```python +# ✅ CORRECT: Use relative import +from . import api_handler + +# ❌ WRONG: Causes circular import +from handlers import api_handler +from backend.handlers import api_handler +``` + +**Why:** The `handlers/__init__.py` imports all handler modules at the bottom. Using absolute import causes Python to reload the package while it's still initializing, resulting in `api_handler` being undefined or import failures. + +### 3. Always Define Structured Request/Response Models **CRITICAL RULE:** All handlers must use Pydantic models inheriting from `backend.models.base.BaseModel`. **NEVER use `Dict[str, Any]` as return type** - this prevents TypeScript type generation for the frontend. ```python -from backend.handlers import api_handler -from backend.models.base import BaseModel, TimedOperationResponse +from datetime import datetime +from typing import Optional + +from core.logger import get_logger +from models.base import BaseModel +from models.responses import TimedOperationResponse + +# ✅ CORRECT: Use relative import +from . import api_handler + +logger = get_logger(__name__) # ❌ WRONG - Dict prevents TypeScript type generation @api_handler() @@ -154,6 +178,15 @@ When frontend needs data: **Data Flow:** RawRecords (60s memory) → Events (LLM) → Activities (10min aggregation) → Tasks (AI-generated) +**Pomodoro Mode:** + +- **Core Principle**: Pomodoro mode ONLY controls whether perception layer (keyboard/mouse/screenshots) is running +- **Idle Mode (Default)**: Perception layer is stopped, no data captured +- **Active Mode (Pomodoro)**: Perception layer is running, captures user activity +- **No Configuration**: Pomodoro has NO system configuration parameters (e.g., default duration, behavior settings) +- **User-Controlled**: Duration is specified per session when starting, not a global config +- **Behavior Unchanged**: Capture behavior (smart capture, deduplication, etc.) follows normal settings and is NOT modified by Pomodoro mode + ## Development Commands ```bash @@ -182,23 +215,48 @@ pnpm sign-macos # Code signing (after bundle) ### Adding API Handler +**CRITICAL: Always use relative import `from . import api_handler` in handler modules to avoid circular import issues.** + ```python # 1. backend/handlers/my_feature.py -from backend.handlers import api_handler -from backend.models.base import BaseModel +from datetime import datetime + +from core.coordinator import get_coordinator +from core.logger import get_logger +from models.base import BaseModel +from models.responses import TimedOperationResponse + +# ✅ CORRECT: Use relative import to avoid circular imports +from . import api_handler + +logger = get_logger(__name__) + class MyRequest(BaseModel): user_input: str + @api_handler(body=MyRequest, method="POST", path="/endpoint", tags=["module"]) -async def my_handler(body: MyRequest) -> dict: - return {"data": body.user_input} +async def my_handler(body: MyRequest) -> TimedOperationResponse: + return TimedOperationResponse( + success=True, + message="Operation completed", + timestamp=datetime.now().isoformat() + ) # 2. Import in backend/handlers/__init__.py -# 3. Run: pnpm setup-backend +# 3. Run: pnpm tauri:dev:gen-ts (to regenerate TypeScript bindings) # 4. Use: import { myHandler } from '@/lib/client/apiClient' ``` +**Why relative import is required:** + +- `from . import api_handler` ✅ Correct - imports from current package (`handlers`) +- `from handlers import api_handler` ❌ Wrong - causes circular import because `handlers/__init__.py` is importing your module +- `from backend.handlers import api_handler` ❌ Wrong - same circular import issue + +The `handlers/__init__.py` file imports all handler modules at the bottom (line 207-218). If your handler uses absolute import, Python will try to reload the `handlers` package while it's still being initialized, causing import failures. + ### Adding i18n ```typescript diff --git a/backend/config/config.toml b/backend/config/config.toml index e5b078c..2f33b91 100644 --- a/backend/config/config.toml +++ b/backend/config/config.toml @@ -90,7 +90,7 @@ min_sampling_interval = 1.5 # Optimized: balanced for ~12 second span # - 3-5: Minimal mode (significant cost savings, suitable for simple tasks) # - 6-8: Standard mode (recommended, balances information and cost) ⭐ Recommended # - 10-15: Rich mode (more context, suitable for complex tasks) -max_images_per_event = 8 +# Note: This is controlled by processing.max_screenshots_per_extraction instead # ========== Content Analysis Configuration ========== # Enable content analysis @@ -165,8 +165,7 @@ memory_cache_size = 500 # Processing configuration [processing] -# Perception layer configuration -screenshot_interval = 1 # Screenshot interval (seconds) +# Perception layer configuration (screenshot_interval controlled by monitoring.capture_interval) enable_screenshot_deduplication = true # Enable screenshot deduplication # ========== Session Agent Merging Configuration ========== diff --git a/backend/core/coordinator.py b/backend/core/coordinator.py index a929c6c..bfa8a0d 100644 --- a/backend/core/coordinator.py +++ b/backend/core/coordinator.py @@ -30,7 +30,7 @@ def __init__(self, config: Dict[str, Any]): self.config = config self.processing_interval = config.get("monitoring.processing_interval", 30) self.window_size = config.get("monitoring.window_size", 60) - self.capture_interval = config.get("monitoring.capture_interval", 0.2) + self.capture_interval = config.get("monitoring.capture_interval", 1.0) # Initialize managers (lazy import to avoid circular dependencies) self.perception_manager = None @@ -43,6 +43,11 @@ def __init__(self, config: Dict[str, Any]): self.knowledge_agent = None self.diary_agent = None self.cleanup_agent = None + self.pomodoro_manager = None + + # Pomodoro mode state + self.pomodoro_mode = False + self.current_pomodoro_session_id: Optional[str] = None # Running state self.is_running = False @@ -315,6 +320,11 @@ def _init_managers(self): ), ) + if self.pomodoro_manager is None: + from core.pomodoro_manager import PomodoroManager + + self.pomodoro_manager = PomodoroManager(self) + # Link agents if self.processing_pipeline: # Link action_agent to pipeline for action extraction @@ -401,13 +411,15 @@ async def start(self) -> None: raise Exception("Cleanup agent initialization failed") # Start all components in parallel (they are independent) + # NOTE: Perception manager is NOT started by default - it will be started + # when a Pomodoro session begins (Active mode strategy) logger.debug( - "Starting perception manager, processing pipeline, agents in parallel..." + "Starting processing pipeline and agents (perception will start with Pomodoro)..." ) start_time = datetime.now() await asyncio.gather( - self.perception_manager.start(), + # self.perception_manager.start(), # Disabled: starts with Pomodoro self.processing_pipeline.start(), self.event_agent.start(), self.session_agent.start(), @@ -420,6 +432,12 @@ async def start(self) -> None: f"All components started (took {elapsed:.2f}s)" ) + # Check for orphaned Pomodoro sessions from previous run + if self.pomodoro_manager: + orphaned_count = await self.pomodoro_manager.check_orphaned_sessions() + if orphaned_count > 0: + logger.info(f"✓ Recovered {orphaned_count} orphaned Pomodoro session(s)") + # Start scheduled processing loop self.is_running = True self._set_state(mode="running", error=None) @@ -663,6 +681,106 @@ def get_stats(self) -> Dict[str, Any]: return {"error": str(e)} + async def enter_pomodoro_mode(self, session_id: str) -> None: + """ + Enter Pomodoro mode - start perception and disable continuous processing + + Changes: + 1. Start perception manager (if not already running) + 2. Stop processing_loop (cancel task) + 3. Set pomodoro_mode = True + 4. Set current_pomodoro_session_id + 5. Perception captures and tags records + 6. Records are saved to DB instead of processed + + Args: + session_id: Pomodoro session identifier + """ + logger.info(f"→ Entering Pomodoro mode: {session_id}") + + self.pomodoro_mode = True + self.current_pomodoro_session_id = session_id + + # Start perception manager if not already running + if self.perception_manager and not self.perception_manager.is_running: + try: + logger.info("Starting perception manager for Pomodoro mode...") + await self.perception_manager.start() + logger.info("✓ Perception manager started") + except Exception as e: + logger.error(f"Failed to start perception manager: {e}", exc_info=True) + raise + elif not self.perception_manager: + logger.error("Perception manager is None, cannot start") + else: + logger.debug("Perception manager already running") + + # Keep processing loop running - do NOT cancel it + # This allows Actions (30s) and Events (10min) to continue normally + + # Pause only SessionAgent (activity generation deferred) + try: + if self.session_agent: + self.session_agent.pause() + logger.debug("✓ SessionAgent paused (activity generation deferred)") + except Exception as e: + logger.error(f"Failed to pause SessionAgent: {e}") + + # Notify perception manager of Pomodoro mode (for tagging records) + if self.perception_manager: + self.perception_manager.set_pomodoro_session(session_id) + + logger.info( + "✓ Pomodoro mode active - normal processing continues, " + "activity generation paused until session ends" + ) + + async def exit_pomodoro_mode(self) -> None: + """ + Exit Pomodoro mode - stop perception and trigger activity generation + + When Pomodoro ends: + - Stop perception manager + - Resume SessionAgent + - Trigger immediate activity aggregation for accumulated Events + """ + logger.info("→ Exiting Pomodoro mode") + + self.pomodoro_mode = False + session_id = self.current_pomodoro_session_id + self.current_pomodoro_session_id = None + + # Stop perception manager + if self.perception_manager and self.perception_manager.is_running: + try: + logger.debug("Stopping perception manager...") + await self.perception_manager.stop() + logger.debug("✓ Perception manager stopped") + except Exception as e: + logger.error(f"Failed to stop perception manager: {e}") + + # Processing loop is still running - no need to resume + + # Resume SessionAgent and trigger immediate activity aggregation + try: + if self.session_agent: + self.session_agent.resume() + logger.debug("✓ SessionAgent resumed") + + # Trigger immediate activity aggregation for Pomodoro session + logger.info("→ Triggering activity aggregation for Pomodoro session...") + await self.session_agent._aggregate_sessions() + logger.info("✓ Activity aggregation complete for Pomodoro session") + except Exception as e: + logger.error(f"Failed to resume SessionAgent or aggregate activities: {e}") + + # Notify perception manager to exit Pomodoro mode + if self.perception_manager: + self.perception_manager.clear_pomodoro_session() + + logger.info(f"✓ Idle mode resumed - perception stopped (exited session: {session_id})") + + def get_coordinator() -> PipelineCoordinator: """Get global coordinator singleton""" global _coordinator diff --git a/backend/core/db/__init__.py b/backend/core/db/__init__.py index 9f3c36f..56f85a4 100644 --- a/backend/core/db/__init__.py +++ b/backend/core/db/__init__.py @@ -22,6 +22,8 @@ from .events import EventsRepository from .knowledge import KnowledgeRepository from .models import LLMModelsRepository +from .pomodoro_sessions import PomodoroSessionsRepository +from .raw_records import RawRecordsRepository from .session_preferences import SessionPreferencesRepository from .settings import SettingsRepository from .todos import TodosRepository @@ -69,76 +71,37 @@ def __init__(self, db_path: Path): self.actions = ActionsRepository(db_path) self.session_preferences = SessionPreferencesRepository(db_path) + # Pomodoro feature repositories + self.pomodoro_sessions = PomodoroSessionsRepository(db_path) + self.raw_records = RawRecordsRepository(db_path) + logger.debug(f"✓ DatabaseManager initialized with path: {db_path}") def _initialize_database(self): """ - Initialize database schema - create all tables and indexes + Initialize database schema using version-based migrations This is called automatically when DatabaseManager is instantiated. - It ensures all required tables and indexes exist. + It runs all pending migrations to ensure database is up to date. """ - import sqlite3 - - from core.sqls import migrations, schema + from migrations import MigrationRunner try: - conn = sqlite3.connect(str(self.db_path)) - cursor = conn.cursor() - - # Create all tables - for table_sql in schema.ALL_TABLES: - cursor.execute(table_sql) + # Create migration runner + runner = MigrationRunner(self.db_path) - # Create all indexes - for index_sql in schema.ALL_INDEXES: - cursor.execute(index_sql) + # Run all pending migrations + executed_count = runner.run_migrations() - # Run migrations for new columns - self._run_migrations(cursor) - - conn.commit() - conn.close() - - logger.debug(f"✓ Database schema initialized: {len(schema.ALL_TABLES)} tables, {len(schema.ALL_INDEXES)} indexes") + if executed_count > 0: + logger.info(f"✓ Database schema initialized: {executed_count} migration(s) executed") + else: + logger.debug("✓ Database schema up to date") except Exception as e: logger.error(f"Failed to initialize database schema: {e}", exc_info=True) raise - def _run_migrations(self, cursor): - """ - Run database migrations to add new columns to existing tables - - Args: - cursor: Database cursor - """ - import sqlite3 - - from core.sqls import migrations - - # List of migrations to run (column name, migration SQL) - migration_list = [ - ("actions.extract_knowledge", migrations.ADD_ACTIONS_EXTRACT_KNOWLEDGE_COLUMN), - ("actions.knowledge_extracted", migrations.ADD_ACTIONS_KNOWLEDGE_EXTRACTED_COLUMN), - ("knowledge.source_action_id", migrations.ADD_KNOWLEDGE_SOURCE_ACTION_ID_COLUMN), - ] - - for column_desc, migration_sql in migration_list: - try: - cursor.execute(migration_sql) - logger.info(f"✓ Migration applied: {column_desc}") - except sqlite3.OperationalError as e: - error_msg = str(e).lower() - # Column might already exist, which is fine - if "duplicate column" in error_msg or "already exists" in error_msg: - logger.debug(f"Column {column_desc} already exists, skipping") - else: - # Real error, log as warning but continue - logger.warning(f"Migration failed for {column_desc}: {e}") - except Exception as e: - # Unexpected error - logger.error(f"Unexpected error in migration for {column_desc}: {e}", exc_info=True) def get_connection(self): """ @@ -380,6 +343,8 @@ def switch_database(new_db_path: str) -> bool: "LLMModelsRepository", "ActionsRepository", "SessionPreferencesRepository", + "PomodoroSessionsRepository", + "RawRecordsRepository", # Unified manager "DatabaseManager", # Global access functions diff --git a/backend/core/db/pomodoro_sessions.py b/backend/core/db/pomodoro_sessions.py new file mode 100644 index 0000000..cc3a92a --- /dev/null +++ b/backend/core/db/pomodoro_sessions.py @@ -0,0 +1,320 @@ +""" +PomodoroSessions Repository - Handles Pomodoro session lifecycle +Manages session metadata, status tracking, and processing state +""" + +import json +from pathlib import Path +from typing import Any, Dict, List, Optional + +from core.logger import get_logger + +from .base import BaseRepository + +logger = get_logger(__name__) + + +class PomodoroSessionsRepository(BaseRepository): + """Repository for managing Pomodoro sessions in the database""" + + def __init__(self, db_path: Path): + super().__init__(db_path) + + async def create( + self, + session_id: str, + user_intent: str, + planned_duration_minutes: int, + start_time: str, + status: str = "active", + ) -> None: + """ + Create a new Pomodoro session + + Args: + session_id: Unique session identifier + user_intent: User's description of what they plan to work on + planned_duration_minutes: Planned session duration + start_time: ISO format start timestamp + status: Session status (default: 'active') + """ + try: + with self._get_conn() as conn: + conn.execute( + """ + INSERT INTO pomodoro_sessions ( + id, user_intent, planned_duration_minutes, + start_time, status, created_at, updated_at + ) VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) + """, + (session_id, user_intent, planned_duration_minutes, start_time, status), + ) + conn.commit() + logger.debug(f"Created Pomodoro session: {session_id}") + except Exception as e: + logger.error(f"Failed to create Pomodoro session {session_id}: {e}", exc_info=True) + raise + + async def update(self, session_id: str, **kwargs) -> None: + """ + Update Pomodoro session fields + + Args: + session_id: Session ID to update + **kwargs: Fields to update (e.g., end_time, status, processing_status) + """ + try: + if not kwargs: + return + + set_clauses = [] + params = [] + + for key, value in kwargs.items(): + set_clauses.append(f"{key} = ?") + params.append(value) + + set_clauses.append("updated_at = CURRENT_TIMESTAMP") + params.append(session_id) + + query = f""" + UPDATE pomodoro_sessions + SET {', '.join(set_clauses)} + WHERE id = ? + """ + + with self._get_conn() as conn: + conn.execute(query, params) + conn.commit() + logger.debug(f"Updated Pomodoro session {session_id}: {list(kwargs.keys())}") + except Exception as e: + logger.error(f"Failed to update Pomodoro session {session_id}: {e}", exc_info=True) + raise + + async def get_by_id(self, session_id: str) -> Optional[Dict[str, Any]]: + """ + Get session by ID + + Args: + session_id: Session ID + + Returns: + Session dictionary or None if not found + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT * FROM pomodoro_sessions + WHERE id = ? AND deleted = 0 + """, + (session_id,), + ) + row = cursor.fetchone() + return self._row_to_dict(row) + except Exception as e: + logger.error(f"Failed to get Pomodoro session {session_id}: {e}", exc_info=True) + raise + + async def get_by_status( + self, + status: str, + limit: int = 100, + offset: int = 0, + ) -> List[Dict[str, Any]]: + """ + Get sessions by status + + Args: + status: Session status ('active', 'completed', 'abandoned', etc.) + limit: Maximum number of results + offset: Number of results to skip + + Returns: + List of session dictionaries + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT * FROM pomodoro_sessions + WHERE status = ? AND deleted = 0 + ORDER BY start_time DESC + LIMIT ? OFFSET ? + """, + (status, limit, offset), + ) + rows = cursor.fetchall() + return self._rows_to_dicts(rows) + except Exception as e: + logger.error(f"Failed to get sessions by status {status}: {e}", exc_info=True) + raise + + async def get_by_processing_status( + self, + processing_status: str, + limit: int = 100, + offset: int = 0, + ) -> List[Dict[str, Any]]: + """ + Get sessions by processing status + + Args: + processing_status: Processing status ('pending', 'processing', 'completed', 'failed') + limit: Maximum number of results + offset: Number of results to skip + + Returns: + List of session dictionaries + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT * FROM pomodoro_sessions + WHERE processing_status = ? AND deleted = 0 + ORDER BY start_time DESC + LIMIT ? OFFSET ? + """, + (processing_status, limit, offset), + ) + rows = cursor.fetchall() + return self._rows_to_dicts(rows) + except Exception as e: + logger.error( + f"Failed to get sessions by processing status {processing_status}: {e}", + exc_info=True, + ) + raise + + async def get_recent( + self, + limit: int = 10, + offset: int = 0, + ) -> List[Dict[str, Any]]: + """ + Get recent Pomodoro sessions + + Args: + limit: Maximum number of results + offset: Number of results to skip + + Returns: + List of session dictionaries + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT * FROM pomodoro_sessions + WHERE deleted = 0 + ORDER BY start_time DESC + LIMIT ? OFFSET ? + """, + (limit, offset), + ) + rows = cursor.fetchall() + return self._rows_to_dicts(rows) + except Exception as e: + logger.error(f"Failed to get recent Pomodoro sessions: {e}", exc_info=True) + raise + + async def get_stats( + self, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + ) -> Optional[Dict[str, Any]]: + """ + Get Pomodoro session statistics + + Args: + start_date: Optional start date (ISO format) + end_date: Optional end date (ISO format) + + Returns: + Dictionary with statistics (total, completed, abandoned, avg_duration, etc.) + """ + try: + with self._get_conn() as conn: + where_clauses = ["deleted = 0"] + params = [] + + if start_date: + where_clauses.append("start_time >= ?") + params.append(start_date) + if end_date: + where_clauses.append("start_time <= ?") + params.append(end_date) + + where_sql = " AND ".join(where_clauses) + + cursor = conn.execute( + f""" + SELECT + COUNT(*) as total, + SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed, + SUM(CASE WHEN status = 'abandoned' THEN 1 ELSE 0 END) as abandoned, + SUM(CASE WHEN status = 'interrupted' THEN 1 ELSE 0 END) as interrupted, + AVG(actual_duration_minutes) as avg_duration, + SUM(actual_duration_minutes) as total_duration + FROM pomodoro_sessions + WHERE {where_sql} + """, + params, + ) + row = cursor.fetchone() + return self._row_to_dict(row) if row else None + except Exception as e: + logger.error(f"Failed to get Pomodoro session stats: {e}", exc_info=True) + raise + + async def soft_delete(self, session_id: str) -> None: + """ + Soft delete a session + + Args: + session_id: Session ID to delete + """ + try: + with self._get_conn() as conn: + conn.execute( + """ + UPDATE pomodoro_sessions + SET deleted = 1, updated_at = CURRENT_TIMESTAMP + WHERE id = ? + """, + (session_id,), + ) + conn.commit() + logger.debug(f"Soft deleted Pomodoro session: {session_id}") + except Exception as e: + logger.error(f"Failed to soft delete Pomodoro session {session_id}: {e}", exc_info=True) + raise + + async def hard_delete_old(self, days: int = 90) -> int: + """ + Hard delete old completed sessions + + Args: + days: Delete sessions older than this many days + + Returns: + Number of sessions deleted + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + DELETE FROM pomodoro_sessions + WHERE deleted = 1 + AND created_at < datetime('now', '-' || ? || ' days') + """, + (days,), + ) + conn.commit() + deleted_count = cursor.rowcount + logger.debug(f"Hard deleted {deleted_count} old Pomodoro sessions") + return deleted_count + except Exception as e: + logger.error(f"Failed to hard delete old sessions: {e}", exc_info=True) + raise diff --git a/backend/core/db/raw_records.py b/backend/core/db/raw_records.py new file mode 100644 index 0000000..83fdc88 --- /dev/null +++ b/backend/core/db/raw_records.py @@ -0,0 +1,204 @@ +""" +RawRecords Repository - Handles raw record persistence for Pomodoro sessions +Raw records are temporary storage for screenshots, keyboard, and mouse activity +""" + +import json +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, List, Optional + +from core.logger import get_logger + +from .base import BaseRepository + +logger = get_logger(__name__) + + +class RawRecordsRepository(BaseRepository): + """Repository for managing raw records in the database""" + + def __init__(self, db_path: Path): + super().__init__(db_path) + + async def save( + self, + timestamp: str, + record_type: str, + data: str, + pomodoro_session_id: Optional[str] = None, + ) -> Optional[int]: + """ + Save a raw record to database + + Args: + timestamp: ISO format timestamp + record_type: Type of record (SCREENSHOT_RECORD, KEYBOARD_RECORD, MOUSE_RECORD) + data: JSON string of record data + pomodoro_session_id: Optional Pomodoro session ID + + Returns: + Record ID if successful, None otherwise + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + INSERT INTO raw_records ( + timestamp, type, data, pomodoro_session_id, created_at + ) VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP) + """, + (timestamp, record_type, data, pomodoro_session_id), + ) + conn.commit() + record_id = cursor.lastrowid + logger.debug( + f"Saved raw record: {record_id}, " + f"type={record_type}, pomodoro_session={pomodoro_session_id}" + ) + return record_id + except Exception as e: + logger.error(f"Failed to save raw record: {e}", exc_info=True) + raise + + async def get_by_session( + self, + session_id: str, + limit: int = 100, + offset: int = 0, + ) -> List[Dict[str, Any]]: + """ + Get raw records for a specific Pomodoro session + + Args: + session_id: Pomodoro session ID + limit: Maximum number of records to return + offset: Number of records to skip + + Returns: + List of raw record dictionaries + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT * FROM raw_records + WHERE pomodoro_session_id = ? + ORDER BY timestamp ASC + LIMIT ? OFFSET ? + """, + (session_id, limit, offset), + ) + rows = cursor.fetchall() + return self._rows_to_dicts(rows) + except Exception as e: + logger.error( + f"Failed to get raw records for session {session_id}: {e}", + exc_info=True, + ) + raise + + async def count_by_session(self, session_id: str) -> int: + """ + Count raw records for a session + + Args: + session_id: Pomodoro session ID + + Returns: + Number of raw records + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + SELECT COUNT(*) as count FROM raw_records + WHERE pomodoro_session_id = ? + """, + (session_id,), + ) + row = cursor.fetchone() + return row["count"] if row else 0 + except Exception as e: + logger.error( + f"Failed to count raw records for session {session_id}: {e}", + exc_info=True, + ) + raise + + async def delete_by_session(self, session_id: str) -> int: + """ + Delete raw records for a session + + Args: + session_id: Pomodoro session ID + + Returns: + Number of records deleted + """ + try: + with self._get_conn() as conn: + cursor = conn.execute( + """ + DELETE FROM raw_records + WHERE pomodoro_session_id = ? + """, + (session_id,), + ) + conn.commit() + deleted_count = cursor.rowcount + logger.debug( + f"Deleted {deleted_count} raw records for session {session_id}" + ) + return deleted_count + except Exception as e: + logger.error( + f"Failed to delete raw records for session {session_id}: {e}", + exc_info=True, + ) + raise + + async def get_by_time_range( + self, + start_time: str, + end_time: str, + record_type: Optional[str] = None, + ) -> List[Dict[str, Any]]: + """ + Get raw records within a time range + + Args: + start_time: Start timestamp (ISO format) + end_time: End timestamp (ISO format) + record_type: Optional filter by record type + + Returns: + List of raw record dictionaries + """ + try: + with self._get_conn() as conn: + if record_type: + cursor = conn.execute( + """ + SELECT * FROM raw_records + WHERE timestamp >= ? AND timestamp <= ? AND type = ? + ORDER BY timestamp ASC + """, + (start_time, end_time, record_type), + ) + else: + cursor = conn.execute( + """ + SELECT * FROM raw_records + WHERE timestamp >= ? AND timestamp <= ? + ORDER BY timestamp ASC + """, + (start_time, end_time), + ) + rows = cursor.fetchall() + return self._rows_to_dicts(rows) + except Exception as e: + logger.error( + f"Failed to get raw records by time range: {e}", exc_info=True + ) + raise diff --git a/backend/core/events.py b/backend/core/events.py index e60d194..2c0dc83 100644 --- a/backend/core/events.py +++ b/backend/core/events.py @@ -544,3 +544,85 @@ def emit_todo_deleted(todo_id: str, timestamp: Optional[str] = None) -> bool: if success: logger.debug(f"✅ TODO deletion event sent: {todo_id}") return success + + + +def emit_pomodoro_processing_progress( + session_id: str, job_id: str, processed: int +) -> bool: + """ + Send Pomodoro processing progress event to frontend + + Args: + session_id: Pomodoro session ID + job_id: Processing job ID + processed: Number of records processed + + Returns: + True if sent successfully, False otherwise + """ + payload = { + "session_id": session_id, + "job_id": job_id, + "processed": processed, + } + + logger.debug( + f"[emit_pomodoro_processing_progress] Session: {session_id}, " + f"Job: {job_id}, Processed: {processed}" + ) + return _emit("pomodoro-processing-progress", payload) + + +def emit_pomodoro_processing_complete( + session_id: str, job_id: str, total_processed: int +) -> bool: + """ + Send Pomodoro processing completion event to frontend + + Args: + session_id: Pomodoro session ID + job_id: Processing job ID + total_processed: Total number of records processed + + Returns: + True if sent successfully, False otherwise + """ + payload = { + "session_id": session_id, + "job_id": job_id, + "total_processed": total_processed, + } + + logger.debug( + f"[emit_pomodoro_processing_complete] Session: {session_id}, " + f"Job: {job_id}, Total: {total_processed}" + ) + return _emit("pomodoro-processing-complete", payload) + + +def emit_pomodoro_processing_failed( + session_id: str, job_id: str, error: str +) -> bool: + """ + Send Pomodoro processing failure event to frontend + + Args: + session_id: Pomodoro session ID + job_id: Processing job ID + error: Error message + + Returns: + True if sent successfully, False otherwise + """ + payload = { + "session_id": session_id, + "job_id": job_id, + "error": error, + } + + logger.debug( + f"[emit_pomodoro_processing_failed] Session: {session_id}, " + f"Job: {job_id}, Error: {error}" + ) + return _emit("pomodoro-processing-failed", payload) diff --git a/backend/core/pomodoro_manager.py b/backend/core/pomodoro_manager.py new file mode 100644 index 0000000..d595284 --- /dev/null +++ b/backend/core/pomodoro_manager.py @@ -0,0 +1,451 @@ +""" +Pomodoro Manager - Manages Pomodoro session lifecycle + +Responsibilities: +1. Start/stop Pomodoro sessions +2. Coordinate with PipelineCoordinator (enter/exit Pomodoro mode) +3. Trigger deferred batch processing after session completion +4. Track session metadata and handle orphaned sessions +""" + +import asyncio +import uuid +from datetime import datetime +from typing import Any, Dict, Optional + +from core.db import get_db +from core.logger import get_logger +from core.models import RawRecord + +logger = get_logger(__name__) + + +class PomodoroSession: + """Pomodoro session data class""" + + def __init__( + self, + session_id: str, + user_intent: str, + duration_minutes: int, + start_time: datetime, + ): + self.id = session_id + self.user_intent = user_intent + self.duration_minutes = duration_minutes + self.start_time = start_time + + +class PomodoroManager: + """ + Pomodoro session manager + + Handles Pomodoro lifecycle and coordinates with coordinator + """ + + def __init__(self, coordinator): + """ + Initialize Pomodoro manager + + Args: + coordinator: Reference to PipelineCoordinator instance + """ + self.coordinator = coordinator + self.db = get_db() + self.current_session: Optional[PomodoroSession] = None + self.is_active = False + self._processing_tasks: Dict[str, asyncio.Task] = {} + + async def start_pomodoro( + self, user_intent: str, duration_minutes: int = 25 + ) -> str: + """ + Start a new Pomodoro session + + Actions: + 1. Create pomodoro_sessions record + 2. Signal coordinator to enter "pomodoro mode" + 3. Coordinator disables continuous processing + 4. PerceptionManager continues capturing but tags records + 5. RawRecords get persisted to DB with session_id + + Args: + user_intent: User's description of what they plan to work on + duration_minutes: Planned duration (default: 25 minutes) + + Returns: + session_id + + Raises: + ValueError: If a Pomodoro session is already active + """ + if self.is_active: + raise ValueError("A Pomodoro session is already active") + + # Check if previous session is still processing + processing_sessions = await self.db.pomodoro_sessions.get_by_processing_status( + "processing", limit=1 + ) + if processing_sessions: + raise ValueError( + "Previous Pomodoro session is still being analyzed. " + "Please wait for completion before starting a new session." + ) + + session_id = str(uuid.uuid4()) + start_time = datetime.now() + + try: + # Save to database + await self.db.pomodoro_sessions.create( + session_id=session_id, + user_intent=user_intent, + planned_duration_minutes=duration_minutes, + start_time=start_time.isoformat(), + status="active", + ) + + # Create session object + self.current_session = PomodoroSession( + session_id=session_id, + user_intent=user_intent, + duration_minutes=duration_minutes, + start_time=start_time, + ) + self.is_active = True + + # Signal coordinator to enter pomodoro mode + await self.coordinator.enter_pomodoro_mode(session_id) + + logger.info( + f"✓ Pomodoro session started: {session_id}, " + f"intent='{user_intent}', duration={duration_minutes}min" + ) + + return session_id + + except Exception as e: + logger.error(f"Failed to start Pomodoro session: {e}", exc_info=True) + # Cleanup on failure + self.is_active = False + self.current_session = None + raise + + async def end_pomodoro(self, status: str = "completed") -> Dict[str, Any]: + """ + End current Pomodoro session + + Actions: + 1. Update pomodoro_sessions record + 2. Signal coordinator to exit "pomodoro mode" + 3. Trigger deferred batch processing + 4. Return processing job ID + + Args: + status: Session status ('completed', 'abandoned', 'interrupted') + + Returns: + { + "session_id": str, + "processing_job_id": str, + "raw_records_count": int + } + + Raises: + ValueError: If no active Pomodoro session + """ + if not self.is_active or not self.current_session: + raise ValueError("No active Pomodoro session") + + session_id = self.current_session.id + end_time = datetime.now() + duration = (end_time - self.current_session.start_time).total_seconds() / 60 + + try: + # Check if session is too short (< 2 minutes) + if duration < 2: + logger.warning( + f"Pomodoro session {session_id} too short ({duration:.1f}min), skipping analysis" + ) + await self.db.pomodoro_sessions.update( + session_id=session_id, + end_time=end_time.isoformat(), + actual_duration_minutes=int(duration), + status="too_short", + processing_status="skipped", + ) + + # Exit pomodoro mode + await self.coordinator.exit_pomodoro_mode() + + self.is_active = False + self.current_session = None + + return { + "session_id": session_id, + "processing_job_id": None, + "raw_records_count": 0, + "message": "Session too short, data discarded", + } + + # Update database + await self.db.pomodoro_sessions.update( + session_id=session_id, + end_time=end_time.isoformat(), + actual_duration_minutes=int(duration), + status=status, + processing_status="pending", + ) + + # Exit pomodoro mode + await self.coordinator.exit_pomodoro_mode() + + # Count raw records for this session + raw_count = await self.db.raw_records.count_by_session(session_id) + + logger.info( + f"✓ Pomodoro session ended: {session_id}, " + f"status={status}, duration={duration:.1f}min, records={raw_count}" + ) + + # Trigger batch processing in background + job_id = await self._trigger_batch_processing(session_id) + + self.is_active = False + self.current_session = None + + return { + "session_id": session_id, + "processing_job_id": job_id, + "raw_records_count": raw_count, + } + + except Exception as e: + logger.error(f"Failed to end Pomodoro session: {e}", exc_info=True) + raise + + async def _trigger_batch_processing(self, session_id: str) -> str: + """ + Trigger background batch processing for Pomodoro session + + Creates async task that: + 1. Loads all RawRecords with pomodoro_session_id + 2. Processes through normal pipeline (deferred) + 3. Updates processing_status as it progresses + 4. Emits events for frontend to track progress + + Args: + session_id: Pomodoro session ID + + Returns: + job_id: Processing job identifier + """ + job_id = str(uuid.uuid4()) + + # Create background task + task = asyncio.create_task(self._process_pomodoro_batch(session_id, job_id)) + + # Store task reference + self._processing_tasks[job_id] = task + + logger.debug(f"✓ Batch processing triggered: job={job_id}, session={session_id}") + + return job_id + + async def _process_pomodoro_batch(self, session_id: str, job_id: str): + """ + Background task to process Pomodoro session data + + Steps: + 1. Update status to 'processing' + 2. Load RawRecords in chunks (to avoid memory issues) + 3. Process through pipeline + 4. Update status to 'completed' + 5. Emit completion event + + Args: + session_id: Pomodoro session ID + job_id: Processing job ID + """ + try: + await self.db.pomodoro_sessions.update( + session_id=session_id, + processing_status="processing", + processing_started_at=datetime.now().isoformat(), + ) + + logger.info(f"→ Processing Pomodoro session: {session_id}") + + # Load raw records in chunks + chunk_size = 100 + offset = 0 + total_processed = 0 + + while True: + records = await self.db.raw_records.get_by_session( + session_id=session_id, + limit=chunk_size, + offset=offset, + ) + + if not records: + break + + # Convert DB records back to RawRecord objects + raw_records = [] + for r in records: + try: + import json + + raw_record = RawRecord( + timestamp=datetime.fromisoformat(r["timestamp"]), + type=r["type"], + data=json.loads(r["data"]), + ) + raw_records.append(raw_record) + except Exception as e: + logger.warning(f"Failed to parse raw record {r['id']}: {e}") + + # Process through pipeline + if raw_records: + await self.coordinator.processing_pipeline.process_raw_records( + raw_records + ) + + total_processed += len(records) + offset += chunk_size + + # Emit progress event + self._emit_progress_event(session_id, job_id, total_processed) + + # Update status + await self.db.pomodoro_sessions.update( + session_id=session_id, + processing_status="completed", + processing_completed_at=datetime.now().isoformat(), + ) + + logger.info( + f"✓ Pomodoro session processed: {session_id}, records={total_processed}" + ) + + # Emit completion event + self._emit_completion_event(session_id, job_id, total_processed) + + # Cleanup task reference + self._processing_tasks.pop(job_id, None) + + except Exception as e: + logger.error( + f"✗ Pomodoro batch processing failed: {e}", exc_info=True + ) + await self.db.pomodoro_sessions.update( + session_id=session_id, + processing_status="failed", + processing_error=str(e), + ) + + # Emit failure event + self._emit_failure_event(session_id, job_id, str(e)) + + # Cleanup task reference + self._processing_tasks.pop(job_id, None) + + def _emit_progress_event( + self, session_id: str, job_id: str, processed: int + ) -> None: + """Emit progress event for frontend""" + try: + from core.events import emit_pomodoro_processing_progress + + emit_pomodoro_processing_progress(session_id, job_id, processed) + except Exception as e: + logger.debug(f"Failed to emit progress event: {e}") + + def _emit_completion_event( + self, session_id: str, job_id: str, total_processed: int + ) -> None: + """Emit completion event for frontend""" + try: + from core.events import emit_pomodoro_processing_complete + + emit_pomodoro_processing_complete(session_id, job_id, total_processed) + except Exception as e: + logger.debug(f"Failed to emit completion event: {e}") + + def _emit_failure_event( + self, session_id: str, job_id: str, error: str + ) -> None: + """Emit failure event for frontend""" + try: + from core.events import emit_pomodoro_processing_failed + + emit_pomodoro_processing_failed(session_id, job_id, error) + except Exception as e: + logger.debug(f"Failed to emit failure event: {e}") + + async def check_orphaned_sessions(self) -> int: + """ + Check for orphaned sessions from previous runs + + Orphaned sessions are active sessions that were not properly closed + (e.g., due to app crash or system shutdown). + + This should be called on application startup. + + Returns: + Number of orphaned sessions found and recovered + """ + try: + orphaned = await self.db.pomodoro_sessions.get_by_status("active") + + if not orphaned: + return 0 + + logger.warning(f"Found {len(orphaned)} orphaned Pomodoro session(s)") + + for session in orphaned: + session_id = session["id"] + + # Auto-end as 'interrupted' + await self.db.pomodoro_sessions.update( + session_id=session_id, + end_time=datetime.now().isoformat(), + status="interrupted", + processing_status="pending", + ) + + # Trigger batch processing + await self._trigger_batch_processing(session_id) + + logger.info( + f"✓ Recovered orphaned session: {session_id}, triggering analysis" + ) + + return len(orphaned) + + except Exception as e: + logger.error(f"Failed to check orphaned sessions: {e}", exc_info=True) + return 0 + + async def get_current_session_info(self) -> Optional[Dict[str, Any]]: + """ + Get current session information + + Returns: + Session info dict or None if no active session + """ + if not self.is_active or not self.current_session: + return None + + elapsed_minutes = ( + datetime.now() - self.current_session.start_time + ).total_seconds() / 60 + + return { + "session_id": self.current_session.id, + "user_intent": self.current_session.user_intent, + "start_time": self.current_session.start_time.isoformat(), + "elapsed_minutes": int(elapsed_minutes), + "planned_duration_minutes": self.current_session.duration_minutes, + } diff --git a/backend/core/settings.py b/backend/core/settings.py index 1dd6338..153f394 100644 --- a/backend/core/settings.py +++ b/backend/core/settings.py @@ -376,9 +376,6 @@ def get_image_optimization_config(self) -> Dict[str, Any]: min_interval = float( self.config_loader.get("image_optimization.min_sampling_interval", 2.0) ) - max_images = int( - self.config_loader.get("image_optimization.max_images_per_event", 8) - ) enable_content = self.config_loader.get( "image_optimization.enable_content_analysis", True ) @@ -391,7 +388,6 @@ def get_image_optimization_config(self) -> Dict[str, Any]: "strategy": strategy, "phash_threshold": phash_threshold, "min_interval": min_interval, - "max_images": max_images, "enable_content_analysis": enable_content, "enable_text_detection": enable_text, } @@ -421,9 +417,6 @@ def set_image_optimization_config(self, config: Dict[str, Any]) -> bool: "image_optimization.min_sampling_interval", config.get("min_interval", 2.0), ) - self.config_loader.set( - "image_optimization.max_images_per_event", config.get("max_images", 8) - ) self.config_loader.set( "image_optimization.enable_content_analysis", config.get("enable_content_analysis", True), @@ -446,7 +439,6 @@ def _get_default_image_optimization_config() -> Dict[str, Any]: "strategy": "hybrid", "phash_threshold": 0.15, "min_interval": 2.0, - "max_images": 8, "enable_content_analysis": True, "enable_text_detection": False, } diff --git a/backend/core/sqls/migrations.py b/backend/core/sqls/migrations.py index ad9b878..a967e35 100644 --- a/backend/core/sqls/migrations.py +++ b/backend/core/sqls/migrations.py @@ -1,6 +1,13 @@ """ -Database migration SQL statements -Contains all ALTER TABLE and data migration statements +DEPRECATED: This file is no longer used + +Database migration system has been moved to version-based migrations. +See: backend/migrations/ + +All migrations should now be created as versioned files in: +backend/migrations/versions/ + +This file is kept for reference only and will be removed in a future version. """ # Events table migrations @@ -176,3 +183,59 @@ ADD_KNOWLEDGE_SOURCE_ACTION_ID_COLUMN = """ ALTER TABLE knowledge ADD COLUMN source_action_id TEXT """ + +# ============ Pomodoro Feature Migrations ============ + +# Add pomodoro_session_id to raw_records +ADD_RAW_RECORDS_POMODORO_SESSION_ID_COLUMN = """ + ALTER TABLE raw_records ADD COLUMN pomodoro_session_id TEXT +""" + +# Add pomodoro_session_id to actions +ADD_ACTIONS_POMODORO_SESSION_ID_COLUMN = """ + ALTER TABLE actions ADD COLUMN pomodoro_session_id TEXT +""" + +# Add pomodoro_session_id to events +ADD_EVENTS_POMODORO_SESSION_ID_COLUMN = """ + ALTER TABLE events ADD COLUMN pomodoro_session_id TEXT +""" + +# Add pomodoro-related columns to activities +ADD_ACTIVITIES_POMODORO_SESSION_ID_COLUMN = """ + ALTER TABLE activities ADD COLUMN pomodoro_session_id TEXT +""" + +ADD_ACTIVITIES_USER_INTENT_COLUMN = """ + ALTER TABLE activities ADD COLUMN user_intent TEXT +""" + +ADD_ACTIVITIES_POMODORO_STATUS_COLUMN = """ + ALTER TABLE activities ADD COLUMN pomodoro_status TEXT +""" + +# Create indexes for pomodoro_session_id columns +CREATE_RAW_RECORDS_POMODORO_SESSION_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_raw_records_pomodoro_session + ON raw_records(pomodoro_session_id) +""" + +CREATE_ACTIONS_POMODORO_SESSION_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_actions_pomodoro_session + ON actions(pomodoro_session_id) +""" + +CREATE_EVENTS_POMODORO_SESSION_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_events_pomodoro_session + ON events(pomodoro_session_id) +""" + +CREATE_ACTIVITIES_POMODORO_SESSION_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_activities_pomodoro_session + ON activities(pomodoro_session_id) +""" + +CREATE_ACTIVITIES_POMODORO_STATUS_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_activities_pomodoro_status + ON activities(pomodoro_status) +""" diff --git a/backend/core/sqls/schema.py b/backend/core/sqls/schema.py index 8f31740..f17bb0d 100644 --- a/backend/core/sqls/schema.py +++ b/backend/core/sqls/schema.py @@ -228,6 +228,29 @@ ) """ +CREATE_POMODORO_SESSIONS_TABLE = """ + CREATE TABLE IF NOT EXISTS pomodoro_sessions ( + id TEXT PRIMARY KEY, + user_intent TEXT NOT NULL, + planned_duration_minutes INTEGER DEFAULT 25, + actual_duration_minutes INTEGER, + start_time TEXT NOT NULL, + end_time TEXT, + status TEXT NOT NULL, + processing_status TEXT DEFAULT 'pending', + processing_started_at TEXT, + processing_completed_at TEXT, + processing_error TEXT, + interruption_count INTEGER DEFAULT 0, + interruption_reasons TEXT, + created_at TEXT DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT DEFAULT CURRENT_TIMESTAMP, + deleted BOOLEAN DEFAULT 0, + CHECK(status IN ('active', 'completed', 'abandoned', 'interrupted', 'too_short')), + CHECK(processing_status IN ('pending', 'processing', 'completed', 'failed', 'skipped')) + ) +""" + CREATE_KNOWLEDGE_CREATED_INDEX = """ CREATE INDEX IF NOT EXISTS idx_knowledge_created ON knowledge(created_at DESC) @@ -386,6 +409,28 @@ ON session_preferences(confidence_score DESC) """ +# ============ Pomodoro Sessions Indexes ============ + +CREATE_POMODORO_SESSIONS_STATUS_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_pomodoro_sessions_status + ON pomodoro_sessions(status) +""" + +CREATE_POMODORO_SESSIONS_PROCESSING_STATUS_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_pomodoro_sessions_processing_status + ON pomodoro_sessions(processing_status) +""" + +CREATE_POMODORO_SESSIONS_START_TIME_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_pomodoro_sessions_start_time + ON pomodoro_sessions(start_time DESC) +""" + +CREATE_POMODORO_SESSIONS_CREATED_INDEX = """ + CREATE INDEX IF NOT EXISTS idx_pomodoro_sessions_created + ON pomodoro_sessions(created_at DESC) +""" + # All table creation statements in order ALL_TABLES = [ CREATE_RAW_RECORDS_TABLE, @@ -405,6 +450,8 @@ CREATE_ACTIONS_TABLE, CREATE_ACTION_IMAGES_TABLE, CREATE_SESSION_PREFERENCES_TABLE, + # Pomodoro feature + CREATE_POMODORO_SESSIONS_TABLE, ] # All index creation statements @@ -441,4 +488,9 @@ CREATE_ACTION_IMAGES_HASH_INDEX, CREATE_SESSION_PREFERENCES_TYPE_INDEX, CREATE_SESSION_PREFERENCES_CONFIDENCE_INDEX, + # Pomodoro sessions indexes + CREATE_POMODORO_SESSIONS_STATUS_INDEX, + CREATE_POMODORO_SESSIONS_PROCESSING_STATUS_INDEX, + CREATE_POMODORO_SESSIONS_START_TIME_INDEX, + CREATE_POMODORO_SESSIONS_CREATED_INDEX, ] diff --git a/backend/handlers/__init__.py b/backend/handlers/__init__.py index 7f4dbcf..bdfcf9c 100644 --- a/backend/handlers/__init__.py +++ b/backend/handlers/__init__.py @@ -211,6 +211,7 @@ def register_fastapi_routes(app: "FastAPI", prefix: str = "/api") -> None: events, insights, monitoring, + pomodoro, processing, resources, system, @@ -227,6 +228,7 @@ def register_fastapi_routes(app: "FastAPI", prefix: str = "/api") -> None: "events", "insights", "monitoring", + "pomodoro", "processing", "resources", "system", diff --git a/backend/handlers/pomodoro.py b/backend/handlers/pomodoro.py new file mode 100644 index 0000000..206a007 --- /dev/null +++ b/backend/handlers/pomodoro.py @@ -0,0 +1,245 @@ +""" +Pomodoro timer API handlers + +Endpoints: +- POST /pomodoro/start - Start a Pomodoro session +- POST /pomodoro/end - End current Pomodoro session +- GET /pomodoro/status - Get current Pomodoro session status +""" + +from datetime import datetime + +from core.coordinator import get_coordinator +from core.logger import get_logger +from models.base import BaseModel +from models.responses import ( + EndPomodoroData, + EndPomodoroResponse, + GetPomodoroStatusResponse, + PomodoroSessionData, + StartPomodoroResponse, +) + +from . import api_handler + +logger = get_logger(__name__) + + +class StartPomodoroRequest(BaseModel): + """Start Pomodoro request""" + + user_intent: str + duration_minutes: int = 25 + + +class EndPomodoroRequest(BaseModel): + """End Pomodoro request""" + + status: str = "completed" # completed, abandoned, interrupted + + +@api_handler( + body=StartPomodoroRequest, + method="POST", + path="/pomodoro/start", + tags=["pomodoro"], +) +async def start_pomodoro(body: StartPomodoroRequest) -> StartPomodoroResponse: + """ + Start a new Pomodoro session + + Args: + body: Request containing user_intent and duration_minutes + + Returns: + StartPomodoroResponse with session data + + Raises: + ValueError: If a Pomodoro session is already active or previous session is still processing + """ + try: + coordinator = get_coordinator() + + if not coordinator.pomodoro_manager: + return StartPomodoroResponse( + success=False, + message="Pomodoro manager not initialized", + error="Pomodoro manager not initialized", + timestamp=datetime.now().isoformat(), + ) + + # Start Pomodoro session + session_id = await coordinator.pomodoro_manager.start_pomodoro( + user_intent=body.user_intent, + duration_minutes=body.duration_minutes, + ) + + # Get session info + session_info = await coordinator.pomodoro_manager.get_current_session_info() + + if not session_info: + return StartPomodoroResponse( + success=False, + message="Failed to retrieve session info", + error="Failed to retrieve session info after starting", + timestamp=datetime.now().isoformat(), + ) + + logger.info( + f"Pomodoro session started via API: {session_id}, intent='{body.user_intent}'" + ) + + return StartPomodoroResponse( + success=True, + message="Pomodoro session started successfully", + data=PomodoroSessionData( + session_id=session_info["session_id"], + user_intent=session_info["user_intent"], + start_time=session_info["start_time"], + elapsed_minutes=session_info["elapsed_minutes"], + planned_duration_minutes=session_info["planned_duration_minutes"], + ), + timestamp=datetime.now().isoformat(), + ) + + except ValueError as e: + # Expected errors (session already active, previous processing) + logger.warning(f"Failed to start Pomodoro session: {e}") + return StartPomodoroResponse( + success=False, + message=str(e), + error=str(e), + timestamp=datetime.now().isoformat(), + ) + except Exception as e: + logger.error(f"Unexpected error starting Pomodoro session: {e}", exc_info=True) + return StartPomodoroResponse( + success=False, + message="Failed to start Pomodoro session", + error=str(e), + timestamp=datetime.now().isoformat(), + ) + + +@api_handler( + body=EndPomodoroRequest, + method="POST", + path="/pomodoro/end", + tags=["pomodoro"], +) +async def end_pomodoro(body: EndPomodoroRequest) -> EndPomodoroResponse: + """ + End current Pomodoro session + + Args: + body: Request containing status (completed/abandoned/interrupted) + + Returns: + EndPomodoroResponse with processing job info + + Raises: + ValueError: If no active Pomodoro session + """ + try: + coordinator = get_coordinator() + + if not coordinator.pomodoro_manager: + return EndPomodoroResponse( + success=False, + message="Pomodoro manager not initialized", + error="Pomodoro manager not initialized", + timestamp=datetime.now().isoformat(), + ) + + # End Pomodoro session + result = await coordinator.pomodoro_manager.end_pomodoro(status=body.status) + + logger.info( + f"Pomodoro session ended via API: {result['session_id']}, status={body.status}" + ) + + return EndPomodoroResponse( + success=True, + message="Pomodoro session ended successfully", + data=EndPomodoroData( + session_id=result["session_id"], + processing_job_id=result.get("processing_job_id"), + raw_records_count=result["raw_records_count"], + message=result.get("message", ""), + ), + timestamp=datetime.now().isoformat(), + ) + + except ValueError as e: + # Expected error (no active session) + logger.warning(f"Failed to end Pomodoro session: {e}") + return EndPomodoroResponse( + success=False, + message=str(e), + error=str(e), + timestamp=datetime.now().isoformat(), + ) + except Exception as e: + logger.error(f"Unexpected error ending Pomodoro session: {e}", exc_info=True) + return EndPomodoroResponse( + success=False, + message="Failed to end Pomodoro session", + error=str(e), + timestamp=datetime.now().isoformat(), + ) + + +@api_handler(method="GET", path="/pomodoro/status", tags=["pomodoro"]) +async def get_pomodoro_status() -> GetPomodoroStatusResponse: + """ + Get current Pomodoro session status + + Returns: + GetPomodoroStatusResponse with current session info or None if no active session + """ + try: + coordinator = get_coordinator() + + if not coordinator.pomodoro_manager: + return GetPomodoroStatusResponse( + success=False, + message="Pomodoro manager not initialized", + error="Pomodoro manager not initialized", + timestamp=datetime.now().isoformat(), + ) + + # Get current session info + session_info = await coordinator.pomodoro_manager.get_current_session_info() + + if not session_info: + # No active session + return GetPomodoroStatusResponse( + success=True, + message="No active Pomodoro session", + data=None, + timestamp=datetime.now().isoformat(), + ) + + return GetPomodoroStatusResponse( + success=True, + message="Active Pomodoro session found", + data=PomodoroSessionData( + session_id=session_info["session_id"], + user_intent=session_info["user_intent"], + start_time=session_info["start_time"], + elapsed_minutes=session_info["elapsed_minutes"], + planned_duration_minutes=session_info["planned_duration_minutes"], + ), + timestamp=datetime.now().isoformat(), + ) + + except Exception as e: + logger.error( + f"Unexpected error getting Pomodoro status: {e}", exc_info=True + ) + return GetPomodoroStatusResponse( + success=False, + message="Failed to get Pomodoro status", + error=str(e), + timestamp=datetime.now().isoformat(), + ) diff --git a/backend/migrations/__init__.py b/backend/migrations/__init__.py new file mode 100644 index 0000000..947e87b --- /dev/null +++ b/backend/migrations/__init__.py @@ -0,0 +1,12 @@ +""" +Database migrations module - Version-based migration system + +This module provides a versioned migration system that: +1. Tracks applied migrations in schema_migrations table +2. Runs migrations in order by version number +3. Supports both SQL-based and Python-based migrations +""" + +from .runner import MigrationRunner + +__all__ = ["MigrationRunner"] diff --git a/backend/migrations/base.py b/backend/migrations/base.py new file mode 100644 index 0000000..00a8e6b --- /dev/null +++ b/backend/migrations/base.py @@ -0,0 +1,51 @@ +""" +Base migration class + +All migrations should inherit from this base class +""" + +import sqlite3 +from abc import ABC, abstractmethod +from typing import Optional + + +class BaseMigration(ABC): + """ + Base class for database migrations + + Each migration must: + 1. Define a unique version string (e.g., "0001", "0002") + 2. Provide a description + 3. Implement the up() method + 4. Optionally implement the down() method for rollbacks + """ + + # Must be overridden in subclass + version: str = "" + description: str = "" + + @abstractmethod + def up(self, cursor: sqlite3.Cursor) -> None: + """ + Execute migration (upgrade database) + + Args: + cursor: SQLite cursor for executing SQL commands + """ + pass + + def down(self, cursor: sqlite3.Cursor) -> None: + """ + Rollback migration (downgrade database) + + Args: + cursor: SQLite cursor for executing SQL commands + + Note: + This is optional. Many migrations cannot be safely rolled back. + If not implemented, rollback will be skipped with a warning. + """ + pass + + def __repr__(self) -> str: + return f"" diff --git a/backend/migrations/runner.py b/backend/migrations/runner.py new file mode 100644 index 0000000..04681d9 --- /dev/null +++ b/backend/migrations/runner.py @@ -0,0 +1,265 @@ +""" +Migration runner - Manages database schema versioning + +Responsibilities: +1. Create schema_migrations table if not exists +2. Discover all migration files +3. Determine which migrations need to run +4. Execute migrations in order +5. Record successful migrations +""" + +import importlib +import sqlite3 +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, List, Type + +from core.logger import get_logger + +from .base import BaseMigration + +logger = get_logger(__name__) + + +class MigrationRunner: + """ + Database migration runner with version tracking + + Usage: + runner = MigrationRunner(db_path) + runner.run_migrations() + """ + + SCHEMA_MIGRATIONS_TABLE = """ + CREATE TABLE IF NOT EXISTS schema_migrations ( + version TEXT PRIMARY KEY, + description TEXT NOT NULL, + applied_at TEXT NOT NULL + ) + """ + + def __init__(self, db_path: Path): + """ + Initialize migration runner + + Args: + db_path: Path to SQLite database + """ + self.db_path = db_path + self.migrations: Dict[str, Type[BaseMigration]] = {} + + def _ensure_schema_migrations_table(self, cursor: sqlite3.Cursor) -> None: + """ + Create schema_migrations table if it doesn't exist + + Args: + cursor: Database cursor + """ + cursor.execute(self.SCHEMA_MIGRATIONS_TABLE) + logger.debug("✓ schema_migrations table ready") + + def _get_applied_versions(self, cursor: sqlite3.Cursor) -> set: + """ + Get set of already-applied migration versions + + Args: + cursor: Database cursor + + Returns: + Set of version strings + """ + cursor.execute("SELECT version FROM schema_migrations") + rows = cursor.fetchall() + return {row[0] for row in rows} + + def _discover_migrations(self) -> List[Type[BaseMigration]]: + """ + Discover all migration classes from versions directory + + Returns: + List of migration classes sorted by version + """ + migrations_dir = Path(__file__).parent / "versions" + + if not migrations_dir.exists(): + logger.warning(f"Migrations directory not found: {migrations_dir}") + return [] + + discovered = [] + + # Import all Python files in versions directory + for migration_file in sorted(migrations_dir.glob("*.py")): + if migration_file.name.startswith("_"): + continue # Skip __init__.py and other private files + + module_name = f"migrations.versions.{migration_file.stem}" + + try: + module = importlib.import_module(module_name) + + # Find migration class in module + for attr_name in dir(module): + attr = getattr(module, attr_name) + + # Check if it's a migration class + if ( + isinstance(attr, type) + and issubclass(attr, BaseMigration) + and attr is not BaseMigration + ): + discovered.append(attr) + logger.debug(f"Discovered migration: {attr.version} - {attr.description}") + + except Exception as e: + logger.error(f"Failed to load migration {migration_file}: {e}", exc_info=True) + + # Sort by version + discovered.sort(key=lambda m: m.version) + + return discovered + + def _record_migration( + self, cursor: sqlite3.Cursor, migration: BaseMigration + ) -> None: + """ + Record successful migration in schema_migrations table + + Args: + cursor: Database cursor + migration: Migration instance + """ + cursor.execute( + """ + INSERT INTO schema_migrations (version, description, applied_at) + VALUES (?, ?, ?) + """, + ( + migration.version, + migration.description, + datetime.now().isoformat(), + ), + ) + logger.info(f"✓ Recorded migration: {migration.version}") + + def run_migrations(self) -> int: + """ + Run all pending migrations + + Returns: + Number of migrations executed + """ + try: + conn = sqlite3.connect(str(self.db_path)) + cursor = conn.cursor() + + # Ensure tracking table exists + self._ensure_schema_migrations_table(cursor) + conn.commit() + + # Get applied versions + applied_versions = self._get_applied_versions(cursor) + logger.debug(f"Applied migrations: {applied_versions}") + + # Discover all migrations + all_migrations = self._discover_migrations() + + if not all_migrations: + logger.info("No migrations found") + conn.close() + return 0 + + # Filter to pending migrations + pending_migrations = [ + m for m in all_migrations if m.version not in applied_versions + ] + + if not pending_migrations: + logger.info("✓ All migrations up to date") + conn.close() + return 0 + + logger.info(f"Found {len(pending_migrations)} pending migration(s)") + + # Execute each pending migration + executed_count = 0 + for migration_class in pending_migrations: + migration = migration_class() + + logger.info(f"Running migration {migration.version}: {migration.description}") + + try: + # Execute migration + migration.up(cursor) + + # Record success + self._record_migration(cursor, migration) + conn.commit() + + executed_count += 1 + logger.info(f"✓ Migration {migration.version} completed successfully") + + except Exception as e: + logger.error( + f"✗ Migration {migration.version} failed: {e}", + exc_info=True, + ) + conn.rollback() + raise + + conn.close() + + logger.info(f"✓ Successfully executed {executed_count} migration(s)") + return executed_count + + except Exception as e: + logger.error(f"Migration runner failed: {e}", exc_info=True) + raise + + def get_migration_status(self) -> Dict[str, Any]: + """ + Get current migration status + + Returns: + Dictionary with migration status information + """ + try: + conn = sqlite3.connect(str(self.db_path)) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + # Ensure tracking table exists + self._ensure_schema_migrations_table(cursor) + + # Get applied migrations + cursor.execute( + """ + SELECT version, description, applied_at + FROM schema_migrations + ORDER BY version + """ + ) + applied = [dict(row) for row in cursor.fetchall()] + + # Discover all migrations + all_migrations = self._discover_migrations() + + applied_versions = {m["version"] for m in applied} + pending = [ + {"version": m.version, "description": m.description} + for m in all_migrations + if m.version not in applied_versions + ] + + conn.close() + + return { + "applied_count": len(applied), + "pending_count": len(pending), + "applied": applied, + "pending": pending, + } + + except Exception as e: + logger.error(f"Failed to get migration status: {e}", exc_info=True) + raise diff --git a/backend/migrations/versions/0001_initial_schema.py b/backend/migrations/versions/0001_initial_schema.py new file mode 100644 index 0000000..c1c730c --- /dev/null +++ b/backend/migrations/versions/0001_initial_schema.py @@ -0,0 +1,33 @@ +""" +Migration 0001: Initial database schema + +Creates all base tables and indexes for the iDO application +""" + +import sqlite3 + +from migrations.base import BaseMigration + + +class Migration(BaseMigration): + version = "0001" + description = "Initial database schema with all base tables" + + def up(self, cursor: sqlite3.Cursor) -> None: + """Create all initial tables and indexes""" + from core.sqls import schema + + # Create all tables + for table_sql in schema.ALL_TABLES: + cursor.execute(table_sql) + + # Create all indexes + for index_sql in schema.ALL_INDEXES: + cursor.execute(index_sql) + + def down(self, cursor: sqlite3.Cursor) -> None: + """ + Rollback not supported for initial schema + Would require dropping all tables + """ + pass diff --git a/backend/migrations/versions/0002_add_knowledge_actions_columns.py b/backend/migrations/versions/0002_add_knowledge_actions_columns.py new file mode 100644 index 0000000..27a8f45 --- /dev/null +++ b/backend/migrations/versions/0002_add_knowledge_actions_columns.py @@ -0,0 +1,53 @@ +""" +Migration 0002: Add knowledge extraction columns to actions table + +Adds columns to support knowledge extraction feature +""" + +import sqlite3 + +from migrations.base import BaseMigration + + +class Migration(BaseMigration): + version = "0002" + description = "Add knowledge extraction columns to actions and knowledge tables" + + def up(self, cursor: sqlite3.Cursor) -> None: + """Add columns for knowledge extraction feature""" + + # List of column additions (with error handling for already-exists) + columns_to_add = [ + ( + "actions", + "extract_knowledge", + "ALTER TABLE actions ADD COLUMN extract_knowledge BOOLEAN DEFAULT 0", + ), + ( + "actions", + "knowledge_extracted", + "ALTER TABLE actions ADD COLUMN knowledge_extracted BOOLEAN DEFAULT 0", + ), + ( + "knowledge", + "source_action_id", + "ALTER TABLE knowledge ADD COLUMN source_action_id TEXT", + ), + ] + + for table, column, sql in columns_to_add: + try: + cursor.execute(sql) + except sqlite3.OperationalError as e: + error_msg = str(e).lower() + if "duplicate column" in error_msg or "already exists" in error_msg: + # Column already exists, skip + pass + else: + raise + + def down(self, cursor: sqlite3.Cursor) -> None: + """ + Rollback not supported (SQLite doesn't support DROP COLUMN in older versions) + """ + pass diff --git a/backend/migrations/versions/0003_add_pomodoro_feature.py b/backend/migrations/versions/0003_add_pomodoro_feature.py new file mode 100644 index 0000000..97534b2 --- /dev/null +++ b/backend/migrations/versions/0003_add_pomodoro_feature.py @@ -0,0 +1,125 @@ +""" +Migration 0003: Add Pomodoro feature + +Adds columns to existing tables for Pomodoro session tracking: +- pomodoro_session_id to raw_records, actions, events, activities +- user_intent and pomodoro_status to activities + +Also creates indexes for efficient querying +""" + +import sqlite3 + +from migrations.base import BaseMigration + + +class Migration(BaseMigration): + version = "0003" + description = "Add Pomodoro feature columns and indexes" + + def up(self, cursor: sqlite3.Cursor) -> None: + """Add Pomodoro-related columns and indexes""" + + # Column additions + columns_to_add = [ + ( + "raw_records", + "pomodoro_session_id", + "ALTER TABLE raw_records ADD COLUMN pomodoro_session_id TEXT", + ), + ( + "actions", + "pomodoro_session_id", + "ALTER TABLE actions ADD COLUMN pomodoro_session_id TEXT", + ), + ( + "events", + "pomodoro_session_id", + "ALTER TABLE events ADD COLUMN pomodoro_session_id TEXT", + ), + ( + "activities", + "pomodoro_session_id", + "ALTER TABLE activities ADD COLUMN pomodoro_session_id TEXT", + ), + ( + "activities", + "user_intent", + "ALTER TABLE activities ADD COLUMN user_intent TEXT", + ), + ( + "activities", + "pomodoro_status", + "ALTER TABLE activities ADD COLUMN pomodoro_status TEXT", + ), + ] + + for table, column, sql in columns_to_add: + try: + cursor.execute(sql) + except sqlite3.OperationalError as e: + error_msg = str(e).lower() + if "duplicate column" in error_msg or "already exists" in error_msg: + # Column already exists, skip + pass + else: + raise + + # Index creation + indexes_to_create = [ + ( + "idx_raw_records_pomodoro_session", + """ + CREATE INDEX IF NOT EXISTS idx_raw_records_pomodoro_session + ON raw_records(pomodoro_session_id) + """, + ), + ( + "idx_actions_pomodoro_session", + """ + CREATE INDEX IF NOT EXISTS idx_actions_pomodoro_session + ON actions(pomodoro_session_id) + """, + ), + ( + "idx_events_pomodoro_session", + """ + CREATE INDEX IF NOT EXISTS idx_events_pomodoro_session + ON events(pomodoro_session_id) + """, + ), + ( + "idx_activities_pomodoro_session", + """ + CREATE INDEX IF NOT EXISTS idx_activities_pomodoro_session + ON activities(pomodoro_session_id) + """, + ), + ( + "idx_activities_pomodoro_status", + """ + CREATE INDEX IF NOT EXISTS idx_activities_pomodoro_status + ON activities(pomodoro_status) + """, + ), + ] + + for index_name, sql in indexes_to_create: + try: + cursor.execute(sql) + except Exception as e: + # Index creation failures are usually safe to ignore + # (index might already exist) + pass + + def down(self, cursor: sqlite3.Cursor) -> None: + """ + Rollback not supported (SQLite doesn't support DROP COLUMN easily) + + To rollback, you would need to: + 1. Create new tables without the columns + 2. Copy data + 3. Drop old tables + 4. Rename new tables + """ + pass diff --git a/backend/migrations/versions/__init__.py b/backend/migrations/versions/__init__.py new file mode 100644 index 0000000..54f45bb --- /dev/null +++ b/backend/migrations/versions/__init__.py @@ -0,0 +1,11 @@ +""" +Migration versions directory + +Each migration file should be named: XXXX_description.py +Where XXXX is a 4-digit version number (e.g., 0001, 0002, etc.) + +Example: + 0001_initial_schema.py + 0002_add_three_layer_architecture.py + 0003_add_pomodoro_feature.py +""" diff --git a/backend/models/responses.py b/backend/models/responses.py index bbf0f75..058304d 100644 --- a/backend/models/responses.py +++ b/backend/models/responses.py @@ -271,7 +271,6 @@ class ImageOptimizationConfigData(BaseModel): strategy: str = "phash" phash_threshold: int = 10 min_interval: float = 0.5 - max_images: int = 10 enable_content_analysis: bool = True enable_text_detection: bool = True @@ -344,3 +343,41 @@ class CompleteInitialSetupResponse(TimedOperationResponse): pass +# Pomodoro Feature Response Models +class PomodoroSessionData(BaseModel): + """Pomodoro session data""" + + session_id: str + user_intent: str + start_time: str + elapsed_minutes: int + planned_duration_minutes: int + + +class StartPomodoroResponse(TimedOperationResponse): + """Response after starting a Pomodoro session""" + + data: Optional[PomodoroSessionData] = None + + +class EndPomodoroData(BaseModel): + """End Pomodoro session result data""" + + session_id: str + processing_job_id: Optional[str] = None + raw_records_count: int = 0 + message: str = "" + + +class EndPomodoroResponse(TimedOperationResponse): + """Response after ending a Pomodoro session""" + + data: Optional[EndPomodoroData] = None + + +class GetPomodoroStatusResponse(TimedOperationResponse): + """Response for getting current Pomodoro session status""" + + data: Optional[PomodoroSessionData] = None + + diff --git a/backend/perception/manager.py b/backend/perception/manager.py index d79a2bf..5f61ab2 100644 --- a/backend/perception/manager.py +++ b/backend/perception/manager.py @@ -6,6 +6,7 @@ """ import asyncio +import time from datetime import datetime from typing import Any, Callable, Dict, Optional @@ -89,6 +90,12 @@ def __init__( self.keyboard_enabled = True self.mouse_enabled = True + # Pomodoro mode state + self.pomodoro_session_id: Optional[str] = None + + # Event loop reference (set when start() is called) + self._event_loop: Optional[asyncio.AbstractEventLoop] = None + def _on_screen_lock(self) -> None: """Screen lock/system sleep callback""" if not self.is_running: @@ -148,7 +155,11 @@ def _on_keyboard_event(self, record: RawRecord) -> None: return try: - # Record all keyboard events for subsequent processing to preserve usage context + # Tag with Pomodoro session ID if active (for future use) + if self.pomodoro_session_id: + record.data['pomodoro_session_id'] = self.pomodoro_session_id + + # Always add to memory for real-time viewing and processing self.storage.add_record(record) self.event_buffer.add(record) @@ -170,6 +181,11 @@ def _on_mouse_event(self, record: RawRecord) -> None: try: # Only record important mouse events if self.mouse_capture.is_important_event(record.data): + # Tag with Pomodoro session ID if active (for future use) + if self.pomodoro_session_id: + record.data['pomodoro_session_id'] = self.pomodoro_session_id + + # Always add to memory for real-time viewing and processing self.storage.add_record(record) self.event_buffer.add(record) @@ -201,6 +217,11 @@ def _on_screenshot_event(self, record: RawRecord) -> None: try: if record: # Screenshot may be None (duplicate screenshots) + # Tag with Pomodoro session ID if active (for future use) + if self.pomodoro_session_id: + record.data['pomodoro_session_id'] = self.pomodoro_session_id + + # Always add to memory for real-time viewing and processing self.storage.add_record(record) self.event_buffer.add(record) @@ -226,6 +247,9 @@ async def start(self) -> None: self.is_running = True self.is_paused = False + # Store event loop reference for sync callbacks + self._event_loop = asyncio.get_running_loop() + # Load perception settings from core.settings import get_settings @@ -309,6 +333,9 @@ async def stop(self) -> None: self.is_running = False self.is_paused = False + # Clear event loop reference + self._event_loop = None + # Stop screen state monitor self.screen_state_monitor.stop() @@ -345,19 +372,29 @@ async def stop(self) -> None: async def _screenshot_loop(self) -> None: """Screenshot loop task""" try: - loop = asyncio.get_event_loop() + iteration = 0 + while self.is_running: - # Execute synchronous screenshot operation in thread pool to avoid blocking event loop - await loop.run_in_executor( - None, - self.screenshot_capture.capture_with_interval, - self.capture_interval, - ) - await asyncio.sleep(0.1) # Brief sleep to avoid excessive CPU usage + iteration += 1 + loop_start = time.time() + + # Directly call capture() without interval checking + # The loop itself controls the timing + try: + self.screenshot_capture.capture() + except Exception as e: + logger.error(f"Screenshot capture failed: {e}", exc_info=True) + + elapsed = time.time() - loop_start + + # Sleep for the interval, accounting for capture time + sleep_time = max(0.1, self.capture_interval - elapsed) + await asyncio.sleep(sleep_time) + except asyncio.CancelledError: logger.debug("Screenshot loop task cancelled") except Exception as e: - logger.error(f"Screenshot loop task failed: {e}") + logger.error(f"Screenshot loop task failed: {e}", exc_info=True) async def _cleanup_loop(self) -> None: """Cleanup loop task""" @@ -523,3 +560,40 @@ def update_perception_settings( logger.debug( f"Perception settings updated: keyboard={self.keyboard_enabled}, mouse={self.mouse_enabled}" ) + + def set_pomodoro_session(self, session_id: str) -> None: + """ + Set Pomodoro session ID for tagging captured records + + Args: + session_id: Pomodoro session identifier + """ + self.pomodoro_session_id = session_id + logger.debug(f"✓ Pomodoro session set: {session_id}") + + def clear_pomodoro_session(self) -> None: + """Clear Pomodoro session ID (exit Pomodoro mode)""" + session_id = self.pomodoro_session_id + self.pomodoro_session_id = None + logger.debug(f"✓ Pomodoro session cleared: {session_id}") + + async def _persist_raw_record(self, record: RawRecord) -> None: + """ + Persist raw record to database (Pomodoro mode) + + Args: + record: RawRecord to persist + """ + try: + import json + from core.db import get_db + + db = get_db() + await db.raw_records.save( + timestamp=record.timestamp.isoformat(), + record_type=record.type.value, # Convert enum to string + data=json.dumps(record.data), + pomodoro_session_id=record.data.get('pomodoro_session_id'), + ) + except Exception as e: + logger.error(f"Failed to persist raw record: {e}", exc_info=True) diff --git a/backend/perception/screenshot_capture.py b/backend/perception/screenshot_capture.py index 9a90aa9..0f527aa 100644 --- a/backend/perception/screenshot_capture.py +++ b/backend/perception/screenshot_capture.py @@ -323,7 +323,9 @@ def capture_with_interval(self, interval: float = 1.0): return current_time = time.time() - if current_time - self._last_screenshot_time >= interval: + time_since_last = current_time - self._last_screenshot_time + + if time_since_last >= interval: self.capture() self._last_screenshot_time = current_time diff --git a/src/components/pomodoro/PomodoroTimer.tsx b/src/components/pomodoro/PomodoroTimer.tsx new file mode 100644 index 0000000..9950ec1 --- /dev/null +++ b/src/components/pomodoro/PomodoroTimer.tsx @@ -0,0 +1,242 @@ +import { useState, useEffect, useCallback } from 'react' +import { Clock, Play, Square, Loader2 } from 'lucide-react' +import { useTranslation } from 'react-i18next' + +import { Button } from '@/components/ui/button' +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card' +import { Input } from '@/components/ui/input' +import { Label } from '@/components/ui/label' +import { Progress } from '@/components/ui/progress' +import { toast } from 'sonner' +import { startPomodoro, endPomodoro, getPomodoroStatus } from '@/lib/client/apiClient' +import { usePomodoroStore } from '@/lib/stores/pomodoro' +import { + usePomodoroProcessingProgress, + usePomodoroProcessingComplete, + usePomodoroProcessingFailed +} from '@/hooks/useTauriEvents' + +export function PomodoroTimer() { + const { t } = useTranslation() + const { status, session, error, setStatus, setSession, setError, setProcessingJobId, reset } = usePomodoroStore() + + const [userIntent, setUserIntent] = useState('') + const [durationMinutes, setDurationMinutes] = useState(25) + const [elapsedSeconds, setElapsedSeconds] = useState(0) + const [processingProgress, setProcessingProgress] = useState(0) + + // Timer effect - counts elapsed time when session is active + useEffect(() => { + if (status === 'active' && session) { + const timer = setInterval(() => { + const now = Date.now() + const start = new Date(session.startTime).getTime() + const elapsed = Math.floor((now - start) / 1000) + setElapsedSeconds(elapsed) + }, 1000) + + return () => clearInterval(timer) + } + }, [status, session]) + + // Event listeners for batch processing + usePomodoroProcessingProgress((payload) => { + console.log('[Pomodoro] Processing progress:', payload) + if (payload.job_id === usePomodoroStore.getState().processingJobId) { + setProcessingProgress(payload.processed) + } + }) + + usePomodoroProcessingComplete((payload) => { + console.log('[Pomodoro] Processing complete:', payload) + if (payload.job_id === usePomodoroStore.getState().processingJobId) { + toast.success(t('pomodoro.processing.complete', { count: payload.total_processed })) + reset() + setProcessingProgress(0) + } + }) + + usePomodoroProcessingFailed((payload) => { + console.log('[Pomodoro] Processing failed:', payload) + if (payload.job_id === usePomodoroStore.getState().processingJobId) { + toast.error(t('pomodoro.processing.failed', { error: payload.error })) + setError(payload.error) + setStatus('idle') + setProcessingProgress(0) + } + }) + + // Check for active session on mount + useEffect(() => { + const checkStatus = async () => { + try { + const result = await getPomodoroStatus() + if (result.success && result.data) { + setStatus('active') + setSession(result.data) + } + } catch (err) { + console.error('[Pomodoro] Failed to check status:', err) + } + } + + checkStatus() + }, [setStatus, setSession]) + + const handleStart = useCallback(async () => { + if (!userIntent.trim()) { + toast.error(t('pomodoro.error.noIntent')) + return + } + + setStatus('active') + setError(null) + + try { + const result = await startPomodoro({ + userIntent: userIntent.trim(), + durationMinutes + }) + + if (result.success && result.data) { + setSession(result.data) + toast.success(t('pomodoro.started')) + } else { + throw new Error(result.error || 'Failed to start Pomodoro') + } + } catch (err: any) { + console.error('[Pomodoro] Failed to start:', err) + setError(err.message || String(err)) + toast.error(t('pomodoro.error.startFailed', { error: err.message || String(err) })) + setStatus('idle') + } + }, [userIntent, durationMinutes, setStatus, setSession, setError, t]) + + const handleEnd = useCallback(async () => { + if (!session) return + + setStatus('ending') + setError(null) + + try { + const result = await endPomodoro({ + status: 'completed' + }) + + if (result.success && result.data) { + const { processingJobId, rawRecordsCount, message } = result.data + + if (message) { + toast.info(message) + reset() + } else { + toast.success(t('pomodoro.ended', { count: rawRecordsCount })) + setStatus('processing') + setProcessingJobId(processingJobId || null) + } + } else { + throw new Error(result.error || 'Failed to end Pomodoro') + } + } catch (err: any) { + console.error('[Pomodoro] Failed to end:', err) + setError(err.message || String(err)) + toast.error(t('pomodoro.error.endFailed', { error: err.message || String(err) })) + setStatus('active') // Revert to active + } + }, [session, setStatus, setError, setProcessingJobId, reset, t]) + + const formatTime = (seconds: number) => { + const mins = Math.floor(seconds / 60) + const secs = seconds % 60 + return `${mins}:${secs.toString().padStart(2, '0')}` + } + + const progressPercent = session ? (elapsedSeconds / (session.plannedDurationMinutes * 60)) * 100 : 0 + + return ( + + + + + {t('pomodoro.title')} + + {t('pomodoro.description')} + + + {status === 'idle' && ( + <> +
+ + setUserIntent(e.target.value)} + maxLength={200} + /> +

{t('pomodoro.intent.hint')}

+
+ +
+ + setDurationMinutes(Math.max(1, Math.min(90, parseInt(e.target.value) || 25)))} + /> +

{t('pomodoro.duration.hint')}

+
+ + + + )} + + {status === 'active' && session && ( + <> +
+
+ {t('pomodoro.status.active')} + {formatTime(elapsedSeconds)} +
+ +

+ {t('pomodoro.intent.current')}: {session.userIntent} +

+
+ + + + )} + + {(status === 'ending' || status === 'processing') && ( +
+ +

+ {status === 'ending' ? t('pomodoro.status.ending') : t('pomodoro.status.processing')} +

+ {status === 'processing' && processingProgress > 0 && ( +

+ {t('pomodoro.processing.progress', { count: processingProgress })} +

+ )} +
+ )} + + {error && ( +
+ {t('pomodoro.error.title')}: {error} +
+ )} +
+
+ ) +} diff --git a/src/hooks/useTauriEvents.ts b/src/hooks/useTauriEvents.ts index b9fc30a..2ee0565 100644 --- a/src/hooks/useTauriEvents.ts +++ b/src/hooks/useTauriEvents.ts @@ -304,3 +304,42 @@ export interface TodoDeletedPayload { export function useTodoDeleted(onDeleted: (payload: TodoDeletedPayload) => void) { useTauriEvent('todo-deleted', onDeleted) } + +/** + * Pomodoro processing progress hook (fires during batch processing) + */ +export interface PomodoroProcessingProgressPayload { + session_id: string + job_id: string + processed: number +} + +export function usePomodoroProcessingProgress(onProgress: (payload: PomodoroProcessingProgressPayload) => void) { + useTauriEvent('pomodoro-processing-progress', onProgress) +} + +/** + * Pomodoro processing complete hook (fires after batch processing finishes) + */ +export interface PomodoroProcessingCompletePayload { + session_id: string + job_id: string + total_processed: number +} + +export function usePomodoroProcessingComplete(onComplete: (payload: PomodoroProcessingCompletePayload) => void) { + useTauriEvent('pomodoro-processing-complete', onComplete) +} + +/** + * Pomodoro processing failed hook (fires if batch processing fails) + */ +export interface PomodoroProcessingFailedPayload { + session_id: string + job_id: string + error: string +} + +export function usePomodoroProcessingFailed(onFailed: (payload: PomodoroProcessingFailedPayload) => void) { + useTauriEvent('pomodoro-processing-failed', onFailed) +} diff --git a/src/lib/client/_apiTypes.d.ts b/src/lib/client/_apiTypes.d.ts index 9b92bab..b437334 100644 --- a/src/lib/client/_apiTypes.d.ts +++ b/src/lib/client/_apiTypes.d.ts @@ -210,6 +210,30 @@ export type Message13 = string export type Error14 = string export type Delayseconds1 = (number | null) export type Timestamp6 = string +export type Userintent = string +export type Durationminutes = number +export type Success16 = boolean +export type Message14 = string +export type Error15 = string +export type Sessionid = string +export type Userintent1 = string +export type Starttime3 = string +export type Elapsedminutes = number +export type Planneddurationminutes = number +export type Timestamp7 = string +export type Status1 = string +export type Success17 = boolean +export type Message15 = string +export type Error16 = string +export type Sessionid1 = string +export type Processingjobid = (string | null) +export type Rawrecordscount = number +export type Message16 = string +export type Timestamp8 = string +export type Success18 = boolean +export type Message17 = string +export type Error17 = string +export type Timestamp9 = string export type Days = number export type Startdate1 = string export type Enddate1 = string @@ -217,41 +241,40 @@ export type Startdate2 = string export type Enddate2 = string export type Startdate3 = string export type Enddate3 = string -export type Success16 = boolean -export type Message14 = string -export type Error15 = string +export type Success19 = boolean +export type Message18 = string +export type Error18 = string export type Stats = ({ [k: string]: unknown } | null) export type Hashes = string[] -export type Success17 = boolean -export type Message15 = string -export type Error16 = string +export type Success20 = boolean +export type Message19 = string +export type Error19 = string export type Foundcount = number export type Requestedcount = number export type Maxagehours = number -export type Success18 = boolean -export type Message16 = string -export type Error17 = string +export type Success21 = boolean +export type Message20 = string +export type Error20 = string export type Cleanedcount = number -export type Success19 = boolean -export type Message17 = string -export type Error18 = string +export type Success22 = boolean +export type Message21 = string +export type Error21 = string export type Clearedcount = number -export type Success20 = boolean -export type Message18 = string -export type Error19 = string +export type Success23 = boolean +export type Message22 = string +export type Error22 = string export type Enabled2 = boolean export type Strategy = string export type Phashthreshold = number export type Mininterval = number -export type Maximages = number export type Enablecontentanalysis = boolean export type Enabletextdetection = boolean -export type Timestamp7 = string -export type Success21 = boolean -export type Message19 = string -export type Error20 = string +export type Timestamp10 = string +export type Success24 = boolean +export type Message23 = string +export type Error23 = string export type Stats1 = ({ [k: string]: unknown } | null) @@ -262,17 +285,17 @@ export type Enabled3 = (boolean | null) export type Strategy1 = (string | null) export type Phashthreshold1 = (number | null) export type Mininterval1 = (number | null) -export type Maximages1 = (number | null) +export type Maximages = (number | null) export type Enablecontentanalysis1 = (boolean | null) export type Enabletextdetection1 = (boolean | null) -export type Success22 = boolean -export type Message20 = string -export type Error21 = string -export type Timestamp8 = string +export type Success25 = boolean +export type Message24 = string +export type Error24 = string +export type Timestamp11 = string export type Filepath = string -export type Success23 = boolean -export type Message21 = string -export type Error22 = string +export type Success26 = boolean +export type Message25 = string +export type Error25 = string export type Dataurl = string export type Name1 = string /** @@ -285,15 +308,15 @@ export type Inputtokenprice = number export type Outputtokenprice = number export type Currency = string export type Apikey = string -export type Success24 = boolean -export type Message22 = string -export type Error23 = string +export type Success27 = boolean +export type Message26 = string +export type Error26 = string export type Data4 = ({ [k: string]: unknown } | { [k: string]: unknown }[] | null) -export type Timestamp9 = (string | null) +export type Timestamp12 = (string | null) export type Modelid2 = string export type Name2 = (string | null) export type Apiurl1 = (string | null) @@ -309,10 +332,10 @@ export type Apikey1 = (string | null) export type Modelid3 = string export type Modelid4 = string export type Modelid5 = string -export type Success25 = boolean -export type Message23 = string -export type Error24 = string -export type Timestamp10 = string +export type Success28 = boolean +export type Message27 = string +export type Error27 = string +export type Timestamp13 = string export type Modelid6 = string export type Model2 = string export type Prompttokens = number @@ -325,63 +348,63 @@ export type Days1 = number export type Startdate4 = (string | null) export type Enddate4 = (string | null) export type Modelconfigid = (string | null) -export type Success26 = boolean -export type Message24 = string -export type Error25 = string -export type Timestamp11 = string +export type Success29 = boolean +export type Message28 = string +export type Error28 = string +export type Timestamp14 = string export type Dimension1 = (string | null) export type Days2 = (number | null) -export type Success27 = boolean -export type Message25 = string -export type Timestamp12 = string -export type Success28 = boolean +export type Success30 = boolean +export type Message29 = string +export type Timestamp15 = string +export type Success31 = boolean export type Path = string -export type Timestamp13 = string -export type Success29 = boolean -export type Message26 = string -export type Error26 = string +export type Timestamp16 = string +export type Success32 = boolean +export type Message30 = string +export type Error29 = string export type Language = string -export type Timestamp14 = string +export type Timestamp17 = string export type Databasepath = (string | null) export type Screenshotsavepath = (string | null) export type Language1 = (string | null) -export type Success30 = boolean -export type Message27 = string -export type Timestamp15 = string -export type Success31 = boolean -export type Message28 = string -export type Error27 = string +export type Success33 = boolean +export type Message31 = string +export type Timestamp18 = string +export type Success34 = boolean +export type Message32 = string +export type Error30 = string export type Compressionlevel = number export type Enableregioncropping = boolean export type Cropthreshold = number -export type Timestamp16 = string +export type Timestamp19 = string export type Compressionlevel1 = (string | null) export type Enableregioncropping1 = (boolean | null) export type Cropthreshold1 = (number | null) -export type Success32 = boolean -export type Message29 = string -export type Error28 = string -export type Timestamp17 = string -export type Success33 = boolean -export type Message30 = string -export type Error29 = string +export type Success35 = boolean +export type Message33 = string +export type Error31 = string +export type Timestamp20 = string +export type Success36 = boolean +export type Message34 = string +export type Error32 = string export type Totalprocessed = number export type Totalsavedbytes = number export type Averagecompressionratio = number -export type Timestamp18 = string -export type Success34 = boolean -export type Message31 = string -export type Error30 = string +export type Timestamp21 = string +export type Success37 = boolean +export type Message35 = string +export type Error33 = string export type Hasmodels = boolean export type Hasactivemodel = boolean export type Hascompletedsetup = boolean export type Needssetup = boolean export type Modelcount = number -export type Timestamp19 = string -export type Success35 = boolean -export type Message32 = string -export type Error31 = string -export type Timestamp20 = string +export type Timestamp22 = string +export type Success38 = boolean +export type Message36 = string +export type Error34 = string +export type Timestamp23 = string export type Show = string export type Hide = string export type Dashboard = string @@ -391,13 +414,13 @@ export type Agents = string export type Settings1 = string export type About = string export type Quit = string -export type Success36 = boolean -export type Message33 = string -export type Error32 = string +export type Success39 = boolean +export type Message37 = string +export type Error35 = string export type Visible = boolean -export type Success37 = boolean -export type Message34 = string -export type Error33 = string +export type Success40 = boolean +export type Message38 = string +export type Error36 = string export type Visible1 = boolean export type Name3 = string export type RootModelStr = string @@ -682,6 +705,18 @@ restart_app: { input: RestartAppRequest output: RestartAppResponse } +start_pomodoro: { +input: StartPomodoroRequest +output: StartPomodoroResponse +} +end_pomodoro: { +input: EndPomodoroRequest +output: EndPomodoroResponse +} +get_pomodoro_status: { +input: void | undefined +output: GetPomodoroStatusResponse +} get_processing_stats: { input: void | undefined output: TimedOperationResponse @@ -1569,6 +1604,68 @@ error?: Error14 delaySeconds?: Delayseconds1 timestamp?: Timestamp6 } +/** + * Start Pomodoro request + */ +export interface StartPomodoroRequest { +userIntent: Userintent +durationMinutes?: Durationminutes +} +/** + * Response after starting a Pomodoro session + */ +export interface StartPomodoroResponse { +success: Success16 +message?: Message14 +error?: Error15 +data?: (PomodoroSessionData | null) +timestamp?: Timestamp7 +} +/** + * Pomodoro session data + */ +export interface PomodoroSessionData { +sessionId: Sessionid +userIntent: Userintent1 +startTime: Starttime3 +elapsedMinutes: Elapsedminutes +plannedDurationMinutes: Planneddurationminutes +} +/** + * End Pomodoro request + */ +export interface EndPomodoroRequest { +status?: Status1 +} +/** + * Response after ending a Pomodoro session + */ +export interface EndPomodoroResponse { +success: Success17 +message?: Message15 +error?: Error16 +data?: (EndPomodoroData | null) +timestamp?: Timestamp8 +} +/** + * End Pomodoro session result data + */ +export interface EndPomodoroData { +sessionId: Sessionid1 +processingJobId?: Processingjobid +rawRecordsCount?: Rawrecordscount +message?: Message16 +} +/** + * Response for getting current Pomodoro session status + */ +export interface GetPomodoroStatusResponse { +success: Success18 +message?: Message17 +error?: Error17 +data?: (PomodoroSessionData | null) +timestamp?: Timestamp9 +} /** * Request parameters for cleaning up old data. * @@ -1611,9 +1708,9 @@ endDate: Enddate3 * Response containing image cache statistics */ export interface ImageStatsResponse { -success: Success16 -message?: Message14 -error?: Error15 +success: Success19 +message?: Message18 +error?: Error18 stats?: Stats } /** @@ -1628,9 +1725,9 @@ hashes: Hashes * Response containing cached images in base64 format */ export interface CachedImagesResponse { -success: Success17 -message?: Message15 -error?: Error16 +success: Success20 +message?: Message19 +error?: Error19 images: Images1 foundCount: Foundcount requestedCount: Requestedcount @@ -1650,29 +1747,29 @@ maxAgeHours?: Maxagehours * Response after cleaning up old images */ export interface CleanupImagesResponse { -success: Success18 -message?: Message16 -error?: Error17 +success: Success21 +message?: Message20 +error?: Error20 cleanedCount?: Cleanedcount } /** * Response after clearing memory cache */ export interface ClearMemoryCacheResponse { -success: Success19 -message?: Message17 -error?: Error18 +success: Success22 +message?: Message21 +error?: Error21 clearedCount?: Clearedcount } /** * Response for get_image_optimization_config handler */ export interface GetImageOptimizationConfigResponse { -success: Success20 -message?: Message18 -error?: Error19 +success: Success23 +message?: Message22 +error?: Error22 data?: (ImageOptimizationConfigData | null) -timestamp?: Timestamp7 +timestamp?: Timestamp10 } /** * Image optimization configuration data @@ -1682,7 +1779,6 @@ enabled?: Enabled2 strategy?: Strategy phashThreshold?: Phashthreshold minInterval?: Mininterval -maxImages?: Maximages enableContentAnalysis?: Enablecontentanalysis enableTextDetection?: Enabletextdetection } @@ -1690,9 +1786,9 @@ enableTextDetection?: Enabletextdetection * Response containing image optimization statistics */ export interface ImageOptimizationStatsResponse { -success: Success21 -message?: Message19 -error?: Error20 +success: Success24 +message?: Message23 +error?: Error23 stats?: Stats1 config?: Config } @@ -1712,7 +1808,7 @@ enabled?: Enabled3 strategy?: Strategy1 phashThreshold?: Phashthreshold1 minInterval?: Mininterval1 -maxImages?: Maximages1 +maxImages?: Maximages enableContentAnalysis?: Enablecontentanalysis1 enableTextDetection?: Enabletextdetection1 } @@ -1720,11 +1816,11 @@ enableTextDetection?: Enabletextdetection1 * Response for update_image_optimization_config handler */ export interface UpdateImageOptimizationConfigResponseV2 { -success: Success22 -message?: Message20 -error?: Error21 +success: Success25 +message?: Message24 +error?: Error24 data?: (ImageOptimizationConfigData | null) -timestamp?: Timestamp8 +timestamp?: Timestamp11 } /** * Request parameters for reading an image file. @@ -1738,9 +1834,9 @@ filePath: Filepath * Response containing image file data as base64 */ export interface ReadImageFileResponse { -success: Success23 -message?: Message21 -error?: Error22 +success: Success26 +message?: Message25 +error?: Error25 dataUrl?: Dataurl } /** @@ -1770,11 +1866,11 @@ apiKey: Apikey * Generic model management response with optional payload and timestamp. */ export interface ModelOperationResponse { -success: Success24 -message?: Message22 -error?: Error23 +success: Success27 +message?: Message26 +error?: Error26 data?: Data4 -timestamp?: Timestamp9 +timestamp?: Timestamp12 } /** * Request parameters for updating a model configuration. @@ -1829,11 +1925,11 @@ modelId: Modelid5 * Standard dashboard response with optional data payload. */ export interface DashboardResponse { -success: Success25 -message?: Message23 -error?: Error24 +success: Success28 +message?: Message27 +error?: Error27 data?: unknown -timestamp?: Timestamp10 +timestamp?: Timestamp13 } /** * Request parameters for retrieving LLM statistics of a specific model. @@ -1881,11 +1977,11 @@ modelConfigId?: Modelconfigid * Dashboard trend response with dimension metadata. */ export interface LLMUsageTrendResponse { -success: Success26 -message?: Message24 -error?: Error25 +success: Success29 +message?: Message28 +error?: Error28 data?: unknown -timestamp?: Timestamp11 +timestamp?: Timestamp14 dimension?: Dimension1 days?: Days2 } @@ -1893,18 +1989,18 @@ days?: Days2 * Common system operation response */ export interface SystemResponse { -success: Success27 -message?: Message25 +success: Success30 +message?: Message29 data?: unknown -timestamp: Timestamp12 +timestamp: Timestamp15 } /** * Database path response */ export interface DatabasePathResponse { -success: Success28 +success: Success31 data: DatabasePathData -timestamp: Timestamp13 +timestamp: Timestamp16 } /** * Database path data @@ -1916,11 +2012,11 @@ path: Path * Response for get_settings_info handler */ export interface GetSettingsInfoResponse { -success: Success29 -message?: Message26 -error?: Error26 +success: Success32 +message?: Message30 +error?: Error29 data?: (SettingsInfoData | null) -timestamp?: Timestamp14 +timestamp?: Timestamp17 } /** * Settings info data structure @@ -1963,19 +2059,19 @@ language?: Language1 * Update settings response */ export interface UpdateSettingsResponse { -success: Success30 -message: Message27 -timestamp: Timestamp15 +success: Success33 +message: Message31 +timestamp: Timestamp18 } /** * Response for get_image_compression_config handler */ export interface GetImageCompressionConfigResponse { -success: Success31 -message?: Message28 -error?: Error27 +success: Success34 +message?: Message32 +error?: Error30 data?: (ImageCompressionConfigData | null) -timestamp?: Timestamp16 +timestamp?: Timestamp19 } /** * Image compression configuration data @@ -2001,21 +2097,21 @@ cropThreshold?: Cropthreshold1 * Response for update_image_compression_config handler */ export interface UpdateImageCompressionConfigResponseV2 { -success: Success32 -message?: Message29 -error?: Error28 +success: Success35 +message?: Message33 +error?: Error31 data?: (ImageCompressionConfigData | null) -timestamp?: Timestamp17 +timestamp?: Timestamp20 } /** * Response for get_image_compression_stats handler */ export interface GetImageCompressionStatsResponse { -success: Success33 -message?: Message30 -error?: Error29 +success: Success36 +message?: Message34 +error?: Error32 data?: (ImageCompressionStatsData | null) -timestamp?: Timestamp18 +timestamp?: Timestamp21 } /** * Image compression statistics data @@ -2029,11 +2125,11 @@ averageCompressionRatio?: Averagecompressionratio * Response for check_initial_setup handler */ export interface CheckInitialSetupResponse { -success: Success34 -message?: Message31 -error?: Error30 +success: Success37 +message?: Message35 +error?: Error33 data?: (InitialSetupData | null) -timestamp?: Timestamp19 +timestamp?: Timestamp22 } /** * Initial setup check data @@ -2049,11 +2145,11 @@ modelCount: Modelcount * Response for complete_initial_setup handler */ export interface CompleteInitialSetupResponse { -success: Success35 -message?: Message32 -error?: Error31 +success: Success38 +message?: Message36 +error?: Error34 data?: unknown -timestamp?: Timestamp20 +timestamp?: Timestamp23 } /** * Request to update tray menu labels with i18n translations. @@ -2073,9 +2169,9 @@ quit: Quit * Response from tray update operation. */ export interface TrayUpdateResponse { -success: Success36 -message?: Message33 -error?: Error32 +success: Success39 +message?: Message37 +error?: Error35 } /** * Request to change tray icon visibility. @@ -2087,9 +2183,9 @@ visible: Visible * Response from tray visibility operation. */ export interface TrayVisibilityResponse { -success: Success37 -message?: Message34 -error?: Error33 +success: Success40 +message?: Message38 +error?: Error36 visible: Visible1 } /** diff --git a/src/lib/client/apiClient.ts b/src/lib/client/apiClient.ts index 512c53e..3f42aba 100644 --- a/src/lib/client/apiClient.ts +++ b/src/lib/client/apiClient.ts @@ -928,6 +928,57 @@ export async function restartApp( return await pyInvoke("restart_app", body, options); } +/** + * Start a new Pomodoro session + * + * Args: + * body: Request containing user_intent and duration_minutes + * + * Returns: + * StartPomodoroResponse with session data + * + * Raises: + * ValueError: If a Pomodoro session is already active or previous session is still processing + */ +export async function startPomodoro( + body: Commands["start_pomodoro"]["input"], + options?: InvokeOptions +): Promise { + return await pyInvoke("start_pomodoro", body, options); +} + +/** + * End current Pomodoro session + * + * Args: + * body: Request containing status (completed/abandoned/interrupted) + * + * Returns: + * EndPomodoroResponse with processing job info + * + * Raises: + * ValueError: If no active Pomodoro session + */ +export async function endPomodoro( + body: Commands["end_pomodoro"]["input"], + options?: InvokeOptions +): Promise { + return await pyInvoke("end_pomodoro", body, options); +} + +/** + * Get current Pomodoro session status + * + * Returns: + * GetPomodoroStatusResponse with current session info or None if no active session + */ +export async function getPomodoroStatus( + body: Commands["get_pomodoro_status"]["input"], + options?: InvokeOptions +): Promise { + return await pyInvoke("get_pomodoro_status", body, options); +} + /** * Get processing module statistics. * diff --git a/src/lib/stores/pomodoro.ts b/src/lib/stores/pomodoro.ts new file mode 100644 index 0000000..be935e9 --- /dev/null +++ b/src/lib/stores/pomodoro.ts @@ -0,0 +1,57 @@ +import { create } from 'zustand' + +/** + * Pomodoro Session State + * + * Manages Pomodoro timer state and interactions with the backend API + */ + +export interface PomodoroSession { + sessionId: string + userIntent: string + startTime: string + elapsedMinutes: number + plannedDurationMinutes: number +} + +export type PomodoroStatus = 'idle' | 'active' | 'ending' | 'processing' + +interface PomodoroState { + // State + status: PomodoroStatus + session: PomodoroSession | null + error: string | null + processingJobId: string | null + + // Actions + setStatus: (status: PomodoroStatus) => void + setSession: (session: PomodoroSession | null) => void + setError: (error: string | null) => void + setProcessingJobId: (jobId: string | null) => void + reset: () => void +} + +export const usePomodoroStore = create((set) => ({ + // Initial state + status: 'idle', + session: null, + error: null, + processingJobId: null, + + // Actions + setStatus: (status) => set({ status }), + + setSession: (session) => set({ session }), + + setError: (error) => set({ error }), + + setProcessingJobId: (jobId) => set({ processingJobId: jobId }), + + reset: () => + set({ + status: 'idle', + session: null, + error: null, + processingJobId: null + }) +})) diff --git a/src/locales/en.ts b/src/locales/en.ts index cfad778..ed69fad 100644 --- a/src/locales/en.ts +++ b/src/locales/en.ts @@ -933,6 +933,40 @@ export const en = { description: 'AI-powered desktop activity monitoring and task recommendation system', allRightsReserved: 'All rights reserved' }, + pomodoro: { + title: 'Pomodoro Timer', + description: 'Focus Mode: Start a Pomodoro session to capture and analyze your focused work.', + intent: { + label: 'What do you plan to work on?', + placeholder: 'e.g., Write project documentation', + hint: 'Describe your intention for this Pomodoro session', + current: 'Current task' + }, + duration: { + label: 'Duration (minutes)', + hint: 'Recommended: 25 minutes' + }, + start: 'Start Pomodoro', + end: 'End Session', + status: { + active: 'Session in progress', + ending: 'Ending session...', + processing: 'Analyzing session data...' + }, + started: 'Pomodoro session started', + ended: 'Session ended. Analyzing {{count}} records...', + processing: { + progress: 'Processed {{count}} records', + complete: 'Analysis complete! Processed {{count}} records.', + failed: 'Analysis failed: {{error}}' + }, + error: { + title: 'Error', + noIntent: 'Please describe what you plan to work on', + startFailed: 'Failed to start Pomodoro: {{error}}', + endFailed: 'Failed to end Pomodoro: {{error}}' + } + }, debug: { welcomeFlowReset: '🔄 Welcome flow reset', setupAlreadyActive: 'ℹ️ Setup is already active', diff --git a/src/locales/zh-CN.ts b/src/locales/zh-CN.ts index 166c747..9de5711 100644 --- a/src/locales/zh-CN.ts +++ b/src/locales/zh-CN.ts @@ -923,6 +923,40 @@ export const zhCN = { description: 'AI 驱动的桌面活动监控与任务推荐系统', allRightsReserved: '版权所有' }, + pomodoro: { + title: '番茄钟', + description: '专注模式:开启番茄钟会话以捕获并分析您的专注工作。', + intent: { + label: '你计划做什么?', + placeholder: '例如:编写项目文档', + hint: '描述你在这个番茄钟期间的工作意图', + current: '当前任务' + }, + duration: { + label: '时长(分钟)', + hint: '推荐:25 分钟' + }, + start: '开始番茄钟', + end: '结束会话', + status: { + active: '会话进行中', + ending: '正在结束会话...', + processing: '正在分析会话数据...' + }, + started: '番茄钟会话已开始', + ended: '会话已结束,正在分析 {{count}} 条记录...', + processing: { + progress: '已处理 {{count}} 条记录', + complete: '分析完成!已处理 {{count}} 条记录。', + failed: '分析失败:{{error}}' + }, + error: { + title: '错误', + noIntent: '请描述你计划做什么', + startFailed: '启动番茄钟失败:{{error}}', + endFailed: '结束番茄钟失败:{{error}}' + } + }, debug: { welcomeFlowReset: '🔄 欢迎流程已重置', setupAlreadyActive: 'ℹ️ 设置已处于激活状态', diff --git a/src/views/Activity.tsx b/src/views/Activity.tsx index 9a60934..f2485ef 100644 --- a/src/views/Activity.tsx +++ b/src/views/Activity.tsx @@ -14,10 +14,12 @@ import { TimelineDay, Activity } from '@/lib/types/activity' import { format, parseISO } from 'date-fns' import { getDateLocale } from '@/lib/utils/date-i18n' import { MergeActivitiesDialog } from '@/components/activity/MergeActivitiesDialog' +import { PomodoroTimer } from '@/components/pomodoro/PomodoroTimer' /** * Activity view with timeline list layout * Features: + * - Pomodoro timer for focused work sessions * - Timeline view with date grouping * - Category filtering (work, personal, distraction, idle) * - Activity statistics per day @@ -316,6 +318,12 @@ export default function ActivityView() { {/* Main Content: Timeline */}
+ {/* Pomodoro Timer */} +
+ +
+ + {/* Timeline Content */} {isLoading ? (