diff --git a/.env.example b/.env.example index 12cfb9067..5903ab08b 100644 --- a/.env.example +++ b/.env.example @@ -16,19 +16,3 @@ TAVILY_API_KEY=your-tavily-api-key # LangSmith tracing (optional) LANGSMITH_API_KEY=your-langsmith-api-key - -# Supabase (required when LEON_STORAGE_STRATEGY=supabase) -LEON_STORAGE_STRATEGY=supabase -SUPABASE_PUBLIC_URL=https://supabase.mycel.nextmind.space - -# SUPABASE_INTERNAL_URL: direct server-side URL (bypasses public proxy). -# Production (same-host): SUPABASE_INTERNAL_URL=http://:8000 -# Local dev (SSH tunnel): SUPABASE_INTERNAL_URL=http://localhost:18000 -SUPABASE_INTERNAL_URL=http://localhost:18000 - -SUPABASE_ANON_KEY=your-anon-key -LEON_SUPABASE_SERVICE_ROLE_KEY=your-service-role-key -SUPABASE_JWT_SECRET=your-jwt-secret - -# DB schema: staging for local dev and staging envs; omit for production (defaults to public) -LEON_DB_SCHEMA=staging diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 000000000..b030b53a9 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,39 @@ +name: Publish to PyPI + +on: + push: + tags: + - 'v*' + workflow_dispatch: + +jobs: + build-and-publish: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Build package + run: python -m build + + - name: Check package + run: twine check dist/* + + - name: Publish to PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: twine upload dist/* diff --git a/.gitignore b/.gitignore index be4d3c775..4d299bc3d 100644 --- a/.gitignore +++ b/.gitignore @@ -46,8 +46,6 @@ frontend/app/.env.development .claude/.stfolder/ .claude/.vscode/ .claude/settings.local.json -.claude/mcp.json -.mcp.json teams # User-level Leon config and skills @@ -94,7 +92,6 @@ worktrees/ /*.png /*.yml /*.yaml -!docker-compose.yml /dogfood-output/ /current-chat.yaml /.claude/skills/ @@ -106,6 +103,5 @@ frontend/.vite/ .playwright-cli/ ops -# Auto-generated +.lark-events/ .playwright-mcp/ -/supabase/ diff --git a/README.md b/README.md index a7fdc9af7..e176d1a9c 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Mycel Banner -**Link: connecting people, agents, and teams for the next era of human-AI collaboration** +**Production-ready agent runtime for building, running, and governing collaborative AI teams** 🇬🇧 English | [🇨🇳 中文](README.zh.md) @@ -15,16 +15,16 @@ --- -Mycel gives your agents a **body** (portable identity & sandbox), **mind** (shareable templates), **memory** (persistent context), and **social life** (a native messaging layer where humans and agents coexist as equals). It's the platform layer for human-AI teams that actually work together. +Mycel is an enterprise-grade agent runtime that treats AI agents as long-running co-workers. Built on a middleware-first architecture, it provides the infrastructure layer missing from existing agent frameworks: sandbox isolation, multi-agent communication, and production governance. ## Why Mycel? -Existing frameworks help you *build* agents. Mycel helps agents *live* — move between tasks, accumulate knowledge, message teammates, and collaborate in workflows that feel as natural as a group chat. +Existing agent frameworks focus on *building* agents. Mycel focuses on *running* them in production: -- **Body** — Agents get a portable identity with sandbox isolation. Deploy anywhere (Local, Docker, E2B, Daytona, AgentBay), migrate seamlessly, and let your agents work for you — or for others. -- **Mind** — A template marketplace for agent personas and skills. Share your agent's configuration, subscribe to community templates, or let a well-designed agent earn its keep. -- **Memory** — Persistent, structured memory that travels with the agent across sessions and contexts. -- **Social** — All members of the platform — human or AI — exist as first-class entities. Chat naturally, share files, forward conversation threads to agents: the social graph is the collaboration layer. +- **Middleware Pipeline**: Unified tool injection, validation, security, and observability +- **Sandbox Isolation**: Run agents in Docker/E2B/cloud with automatic state management +- **Multi-Agent Communication**: Agents discover, message, and collaborate with each other — and with humans +- **Production Governance**: Built-in security controls, audit logging, and cost tracking ## Quick Start @@ -59,7 +59,7 @@ uv sync --extra e2b # E2B uv sync --extra daytona # Daytona ``` -Docker sandbox works out of the box (just needs Docker installed). See [Sandbox docs](docs/en/sandbox.mdx) for provider setup. +Docker sandbox works out of the box (just needs Docker installed). See [Sandbox docs](docs/en/sandbox.md) for provider setup. ### 3. Start the services @@ -170,11 +170,12 @@ Agents can be extended with external tools and specialized expertise: ## Documentation -- [Configuration](docs/en/configuration.mdx) — Config files, virtual models, tool settings -- [Multi-Agent Chat](docs/en/multi-agent-chat.mdx) — Entity-Chat system, agent communication -- [Sandbox](docs/en/sandbox.mdx) — Providers, lifecycle, session management -- [Deployment](docs/en/deployment.mdx) — Production deployment guide -- [Concepts](docs/en/concepts.mdx) — Core abstractions (Thread, Member, Task, Resource) +- [CLI Reference](docs/en/cli.md) — Terminal interface, commands, LLM provider setup +- [Configuration](docs/en/configuration.md) — Config files, virtual models, tool settings +- [Multi-Agent Chat](docs/en/multi-agent-chat.md) — Entity-Chat system, agent communication +- [Sandbox](docs/en/sandbox.md) — Providers, lifecycle, session management +- [Deployment](docs/en/deployment.md) — Production deployment guide +- [Concepts](docs/en/product-primitives.md) — Core abstractions (Thread, Member, Task, Resource) ## Contact Us diff --git a/README.zh.md b/README.zh.md index 12bb8981a..75dd9618b 100644 --- a/README.zh.md +++ b/README.zh.md @@ -4,7 +4,7 @@ Mycel Banner -**Link:连接人与 Agent,构建下一代人机协同** +**企业级 Agent 运行时,构建、运行和治理协作 AI 团队** [🇬🇧 English](README.md) | 🇨🇳 中文 @@ -15,16 +15,16 @@ --- -Mycel 让你的 Agent 拥有**身体**(可迁移的身份与沙箱)、**思想**(可共享的模板市场)、**记忆**(跨会话的持久上下文)和**社交**(人与 Agent 平等共存的原生消息层)。这是真正意义上的人机协同平台。 +Mycel 是企业级 Agent 运行时,将 AI Agent 视为长期运行的协作伙伴。基于中间件优先架构,提供现有 Agent 框架缺失的基础设施层:沙箱隔离、多 Agent 通讯和生产治理。 ## 为什么选择 Mycel? -现有框架帮你*构建* Agent,Mycel 让 Agent 真正*活着*——在任务间自由迁移、积累知识、给队友发消息,用像群聊一样自然的方式协作。 +现有 Agent 框架专注于*构建* Agent,Mycel 专注于在生产环境*运行*它们: -- **身体** — Agent 拥有可迁移的身份和沙箱隔离。支持 Local / Docker / E2B / Daytona / AgentBay,随时迁移,让你的 Agent 为你工作,也能为别人打工。 -- **思想** — Agent 模板市场:分享你的 Agent 配置,订阅社区模板,让设计精良的 Agent 产生真实价值。 -- **记忆** — 持久结构化记忆,跟随 Agent 跨会话、跨上下文流转。 -- **社交** — 平台上所有成员——无论是人还是 AI——都是一等公民实体。像微信一样自然地聊天、发文件、把聊天记录分享给 Agent:社交图谱就是协作层。 +- **中间件管线**:统一的工具注入、校验、安全和可观测性 +- **沙箱隔离**:在 Docker/E2B/云端运行 Agent,自动状态管理 +- **多 Agent 通讯**:Agent 之间互相发现、发送消息、自主协作——人类也参与其中 +- **生产治理**:内置安全控制、审计日志和成本追踪 ## 快速开始 @@ -59,7 +59,7 @@ uv sync --extra e2b # E2B uv sync --extra daytona # Daytona ``` -Docker 沙箱开箱即用(只需安装 Docker)。详见[沙箱文档](docs/zh/sandbox.mdx)。 +Docker 沙箱开箱即用(只需安装 Docker)。详见[沙箱文档](docs/zh/sandbox.md)。 ### 3. 启动服务 @@ -170,11 +170,12 @@ Agent 可通过外部工具和专业技能进行扩展: ## 文档 -- [配置指南](docs/zh/configuration.mdx) — 配置文件、虚拟模型、工具设置 -- [多 Agent 通讯](docs/zh/multi-agent-chat.mdx) — Entity-Chat 系统、Agent 间通讯 -- [沙箱](docs/zh/sandbox.mdx) — 提供商、生命周期、会话管理 -- [部署](docs/zh/deployment.mdx) — 生产部署指南 -- [核心概念](docs/zh/concepts.mdx) — 核心抽象(Thread、Member、Task、Resource) +- [CLI 参考](docs/zh/cli.md) — 终端界面、命令、LLM 提供商配置 +- [配置指南](docs/zh/configuration.md) — 配置文件、虚拟模型、工具设置 +- [多 Agent 通讯](docs/zh/multi-agent-chat.md) — Entity-Chat 系统、Agent 间通讯 +- [沙箱](docs/zh/sandbox.md) — 提供商、生命周期、会话管理 +- [部署](docs/zh/deployment.md) — 生产部署指南 +- [核心概念](docs/zh/product-primitives.md) — 核心抽象(Thread、Member、Task、Resource) ## 联系我们 diff --git a/backend/taskboard/service.py b/backend/taskboard/service.py index e1c99b568..ffd4586df 100644 --- a/backend/taskboard/service.py +++ b/backend/taskboard/service.py @@ -217,7 +217,7 @@ def _get_thread_id(self) -> str: # Handlers (async — ToolRunner awaits coroutines) # ------------------------------------------------------------------ - async def _list_tasks(self, Status: str = "", Priority: str = "") -> str: + async def _list_tasks(self, Status: str = "", Priority: str = "") -> str: # noqa: N803 try: tasks = await asyncio.to_thread(task_service.list_tasks) except Exception as e: @@ -231,7 +231,7 @@ async def _list_tasks(self, Status: str = "", Priority: str = "") -> str: return json.dumps({"tasks": tasks, "total": len(tasks)}, ensure_ascii=False) - async def _claim_task(self, TaskId: str) -> str: + async def _claim_task(self, TaskId: str) -> str: # noqa: N803 thread_id = self._get_thread_id() now_ms = int(time.time() * 1000) try: @@ -249,7 +249,7 @@ async def _claim_task(self, TaskId: str) -> str: return json.dumps({"error": f"Task not found: {TaskId}"}) return json.dumps({"task": updated}, ensure_ascii=False) - async def _update_progress(self, TaskId: str, Progress: int, Note: str = "") -> str: + async def _update_progress(self, TaskId: str, Progress: int, Note: str = "") -> str: # noqa: N803 update_kwargs: dict[str, Any] = {"progress": Progress} if Note: @@ -272,7 +272,7 @@ async def _update_progress(self, TaskId: str, Progress: int, Note: str = "") -> return json.dumps({"error": f"Task not found: {TaskId}"}) return json.dumps({"task": updated}, ensure_ascii=False) - async def _complete_task(self, TaskId: str, Result: str) -> str: + async def _complete_task(self, TaskId: str, Result: str) -> str: # noqa: N803 now_ms = int(time.time() * 1000) try: updated = await asyncio.to_thread( @@ -290,7 +290,7 @@ async def _complete_task(self, TaskId: str, Result: str) -> str: return json.dumps({"error": f"Task not found: {TaskId}"}) return json.dumps({"task": updated}, ensure_ascii=False) - async def _fail_task(self, TaskId: str, Reason: str) -> str: + async def _fail_task(self, TaskId: str, Reason: str) -> str: # noqa: N803 now_ms = int(time.time() * 1000) try: updated = await asyncio.to_thread( @@ -307,7 +307,7 @@ async def _fail_task(self, TaskId: str, Reason: str) -> str: return json.dumps({"error": f"Task not found: {TaskId}"}) return json.dumps({"task": updated}, ensure_ascii=False) - async def _create_task(self, Title: str, Description: str = "", Priority: str = "medium") -> str: + async def _create_task(self, Title: str, Description: str = "", Priority: str = "medium") -> str: # noqa: N803 try: task = await asyncio.to_thread( task_service.create_task, diff --git a/backend/web/core/lifespan.py b/backend/web/core/lifespan.py index 13a76a4b2..5f56f1312 100644 --- a/backend/web/core/lifespan.py +++ b/backend/web/core/lifespan.py @@ -31,9 +31,9 @@ def _seed_dev_user(app: FastAPI) -> None: log = logging.getLogger(__name__) member_repo = app.state.member_repo - dev_user_id = "dev-user" + DEV_USER_ID = "dev-user" # noqa: N806 - if member_repo.get_by_id(dev_user_id) is not None: + if member_repo.get_by_id(DEV_USER_ID) is not None: return # already seeded log.info("DEV: seeding dev-user member + initial agents") @@ -42,7 +42,7 @@ def _seed_dev_user(app: FastAPI) -> None: # Human member row member_repo.create( MemberRow( - id=dev_user_id, + id=DEV_USER_ID, name="Dev", type=MemberType.HUMAN, created_at=now, @@ -77,7 +77,7 @@ def _seed_dev_user(app: FastAPI) -> None: type=MemberType.MYCEL_AGENT, description=agent_def["description"], config_dir=str(agent_dir), - owner_user_id=dev_user_id, + owner_user_id=DEV_USER_ID, created_at=now, ) ) @@ -111,79 +111,33 @@ async def lifespan(app: FastAPI): ensure_library_dir() # ---- Entity-Chat repos + services ---- - _storage_strategy = os.getenv("LEON_STORAGE_STRATEGY", "sqlite") - - if _storage_strategy == "supabase": - from backend.web.core.supabase_factory import create_supabase_client - from storage.container import StorageContainer - from storage.providers.supabase import ( - SupabaseAccountRepo, - SupabaseChatEntityRepo, - SupabaseChatMessageRepo, - SupabaseChatRepo, - SupabaseContactRepo, - SupabaseEntityRepo, - SupabaseInviteCodeRepo, - SupabaseMemberRepo, - SupabaseRecipeRepo, - SupabaseThreadLaunchPrefRepo, - SupabaseThreadRepo, - SupabaseUserSettingsRepo, - ) - - _supabase_client = create_supabase_client() - app.state.member_repo = SupabaseMemberRepo(_supabase_client) - app.state.account_repo = SupabaseAccountRepo(_supabase_client) - app.state.entity_repo = SupabaseEntityRepo(_supabase_client) - app.state.thread_repo = SupabaseThreadRepo(_supabase_client) - app.state.thread_launch_pref_repo = SupabaseThreadLaunchPrefRepo(_supabase_client) - app.state.recipe_repo = SupabaseRecipeRepo(_supabase_client) - app.state.chat_repo = SupabaseChatRepo(_supabase_client) - app.state.chat_entity_repo = SupabaseChatEntityRepo(_supabase_client) - app.state.chat_message_repo = SupabaseChatMessageRepo(_supabase_client) - app.state.invite_code_repo = SupabaseInviteCodeRepo(_supabase_client) - app.state.user_settings_repo = SupabaseUserSettingsRepo(_supabase_client) - app.state._supabase_client = _supabase_client - app.state._storage_container = StorageContainer(strategy="supabase", supabase_client=_supabase_client) - else: - from storage.providers.sqlite.chat_repo import SQLiteChatEntityRepo, SQLiteChatMessageRepo, SQLiteChatRepo - from storage.providers.sqlite.entity_repo import SQLiteEntityRepo - from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path - from storage.providers.sqlite.member_repo import SQLiteAccountRepo, SQLiteMemberRepo - from storage.providers.sqlite.recipe_repo import SQLiteRecipeRepo - from storage.providers.sqlite.thread_launch_pref_repo import SQLiteThreadLaunchPrefRepo - from storage.providers.sqlite.thread_repo import SQLiteThreadRepo - - db = resolve_role_db_path(SQLiteDBRole.MAIN) - chat_db = resolve_role_db_path(SQLiteDBRole.CHAT) - - app.state.member_repo = SQLiteMemberRepo(db) - app.state.account_repo = SQLiteAccountRepo(db) - app.state.entity_repo = SQLiteEntityRepo(db) - app.state.thread_repo = SQLiteThreadRepo(db) - app.state.thread_launch_pref_repo = SQLiteThreadLaunchPrefRepo(db) - app.state.recipe_repo = SQLiteRecipeRepo(db) - app.state.chat_repo = SQLiteChatRepo(chat_db) - app.state.chat_entity_repo = SQLiteChatEntityRepo(chat_db) - app.state.chat_message_repo = SQLiteChatMessageRepo(chat_db) + from storage.providers.sqlite.chat_repo import SQLiteChatEntityRepo, SQLiteChatMessageRepo, SQLiteChatRepo + from storage.providers.sqlite.entity_repo import SQLiteEntityRepo + from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path + from storage.providers.sqlite.member_repo import SQLiteAccountRepo, SQLiteMemberRepo + from storage.providers.sqlite.recipe_repo import SQLiteRecipeRepo + from storage.providers.sqlite.thread_launch_pref_repo import SQLiteThreadLaunchPrefRepo + from storage.providers.sqlite.thread_repo import SQLiteThreadRepo + + db = resolve_role_db_path(SQLiteDBRole.MAIN) + chat_db = resolve_role_db_path(SQLiteDBRole.CHAT) + + app.state.member_repo = SQLiteMemberRepo(db) + app.state.account_repo = SQLiteAccountRepo(db) + app.state.entity_repo = SQLiteEntityRepo(db) + app.state.thread_repo = SQLiteThreadRepo(db) + app.state.thread_launch_pref_repo = SQLiteThreadLaunchPrefRepo(db) + app.state.recipe_repo = SQLiteRecipeRepo(db) + app.state.chat_repo = SQLiteChatRepo(chat_db) + app.state.chat_entity_repo = SQLiteChatEntityRepo(chat_db) + app.state.chat_message_repo = SQLiteChatMessageRepo(chat_db) from backend.web.services.auth_service import AuthService - if _storage_strategy == "supabase": - app.state.auth_service = AuthService( - members=app.state.member_repo, - accounts=app.state.account_repo, - entities=app.state.entity_repo, - supabase_client=_supabase_client, - invite_codes=app.state.invite_code_repo, - ) - else: - app.state.auth_service = AuthService( - members=app.state.member_repo, - accounts=app.state.account_repo, - entities=app.state.entity_repo, - supabase_client=None, - ) + app.state.auth_service = AuthService( + members=app.state.member_repo, + accounts=app.state.account_repo, + ) # Dev bypass: seed dev-user + initial agents on first startup from backend.web.core.dependencies import _DEV_SKIP_AUTH @@ -191,39 +145,78 @@ async def lifespan(app: FastAPI): if _DEV_SKIP_AUTH: _seed_dev_user(app) - from backend.web.services.chat_events import ChatEventBus - from backend.web.services.typing_tracker import TypingTracker + from messaging.realtime.bridge import SupabaseRealtimeBridge + from messaging.realtime.typing import TypingTracker as MessagingTypingTracker + + app.state.chat_event_bus = SupabaseRealtimeBridge() + app.state.typing_tracker = MessagingTypingTracker(app.state.chat_event_bus) + + # Messaging system — Supabase-backed when SUPABASE env vars are available + _supabase_url = os.getenv("SUPABASE_INTERNAL_URL") or os.getenv("SUPABASE_PUBLIC_URL") + _supabase_key = os.getenv("LEON_SUPABASE_ANON_KEY") or os.getenv("LEON_SUPABASE_SERVICE_ROLE_KEY") + _messaging_supabase_available = bool(_supabase_url and _supabase_key) + + if _messaging_supabase_available: + from backend.web.core.supabase_factory import create_messaging_supabase_client + from storage.providers.supabase.messaging_repo import ( + SupabaseChatMemberRepo, + SupabaseMessageReadRepo, + SupabaseMessagesRepo, + SupabaseRelationshipRepo, + ) - app.state.chat_event_bus = ChatEventBus() - app.state.typing_tracker = TypingTracker(app.state.chat_event_bus) + _supabase = create_messaging_supabase_client() + _chat_member_repo = SupabaseChatMemberRepo(_supabase) + _messages_repo = SupabaseMessagesRepo(_supabase) + _message_read_repo = SupabaseMessageReadRepo(_supabase) + app.state.relationship_repo = SupabaseRelationshipRepo(_supabase) - from backend.web.services.delivery_resolver import DefaultDeliveryResolver + from storage.providers.supabase.contact_repo import SupabaseContactRepo - if _storage_strategy == "supabase": - app.state.contact_repo = SupabaseContactRepo(_supabase_client) + app.state.contact_repo = SupabaseContactRepo(_supabase) else: - from storage.providers.sqlite.contact_repo import SQLiteContactRepo + import logging as _logging + + _logging.getLogger(__name__).warning("Messaging Supabase client not configured — relationship/contact features unavailable.") + _chat_member_repo = None + _messages_repo = None + _message_read_repo = None + app.state.relationship_repo = None + app.state.contact_repo = None + + from messaging.delivery.resolver import HireVisitDeliveryResolver + + delivery_resolver = HireVisitDeliveryResolver( + contact_repo=app.state.contact_repo, + chat_member_repo=_chat_member_repo, + relationship_repo=app.state.relationship_repo, + ) - app.state.contact_repo = SQLiteContactRepo(chat_db) + from messaging.relationships.service import RelationshipService - delivery_resolver = DefaultDeliveryResolver(app.state.contact_repo, app.state.chat_entity_repo) + app.state.relationship_service = RelationshipService( + app.state.relationship_repo, + entity_repo=app.state.entity_repo, + ) - from backend.web.services.chat_service import ChatService + from messaging.service import MessagingService - app.state.chat_service = ChatService( + app.state.messaging_service = MessagingService( chat_repo=app.state.chat_repo, - chat_entity_repo=app.state.chat_entity_repo, - chat_message_repo=app.state.chat_message_repo, + chat_member_repo=_chat_member_repo, + messages_repo=_messages_repo, + message_read_repo=_message_read_repo, entity_repo=app.state.entity_repo, member_repo=app.state.member_repo, - event_bus=app.state.chat_event_bus, delivery_resolver=delivery_resolver, + event_bus=app.state.chat_event_bus, ) # Wire chat delivery after event loop is available from core.agents.communication.delivery import make_chat_delivery_fn - app.state.chat_service.set_delivery_fn(make_chat_delivery_fn(app)) + _delivery_fn = make_chat_delivery_fn(app) + app.state.messaging_service.set_delivery_fn(_delivery_fn) # ---- Existing state ---- app.state.queue_manager = MessageQueueManager() @@ -260,11 +253,9 @@ async def lifespan(app: FastAPI): app.state.cron_service = cron_svc # @@@wechat-registry — create registry with delivery callback, auto-start all - from backend.web.services.wechat_service import WeChatConnectionRegistry, migrate_entity_id_dirs + from backend.web.services.wechat_service import WeChatConnectionRegistry from core.runtime.middleware.queue.formatters import format_wechat_message - migrate_entity_id_dirs() - async def _wechat_deliver(conn, msg): """Delivery callback — routes WeChat messages to configured thread/chat.""" routing = conn.routing @@ -278,7 +269,7 @@ async def _wechat_deliver(conn, msg): await route_message_to_brain(app, routing.id, content, source="owner", sender_name=sender_name) elif routing.type == "chat": content = format_wechat_message(sender_name, msg.from_user_id, msg.text) - app.state.chat_service.send_message(routing.id, conn.user_id, content) + app.state.chat_service.send_message(routing.id, conn.entity_id, content) app.state.wechat_registry = WeChatConnectionRegistry(delivery_fn=_wechat_deliver) app.state.wechat_registry.auto_start_all() diff --git a/backend/web/core/supabase_factory.py b/backend/web/core/supabase_factory.py index c8dc9abd1..c944a0dab 100644 --- a/backend/web/core/supabase_factory.py +++ b/backend/web/core/supabase_factory.py @@ -4,25 +4,30 @@ import os -import httpx -from supabase import ClientOptions, create_client +from supabase import create_client def create_supabase_client(): - """Build a supabase-py client from runtime environment. - - Uses SUPABASE_INTERNAL_URL when available (direct server-side access, e.g. same-host - or SSH tunnel), falling back to SUPABASE_PUBLIC_URL. trust_env=False ensures the - httpx client never routes through any system/VPN proxy. - """ - # Prefer internal URL (same-host direct connection) over public tunnel URL. - url = os.getenv("SUPABASE_INTERNAL_URL") or os.getenv("SUPABASE_PUBLIC_URL") + """Build a supabase-py client using service role key (legacy repos).""" + url = os.getenv("SUPABASE_PUBLIC_URL") key = os.getenv("LEON_SUPABASE_SERVICE_ROLE_KEY") if not url: - raise RuntimeError("SUPABASE_INTERNAL_URL or SUPABASE_PUBLIC_URL is required.") + raise RuntimeError("SUPABASE_PUBLIC_URL is required for Supabase storage runtime.") if not key: raise RuntimeError("LEON_SUPABASE_SERVICE_ROLE_KEY is required for Supabase storage runtime.") - schema = os.getenv("LEON_DB_SCHEMA", "public") - timeout = httpx.Timeout(30.0, connect=10.0) - http_client = httpx.Client(timeout=timeout, trust_env=False) - return create_client(url, key, options=ClientOptions(httpx_client=http_client, schema=schema)) + return create_client(url, key) + + +def create_messaging_supabase_client(): + """Build a supabase-py client for messaging repos using anon key. + + The anon key works for messaging tables which have no RLS policies + in the current self-hosted setup. + """ + url = os.getenv("SUPABASE_PUBLIC_URL") + key = os.getenv("SUPABASE_ANON_KEY") + if not url: + raise RuntimeError("SUPABASE_PUBLIC_URL is required for messaging.") + if not key: + raise RuntimeError("SUPABASE_ANON_KEY is required for messaging.") + return create_client(url, key) diff --git a/backend/web/main.py b/backend/web/main.py index 64f60e0a5..f0c49ac93 100644 --- a/backend/web/main.py +++ b/backend/web/main.py @@ -6,16 +6,9 @@ import sys from pathlib import Path -# Load .env file if ENV_FILE is specified (e.g. ENV_FILE=.env for local dev) -_env_file = os.getenv("ENV_FILE") -if _env_file: - from dotenv import load_dotenv - - load_dotenv(_env_file, override=False) - -import uvicorn # noqa: E402 -from fastapi import FastAPI # noqa: E402 -from fastapi.middleware.cors import CORSMiddleware # noqa: E402 +import uvicorn +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware def _ensure_windows_db_env_defaults() -> None: @@ -82,11 +75,10 @@ def _sqlite_root_supports_wal(root: Path) -> bool: from backend.web.core.lifespan import lifespan # noqa: E402 from backend.web.routers import ( # noqa: E402 auth, - chats, connections, + contacts, debug, entities, - invite_codes, marketplace, monitor, panel, @@ -96,6 +88,8 @@ def _sqlite_root_supports_wal(root: Path) -> bool: threads, webhooks, ) +from backend.web.routers import messaging as messaging_router # noqa: E402 +from messaging.relationships.router import router as relationships_router # noqa: E402 # Create FastAPI app app = FastAPI(title="Leon Web Backend", lifespan=lifespan) @@ -111,9 +105,10 @@ def _sqlite_root_supports_wal(root: Path) -> bool: # Include routers app.include_router(auth.router) -app.include_router(invite_codes.router) app.include_router(threads.router) -app.include_router(chats.router) +app.include_router(messaging_router.router) +app.include_router(contacts.router) +app.include_router(relationships_router) app.include_router(entities.router) app.include_router(entities.members_router) app.include_router(sandbox.router) diff --git a/backend/web/routers/auth.py b/backend/web/routers/auth.py index 5c5f87b5b..ea2c586ea 100644 --- a/backend/web/routers/auth.py +++ b/backend/web/routers/auth.py @@ -1,6 +1,5 @@ -"""Authentication endpoints — 3-step registration + login.""" +"""Authentication endpoints — register and login.""" -import asyncio from typing import Annotated, Any from fastapi import APIRouter, Depends, HTTPException @@ -11,67 +10,22 @@ router = APIRouter(prefix="/api/auth", tags=["auth"]) -# ── Registration step 1: send OTP ────────────────────────────────────────── - - -class SendOtpRequest(BaseModel): - email: str +class AuthRequest(BaseModel): + username: str password: str - invite_code: str - - -@router.post("/send-otp") -async def send_otp(payload: SendOtpRequest, app: Annotated[Any, Depends(get_app)]) -> dict: - try: - await asyncio.to_thread(_get_auth_service(app).send_otp, payload.email, payload.password, payload.invite_code) - return {"ok": True} - except ValueError as e: - raise HTTPException(400, str(e)) -# ── Registration step 2: verify OTP ──────────────────────────────────────── - - -class VerifyOtpRequest(BaseModel): - email: str - token: str - - -@router.post("/verify-otp") -async def verify_otp(payload: VerifyOtpRequest, app: Annotated[Any, Depends(get_app)]) -> dict: +@router.post("/register") +async def register(payload: AuthRequest, app: Annotated[Any, Depends(get_app)]) -> dict: try: - return await asyncio.to_thread(_get_auth_service(app).verify_register_otp, payload.email, payload.token) + return _get_auth_service(app).register(payload.username, payload.password) except ValueError as e: - raise HTTPException(400, str(e)) - - -# ── Registration step 3: set password + invite code ──────────────────────── - - -class CompleteRegisterRequest(BaseModel): - temp_token: str - invite_code: str - - -@router.post("/complete-register") -async def complete_register(payload: CompleteRegisterRequest, app: Annotated[Any, Depends(get_app)]) -> dict: - try: - return await asyncio.to_thread(_get_auth_service(app).complete_register, payload.temp_token, payload.invite_code) - except ValueError as e: - raise HTTPException(400, str(e)) - - -# ── Login ─────────────────────────────────────────────────────────────────── - - -class LoginRequest(BaseModel): - identifier: str # email 或 mycel_id(纯数字字符串) - password: str + raise HTTPException(409, str(e)) @router.post("/login") -async def login(payload: LoginRequest, app: Annotated[Any, Depends(get_app)]) -> dict: +async def login(payload: AuthRequest, app: Annotated[Any, Depends(get_app)]) -> dict: try: - return await asyncio.to_thread(_get_auth_service(app).login, payload.identifier, payload.password) + return _get_auth_service(app).login(payload.username, payload.password) except ValueError as e: raise HTTPException(401, str(e)) diff --git a/backend/web/routers/chats.py b/backend/web/routers/chats.py index 5e7e3ff9e..8a64073eb 100644 --- a/backend/web/routers/chats.py +++ b/backend/web/routers/chats.py @@ -33,7 +33,7 @@ async def list_chats( user_id: Annotated[str, Depends(get_current_user_id)], app: Annotated[Any, Depends(get_app)], ): - """List all chats for the current user (social identity from JWT).""" + """List all chats for the current user.""" return app.state.chat_service.list_chats_for_user(user_id) @@ -43,7 +43,7 @@ async def create_chat( user_id: Annotated[str, Depends(get_current_user_id)], app: Annotated[Any, Depends(get_app)], ): - """Create a chat between users. 2 users = 1:1 chat, 3+ = group chat.""" + """Create a chat between entities. 2 entities = 1:1 chat, 3+ = group chat.""" chat_service = app.state.chat_service try: if len(body.user_ids) >= 3: @@ -65,33 +65,22 @@ async def get_chat( chat = app.state.chat_repo.get_by_id(chat_id) if not chat: raise HTTPException(404, "Chat not found") - participants = app.state.chat_entity_repo.list_participants(chat_id) + participants = app.state.chat_entity_repo.list_members(chat_id) entity_repo = app.state.entity_repo member_repo = app.state.member_repo entities_info = [] for p in participants: - e = entity_repo.get_by_id(p.user_id) + e = entity_repo.get_by_id(p.entity_id) if e: m = member_repo.get_by_id(e.member_id) entities_info.append( { - "id": p.user_id, + "id": e.id, "name": e.name, "type": e.type, "avatar_url": avatar_url(e.member_id, bool(m.avatar if m else None)), } ) - else: - m = member_repo.get_by_id(p.user_id) - if m: - entities_info.append( - { - "id": p.user_id, - "name": m.name, - "type": "human", - "avatar_url": avatar_url(m.id, bool(m.avatar)), - } - ) return { "id": chat.id, "title": chat.title, @@ -111,23 +100,20 @@ async def list_messages( ): """List messages in a chat.""" msgs = app.state.chat_message_repo.list_by_chat(chat_id, limit=limit, before=before) + # Batch entity lookup to avoid N+1 entity_repo = app.state.entity_repo - member_repo = app.state.member_repo - sender_ids = {m.sender_id for m in msgs} - sender_names: dict[str, str] = {} + sender_ids = {m.sender_id for m in msgs} # sender_id is the storage field name + sender_map = {} for sid in sender_ids: e = entity_repo.get_by_id(sid) if e: - sender_names[sid] = e.name - else: - m = member_repo.get_by_id(sid) - sender_names[sid] = m.name if m else "unknown" + sender_map[sid] = e return [ { "id": m.id, "chat_id": m.chat_id, "sender_id": m.sender_id, - "sender_name": sender_names.get(m.sender_id, "unknown"), + "sender_name": sender_map[m.sender_id].name if m.sender_id in sender_map else "unknown", "content": m.content, "mentioned_ids": m.mentioned_ids, "created_at": m.created_at, @@ -159,8 +145,15 @@ async def send_message( """Send a message in a chat.""" if not body.content.strip(): raise HTTPException(400, "Content cannot be empty") - # Verify sender_id belongs to the authenticated user - _verify_participant_ownership(app, body.sender_id, user_id) + # Verify sender_id belongs to the authenticated member + sender = app.state.entity_repo.get_by_id(body.sender_id) + if not sender: + raise HTTPException(404, "Sender entity not found") + # Entity belongs to member directly, or to an agent owned by member + if sender.member_id != user_id: + agent_member = app.state.member_repo.get_by_id(sender.member_id) + if not agent_member or agent_member.owner_user_id != user_id: + raise HTTPException(403, "Sender entity does not belong to you") chat_service = app.state.chat_service msg = chat_service.send_message(chat_id, body.sender_id, body.content, body.mentioned_ids) return { @@ -221,19 +214,22 @@ class SetContactBody(BaseModel): relation: Literal["normal", "blocked", "muted"] -def _verify_participant_ownership(app: Any, participant_id: str, user_id: str) -> None: - """Raise 403 if participant_id does not belong to the authenticated user. +def _verify_entity_ownership(app: Any, entity_id: str, user_id: str) -> None: + """Raise 403 if entity does not belong to the authenticated member. - For humans: participant_id == user_id (direct match). - For agents: participant_id == member_id, and agent_member.owner_user_id == user_id. + Ownership: entity belongs to member directly, OR entity belongs to + an agent member owned by the authenticated member. """ - if participant_id == user_id: + entity = app.state.entity_repo.get_by_id(entity_id) + if not entity: + raise HTTPException(403, "Entity does not belong to you") + if entity.member_id == user_id: return - # Check if it's an agent member owned by this user - agent_member = app.state.member_repo.get_by_id(participant_id) + # Check if entity belongs to an agent owned by this user + agent_member = app.state.member_repo.get_by_id(entity.member_id) if agent_member and agent_member.owner_user_id == user_id: return - raise HTTPException(403, "Participant does not belong to you") + raise HTTPException(403, "Entity does not belong to you") @router.post("/contacts") @@ -243,7 +239,7 @@ async def set_contact( app: Annotated[Any, Depends(get_app)], ): """Set a directional contact relationship (block/mute/normal).""" - _verify_participant_ownership(app, body.owner_id, user_id) + _verify_entity_ownership(app, body.owner_id, user_id) import time from storage.contracts import ContactRow @@ -269,7 +265,7 @@ async def delete_contact( app: Annotated[Any, Depends(get_app)], ): """Delete a contact relationship.""" - _verify_participant_ownership(app, owner_id, user_id) + _verify_entity_ownership(app, owner_id, user_id) contact_repo = app.state.contact_repo contact_repo.delete(owner_id, target_id) return {"status": "deleted"} @@ -293,8 +289,8 @@ async def mute_chat( user_id: Annotated[str, Depends(get_current_user_id)], app: Annotated[Any, Depends(get_app)], ): - """Mute/unmute a chat for the current user.""" - _verify_participant_ownership(app, body.user_id, user_id) + """Mute/unmute a chat for a specific entity.""" + _verify_entity_ownership(app, body.user_id, user_id) chat_entity_repo = app.state.chat_entity_repo chat_entity_repo.update_mute(chat_id, body.user_id, body.muted, body.mute_until) return {"status": "ok", "muted": body.muted} @@ -310,7 +306,7 @@ async def delete_chat( chat = app.state.chat_repo.get_by_id(chat_id) if not chat: raise HTTPException(404, "Chat not found") - if not app.state.chat_entity_repo.is_participant_in_chat(chat_id, user_id): + if not app.state.chat_entity_repo.is_member_in_chat(chat_id, user_id): raise HTTPException(403, "Not a participant of this chat") app.state.chat_repo.delete(chat_id) return {"status": "deleted"} diff --git a/backend/web/routers/connections.py b/backend/web/routers/connections.py index c5fa0adc2..e1e48684f 100644 --- a/backend/web/routers/connections.py +++ b/backend/web/routers/connections.py @@ -1,6 +1,6 @@ """Connection endpoints — manage external platform connections (WeChat, etc.). -@@@per-user — all endpoints scoped by user_id (the user's social identity). +@@@per-user — all endpoints scoped by user_id. """ from typing import Annotated, Any @@ -127,7 +127,10 @@ async def wechat_routing_targets( user_id: Annotated[str, Depends(get_current_user_id)], app: Annotated[Any, Depends(get_app)], ) -> dict: - """List available threads and chats for the routing picker.""" + """List available threads and chats for the routing picker. + + user_id: needed for thread ownership lookup and chat participation lookup. + """ from backend.web.utils.serializers import avatar_url raw_threads = app.state.thread_repo.list_by_owner_user_id(user_id) diff --git a/backend/web/routers/contacts.py b/backend/web/routers/contacts.py new file mode 100644 index 000000000..f60caee16 --- /dev/null +++ b/backend/web/routers/contacts.py @@ -0,0 +1,71 @@ +"""Contacts API router — /api/contacts endpoints.""" + +from __future__ import annotations + +import logging +import time +from typing import Annotated, Any, Literal + +from fastapi import APIRouter, Depends +from pydantic import BaseModel + +from backend.web.core.dependencies import get_app, get_current_user_id +from storage.contracts import ContactRow + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/contacts", tags=["contacts"]) + + +class SetContactBody(BaseModel): + target_id: str + relation: Literal["normal", "blocked", "muted"] + + +@router.get("") +async def list_contacts( + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + """List contacts (blocked/muted) for the current user.""" + rows = app.state.contact_repo.list_for_user(user_id) + return [ + { + "owner_user_id": row.owner_id, + "target_user_id": row.target_id, + "relation": row.relation, + "created_at": row.created_at, + "updated_at": row.updated_at, + } + for row in rows + ] + + +@router.post("") +async def set_contact( + body: SetContactBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + """Upsert contact (block/mute/normal).""" + app.state.contact_repo.upsert( + ContactRow( + owner_id=user_id, + target_id=body.target_id, + relation=body.relation, + created_at=time.time(), + updated_at=time.time(), + ) + ) + return {"status": "ok", "relation": body.relation} + + +@router.delete("/{target_id}") +async def delete_contact( + target_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + """Remove contact entry.""" + app.state.contact_repo.delete(user_id, target_id) + return {"status": "deleted"} diff --git a/backend/web/routers/entities.py b/backend/web/routers/entities.py index 96f636955..ec4fcb99c 100644 --- a/backend/web/routers/entities.py +++ b/backend/web/routers/entities.py @@ -160,46 +160,32 @@ async def list_entities( app: Annotated[Any, Depends(get_app)], ): """List chattable entities for discovery (New Chat picker). - Humans are represented by their user_id; agents by their member_id. - Excludes the current user (you don't chat with yourself).""" + Excludes only the current user's own human entity (you don't chat with yourself).""" entity_repo = app.state.entity_repo member_repo = app.state.member_repo + # Only exclude self (human entity). Own agents are allowed — user can pull them into group chats. + exclude_member_ids = {user_id} + + all_entities = entity_repo.list_all() members = member_repo.list_all() member_map = {m.id: m for m in members} - + member_avatars = {m.id: bool(m.avatar) for m in members} + # @@@entity-is-social-identity — response uses entity_id only, no member_id leak. + # member_id is internal (template), entity_id is the social identity. items = [] - - # Human participants: all human members except self - for m in members: - if m.type != "human" or m.id == user_id: - continue - items.append( - { - "id": m.id, # user_id IS the social identity for humans - "name": m.name, - "type": "human", - "avatar_url": avatar_url(m.id, bool(m.avatar)), - "owner_name": None, - "member_name": m.name, - "thread_id": None, - "is_main": None, - "branch_index": None, - } - ) - - # Agent participants: from entity_repo (agent entities have id = member_id) - all_entities = entity_repo.list_by_type("agent") for entity in all_entities: + if entity.member_id in exclude_member_ids: + continue member = member_map.get(entity.member_id) owner = member_map.get(member.owner_user_id) if member and member.owner_user_id else None thread = app.state.thread_repo.get_by_id(entity.thread_id) if entity.thread_id else None items.append( { - "id": entity.id, # entity.id = member_id = social identity for agents + "id": entity.id, "name": entity.name, "type": entity.type, - "avatar_url": avatar_url(entity.member_id, bool(member.avatar if member else None)), + "avatar_url": avatar_url(entity.member_id, member_avatars.get(entity.member_id, False)), "owner_name": owner.name if owner else None, "member_name": member.name if member else None, "thread_id": entity.thread_id, @@ -210,16 +196,84 @@ async def list_entities( return items -@router.get("/{user_id}/agent-thread") +def _get_entity_by_id_or_member(app: Any, id_or_member: str): + """Resolve entity by entity_id first, then by member_id (main thread entity).""" + entity = app.state.entity_repo.get_by_id(id_or_member) + if entity: + return entity + # Try as member_id: find the main entity for this member + entities = app.state.entity_repo.get_by_member_id(id_or_member) + if entities: + # Prefer the main thread entity (lowest seq) + main = sorted(entities, key=lambda e: e.id)[0] + return main + return None + + +@router.get("/{entity_id}/profile") +async def get_entity_profile( + entity_id: str, + app: Annotated[Any, Depends(get_app)], +): + """Public agent profile — no auth required. Only type=='agent'.""" + entity = _get_entity_by_id_or_member(app, entity_id) + if not entity: + raise HTTPException(404, "Entity not found") + if entity.type != "agent": + raise HTTPException(403, "Only agent profiles are public") + member = app.state.member_repo.get_by_id(entity.member_id) if entity.member_id else None + return { + "id": entity.member_id, + "name": entity.name, + "type": "agent", + "avatar_url": avatar_url(entity.member_id, bool(member.avatar if member else None)), + "description": member.description if member else None, + } + + +@router.get("/{entity_id}/invite-link") +async def get_invite_link( + entity_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + """Generate invite link for an agent entity. Owner only.""" + entity = _get_entity_by_id_or_member(app, entity_id) + if not entity: + raise HTTPException(404, "Entity not found") + if entity.type != "agent": + raise HTTPException(400, "Invite links only for agents") + member = app.state.member_repo.get_by_id(entity.member_id) if entity.member_id else None + if not member or member.owner_user_id != user_id: + raise HTTPException(403, "Not your agent") + member_id = entity.member_id + return { + "url": f"/a/{member_id}", + "entity_id": member_id, + } + + +@router.get("/{entity_id}/agent-thread") async def get_agent_thread( - user_id: str, - current_user_id: Annotated[str, Depends(get_current_user_id)], + entity_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], app: Annotated[Any, Depends(get_app)], ): - """Get the thread_id for an agent's main thread. user_id here is the agent's member_id.""" - entity = app.state.entity_repo.get_by_id(user_id) + """Get the thread_id for an entity's agent. Accepts human or agent entity.""" + entity = app.state.entity_repo.get_by_id(entity_id) if not entity: raise HTTPException(404, "Entity not found") + # If this is already an agent with a thread, return directly if entity.type == "agent" and entity.thread_id: - return {"user_id": user_id, "thread_id": entity.thread_id} - raise HTTPException(404, "No agent thread found") + return {"entity_id": entity_id, "thread_id": entity.thread_id} + # If this is a human entity, find the agent entity owned by the same member + member = app.state.member_repo.get_by_id(entity.member_id) + if member: + # Find agent members owned by this member + agents = app.state.member_repo.list_by_owner_user_id(member.id) + for agent_member in agents: + agent_entities = app.state.entity_repo.get_by_member_id(agent_member.id) + for ae in agent_entities: + if ae.type == "agent" and ae.thread_id: + return {"entity_id": ae.id, "thread_id": ae.thread_id} + raise HTTPException(404, "No agent thread found for this entity") diff --git a/backend/web/routers/marketplace.py b/backend/web/routers/marketplace.py index 898708195..e96256201 100644 --- a/backend/web/routers/marketplace.py +++ b/backend/web/routers/marketplace.py @@ -3,7 +3,7 @@ import asyncio from typing import Annotated, Any -from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi import APIRouter, Depends, HTTPException from backend.web.core.dependencies import get_current_user_id from backend.web.models.marketplace import ( @@ -17,16 +17,21 @@ router = APIRouter(prefix="/api/marketplace", tags=["marketplace"]) -async def _verify_member_ownership(member_id: str, user_id: str, member_repo: Any) -> None: - """Raise 403 if *user_id* does not own *member_id*.""" +async def _verify_member_ownership(member_id: str, user_id: str) -> None: + """Raise 403 if *user_id* does not own *member_id* in the SQLite registry.""" + from storage.providers.sqlite.member_repo import SQLiteMemberRepo def _check() -> None: - member = member_repo.get_by_id(member_id) - if member is None or member.owner_user_id != user_id: - raise HTTPException( - status_code=403, - detail="Not authorized to publish this member", - ) + repo = SQLiteMemberRepo() + try: + member = repo.get_by_id(member_id) + if member is None or member.owner_user_id != user_id: + raise HTTPException( + status_code=403, + detail="Not authorized to publish this member", + ) + finally: + repo.close() await asyncio.to_thread(_check) @@ -35,10 +40,8 @@ def _check() -> None: async def publish_to_marketplace( req: PublishToMarketplaceRequest, user_id: Annotated[str, Depends(get_current_user_id)], - request: Request, ) -> dict[str, Any]: - member_repo = request.app.state.member_repo - await _verify_member_ownership(req.member_id, user_id, member_repo) + await _verify_member_ownership(req.member_id, user_id) from backend.web.services.profile_service import get_profile @@ -75,10 +78,8 @@ async def download_from_marketplace( async def upgrade_from_marketplace( req: UpgradeFromMarketplaceRequest, user_id: Annotated[str, Depends(get_current_user_id)], - request: Request, ) -> dict[str, Any]: - member_repo = request.app.state.member_repo - await _verify_member_ownership(req.member_id, user_id, member_repo) + await _verify_member_ownership(req.member_id, user_id) result = await asyncio.to_thread( marketplace_client.upgrade, diff --git a/backend/web/routers/messaging.py b/backend/web/routers/messaging.py new file mode 100644 index 000000000..553944bdb --- /dev/null +++ b/backend/web/routers/messaging.py @@ -0,0 +1,357 @@ +"""Messaging API router — replaces chats.py. + +All operations go through MessagingService (Supabase-backed). +No legacy fallback. +""" + +from __future__ import annotations + +import asyncio +import json +import logging +from datetime import UTC, datetime +from typing import Annotated, Any, Literal + +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel + +from backend.web.core.dependencies import get_app, get_current_user_id +from backend.web.utils.serializers import avatar_url + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/chats", tags=["chats"]) + + +# --------------------------------------------------------------------------- +# Request models +# --------------------------------------------------------------------------- + + +class CreateChatBody(BaseModel): + user_ids: list[str] + title: str | None = None + + +class SendMessageBody(BaseModel): + content: str + sender_id: str + mentioned_ids: list[str] | None = None + message_type: str = "human" + signal: str | None = None + + +class SetContactBody(BaseModel): + owner_id: str + target_id: str + relation: Literal["normal", "blocked", "muted"] + + +class MuteChatBody(BaseModel): + user_id: str + muted: bool + mute_until: float | None = None + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _messaging(app: Any): + svc = getattr(app.state, "messaging_service", None) + if svc is None: + raise HTTPException(503, "MessagingService not initialized") + return svc + + +def _verify_member_ownership(app: Any, member_id: str, user_id: str) -> None: + member = app.state.member_repo.get_by_id(member_id) + if not member: + raise HTTPException(403, "Member not found") + if member.id == user_id: + return # human member sending as themselves + if member.owner_user_id == user_id: + return # agent owned by current user + raise HTTPException(403, "Member does not belong to you") + + +def _msg_response(m: dict[str, Any], member_repo: Any) -> dict[str, Any]: + sender = member_repo.get_by_id(m.get("sender_id", "")) + return { + "id": m["id"], + "chat_id": m["chat_id"], + "sender_id": m.get("sender_id"), + "sender_name": sender.name if sender else "unknown", + "content": m["content"], + "message_type": m.get("message_type", "human"), + "mentioned_ids": m.get("mentioned_ids") or m.get("mentions") or [], + "signal": m.get("signal"), + "retracted_at": m.get("retracted_at"), + "created_at": m.get("created_at"), + } + + +# --------------------------------------------------------------------------- +# Chat list / create +# --------------------------------------------------------------------------- + + +@router.get("") +async def list_chats( + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + return _messaging(app).list_chats_for_user(user_id) + + +@router.post("") +async def create_chat( + body: CreateChatBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + try: + if len(body.user_ids) >= 3: + chat = _messaging(app).create_group_chat(body.user_ids, body.title) + else: + chat = _messaging(app).find_or_create_chat(body.user_ids, body.title) + return { + "id": chat["id"], + "title": chat.get("title"), + "status": chat.get("status"), + "created_at": chat.get("created_at"), + } + except ValueError as e: + raise HTTPException(400, str(e)) + + +# --------------------------------------------------------------------------- +# Chat detail +# --------------------------------------------------------------------------- + + +@router.get("/{chat_id}") +async def get_chat( + chat_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + chat = app.state.chat_repo.get_by_id(chat_id) + if not chat: + raise HTTPException(404, "Chat not found") + members = _messaging(app)._members_repo.list_members(chat_id) + entities_info = [] + for m in members: + uid = m.get("user_id") + e = app.state.entity_repo.get_by_id(uid) if uid else None + if e: + mem = app.state.member_repo.get_by_id(e.member_id) + entities_info.append( + { + "id": e.id, + "name": e.name, + "type": e.type, + "avatar_url": avatar_url(e.member_id, bool(mem.avatar if mem else None)), + } + ) + return { + "id": chat.id, + "title": chat.title, + "status": chat.status, + "created_at": chat.created_at, + "entities": entities_info, + } + + +# --------------------------------------------------------------------------- +# Messages +# --------------------------------------------------------------------------- + + +@router.get("/{chat_id}/messages") +async def list_messages( + chat_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], + limit: int = Query(50, ge=1, le=200), + before: str | None = Query(None), +): + msgs = _messaging(app).list_messages(chat_id, limit=limit, before=before, viewer_id=user_id) + return [_msg_response(m, app.state.member_repo) for m in msgs] + + +@router.post("/{chat_id}/messages") +async def send_message( + chat_id: str, + body: SendMessageBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + if not body.content.strip(): + raise HTTPException(400, "Content cannot be empty") + _verify_member_ownership(app, body.sender_id, user_id) + msg = _messaging(app).send( + chat_id, + body.sender_id, + body.content, + mentions=body.mentioned_ids, + signal=body.signal, + message_type=body.message_type, + ) + return _msg_response(msg, app.state.entity_repo) + + +@router.post("/{chat_id}/messages/{message_id}/retract") +async def retract_message( + chat_id: str, + message_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + ok = _messaging(app).retract(message_id, user_id) + if not ok: + raise HTTPException(400, "Cannot retract: not sender, already retracted, or 2-min window expired") + return {"status": "retracted"} + + +@router.delete("/{chat_id}/messages/{message_id}") +async def delete_message_for_self( + chat_id: str, + message_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + _messaging(app).delete_for(message_id, user_id) + return {"status": "deleted"} + + +@router.post("/{chat_id}/read") +async def mark_read( + chat_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + _messaging(app).mark_read(chat_id, user_id) + return {"status": "ok"} + + +# --------------------------------------------------------------------------- +# Delete chat +# --------------------------------------------------------------------------- + + +@router.delete("/{chat_id}") +async def delete_chat( + chat_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + chat = app.state.chat_repo.get_by_id(chat_id) + if not chat: + raise HTTPException(404, "Chat not found") + if not _messaging(app)._members_repo.is_member(chat_id, user_id): + raise HTTPException(403, "Not a participant of this chat") + app.state.chat_repo.delete(chat_id) + return {"status": "deleted"} + + +# --------------------------------------------------------------------------- +# SSE stream (typing indicators fallback, messages come via Supabase Realtime) +# --------------------------------------------------------------------------- + + +@router.get("/{chat_id}/events") +async def stream_chat_events( + chat_id: str, + token: str | None = None, + app: Annotated[Any, Depends(get_app)] = None, +): + from backend.web.core.dependencies import _DEV_SKIP_AUTH + + if not _DEV_SKIP_AUTH: + if not token: + raise HTTPException(401, "Missing token") + try: + app.state.auth_service.verify_token(token) + except ValueError as e: + raise HTTPException(401, str(e)) + + from fastapi.responses import StreamingResponse + + event_bus = app.state.chat_event_bus + queue = event_bus.subscribe(chat_id) + + async def event_generator(): + try: + yield "retry: 5000\n\n" + while True: + try: + event = await asyncio.wait_for(queue.get(), timeout=30) + event_type = event.get("event", "message") + data = event.get("data", {}) + yield f"event: {event_type}\ndata: {json.dumps(data, ensure_ascii=False)}\n\n" + except TimeoutError: + yield ": keepalive\n\n" + finally: + event_bus.unsubscribe(chat_id, queue) + + return StreamingResponse(event_generator(), media_type="text/event-stream") + + +# --------------------------------------------------------------------------- +# Contact management +# --------------------------------------------------------------------------- + + +@router.post("/contacts") +async def set_contact( + body: SetContactBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + _verify_member_ownership(app, body.owner_id, user_id) + import time + + from storage.contracts import ContactRow + + app.state.contact_repo.upsert( + ContactRow( + owner_id=body.owner_id, + target_id=body.target_id, + relation=body.relation, + created_at=time.time(), + updated_at=time.time(), + ) + ) + return {"status": "ok", "relation": body.relation} + + +@router.delete("/contacts/{owner_id}/{target_id}") +async def delete_contact( + owner_id: str, + target_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + _verify_member_ownership(app, owner_id, user_id) + app.state.contact_repo.delete(owner_id, target_id) + return {"status": "deleted"} + + +# --------------------------------------------------------------------------- +# Chat mute +# --------------------------------------------------------------------------- + + +@router.post("/{chat_id}/mute") +async def mute_chat( + chat_id: str, + body: MuteChatBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + _verify_member_ownership(app, body.user_id, user_id) + mute_until_iso = datetime.fromtimestamp(body.mute_until, tz=UTC).isoformat() if body.mute_until else None + _messaging(app)._members_repo.update_mute(chat_id, body.user_id, body.muted, mute_until_iso) + return {"status": "ok", "muted": body.muted} diff --git a/backend/web/routers/panel.py b/backend/web/routers/panel.py index 3fe2f481b..0623d584f 100644 --- a/backend/web/routers/panel.py +++ b/backend/web/routers/panel.py @@ -33,10 +33,8 @@ @router.get("/members") async def list_members( user_id: Annotated[str, Depends(get_current_user_id)], - request: Request, ) -> dict[str, Any]: - member_repo = getattr(request.app.state, "member_repo", None) - items = await asyncio.to_thread(member_service.list_members, user_id, member_repo=member_repo) + items = await asyncio.to_thread(member_service.list_members, user_id) return {"items": items} @@ -52,25 +50,13 @@ async def get_member(member_id: str) -> dict[str, Any]: async def create_member( req: CreateMemberRequest, user_id: Annotated[str, Depends(get_current_user_id)], - request: Request, ) -> dict[str, Any]: - member_repo = getattr(request.app.state, "member_repo", None) - return await asyncio.to_thread(member_service.create_member, req.name, req.description, owner_user_id=user_id, member_repo=member_repo) + return await asyncio.to_thread(member_service.create_member, req.name, req.description, owner_user_id=user_id) @router.put("/members/{member_id}") -async def update_member(member_id: str, req: UpdateMemberRequest, request: Request) -> dict[str, Any]: - member_repo = getattr(request.app.state, "member_repo", None) - entity_repo = getattr(request.app.state, "entity_repo", None) - thread_repo = getattr(request.app.state, "thread_repo", None) - item = await asyncio.to_thread( - member_service.update_member, - member_id, - member_repo=member_repo, - entity_repo=entity_repo, - thread_repo=thread_repo, - **req.model_dump(), - ) +async def update_member(member_id: str, req: UpdateMemberRequest) -> dict[str, Any]: + item = await asyncio.to_thread(member_service.update_member, member_id, **req.model_dump()) if not item: raise HTTPException(404, "Member not found") return item @@ -95,11 +81,10 @@ async def publish_member(member_id: str, req: PublishMemberRequest) -> dict[str, @router.delete("/members/{member_id}") -async def delete_member(member_id: str, request: Request) -> dict[str, Any]: +async def delete_member(member_id: str) -> dict[str, Any]: if member_id == "__leon__": raise HTTPException(403, "Cannot delete builtin member") - member_repo = getattr(request.app.state, "member_repo", None) - ok = await asyncio.to_thread(member_service.delete_member, member_id, member_repo=member_repo) + ok = await asyncio.to_thread(member_service.delete_member, member_id) if not ok: raise HTTPException(404, "Member not found") return {"success": True} diff --git a/backend/web/routers/sandbox.py b/backend/web/routers/sandbox.py index 1b7a3d02a..3749ca0a0 100644 --- a/backend/web/routers/sandbox.py +++ b/backend/web/routers/sandbox.py @@ -5,7 +5,7 @@ import sys from typing import Annotated, Any -from fastapi import APIRouter, Depends, HTTPException, Query, Request +from fastapi import APIRouter, Depends, HTTPException, Query from backend.web.core.dependencies import get_current_user_id from backend.web.services import sandbox_service @@ -124,16 +124,8 @@ async def list_sandbox_sessions() -> dict[str, Any]: @router.get("/leases/mine") async def list_my_leases( user_id: Annotated[str, Depends(get_current_user_id)], - request: Request, ) -> dict[str, Any]: - thread_repo = getattr(request.app.state, "thread_repo", None) - member_repo = getattr(request.app.state, "member_repo", None) - leases = await asyncio.to_thread( - sandbox_service.list_user_leases, - user_id, - thread_repo=thread_repo, - member_repo=member_repo, - ) + leases = await asyncio.to_thread(sandbox_service.list_user_leases, user_id) return {"leases": leases} diff --git a/backend/web/routers/settings.py b/backend/web/routers/settings.py index f765c0962..d4f0ad77d 100644 --- a/backend/web/routers/settings.py +++ b/backend/web/routers/settings.py @@ -56,21 +56,6 @@ def save_settings(settings: WorkspaceSettings) -> None: json.dump(settings.model_dump(), f, indent=2, ensure_ascii=False) -def _get_settings_repo(request: Request): - """Return the user_settings_repo wired by lifespan, or None in sqlite mode.""" - return getattr(request.app.state, "user_settings_repo", None) - - -def _try_get_user_id(request: Request) -> str | None: - """Extract user_id from JWT without raising; returns None if unavailable.""" - try: - from backend.web.core.dependencies import _extract_jwt_payload - - return _extract_jwt_payload(request)["user_id"] - except Exception: - return None - - # ============================================================================ # Models config (models.json) # ============================================================================ @@ -129,21 +114,9 @@ class UserSettings(BaseModel): @router.get("") -async def get_settings(request: Request) -> UserSettings: - """Get combined settings (workspace + default_model from Supabase or preferences.json, models from models.json).""" - repo = _get_settings_repo(request) - user_id = _try_get_user_id(request) if repo else None - - if repo and user_id: - row = repo.get(user_id) - ws = WorkspaceSettings( - default_workspace=row.get("default_workspace"), - recent_workspaces=row.get("recent_workspaces") or [], - default_model=row.get("default_model") or "leon:large", - ) - else: - ws = load_settings() - +async def get_settings() -> UserSettings: + """Get combined settings (workspace + default_model from preferences.json, models from models.json).""" + ws = load_settings() models = load_merged_models() # Build compat view @@ -196,7 +169,7 @@ async def browse_filesystem(path: str = Query(default="~"), include_files: bool @router.get("/read") async def read_local_file(path: str = Query(...)) -> dict[str, Any]: """Read a local file's content (for SandboxBrowser in resources page).""" - _read_max_bytes = 100 * 1024 + _READ_MAX_BYTES = 100 * 1024 # noqa: N806 try: target = Path(path).expanduser().resolve() if not target.exists(): @@ -204,8 +177,8 @@ async def read_local_file(path: str = Query(...)) -> dict[str, Any]: if target.is_dir(): raise HTTPException(status_code=400, detail="Path is a directory") raw = target.read_bytes() - truncated = len(raw) > _read_max_bytes - content = raw[:_read_max_bytes].decode(errors="replace") + truncated = len(raw) > _READ_MAX_BYTES + content = raw[:_READ_MAX_BYTES].decode(errors="replace") return {"path": str(target), "content": content, "truncated": truncated} except HTTPException: raise @@ -214,7 +187,7 @@ async def read_local_file(path: str = Query(...)) -> dict[str, Any]: @router.post("/workspace") -async def set_default_workspace(request: WorkspaceRequest, req: Request) -> dict[str, Any]: +async def set_default_workspace(request: WorkspaceRequest) -> dict[str, Any]: """Set default workspace path.""" workspace_path = Path(request.workspace).expanduser().resolve() if not workspace_path.exists(): @@ -222,45 +195,35 @@ async def set_default_workspace(request: WorkspaceRequest, req: Request) -> dict if not workspace_path.is_dir(): raise HTTPException(status_code=400, detail="Workspace path is not a directory") - workspace_str = str(workspace_path) + settings = load_settings() + settings.default_workspace = str(workspace_path) - repo = _get_settings_repo(req) - user_id = _try_get_user_id(req) if repo else None - if repo and user_id: - repo.set_default_workspace(user_id, workspace_str) - else: - settings = load_settings() - settings.default_workspace = workspace_str - if workspace_str in settings.recent_workspaces: - settings.recent_workspaces.remove(workspace_str) - settings.recent_workspaces.insert(0, workspace_str) - settings.recent_workspaces = settings.recent_workspaces[:5] - save_settings(settings) + workspace_str = str(workspace_path) + if workspace_str in settings.recent_workspaces: + settings.recent_workspaces.remove(workspace_str) + settings.recent_workspaces.insert(0, workspace_str) + settings.recent_workspaces = settings.recent_workspaces[:5] + save_settings(settings) return {"success": True, "workspace": workspace_str} @router.post("/workspace/recent") -async def add_recent_workspace(request: WorkspaceRequest, req: Request) -> dict[str, Any]: +async def add_recent_workspace(request: WorkspaceRequest) -> dict[str, Any]: """Add a workspace to recent list.""" workspace_path = Path(request.workspace).expanduser().resolve() if not workspace_path.exists() or not workspace_path.is_dir(): raise HTTPException(status_code=400, detail="Invalid workspace path") + settings = load_settings() workspace_str = str(workspace_path) - repo = _get_settings_repo(req) - user_id = _try_get_user_id(req) if repo else None - if repo and user_id: - repo.add_recent_workspace(user_id, workspace_str) - else: - settings = load_settings() - if workspace_str in settings.recent_workspaces: - settings.recent_workspaces.remove(workspace_str) - settings.recent_workspaces.insert(0, workspace_str) - settings.recent_workspaces = settings.recent_workspaces[:5] - save_settings(settings) + if workspace_str in settings.recent_workspaces: + settings.recent_workspaces.remove(workspace_str) + settings.recent_workspaces.insert(0, workspace_str) + settings.recent_workspaces = settings.recent_workspaces[:5] + save_settings(settings) return {"success": True} @@ -269,16 +232,11 @@ class DefaultModelRequest(BaseModel): @router.post("/default-model") -async def set_default_model(request: DefaultModelRequest, req: Request) -> dict[str, Any]: +async def set_default_model(request: DefaultModelRequest) -> dict[str, Any]: """Set default virtual model preference.""" - repo = _get_settings_repo(req) - user_id = _try_get_user_id(req) if repo else None - if repo and user_id: - repo.set_default_model(user_id, request.model) - else: - settings = load_settings() - settings.default_model = request.model - save_settings(settings) + settings = load_settings() + settings.default_model = request.model + save_settings(settings) return {"success": True, "default_model": request.model} diff --git a/backend/web/routers/threads.py b/backend/web/routers/threads.py index 33a75b8aa..8efce1a88 100644 --- a/backend/web/routers/threads.py +++ b/backend/web/routers/threads.py @@ -4,13 +4,13 @@ import json import logging import uuid -from datetime import UTC from typing import Annotated, Any from fastapi import APIRouter, Depends, HTTPException, Request from fastapi.responses import JSONResponse from sse_starlette.sse import EventSourceResponse +from backend.web.core.config import LOCAL_WORKSPACE_ROOT # noqa: E402 from backend.web.core.dependencies import ( get_app, get_current_user_id, @@ -47,14 +47,17 @@ get_terminal_status, ) from backend.web.utils.helpers import delete_thread_in_db -from backend.web.utils.serializers import avatar_url, serialize_message -from core.runtime.middleware.monitor import AgentState -from sandbox.config import MountSpec -from sandbox.recipes import normalize_recipe_snapshot, provider_type_from_name -from sandbox.thread_context import set_current_thread_id +from backend.web.utils.serializers import serialize_message from storage.contracts import EntityRow logger = logging.getLogger(__name__) +from datetime import UTC # noqa: E402 + +from backend.web.utils.serializers import avatar_url # noqa: E402 +from core.runtime.middleware.monitor import AgentState # noqa: E402 +from sandbox.config import MountSpec # noqa: E402 +from sandbox.recipes import normalize_recipe_snapshot, provider_type_from_name # noqa: E402 +from sandbox.thread_context import set_current_thread_id # noqa: E402 router = APIRouter(prefix="/api/threads", tags=["threads"]) @@ -116,12 +119,9 @@ async def _prepare_attachment_message( # @@@sync-fail-honest - don't tell agent files are in sandbox if sync failed if sync_ok: - message = f"[User uploaded {len(attachments)} file(s) to {files_dir}/: {', '.join(attachments)}]\n\n{original_message}" + message = f"[User uploaded {len(attachments)} file(s) to {files_dir}/: {', '.join(attachments)}]\n\n{original_message}" # noqa: E501 else: - message = ( - f"[User uploaded {len(attachments)} file(s) but sync to sandbox failed. " - f"Files may not be available in {files_dir}/.]\n\n{original_message}" - ) + message = f"[User uploaded {len(attachments)} file(s) but sync to sandbox failed. Files may not be available in {files_dir}/.]\n\n{original_message}" # noqa: E501 return message, message_metadata @@ -167,7 +167,7 @@ async def _validate_mount_capability_gate( if mismatch is None: return None - # @@@request-stage-capability-gate - Fail at create-thread request stage so unsupported mount semantics never enter runtime lifecycle. + # @@@request-stage-capability-gate - Fail at create-thread request stage so unsupported mount semantics never enter runtime lifecycle. # noqa: E501 return JSONResponse( status_code=400, content={ @@ -194,7 +194,7 @@ def _thread_payload(app: Any, thread_id: str, sandbox_type: str) -> dict[str, An if thread is None: raise HTTPException(404, "Thread not found") member = app.state.member_repo.get_by_id(thread["member_id"]) - entity = app.state.entity_repo.get_by_id(thread["member_id"]) + entity = app.state.entity_repo.get_by_thread_id(thread_id) if member is None or entity is None: raise HTTPException(500, f"Thread {thread_id} missing member/entity") return { @@ -275,7 +275,6 @@ def _resolve_existing_lease_cwd(lease_id: str, fallback_cwd: str | None) -> str: if fallback_cwd: return fallback_cwd - from backend.web.core.config import LOCAL_WORKSPACE_ROOT from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path from storage.providers.sqlite.terminal_repo import SQLiteTerminalRepo @@ -327,15 +326,7 @@ def _create_owned_thread( owned_lease: dict[str, Any] | None = None if selected_lease_id: owned_lease = next( - ( - lease - for lease in sandbox_service.list_user_leases( - owner_user_id, - thread_repo=app.state.thread_repo, - member_repo=app.state.member_repo, - ) - if lease["lease_id"] == selected_lease_id - ), + (lease for lease in sandbox_service.list_user_leases(owner_user_id) if lease["lease_id"] == selected_lease_id), None, ) if owned_lease is None: @@ -344,13 +335,13 @@ def _create_owned_thread( # @@@non-atomic-create - these 3 steps (seq++, thread, entity) are not atomic. seq = app.state.member_repo.increment_entity_seq(agent_member_id) - new_thread_id = f"{agent_member_id}-{seq}" + thread_id = f"{agent_member_id}-{seq}" has_main = app.state.thread_repo.get_main_thread(agent_member_id) is not None resolved_is_main = is_main or not has_main branch_index = 0 if resolved_is_main else app.state.thread_repo.get_next_branch_index(agent_member_id) app.state.thread_repo.create( - thread_id=new_thread_id, + thread_id=thread_id, member_id=agent_member_id, sandbox_type=sandbox_type, cwd=payload.cwd, @@ -362,45 +353,35 @@ def _create_owned_thread( # @@@entity-name-convention - entity display names derive from member + thread role, never sandbox strings. entity_name = canonical_entity_name(agent_member.name, is_main=resolved_is_main, branch_index=branch_index) - - # @@@entity-id-is-member-id - agent entity id = member_id (per-agent, not per-thread). - # thread_id field on the entity points to the current main thread. - # If entity already exists, update thread_id (main thread changed); otherwise create. - existing_entity = app.state.entity_repo.get_by_id(agent_member_id) - if existing_entity is not None: - if resolved_is_main: - app.state.entity_repo.update(agent_member_id, thread_id=new_thread_id, name=entity_name) - # Branch threads don't update the entity — it represents the main identity - else: - app.state.entity_repo.create( - EntityRow( - id=agent_member_id, - type="agent", - member_id=agent_member_id, - name=entity_name, - thread_id=new_thread_id if resolved_is_main else None, - created_at=time.time(), - ) + app.state.entity_repo.create( + EntityRow( + id=thread_id, + type="agent", + member_id=agent_member_id, + name=entity_name, + thread_id=thread_id, + created_at=time.time(), ) + ) # Set thread state - app.state.thread_sandbox[new_thread_id] = sandbox_type + app.state.thread_sandbox[thread_id] = sandbox_type if payload.cwd: - app.state.thread_cwd[new_thread_id] = payload.cwd + app.state.thread_cwd[thread_id] = payload.cwd if selected_lease_id: # @@@reuse-lease-binding - Reuse an existing lease by attaching a fresh terminal for the new thread. bound_cwd = _bind_thread_to_existing_lease( - new_thread_id, + thread_id, selected_lease_id, cwd=payload.cwd, ) - app.state.thread_cwd[new_thread_id] = bound_cwd + app.state.thread_cwd[thread_id] = bound_cwd else: # @@@lease-early-creation - Create volume + lease + terminal at thread creation # so volume exists BEFORE any file uploads. _create_thread_sandbox_resources( - new_thread_id, + thread_id, sandbox_type, payload.recipe.model_dump() if payload.recipe else None, ) @@ -412,7 +393,7 @@ def _create_owned_thread( "recipe": owned_lease.get("recipe"), "lease_id": owned_lease["lease_id"], "model": payload.model, - "workspace": app.state.thread_cwd.get(new_thread_id), + "workspace": app.state.thread_cwd.get(thread_id), } else: successful_config = { @@ -424,12 +405,12 @@ def _create_owned_thread( ), "lease_id": None, "model": payload.model, - "workspace": app.state.thread_cwd.get(new_thread_id) or payload.cwd, + "workspace": app.state.thread_cwd.get(thread_id) or payload.cwd, } save_last_successful_config(app, owner_user_id, agent_member_id, successful_config) return { - "thread_id": new_thread_id, + "thread_id": thread_id, "sandbox": sandbox_type, "member_id": agent_member_id, "member_name": agent_member.name, @@ -623,16 +604,12 @@ async def delete_thread( logger.warning("Failed to destroy sandbox resources for thread %s: %s", thread_id, exc) await asyncio.to_thread(delete_thread_in_db, thread_id) # Also delete from threads table (entity-chat addition) - thread_data = app.state.thread_repo.get_by_id(thread_id) - member_id = thread_data["member_id"] if thread_data else None app.state.thread_repo.delete(thread_id) - # Entity is keyed by member_id (shared across threads) — update its thread_id - # to the next main thread, or clear it if no threads remain - if member_id: - entity = app.state.entity_repo.get_by_id(member_id) - if entity and entity.thread_id == thread_id: - next_main = app.state.thread_repo.get_main_thread(member_id) - app.state.entity_repo.update(member_id, thread_id=next_main["id"] if next_main else None) + # Delete associated entity + try: + app.state.entity_repo.delete(thread_id) + except Exception: + logger.error("Failed to delete entity for thread %s", thread_id, exc_info=True) # Clean up thread-specific state app.state.thread_sandbox.pop(thread_id, None) diff --git a/backend/web/routers/webhooks.py b/backend/web/routers/webhooks.py index 334dddc93..b3103a960 100644 --- a/backend/web/routers/webhooks.py +++ b/backend/web/routers/webhooks.py @@ -7,11 +7,11 @@ from backend.web.services.sandbox_service import init_providers_and_managers from backend.web.utils.helpers import _get_container, extract_webhook_instance_id -from sandbox.lease import lease_from_row from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path -from storage.providers.sqlite.lease_repo import SQLiteLeaseRepo SANDBOX_DB_PATH = resolve_role_db_path(SQLiteDBRole.SANDBOX) +from sandbox.lease import lease_from_row # noqa: E402 +from storage.providers.sqlite.lease_repo import SQLiteLeaseRepo # noqa: E402 router = APIRouter(prefix="/api/webhooks", tags=["webhooks"]) diff --git a/backend/web/services/agent_pool.py b/backend/web/services/agent_pool.py index 50ecb5dbf..819bd8604 100644 --- a/backend/web/services/agent_pool.py +++ b/backend/web/services/agent_pool.py @@ -98,23 +98,22 @@ async def get_or_create_agent(app_obj: FastAPI, sandbox_type: str, thread_id: st chat_repos = None if hasattr(app_obj.state, "entity_repo") and thread_data: entity_repo = app_obj.state.entity_repo - member_repo = getattr(app_obj.state, "member_repo", None) - # Entity id = member_id in the new model; look up by member_id, not thread_id - agent_member_id = thread_data.get("member_id") - agent_entity = entity_repo.get_by_id(agent_member_id) if agent_member_id else None + agent_entity = entity_repo.get_by_thread_id(thread_id) if agent_entity: - # agent social identity = member_id - agent_member = member_repo.get_by_id(agent_entity.member_id) if member_repo else None - # owner social identity = owner's user_id (same as their member_id for humans) - owner_user_id = agent_member.owner_user_id if agent_member else "" + # @@@admin-chain — find owner's user_id via Member domain (template ownership). + # Thread→Entity→Member(template)→owner_user_id + agent_member = ( + app_obj.state.member_repo.get_by_id(agent_entity.member_id) if hasattr(app_obj.state, "member_repo") else None + ) + owner_member_id = agent_member.owner_user_id if agent_member and agent_member.owner_user_id else "" chat_repos = { - "user_id": agent_entity.member_id, # agent's social identity = member_id - "owner_user_id": owner_user_id, + "member_id": agent_entity.id, + "owner_member_id": owner_member_id, "entity_repo": entity_repo, "chat_service": getattr(app_obj.state, "chat_service", None), "chat_entity_repo": getattr(app_obj.state, "chat_entity_repo", None), "chat_message_repo": getattr(app_obj.state, "chat_message_repo", None), - "member_repo": member_repo, + "member_repo": getattr(app_obj.state, "member_repo", None), "chat_event_bus": getattr(app_obj.state, "chat_event_bus", None), } diff --git a/backend/web/services/auth_service.py b/backend/web/services/auth_service.py index 85c9c21c6..6f253ff56 100644 --- a/backend/web/services/auth_service.py +++ b/backend/web/services/auth_service.py @@ -1,18 +1,29 @@ -"""Authentication service — Supabase Auth backed register, login, JWT verify.""" +"""Authentication service — register, login, JWT.""" from __future__ import annotations import logging -import os import time +import uuid +import bcrypt import jwt -from storage.contracts import AccountRepo, EntityRepo, InviteCodeRepo, MemberRepo, MemberRow, MemberType +from storage.contracts import ( + AccountRepo, + AccountRow, + MemberRepo, + MemberRow, + MemberType, +) +from storage.providers.sqlite.member_repo import generate_member_id logger = logging.getLogger(__name__) -SUPABASE_JWT_ALGORITHM = "HS256" +# @@@jwt-secret - hardcoded for MVP. Move to config/env before production. +JWT_SECRET = "leon-dev-secret-change-me" +JWT_ALGORITHM = "HS256" +JWT_EXPIRE_SECONDS = 86400 * 7 # 7 days class AuthService: @@ -20,234 +31,156 @@ def __init__( self, members: MemberRepo, accounts: AccountRepo, - entities: EntityRepo, - supabase_client=None, - invite_codes: InviteCodeRepo | None = None, ) -> None: self._members = members self._accounts = accounts - self._entities = entities - self._sb = supabase_client # None in sqlite-only mode - self._invite_codes = invite_codes - - # ------------------------------------------------------------------ - # Registration flow (standard Supabase signUp) - # Step 1: send_otp(email, password) → signUp creates user, GoTrue sends OTP - # Step 2: verify_register_otp(...) → verifyOtp(type:signup), returns temp_token - # Step 3: complete_register(...) → validate invite, create member records - # ------------------------------------------------------------------ - - def send_otp(self, email: str, password: str, invite_code: str) -> None: - """Validate invite code, create user via signUp (sends confirmation OTP to email).""" - if self._sb is None: - raise RuntimeError("Supabase client required.") - if self._invite_codes is None or not self._invite_codes.is_valid(invite_code): - raise ValueError("邀请码无效或已过期") - from supabase_auth.errors import AuthApiError - try: - self._sb.auth.sign_up({"email": email, "password": password}) - except AuthApiError as e: - msg = e.message or "" - if "already registered" in msg or "already exists" in msg: - raise ValueError("该邮箱已注册,请直接登录") from e - raise ValueError("发送验证码失败,请稍后重试") from e - - def verify_register_otp(self, email: str, token: str) -> dict: - """Verify signup OTP. Returns temp_token to be used in complete_register.""" - if self._sb is None: - raise RuntimeError("Supabase client required.") - from supabase_auth.errors import AuthApiError - - try: - resp = self._sb.auth.verify_otp({"email": email, "token": token, "type": "signup"}) - except AuthApiError as e: - raise ValueError(f"验证码错误: {e.message}") from e - if resp.user is None or resp.session is None: - raise ValueError("验证码无效或已过期") - return {"temp_token": resp.session.access_token} - - def complete_register(self, temp_token: str, invite_code: str) -> dict: - """Complete registration: validate invite code, create member records.""" - if self._sb is None: - raise RuntimeError("Supabase client required.") - - # 1. Decode temp_token to get user_id - jwt_secret = os.getenv("SUPABASE_JWT_SECRET") - if not jwt_secret: - raise RuntimeError("SUPABASE_JWT_SECRET not set.") - try: - payload = jwt.decode(temp_token, jwt_secret, algorithms=[SUPABASE_JWT_ALGORITHM], options={"verify_aud": False}) - except jwt.InvalidTokenError as e: - raise ValueError("会话已过期,请重新验证邮箱") from e - auth_user_id = payload["sub"] - - # 2. Validate invite code (re-check; repo handles expired/used) - if self._invite_codes is None or not self._invite_codes.is_valid(invite_code): - raise ValueError("邀请码无效或已过期") - - # 3. Create member records (idempotent guard) - email_from_payload = payload.get("email", "") - existing = self._members.get_by_id(auth_user_id) - if existing is None: - mycel_id = self._sb.rpc("next_mycel_id").execute().data - now = time.time() - display_name = email_from_payload.split("@")[0] - - # Create member row - self._members.create( - MemberRow( - id=auth_user_id, - name=display_name, - type=MemberType.HUMAN, - email=email_from_payload, - mycel_id=mycel_id, - created_at=now, - ) + def register(self, username: str, password: str) -> dict: + """Register a new human user. + + Returns: {token, user, agent} + Creates: human member, account, agent members. + """ + if self._accounts.get_by_username(username) is not None: + raise ValueError(f"Username '{username}' already taken") + + now = time.time() + + # @@@non-atomic-register - steps 1-7 are not atomic. Acceptable for dev. + # Wrap in DB transaction when migrating to Supabase. + # 1. Human member + user_id = generate_member_id() + self._members.create( + MemberRow( + id=user_id, + name=username, + type=MemberType.HUMAN, + created_at=now, ) - - # Initial agents - first_agent_info = self._create_initial_agents(auth_user_id, now) - else: - display_name = existing.name - mycel_id = existing.mycel_id - owned_agents = self._members.list_by_owner_user_id(auth_user_id) - first_agent_info = ( - {"id": owned_agents[0].id, "name": owned_agents[0].name, "type": "mycel_agent", "avatar": None} if owned_agents else None + ) + + # 2. Account (bcrypt hash) + password_hash = bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() + account_id = str(uuid.uuid4()) + self._accounts.create( + AccountRow( + id=account_id, + user_id=user_id, + username=username, + password_hash=password_hash, + created_at=now, ) + ) - # 4. Mark invite code used (atomic via repo) - if self._invite_codes is not None: - self._invite_codes.use(invite_code, auth_user_id) - - logger.info("Registered user %s (mycel_id=%s)", email_from_payload, mycel_id) - return { - "token": temp_token, - "user": {"id": auth_user_id, "name": display_name, "mycel_id": mycel_id, "email": email_from_payload, "avatar": None}, - "agent": first_agent_info, - } - - def login(self, identifier: str, password: str) -> dict: - """Login with email or mycel_id + password.""" - if self._sb is None: - raise RuntimeError("Supabase client required for login. Set LEON_STORAGE_STRATEGY=supabase.") - - # Resolve email - email = self._resolve_email(identifier) - - from supabase_auth.errors import AuthApiError - - # Sign in via Supabase - try: - resp = self._sb.auth.sign_in_with_password({"email": email, "password": password}) - except AuthApiError: - raise ValueError("邮箱或密码错误") - if resp.user is None or resp.session is None: - raise ValueError("邮箱或密码错误") - - auth_user_id = str(resp.user.id) - token = resp.session.access_token - - # Load member info - member = self._members.get_by_id(auth_user_id) - if member is None: - raise ValueError("账号数据异常,请联系支持") - - # Load entities + agents - owned_agents = self._members.list_by_owner_user_id(auth_user_id) - agent_info = None - if owned_agents: - a = owned_agents[0] - agent_info = {"id": a.id, "name": a.name, "type": a.type.value, "avatar": a.avatar} - - logger.info("Login: %s (mycel_id=%s)", email, member.mycel_id) - return { - "token": token, - "user": { - "id": auth_user_id, - "name": member.name, - "mycel_id": member.mycel_id, - "email": member.email, - "avatar": member.avatar, - }, - "agent": agent_info, - } - - def verify_token(self, token: str) -> dict: - """Verify Supabase JWT. Returns {user_id}.""" - jwt_secret = os.getenv("SUPABASE_JWT_SECRET") - if not jwt_secret: - raise RuntimeError("SUPABASE_JWT_SECRET env var required for token verification.") - try: - payload = jwt.decode( - token, - jwt_secret, - algorithms=[SUPABASE_JWT_ALGORITHM], - options={"verify_aud": False}, - ) - return {"user_id": payload["sub"]} - except jwt.ExpiredSignatureError: - raise ValueError("Token 已过期,请重新登录") - except jwt.InvalidTokenError as e: - raise ValueError(f"Token 无效: {e}") - - # ------------------------------------------------------------------ - # Internal helpers - # ------------------------------------------------------------------ - - def _resolve_email(self, identifier: str) -> str: - """Turn mycel_id (numeric string) or email into email address.""" - if identifier.strip().lstrip("0123456789") == "" and identifier.strip().isdigit(): - member = self._members.get_by_mycel_id(int(identifier.strip())) - if member is None or member.email is None: - raise ValueError("用户不存在") - return member.email - return identifier.strip() - - def _create_initial_agents(self, owner_user_id: str, now: float) -> dict | None: - """Create Toad and Morel agents for a new user. Returns first agent info.""" + # 3. Create two initial agent members: Toad and Morel from pathlib import Path from backend.web.services.member_service import MEMBERS_DIR, _write_agent_md, _write_json - from storage.providers.sqlite.member_repo import generate_member_id + # @@@initial-agent-names - keep template names plain; owner disambiguation belongs in discovery UI metadata. initial_agents = [ {"name": "Toad", "description": "Curious and energetic assistant", "avatar": "toad.jpeg"}, {"name": "Morel", "description": "Thoughtful senior analyst", "avatar": "morel.jpeg"}, ] + assets_dir = Path(__file__).resolve().parents[3] / "assets" - first_agent_info = None + first_agent_info = None for i, agent_def in enumerate(initial_agents): - agent_id = generate_member_id() - agent_dir = MEMBERS_DIR / agent_id + agent_member_id = generate_member_id() + agent_dir = MEMBERS_DIR / agent_member_id agent_dir.mkdir(parents=True, exist_ok=True) _write_agent_md(agent_dir / "agent.md", name=agent_def["name"], description=agent_def["description"]) _write_json( agent_dir / "meta.json", - {"status": "active", "version": "1.0.0", "created_at": int(now * 1000), "updated_at": int(now * 1000)}, + { + "status": "active", + "version": "1.0.0", + "created_at": int(now * 1000), + "updated_at": int(now * 1000), + }, ) self._members.create( MemberRow( - id=agent_id, + id=agent_member_id, name=agent_def["name"], type=MemberType.MYCEL_AGENT, description=agent_def["description"], config_dir=str(agent_dir), - owner_user_id=owner_user_id, + owner_user_id=user_id, created_at=now, ) ) + + # @@@avatar-same-pipeline — reuse shared PIL pipeline from entities.py src_avatar = assets_dir / agent_def["avatar"] if src_avatar.exists(): try: from backend.web.routers.entities import process_and_save_avatar - avatar_path = process_and_save_avatar(src_avatar, agent_id) - self._members.update(agent_id, avatar=avatar_path, updated_at=now) + avatar_path = process_and_save_avatar(src_avatar, agent_member_id) + self._members.update(agent_member_id, avatar=avatar_path, updated_at=now) except Exception as e: - logger.warning("Avatar copy failed for %s: %s", agent_def["name"], e) + logger.warning("Failed to process default avatar for %s: %s", agent_def["name"], e) + if i == 0: - first_agent_info = {"id": agent_id, "name": agent_def["name"], "type": "mycel_agent", "avatar": None} + first_agent_info = { + "id": agent_member_id, + "name": agent_def["name"], + "type": "mycel_agent", + "avatar": None, + } + + logger.info("Created agent '%s' (member=%s) for user '%s'", agent_def["name"], agent_member_id[:8], username) + + token = self._make_token(user_id) + + logger.info("Registered user '%s' (user=%s)", username, user_id[:8]) + + return { + "token": token, + "user": {"id": user_id, "name": username, "type": "human", "avatar": None}, + "agent": first_agent_info, + } + + def login(self, username: str, password: str) -> dict: + """Login and return JWT + member info.""" + account = self._accounts.get_by_username(username) + if account is None or account.password_hash is None: + raise ValueError("Invalid username or password") + + if not bcrypt.checkpw(password.encode(), account.password_hash.encode()): + raise ValueError("Invalid username or password") + + user = self._members.get_by_id(account.user_id) + if user is None: + raise ValueError("Account has no associated user") + + # Find the user's agent + owned_agents = self._members.list_by_owner_user_id(user.id) + agent_info = None + if owned_agents: + a = owned_agents[0] + agent_info = {"id": a.id, "name": a.name, "type": a.type.value, "avatar": a.avatar} + + token = self._make_token(user.id) + + return { + "token": token, + "user": {"id": user.id, "name": user.name, "type": user.type.value, "avatar": user.avatar}, + "agent": agent_info, + } + + def verify_token(self, token: str) -> dict: + """Verify JWT and return payload dict with user_id. Raises ValueError on failure.""" + try: + payload = jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALGORITHM]) + return {"user_id": payload["user_id"]} + except jwt.ExpiredSignatureError: + raise ValueError("Token expired") + except jwt.InvalidTokenError: + raise ValueError("Invalid token") - return first_agent_info + def _make_token(self, user_id: str) -> str: + payload = {"user_id": user_id, "exp": time.time() + JWT_EXPIRE_SECONDS} + return jwt.encode(payload, JWT_SECRET, algorithm=JWT_ALGORITHM) diff --git a/backend/web/services/chat_service.py b/backend/web/services/chat_service.py index 51a5ebbeb..63494b080 100644 --- a/backend/web/services/chat_service.py +++ b/backend/web/services/chat_service.py @@ -1,4 +1,4 @@ -"""Chat service — entity-to-entity communication.""" +"""Chat service — user/member-to-user/member communication.""" from __future__ import annotations @@ -8,7 +8,6 @@ from collections.abc import Callable from typing import Any -from backend.web.utils.serializers import avatar_url from storage.contracts import ( ChatEntityRepo, ChatMessageRepo, @@ -44,39 +43,31 @@ def __init__( self._delivery_fn = delivery_fn self._delivery_resolver = delivery_resolver - def _resolve_name(self, user_id: str) -> str: - """Resolve display name: entity_repo (agents) → member_repo (humans).""" - e = self._entities.get_by_id(user_id) - if e: - return e.name - m = self._members.get_by_id(user_id) if self._members else None - return m.name if m else "unknown" + def find_or_create_chat(self, member_ids: list[str], title: str | None = None) -> ChatRow: + """Find existing 1:1 chat between two members, or create one.""" + if len(member_ids) != 2: + raise ValueError("Use create_group_chat() for 3+ members") - def find_or_create_chat(self, user_ids: list[str], title: str | None = None) -> ChatRow: - """Find existing 1:1 chat between two social identities, or create one.""" - if len(user_ids) != 2: - raise ValueError("Use create_group_chat() for 3+ participants") - - existing_id = self._chat_entities.find_chat_between(user_ids[0], user_ids[1]) + existing_id = self._chat_entities.find_chat_between(member_ids[0], member_ids[1]) if existing_id: return self._chats.get_by_id(existing_id) now = time.time() chat_id = str(uuid.uuid4()) self._chats.create(ChatRow(id=chat_id, title=title, created_at=now)) - for uid in user_ids: - self._chat_entities.add_participant(chat_id, uid, now) + for mid in member_ids: + self._chat_entities.add_member(chat_id, mid, now) return self._chats.get_by_id(chat_id) - def create_group_chat(self, user_ids: list[str], title: str | None = None) -> ChatRow: - """Create a group chat with 3+ participants.""" - if len(user_ids) < 3: - raise ValueError("Group chat requires 3+ participants") + def create_group_chat(self, member_ids: list[str], title: str | None = None) -> ChatRow: + """Create a group chat with 3+ members.""" + if len(member_ids) < 3: + raise ValueError("Group chat requires 3+ members") now = time.time() chat_id = str(uuid.uuid4()) self._chats.create(ChatRow(id=chat_id, title=title, created_at=now)) - for uid in user_ids: - self._chat_entities.add_participant(chat_id, uid, now) + for mid in member_ids: + self._chat_entities.add_member(chat_id, mid, now) return self._chats.get_by_id(chat_id) def send_message( @@ -108,7 +99,8 @@ def send_message( ) self._messages.create(msg) - sender_name = self._resolve_name(sender_id) + sender = self._entities.get_by_id(sender_id) + sender_name = sender.name if sender else "unknown" if self._event_bus: self._event_bus.publish( @@ -127,37 +119,38 @@ def send_message( }, ) - self._deliver_to_agents(chat_id, sender_id, sender_name, content, mentions, signal=signal) + self._deliver_to_agents(chat_id, sender_id, content, mentions, signal=signal) return msg def _deliver_to_agents( self, chat_id: str, sender_id: str, - sender_name: str, content: str, mentioned_ids: list[str] | None = None, signal: str | None = None, ) -> None: - """For each non-sender agent participant in the chat, deliver to their brain thread.""" + """For each non-sender agent entity in the chat, deliver to their brain thread.""" mentions = set(mentioned_ids or []) - participants = self._chat_entities.list_participants(chat_id) - sender_avatar_url = None - sender_mid = sender_id + participants = self._chat_entities.list_members(chat_id) sender_entity = self._entities.get_by_id(sender_id) + sender_name = sender_entity.name if sender_entity else "unknown" + # @@@sender-avatar — compute once for all recipients + sender_avatar_url = None if sender_entity: - sender_mid = sender_entity.member_id - m = self._members.get_by_id(sender_mid) if self._members else None - sender_avatar_url = avatar_url(sender_mid, bool(m.avatar if m else None)) + from backend.web.utils.serializers import avatar_url + + sender_member = self._members.get_by_id(sender_entity.member_id) if self._members else None + sender_avatar_url = avatar_url(sender_entity.member_id, bool(sender_member.avatar if sender_member else None)) for ce in participants: - if ce.user_id == sender_id: + if ce.entity_id == sender_id: continue - entity = self._entities.get_by_id(ce.user_id) + entity = self._entities.get_by_id(ce.entity_id) if not entity or entity.type != "agent" or not entity.thread_id: logger.debug( "[deliver] SKIP %s type=%s thread=%s", - ce.user_id, + ce.entity_id, getattr(entity, "type", None), getattr(entity, "thread_id", None), ) @@ -167,9 +160,9 @@ def _deliver_to_agents( if self._delivery_resolver: from storage.contracts import DeliveryAction - is_mentioned = ce.user_id in mentions + is_mentioned = ce.entity_id in mentions action = self._delivery_resolver.resolve( - ce.user_id, + ce.entity_id, chat_id, sender_id, is_mentioned=is_mentioned, @@ -178,7 +171,7 @@ def _deliver_to_agents( logger.info( "[deliver] POLICY %s for %s (sender=%s chat=%s mentioned=%s)", action.value, - ce.user_id, + ce.entity_id, sender_id, chat_id[:8], is_mentioned, @@ -197,45 +190,37 @@ def set_delivery_fn(self, fn) -> None: self._delivery_fn = fn def list_chats_for_user(self, user_id: str) -> list[dict]: - """List all chats for a user (social identity) with summary info.""" + """List all chats for a user with summary info.""" chat_ids = self._chat_entities.list_chats_for_user(user_id) result = [] for cid in chat_ids: chat = self._chats.get_by_id(cid) if not chat or chat.status != "active": continue - participants = self._chat_entities.list_participants(cid) + participants = self._chat_entities.list_members(cid) entities_info = [] for p in participants: - e = self._entities.get_by_id(p.user_id) + e = self._entities.get_by_id(p.entity_id) if e: + from backend.web.utils.serializers import avatar_url + m = self._members.get_by_id(e.member_id) if self._members else None entities_info.append( { - "id": p.user_id, + "id": e.id, "name": e.name, "type": e.type, "avatar_url": avatar_url(e.member_id, bool(m.avatar if m else None)), } ) - else: - m = self._members.get_by_id(p.user_id) if self._members else None - if m: - entities_info.append( - { - "id": p.user_id, - "name": m.name, - "type": "human", - "avatar_url": avatar_url(m.id, bool(m.avatar)), - } - ) msgs = self._messages.list_by_chat(cid, limit=1) last_msg = None if msgs: m = msgs[0] + sender = self._entities.get_by_id(m.sender_id) last_msg = { "content": m.content, - "sender_name": self._resolve_name(m.sender_id), + "sender_name": sender.name if sender else "unknown", "created_at": m.created_at, } unread = self._messages.count_unread(cid, user_id) diff --git a/backend/web/services/delivery_resolver.py b/backend/web/services/delivery_resolver.py index 43e6e6bd7..8cc796992 100644 --- a/backend/web/services/delivery_resolver.py +++ b/backend/web/services/delivery_resolver.py @@ -61,9 +61,9 @@ def resolve( def _is_chat_muted(self, user_id: str, chat_id: str) -> bool: """Check if user has muted this specific chat.""" - participants = self._chat_entities.list_participants(chat_id) - for ce in participants: - if ce.user_id == user_id: + members = self._chat_entities.list_members(chat_id) + for ce in members: + if ce.entity_id == user_id: muted = getattr(ce, "muted", False) if not muted: return False diff --git a/backend/web/services/display_builder.py b/backend/web/services/display_builder.py index 25f034ed5..00d21bda2 100644 --- a/backend/web/services/display_builder.py +++ b/backend/web/services/display_builder.py @@ -16,9 +16,6 @@ from dataclasses import dataclass, field from typing import Any, Literal -from backend.web.utils.serializers import extract_text_content as _extract_text_content -from backend.web.utils.serializers import strip_system_tags as _strip_system_tags - logger = logging.getLogger(__name__) # --------------------------------------------------------------------------- @@ -38,6 +35,13 @@ # Helpers — ported from message-mapper.ts # --------------------------------------------------------------------------- +from backend.web.utils.serializers import ( # noqa: E402 + extract_text_content as _extract_text_content, +) +from backend.web.utils.serializers import ( # noqa: E402 + strip_system_tags as _strip_system_tags, +) + _CHAT_MESSAGE_RE = re.compile(r"]*>([\s\S]*?)") diff --git a/backend/web/services/event_store.py b/backend/web/services/event_store.py index 998b08018..17a0edfa7 100644 --- a/backend/web/services/event_store.py +++ b/backend/web/services/event_store.py @@ -46,7 +46,7 @@ def _resolve_run_event_repo(run_event_repo: RunEventRepo | None) -> RunEventRepo _default_run_event_repo_path = None container = build_storage_container(main_db_path=_DB_PATH) - # @@@event-store-single-path - keep one persistence boundary; when caller omits repo, resolve default repo from storage container. + # @@@event-store-single-path - keep one persistence boundary; when caller omits repo, resolve default repo from storage container. # noqa: E501 _default_run_event_repo = container.run_event_repo() _default_run_event_repo_path = _DB_PATH return _default_run_event_repo diff --git a/backend/web/services/file_channel_service.py b/backend/web/services/file_channel_service.py index 69516334e..a8aeac3d6 100644 --- a/backend/web/services/file_channel_service.py +++ b/backend/web/services/file_channel_service.py @@ -10,10 +10,10 @@ import json import logging -from backend.web.utils.helpers import _get_container - logger = logging.getLogger(__name__) +from backend.web.utils.helpers import _get_container # noqa: E402 + def _resolve_volume_source(thread_id: str): """Resolve VolumeSource for a thread via lease chain. diff --git a/backend/web/services/library_service.py b/backend/web/services/library_service.py index 2919f8dd6..bf2f7e05c 100644 --- a/backend/web/services/library_service.py +++ b/backend/web/services/library_service.py @@ -20,7 +20,7 @@ def ensure_library_dir() -> None: (LIBRARY_DIR / "skills").mkdir(exist_ok=True) (LIBRARY_DIR / "agents").mkdir(exist_ok=True) legacy_recipe_dir = LIBRARY_DIR / "recipes" - # @@@recipe-storage-cutover - recipes now live in SQLite only; delete the dead file tree so it cannot masquerade as live state. + # @@@recipe-storage-cutover - recipes now live in SQLite only; delete the dead file tree so it cannot masquerade as live state. # noqa: E501 if legacy_recipe_dir.exists(): if legacy_recipe_dir.is_dir(): shutil.rmtree(legacy_recipe_dir) diff --git a/backend/web/services/marketplace_client.py b/backend/web/services/marketplace_client.py index 49de82258..b8c6bfd40 100644 --- a/backend/web/services/marketplace_client.py +++ b/backend/web/services/marketplace_client.py @@ -15,7 +15,7 @@ logger = logging.getLogger(__name__) -HUB_URL = os.environ.get("MYCEL_HUB_URL", "http://localhost:8090") +HUB_URL = os.environ.get("MYCEL_HUB_URL", "http://localhost:8080") _hub_client = httpx.Client(timeout=30.0) diff --git a/backend/web/services/member_service.py b/backend/web/services/member_service.py index ac295e4f4..f929fa442 100644 --- a/backend/web/services/member_service.py +++ b/backend/web/services/member_service.py @@ -336,25 +336,17 @@ def _ensure_leon_dir() -> Path: # ── CRUD operations ── -def list_members(owner_user_id: str | None = None, member_repo: Any = None) -> list[dict[str, Any]]: - """List agent members. If owner_user_id given, only that user's agents (no builtin Leon). - - Args: - owner_user_id: Filter to agents owned by this user. - member_repo: Injected MemberRepo (respects LEON_STORAGE_STRATEGY). Falls back to SQLite. - """ +def list_members(owner_user_id: str | None = None) -> list[dict[str, Any]]: + """List agent members. If owner_user_id given, only that user's agents (no builtin Leon).""" # @@@auth-scope — scoped by owner from DB, config from filesystem if owner_user_id: - if member_repo is None: - from storage.providers.sqlite.member_repo import SQLiteMemberRepo + from storage.providers.sqlite.member_repo import SQLiteMemberRepo - repo = SQLiteMemberRepo() - try: - agents = repo.list_by_owner_user_id(owner_user_id) - finally: - repo.close() - else: - agents = member_repo.list_by_owner_user_id(owner_user_id) + repo = SQLiteMemberRepo() + try: + agents = repo.list_by_owner_user_id(owner_user_id) + finally: + repo.close() results = [] for agent in agents: agent_dir = MEMBERS_DIR / agent.id @@ -391,9 +383,9 @@ def get_member(member_id: str) -> dict[str, Any] | None: return _member_to_dict(member_dir) -def create_member(name: str, description: str = "", owner_user_id: str | None = None, member_repo: Any = None) -> dict[str, Any]: +def create_member(name: str, description: str = "", owner_user_id: str | None = None) -> dict[str, Any]: from storage.contracts import MemberRow, MemberType - from storage.providers.sqlite.member_repo import generate_member_id + from storage.providers.sqlite.member_repo import SQLiteMemberRepo, generate_member_id now = time.time() now_ms = int(now * 1000) @@ -411,38 +403,28 @@ def create_member(name: str, description: str = "", owner_user_id: str | None = }, ) - # Persist to members table so list_members finds it + # Persist to SQLite members table so list_members finds it if owner_user_id: - row = MemberRow( - id=member_id, - name=name, - type=MemberType.MYCEL_AGENT, - description=description, - config_dir=str(member_dir), - owner_user_id=owner_user_id, - created_at=now, - ) - if member_repo is not None: - member_repo.create(row) - else: - from storage.providers.sqlite.member_repo import SQLiteMemberRepo - - repo = SQLiteMemberRepo() - try: - repo.create(row) - finally: - repo.close() + repo = SQLiteMemberRepo() + try: + repo.create( + MemberRow( + id=member_id, + name=name, + type=MemberType.MYCEL_AGENT, + description=description, + config_dir=str(member_dir), + owner_user_id=owner_user_id, + created_at=now, + ) + ) + finally: + repo.close() return get_member(member_id) # type: ignore -def update_member( - member_id: str, - member_repo: Any = None, - entity_repo: Any = None, - thread_repo: Any = None, - **fields: Any, -) -> dict[str, Any] | None: +def update_member(member_id: str, **fields: Any) -> dict[str, Any] | None: if member_id == "__leon__": member_dir = _ensure_leon_dir() else: @@ -472,40 +454,39 @@ def update_member( meta["updated_at"] = int(time.time() * 1000) _write_json(member_dir / "meta.json", meta) - # Sync name to DB + # Sync name to SQLite if "name" in updates: - if member_repo is None: - from storage.providers.sqlite.member_repo import SQLiteMemberRepo - - member_repo = SQLiteMemberRepo() - if entity_repo is None: - from storage.providers.sqlite.entity_repo import SQLiteEntityRepo - - entity_repo = SQLiteEntityRepo() - if thread_repo is None: - from storage.providers.sqlite.thread_repo import SQLiteThreadRepo - - thread_repo = SQLiteThreadRepo() - - member_repo.update(member_id, name=updates["name"]) - member = member_repo.get_by_id(member_id) - if member is None: - raise ValueError(f"Member {member_id} not found after update") - for entity in entity_repo.get_by_member_id(member_id): - if entity.thread_id is None: - entity_repo.update(entity.id, name=member.name) - continue - thread = thread_repo.get_by_id(entity.thread_id) - if thread is None: - raise ValueError(f"Entity {entity.id} references missing thread {entity.thread_id}") - entity_repo.update( - entity.id, - name=canonical_entity_name( - member.name, - is_main=bool(thread["is_main"]), - branch_index=int(thread["branch_index"]), - ), - ) + from storage.providers.sqlite.entity_repo import SQLiteEntityRepo + from storage.providers.sqlite.member_repo import SQLiteMemberRepo + from storage.providers.sqlite.thread_repo import SQLiteThreadRepo + + repo = SQLiteMemberRepo() + entity_repo = SQLiteEntityRepo() + thread_repo = SQLiteThreadRepo() + try: + repo.update(member_id, name=updates["name"]) + member = repo.get_by_id(member_id) + if member is None: + raise ValueError(f"Member {member_id} not found after update") + for entity in entity_repo.get_by_member_id(member_id): + if entity.thread_id is None: + entity_repo.update(entity.id, name=member.name) + continue + thread = thread_repo.get_by_id(entity.thread_id) + if thread is None: + raise ValueError(f"Entity {entity.id} references missing thread {entity.thread_id}") + entity_repo.update( + entity.id, + name=canonical_entity_name( + member.name, + is_main=bool(thread["is_main"]), + branch_index=int(thread["branch_index"]), + ), + ) + finally: + thread_repo.close() + entity_repo.close() + repo.close() return get_member(member_id) @@ -698,7 +679,7 @@ def publish_member(member_id: str, bump_type: str = "patch") -> dict[str, Any] | return get_member(member_id) -def delete_member(member_id: str, member_repo: Any = None) -> bool: +def delete_member(member_id: str) -> bool: if member_id == "__leon__": return False member_dir = MEMBERS_DIR / member_id @@ -707,17 +688,14 @@ def delete_member(member_id: str, member_repo: Any = None) -> bool: shutil.rmtree(member_dir) - # Also remove from DB - if member_repo is not None: - member_repo.delete(member_id) - else: - from storage.providers.sqlite.member_repo import SQLiteMemberRepo + # Also remove from SQLite + from storage.providers.sqlite.member_repo import SQLiteMemberRepo - repo = SQLiteMemberRepo() - try: - repo.delete(member_id) - finally: - repo.close() + repo = SQLiteMemberRepo() + try: + repo.delete(member_id) + finally: + repo.close() return True @@ -739,11 +717,10 @@ def install_from_snapshot( installed_version: str, owner_user_id: str, existing_member_id: str | None = None, - member_repo: Any = None, ) -> str: """Create or update a local member from a marketplace snapshot.""" from storage.contracts import MemberRow, MemberType - from storage.providers.sqlite.member_repo import generate_member_id + from storage.providers.sqlite.member_repo import SQLiteMemberRepo, generate_member_id now = time.time() now_ms = int(now * 1000) @@ -832,26 +809,22 @@ def install_from_snapshot( } _write_json(member_dir / "meta.json", meta) - # Register in DB (new installs only) + # Register in SQLite (new installs only) if not existing_member_id and owner_user_id: - row = MemberRow( - id=member_id, - name=name, - type=MemberType.MYCEL_AGENT, - description=description, - config_dir=str(member_dir), - owner_user_id=owner_user_id, - created_at=now, - ) - if member_repo is not None: - member_repo.create(row) - else: - from storage.providers.sqlite.member_repo import SQLiteMemberRepo - - repo = SQLiteMemberRepo() - try: - repo.create(row) - finally: - repo.close() + repo = SQLiteMemberRepo() + try: + repo.create( + MemberRow( + id=member_id, + name=name, + type=MemberType.MYCEL_AGENT, + description=description, + config_dir=str(member_dir), + owner_user_id=owner_user_id, + created_at=now, + ) + ) + finally: + repo.close() return member_id diff --git a/backend/web/services/monitor_service.py b/backend/web/services/monitor_service.py index 31f59b729..16027dfbf 100644 --- a/backend/web/services/monitor_service.py +++ b/backend/web/services/monitor_service.py @@ -6,9 +6,9 @@ from datetime import UTC, datetime from typing import Any -from backend.web.core.storage_factory import make_sandbox_monitor_repo from backend.web.services.sandbox_service import init_providers_and_managers, load_all_sessions from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path +from storage.providers.sqlite.sandbox_monitor_repo import SQLiteSandboxMonitorRepo # --------------------------------------------------------------------------- # Mapping helpers (private) @@ -271,7 +271,7 @@ def _map_event_detail(event_id: str, event: dict[str, Any]) -> dict[str, Any]: def list_threads() -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: return _map_threads(repo.query_threads()) finally: @@ -279,7 +279,7 @@ def list_threads() -> dict[str, Any]: def get_thread(thread_id: str) -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: summary = repo.query_thread_summary(thread_id) if not summary: @@ -290,7 +290,7 @@ def get_thread(thread_id: str) -> dict[str, Any]: def list_leases() -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: return _map_leases(repo.query_leases()) finally: @@ -298,7 +298,7 @@ def list_leases() -> dict[str, Any]: def get_lease(lease_id: str) -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: lease = repo.query_lease(lease_id) if not lease: @@ -311,7 +311,7 @@ def get_lease(lease_id: str) -> dict[str, Any]: def list_diverged() -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: return _map_diverged(repo.query_diverged()) finally: @@ -319,7 +319,7 @@ def list_diverged() -> dict[str, Any]: def list_events(limit: int = 100) -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: return _map_events(repo.query_events(limit)) finally: @@ -327,7 +327,7 @@ def list_events(limit: int = 100) -> dict[str, Any]: def get_event(event_id: str) -> dict[str, Any]: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: event = repo.query_event(event_id) finally: @@ -349,7 +349,7 @@ def runtime_health_snapshot() -> dict[str, Any]: tables: dict[str, int] = {"chat_sessions": 0, "sandbox_leases": 0, "lease_events": 0} if db_exists: - repo = make_sandbox_monitor_repo() + repo = SQLiteSandboxMonitorRepo() try: tables = repo.count_rows(list(tables)) finally: diff --git a/backend/web/services/resource_service.py b/backend/web/services/resource_service.py index 236db63ab..45b33a2a4 100644 --- a/backend/web/services/resource_service.py +++ b/backend/web/services/resource_service.py @@ -8,7 +8,11 @@ from typing import Any from backend.web.core.config import SANDBOXES_DIR -from backend.web.core.storage_factory import list_resource_snapshots, make_sandbox_monitor_repo, upsert_resource_snapshot +from backend.web.core.storage_factory import ( + list_resource_snapshots, + make_sandbox_monitor_repo, + upsert_resource_snapshot, +) from backend.web.services.config_loader import SandboxConfigLoader from backend.web.services.sandbox_service import available_sandbox_types, build_provider_from_config_name from backend.web.utils.serializers import avatar_url @@ -405,7 +409,7 @@ def list_resource_providers() -> dict[str, Any]: normalized_sessions.append( { # @@@resource-session-identity - monitor rows can legitimately have empty chat session ids. - # Use stable lease+thread identity so React keys do not collapse when one lease has multiple threads. + # Use stable lease+thread identity so React keys do not collapse when one lease has multiple threads. # noqa: E501 "id": str(session.get("session_id") or f"{lease_id}:{thread_id or 'unbound'}"), "leaseId": lease_id, "threadId": thread_id, diff --git a/backend/web/services/sandbox_service.py b/backend/web/services/sandbox_service.py index 2e5e06cf0..654c550dc 100644 --- a/backend/web/services/sandbox_service.py +++ b/backend/web/services/sandbox_service.py @@ -8,21 +8,26 @@ from pathlib import Path from typing import Any -from backend.web.core.config import LOCAL_WORKSPACE_ROOT, SANDBOXES_DIR -from backend.web.core.storage_factory import make_sandbox_monitor_repo -from backend.web.utils.helpers import is_virtual_thread_id -from backend.web.utils.serializers import avatar_url -from sandbox.config import SandboxConfig -from sandbox.manager import SandboxManager -from sandbox.provider import ProviderCapability -from sandbox.recipes import default_recipe_id, list_builtin_recipes, normalize_recipe_snapshot, provider_type_from_name -from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path -from storage.providers.sqlite.member_repo import SQLiteMemberRepo -from storage.providers.sqlite.thread_repo import SQLiteThreadRepo - logger = logging.getLogger(__name__) +from backend.web.core.config import LOCAL_WORKSPACE_ROOT, SANDBOXES_DIR # noqa: E402 +from backend.web.utils.helpers import is_virtual_thread_id # noqa: E402 +from backend.web.utils.serializers import avatar_url # noqa: E402 +from sandbox.config import SandboxConfig # noqa: E402 +from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path # noqa: E402 + SANDBOX_DB_PATH = resolve_role_db_path(SQLiteDBRole.SANDBOX) +from sandbox.manager import SandboxManager # noqa: E402 +from sandbox.provider import ProviderCapability # noqa: E402 +from sandbox.recipes import ( # noqa: E402 + default_recipe_id, + list_builtin_recipes, + normalize_recipe_snapshot, + provider_type_from_name, +) +from storage.providers.sqlite.member_repo import SQLiteMemberRepo # noqa: E402 +from storage.providers.sqlite.sandbox_monitor_repo import SQLiteSandboxMonitorRepo # noqa: E402 +from storage.providers.sqlite.thread_repo import SQLiteThreadRepo # noqa: E402 _SANDBOX_INVENTORY_LOCK = threading.Lock() _SANDBOX_INVENTORY: tuple[dict[str, Any], dict[str, Any]] | None = None @@ -49,15 +54,12 @@ def list_default_recipes() -> list[dict[str, Any]]: def list_user_leases( user_id: str, *, - thread_repo: Any = None, - member_repo: Any = None, main_db_path: str | Path | None = None, sandbox_db_path: str | Path | None = None, ) -> list[dict[str, Any]]: - monitor_repo = make_sandbox_monitor_repo() - _thread_repo = thread_repo or SQLiteThreadRepo(db_path=main_db_path) - _member_repo = member_repo or SQLiteMemberRepo(db_path=main_db_path) - own_repos = thread_repo is None # only close if we created them + monitor_repo = SQLiteSandboxMonitorRepo(db_path=sandbox_db_path) + thread_repo = SQLiteThreadRepo(db_path=main_db_path) + member_repo = SQLiteMemberRepo(db_path=main_db_path) try: rows = monitor_repo.list_leases_with_threads() grouped: dict[str, dict[str, Any]] = {} @@ -82,10 +84,10 @@ def list_user_leases( thread_id = str(row.get("thread_id") or "").strip() if not thread_id or thread_id in group["thread_ids"]: continue - thread = _thread_repo.get_by_id(thread_id) + thread = thread_repo.get_by_id(thread_id) if thread is None: continue - member = _member_repo.get_by_id(thread["member_id"]) + member = member_repo.get_by_id(thread["member_id"]) if member is None or member.owner_user_id != user_id: continue group["thread_ids"].append(thread_id) @@ -117,9 +119,8 @@ def list_user_leases( leases.append(lease) return leases finally: - if own_repos: - _member_repo.close() - _thread_repo.close() + member_repo.close() + thread_repo.close() monitor_repo.close() diff --git a/backend/web/services/streaming_service.py b/backend/web/services/streaming_service.py index 9e6e71a77..88ba1fb48 100644 --- a/backend/web/services/streaming_service.py +++ b/backend/web/services/streaming_service.py @@ -9,22 +9,22 @@ from collections.abc import AsyncGenerator from typing import Any -from backend.web.services.event_buffer import RunEventBuffer, ThreadEventBuffer -from backend.web.services.event_store import cleanup_old_runs -from backend.web.utils.serializers import extract_text_content -from core.runtime.middleware.monitor import AgentState -from sandbox.thread_context import set_current_run_id, set_current_thread_id -from storage.contracts import RunEventRepo - logger = logging.getLogger(__name__) +from backend.web.services.event_buffer import RunEventBuffer, ThreadEventBuffer # noqa: E402 +from backend.web.services.event_store import cleanup_old_runs # noqa: E402 +from backend.web.utils.serializers import extract_text_content # noqa: E402 +from core.runtime.middleware.monitor import AgentState # noqa: E402 +from sandbox.thread_context import set_current_run_id, set_current_thread_id # noqa: E402 +from storage.contracts import RunEventRepo # noqa: E402 + def _resolve_run_event_repo(agent: Any) -> RunEventRepo | None: storage_container = getattr(agent, "storage_container", None) if storage_container is None: return None - # @@@runtime-storage-consumer - runtime run lifecycle must consume injected storage container, not assignment-only wiring. + # @@@runtime-storage-consumer - runtime run lifecycle must consume injected storage container, not assignment-only wiring. # noqa: E501 return storage_container.run_event_repo() @@ -279,8 +279,8 @@ async def activity_sink(event: dict) -> None: data["_seq"] = seq event = {**event, "data": json.dumps(data, ensure_ascii=False)} # Only SSE-valid fields: extra metadata (agent_id, agent_name) stays in event_store - _sse_fields = frozenset({"event", "data", "id", "retry", "comment"}) - sse_event = {k: v for k, v in event.items() if k in _sse_fields} + _SSE_FIELDS = frozenset({"event", "data", "id", "retry", "comment"}) # noqa: N806 + sse_event = {k: v for k, v in event.items() if k in _SSE_FIELDS} await thread_buf.put(sse_event) # @@@display-builder — compute display delta for activity events (notices, etc.) @@ -673,7 +673,7 @@ async def run_agent_stream(input_data: dict | None = _initial_input): yield chunk logger.debug("[stream] thread=%s STREAM DONE chunks=%d", thread_id[:15], chunk_count) - max_stream_retries = 10 + MAX_STREAM_RETRIES = 10 # noqa: N806 def _is_retryable_stream_error(err: Exception) -> bool: try: @@ -730,7 +730,7 @@ def _is_retryable_stream_error(err: Exception) -> bool: if msg_class == "AIMessageChunk": # @@@compact-leak-guard — skip chunks from compact's summary LLM call. # Compact sets isCompacting flag; these chunks are internal, not agent output. - if hasattr(agent, "runtime") and agent.runtime.state.flags.is_compacting: + if hasattr(agent, "runtime") and agent.runtime.state.flags.isCompacting: continue content = extract_text_content(getattr(msg_chunk, "content", "")) chunk_msg_id = getattr(msg_chunk, "id", None) @@ -901,7 +901,7 @@ def _is_retryable_stream_error(err: Exception) -> bool: if stream_err is None: break # 正常完成,退出外层重试循环 - if _is_retryable_stream_error(stream_err) and stream_attempt < max_stream_retries: + if _is_retryable_stream_error(stream_err) and stream_attempt < MAX_STREAM_RETRIES: stream_attempt += 1 wait = max(min(2**stream_attempt, 30) + random.uniform(-1.0, 1.0), 1.0) await emit( @@ -910,7 +910,7 @@ def _is_retryable_stream_error(err: Exception) -> bool: "data": json.dumps( { "attempt": stream_attempt, - "max_attempts": max_stream_retries, + "max_attempts": MAX_STREAM_RETRIES, "wait_seconds": round(wait, 1), }, ensure_ascii=False, diff --git a/backend/web/services/thread_launch_config_service.py b/backend/web/services/thread_launch_config_service.py index 00060e222..cd4c294ba 100644 --- a/backend/web/services/thread_launch_config_service.py +++ b/backend/web/services/thread_launch_config_service.py @@ -38,11 +38,7 @@ def save_last_successful_config(app: Any, owner_user_id: str, member_id: str, pa def resolve_default_config(app: Any, owner_user_id: str, member_id: str) -> dict[str, Any]: prefs = app.state.thread_launch_pref_repo.get(owner_user_id, member_id) or {} - leases = sandbox_service.list_user_leases( - owner_user_id, - thread_repo=app.state.thread_repo, - member_repo=app.state.member_repo, - ) + leases = sandbox_service.list_user_leases(owner_user_id) providers = [item for item in sandbox_service.available_sandbox_types() if item.get("available")] recipes = list_library("recipe", owner_user_id=owner_user_id, recipe_repo=app.state.recipe_repo) member_threads = app.state.thread_repo.list_by_member(member_id) diff --git a/backend/web/services/typing_tracker.py b/backend/web/services/typing_tracker.py index a88d3f900..840b69684 100644 --- a/backend/web/services/typing_tracker.py +++ b/backend/web/services/typing_tracker.py @@ -19,7 +19,7 @@ @dataclass class _ChatEntry: chat_id: str - user_id: str # social identity: user_id for humans, member_id for agents + member_id: str class TypingTracker: @@ -29,14 +29,14 @@ def __init__(self, chat_event_bus: ChatEventBus) -> None: self._chat_bus = chat_event_bus self._active: dict[str, _ChatEntry] = {} - def start_chat(self, thread_id: str, chat_id: str, user_id: str) -> None: + def start_chat(self, thread_id: str, chat_id: str, member_id: str) -> None: """Start typing indicator for a chat-based delivery.""" - self._active[thread_id] = _ChatEntry(chat_id, user_id) + self._active[thread_id] = _ChatEntry(chat_id, member_id) self._chat_bus.publish( chat_id, { "event": "typing_start", - "data": {"user_id": user_id}, + "data": {"member_id": member_id}, }, ) @@ -48,6 +48,6 @@ def stop(self, thread_id: str) -> None: entry.chat_id, { "event": "typing_stop", - "data": {"user_id": entry.user_id}, + "data": {"member_id": entry.member_id}, }, ) diff --git a/backend/web/services/wechat_service.py b/backend/web/services/wechat_service.py index b19261d79..56d118fa1 100644 --- a/backend/web/services/wechat_service.py +++ b/backend/web/services/wechat_service.py @@ -5,7 +5,7 @@ Auth: Bearer token obtained via QR code scan. @@@per-user — each human user_id gets its own WeChatConnection. -user_id is the social identity in Leon's network (Supabase auth UUID for humans). +user_id is the member identity in Leon's network. Polling auto-starts at backend boot via lifespan.py for all users with saved credentials. @@@no-globals — WeChatConnectionRegistry lives on app.state, not module-level. diff --git a/backend/web/utils/helpers.py b/backend/web/utils/helpers.py index b652e04f1..3b50045d8 100644 --- a/backend/web/utils/helpers.py +++ b/backend/web/utils/helpers.py @@ -82,10 +82,8 @@ def _get_container() -> StorageContainer: _cached_thread_repo = None -def _get_thread_repo(thread_repo=None): - """Get cached ThreadRepo instance, or use injected repo.""" - if thread_repo is not None: - return thread_repo +def _get_thread_repo(): + """Get cached ThreadRepo instance.""" global _cached_thread_repo if _cached_thread_repo is not None: return _cached_thread_repo @@ -95,18 +93,18 @@ def _get_thread_repo(thread_repo=None): return _cached_thread_repo -def save_thread_config(thread_id: str, thread_repo=None, **fields: Any) -> None: - """Update specific fields of thread config.""" +def save_thread_config(thread_id: str, **fields: Any) -> None: + """Update specific fields of thread in SQLite.""" allowed = {"sandbox_type", "cwd", "model", "observation_provider"} updates = {k: v for k, v in fields.items() if k in allowed} if not updates: return - _get_thread_repo(thread_repo).update(thread_id, **updates) + _get_thread_repo().update(thread_id, **updates) -def load_thread_config(thread_id: str, thread_repo=None) -> dict[str, Any] | None: - """Load thread data. Returns dict or None.""" - return _get_thread_repo(thread_repo).get_by_id(thread_id) +def load_thread_config(thread_id: str) -> dict[str, Any] | None: + """Load thread data from SQLite. Returns dict or None.""" + return _get_thread_repo().get_by_id(thread_id) def get_active_observation_provider() -> str | None: @@ -138,7 +136,7 @@ def resolve_local_workspace_path( tc = load_thread_config(thread_id) if tc: thread_cwd = tc.get("cwd") - # @@@workspace-base-normalize - relative LOCAL_WORKSPACE_ROOT must be normalized, or target.relative_to(base) always fails. + # @@@workspace-base-normalize - relative LOCAL_WORKSPACE_ROOT must be normalized, or target.relative_to(base) always fails. # noqa: E501 base = Path(thread_cwd).resolve() if thread_cwd else local_workspace_root.resolve() if not raw_path: diff --git a/core/agents/communication/chat_tool_service.py b/core/agents/communication/chat_tool_service.py index f5464abb4..85310f1b1 100644 --- a/core/agents/communication/chat_tool_service.py +++ b/core/agents/communication/chat_tool_service.py @@ -1,7 +1,7 @@ """Chat tool service — 7 tools for entity-to-entity communication. -Tools use user_ids as parameters (human = Supabase auth UUID, agent = member_id). -Two users share at most one chat; the system auto-resolves user_id → chat. +Tools use user_ids/member_ids as parameters. +Two entities share at most one chat; the system auto-resolves entity_id -> chat. """ from __future__ import annotations @@ -91,14 +91,14 @@ def _parse_time_endpoint(s: str, now: float) -> float | None: class ChatToolService: """Registers 5 chat tools into ToolRegistry. - Each tool closure captures user_id (the calling agent's social identity = member_id). + Each tool closure captures entity_id (the calling agent's identity). """ def __init__( self, registry: ToolRegistry, user_id: str, - owner_user_id: str, + owner_id: str, *, entity_repo: Any = None, chat_service: Any = None, @@ -109,7 +109,7 @@ def __init__( runtime_fn: Any = None, ) -> None: self._user_id = user_id - self._owner_user_id = owner_user_id + self._owner_id = owner_id self._entities = entity_repo self._chat_service = chat_service self._chat_entities = chat_entity_repo @@ -126,18 +126,11 @@ def _register(self, registry: ToolRegistry) -> None: self._register_chat_search(registry) self._register_directory(registry) - def _resolve_name(self, user_id: str) -> str: - """Resolve display name: entity_repo (agents) → member_repo (humans).""" - e = self._entities.get_by_id(user_id) - if e: - return e.name - m = self._members.get_by_id(user_id) if self._members else None - return m.name if m else "unknown" - def _format_msgs(self, msgs: list, eid: str) -> str: lines = [] for m in msgs: - name = self._resolve_name(m.sender_id) + sender = self._entities.get_by_id(m.sender_id) + name = sender.name if sender else "unknown" tag = "you" if m.sender_id == eid else name lines.append(f"[{tag}]: {m.content}") return "\n".join(lines) @@ -182,7 +175,7 @@ def handle(unread_only: bool = False, limit: int = 20) -> str: id_str = f" [chat_id: {c['id']}]" else: other_id = others[0]["id"] if others else "" - id_str = f" [user_id: {other_id}]" if other_id else "" + id_str = f" [entity_id: {other_id}]" if other_id else "" lines.append(f"- {name}{id_str}{unread_str}{last_preview}") return "\n".join(lines) @@ -213,16 +206,17 @@ def handle(unread_only: bool = False, limit: int = 20) -> str: def _register_chat_read(self, registry: ToolRegistry) -> None: eid = self._user_id - def handle(user_id: str | None = None, chat_id: str | None = None, range: str | None = None) -> str: + def handle(entity_id: str | None = None, chat_id: str | None = None, range: str | None = None) -> str: if chat_id: pass # use chat_id directly - elif user_id: - chat_id = self._chat_entities.find_chat_between(eid, user_id) + elif entity_id: + chat_id = self._chat_entities.find_chat_between(eid, entity_id) if not chat_id: - name = self._resolve_name(user_id) + target = self._entities.get_by_id(entity_id) + name = target.name if target else entity_id return f"No chat history with {name}." else: - return "Provide user_id or chat_id." + return "Provide entity_id or chat_id." # @@@range-dispatch — if range is provided, use it regardless of unread state. if range: @@ -272,13 +266,11 @@ def handle(user_id: str | None = None, chat_id: str | None = None, range: str | "parameters": { "type": "object", "properties": { - "user_id": {"type": "string", "description": "user_id for 1:1 chat history"}, + "entity_id": {"type": "string", "description": "Entity_id for 1:1 chat history"}, "chat_id": {"type": "string", "description": "Chat_id for group chat history"}, "range": { "type": "string", - "description": ( - "History range. Negative index '-X:-Y' or time '-1h:', '2026-03-20:'. Positive indices NOT allowed." - ), + "description": "History range. Negative index '-X:-Y' or time '-1h:', '2026-03-20:'. Positive indices NOT allowed.", # noqa: E501 }, }, }, @@ -293,7 +285,7 @@ def _register_chat_send(self, registry: ToolRegistry) -> None: def handle( content: str, - user_id: str | None = None, + entity_id: str | None = None, chat_id: str | None = None, signal: str = "open", mentions: list[str] | None = None, @@ -303,19 +295,22 @@ def handle( target_name = "chat" if chat_id: - if not self._chat_entities.is_participant_in_chat(chat_id, eid): + if not self._chat_entities.is_member_in_chat(chat_id, eid): raise RuntimeError(f"You are not a member of chat {chat_id}") - elif user_id: - if user_id == eid: + elif entity_id: + if entity_id == eid: raise RuntimeError("Cannot send a message to yourself.") - target_name = self._resolve_name(user_id) - resolved_chat_id = self._chat_entities.find_chat_between(eid, user_id) + target = self._entities.get_by_id(entity_id) + if not target: + raise RuntimeError(f"Entity not found: {entity_id}") + target_name = target.name + resolved_chat_id = self._chat_entities.find_chat_between(eid, entity_id) if not resolved_chat_id: # New chat — no unread possible, create and send - chat = self._chat_service.find_or_create_chat([eid, user_id]) + chat = self._chat_service.find_or_create_chat([eid, entity_id]) resolved_chat_id = chat.id else: - raise RuntimeError("Provide user_id (for 1:1) or chat_id (for group)") + raise RuntimeError("Provide entity_id (for 1:1) or chat_id (for group)") # @@@read-before-write-gate — reject if unread messages exist unread = self._messages.count_unread(resolved_chat_id, eid) @@ -337,7 +332,7 @@ def handle( schema={ "name": "chat_send", "description": ( - "Send a message. Use user_id for 1:1 chats, chat_id for group chats.\n\n" + "Send a message. Use entity_id for 1:1 chats, chat_id for group chats.\n\n" "You MUST call chat_read() first if you have unread messages — sending will fail otherwise.\n\n" "Signal protocol — append to content:\n" " (no tag) = I expect a reply from you\n" @@ -349,7 +344,7 @@ def handle( "type": "object", "properties": { "content": {"type": "string", "description": "Message content"}, - "user_id": {"type": "string", "description": "Target user_id (for 1:1 chat)"}, + "entity_id": {"type": "string", "description": "Target entity_id (for 1:1 chat)"}, "chat_id": {"type": "string", "description": "Target chat_id (for group chat)"}, "signal": { "type": "string", @@ -374,16 +369,17 @@ def handle( def _register_chat_search(self, registry: ToolRegistry) -> None: eid = self._user_id - def handle(query: str, user_id: str | None = None) -> str: + def handle(query: str, entity_id: str | None = None) -> str: chat_id = None - if user_id: - chat_id = self._chat_entities.find_chat_between(eid, user_id) + if entity_id: + chat_id = self._chat_entities.find_chat_between(eid, entity_id) results = self._messages.search(query, chat_id=chat_id, limit=20) if not results: return f"No messages matching '{query}'." lines = [] for m in results: - name = self._resolve_name(m.sender_id) + sender = self._entities.get_by_id(m.sender_id) + name = sender.name if sender else "unknown" lines.append(f"[{name}] {m.content[:100]}") return "\n".join(lines) @@ -393,14 +389,14 @@ def handle(query: str, user_id: str | None = None) -> str: mode=ToolMode.INLINE, schema={ "name": "chat_search", - "description": "Search messages. Optionally filter by user_id.", + "description": "Search messages. Optionally filter by entity_id.", "parameters": { "type": "object", "properties": { "query": {"type": "string", "description": "Search query"}, - "user_id": { + "entity_id": { "type": "string", - "description": "Optional: only search in chat with this user", + "description": "Optional: only search in chat with this entity", }, }, "required": ["query"], @@ -415,35 +411,24 @@ def _register_directory(self, registry: ToolRegistry) -> None: eid = self._user_id def handle(search: str | None = None, type: str | None = None) -> str: + all_entities = self._entities.list_all() + entities = [e for e in all_entities if e.id != eid] + if type: + entities = [e for e in entities if e.type == type] + if search: + q = search.lower() + entities = [e for e in entities if q in e.name.lower()] + if not entities: + return "No entities found." lines = [] - all_members = self._members.list_all() if self._members else [] - member_map = {m.id: m for m in all_members} - - if type is None or type == "human": - for m in all_members: - if m.id == eid or m.type != "human": - continue - if search and search.lower() not in m.name.lower(): - continue - lines.append(f"- {m.name} [human] user_id={m.id}") - - if type is None or type == "agent": - all_entities = self._entities.list_all() - for e in all_entities: - if e.id == eid or e.type != "agent": - continue - if search and search.lower() not in e.name.lower(): - continue - member = member_map.get(e.member_id) - owner_info = "" - if member and member.owner_user_id: - owner = member_map.get(member.owner_user_id) - if owner: - owner_info = f" (owner: {owner.name})" - lines.append(f"- {e.name} [{e.type}] user_id={e.id}{owner_info}") - - if not lines: - return "No users found." + for e in entities: + member = self._members.get_by_id(e.member_id) + owner_info = "" + if e.type == "agent" and member and member.owner_id: + owner_member = self._members.get_by_id(member.owner_id) + if owner_member: + owner_info = f" (owner: {owner_member.name})" + lines.append(f"- {e.name} [{e.type}] entity_id={e.id}{owner_info}") return "\n".join(lines) registry.register( @@ -452,7 +437,7 @@ def handle(search: str | None = None, type: str | None = None) -> str: mode=ToolMode.INLINE, schema={ "name": "directory", - "description": "Browse the user directory. Returns user_ids for use with chat_send, chat_read.", + "description": "Browse the entity directory. Returns user_ids for use with chat_send, chat_read.", "parameters": { "type": "object", "properties": { diff --git a/core/agents/registry.py b/core/agents/registry.py index f74f4f4ec..00614e2c3 100644 --- a/core/agents/registry.py +++ b/core/agents/registry.py @@ -10,7 +10,7 @@ from dataclasses import dataclass from pathlib import Path -from backend.web.core.storage_factory import make_agent_registry_repo +from storage.providers.sqlite.agent_registry_repo import SQLiteAgentRegistryRepo @dataclass @@ -29,11 +29,11 @@ class AgentRegistry: Persisted at ~/.leon/agent_registry.db """ - DEFAULT_DB_PATH = None # resolved by storage_factory + DEFAULT_DB_PATH = SQLiteAgentRegistryRepo.DEFAULT_DB_PATH def __init__(self, db_path: Path | None = None): self._lock = asyncio.Lock() - self._repo = make_agent_registry_repo() + self._repo = SQLiteAgentRegistryRepo(db_path or self.DEFAULT_DB_PATH) async def register(self, entry: AgentEntry) -> None: async with self._lock: diff --git a/core/agents/service.py b/core/agents/service.py index e7baff89b..2be0f11c8 100644 --- a/core/agents/service.py +++ b/core/agents/service.py @@ -46,10 +46,7 @@ }, "description": { "type": "string", - "description": ( - "Short description of what agent will do. Required when run_in_background is true; " - "shown in the background task indicator." - ), + "description": "Short description of what agent will do. Required when run_in_background is true; shown in the background task indicator.", # noqa: E501 }, "run_in_background": { "type": "boolean", diff --git a/core/runtime/agent.py b/core/runtime/agent.py index e4d7299c6..1d3faa9e0 100644 --- a/core/runtime/agent.py +++ b/core/runtime/agent.py @@ -164,9 +164,9 @@ def __init__( # Resolve virtual model name active_model = self.models_config.active.model if self.models_config.active else model_name if not active_model: - from config.schema import DEFAULT_MODEL # noqa: E402 + from config.schema import DEFAULT_MODEL as _fallback # noqa: N811 - active_model = DEFAULT_MODEL + active_model = _fallback # Member model override: agent.md's model field takes precedence over global config if hasattr(self, "_agent_override") and self._agent_override and self._agent_override.model: active_model = self._agent_override.model @@ -215,10 +215,8 @@ def __init__( # Initialize checkpointer and MCP tools self._aiosqlite_conn, mcp_tools = self._init_async_components() - # If in async context (running loop detected), _init_async_components - # skips init and returns (None, []). Distinguish from Postgres path - # which also returns conn=None but DID initialize successfully. - self._needs_async_init = self._aiosqlite_conn is None and self.checkpointer is None + # If in async context, mark as needing async initialization + self._needs_async_init = self._aiosqlite_conn is None # Set checkpointer to None if in async context (will be initialized later) if self._needs_async_init: @@ -244,20 +242,19 @@ def __init__( # @@@entity-identity — inject chat identity so agent knows who it is in the social layer if self._chat_repos: repos = self._chat_repos - uid = repos.get("user_id") - owner_uid = repos.get("owner_user_id", "") - if uid: + member_id = repos.get("member_id") + owner_member_id = repos.get("owner_member_id", "") + if member_id: entity_repo = repos.get("entity_repo") - entity = entity_repo.get_by_id(uid) if entity_repo else None - member_repo = repos.get("member_repo") - owner_row = member_repo.get_by_id(owner_uid) if member_repo and owner_uid else None - name = entity.name if entity else uid - owner_name = owner_row.name if owner_row else "unknown" + entity = entity_repo.get_by_id(member_id) if entity_repo else None + owner_entity = entity_repo.get_by_id(owner_member_id) if entity_repo and owner_member_id else None + name = entity.name if entity else member_id + owner_name = owner_entity.name if owner_entity else "unknown" self.system_prompt += ( f"\n\n**Chat Identity:**\n" f"- Your name: {name}\n" - f"- Your user_id: {uid}\n" - f"- Your owner: {owner_name} (user_id: {owner_uid})\n" + f"- Your member_id: {member_id}\n" + f"- Your owner: {owner_name} (member_id: {owner_member_id})\n" f"- When you receive a chat notification, READ the message with chat_read(), " f"then REPLY with chat_send(). Your text output goes to your owner's thread, " f"not to the chat — only chat_send() delivers to the other party.\n" @@ -305,7 +302,7 @@ async def ainit(self): # Initialize async components self._aiosqlite_conn = await self._init_checkpointer() - _mcp_tools = await self._init_mcp_tools() + await self._init_mcp_tools() # Update agent with checkpointer self.agent.checkpointer = self.checkpointer @@ -810,6 +807,7 @@ def _build_middleware_stack(self) -> list: # Get backends from sandbox fs_backend = self._sandbox.fs() + self._sandbox.shell() # 1. Monitor — second from outside; observes all model calls/responses. # Must come before PromptCaching/Memory/Steering so token counts @@ -867,7 +865,7 @@ def _add_memory_middleware(self, middleware: list) -> None: compaction_config = self.config.memory.compaction db_path = self.db_path - # @@@memory-storage-consumer - memory summary persistence must consume injected storage container, not fixed sqlite path. + # @@@memory-storage-consumer - memory summary persistence must consume injected storage container, not fixed sqlite path. # noqa: E501 summary_repo = self.storage_container.summary_repo() if self.storage_container is not None else None self._memory_middleware = MemoryMiddleware( context_limit=context_limit, @@ -1023,20 +1021,20 @@ def _init_services(self) -> None: except ImportError: self._taskboard_service = None - # @@@chat-tools - register chat tools for agents with user identity + # @@@chat-tools - register chat tools for agents with entity identity if self._chat_repos: repos = self._chat_repos - user_id = repos.get("user_id") - owner_user_id = repos.get("owner_user_id", "") - if user_id: + member_id = repos.get("member_id") + owner_member_id = repos.get("owner_member_id", "") + if member_id: from core.agents.communication.chat_tool_service import ChatToolService # @@@lazy-runtime — runtime isn't set yet at _init_services() time. # Pass a callable that resolves runtime lazily at tool call time. self._chat_tool_service = ChatToolService( registry=self._tool_registry, - user_id=user_id, - owner_user_id=owner_user_id, + user_id=member_id, + owner_id=owner_member_id, entity_repo=repos.get("entity_repo"), chat_service=repos.get("chat_service"), chat_entity_repo=repos.get("chat_entity_repo"), @@ -1047,18 +1045,18 @@ def _init_services(self) -> None: ) # @@@wechat-tools — register WeChat tools via lazy connection lookup - owner_uid = self._chat_repos.get("owner_user_id", "") if self._chat_repos else "" - if owner_uid: + owner_eid = self._chat_repos.get("owner_member_id", "") if self._chat_repos else "" + if owner_eid: try: from core.tools.wechat.service import WeChatToolService - def _get_wechat_conn(uid=owner_uid): + def _get_wechat_conn(eid=owner_eid): """Lazy lookup — returns None if registry not on app.state yet.""" try: from backend.web.main import app registry = getattr(app.state, "wechat_registry", None) - return registry.get(uid) if registry else None + return registry.get(eid) if registry else None except Exception: return None @@ -1127,33 +1125,15 @@ async def _init_mcp_tools(self) -> list: return [] async def _init_checkpointer(self): - """Initialize async checkpointer for conversation persistence. + """Initialize async checkpointer for conversation persistence""" + from storage.providers.sqlite.kernel import connect_sqlite_async - Uses Postgres (via Supabase) when LEON_STORAGE_STRATEGY=supabase, - otherwise falls back to local SQLite. - """ - strategy = os.getenv("LEON_STORAGE_STRATEGY", "sqlite") - pg_url = os.getenv("LEON_POSTGRES_URL") - - if strategy == "supabase" and pg_url: - from langgraph.checkpoint.postgres.aio import AsyncPostgresSaver - - # from_conn_string is an async context manager; enter it and keep - # the reference so the connection pool stays open for the agent's lifetime. - self._pg_saver_ctx = AsyncPostgresSaver.from_conn_string(pg_url) - self.checkpointer = await self._pg_saver_ctx.__aenter__() - await self.checkpointer.setup() - return None # no SQLite conn to track - else: - from storage.providers.sqlite.kernel import connect_sqlite_async - - db_path = self.db_path - db_path.parent.mkdir(parents=True, exist_ok=True) - conn = await connect_sqlite_async(db_path) - self.checkpointer = AsyncSqliteSaver(conn) - await self.checkpointer.setup() - return conn - return conn + db_path = self.db_path + db_path.parent.mkdir(parents=True, exist_ok=True) + conn = await connect_sqlite_async(db_path) + self.checkpointer = AsyncSqliteSaver(conn) + await self.checkpointer.setup() + return conn def _is_tool_allowed(self, tool) -> bool: # Extract original tool name without mcp__ prefix @@ -1186,7 +1166,7 @@ def _build_system_prompt(self) -> str: prompt += self._build_common_prompt_sections() if self.allowed_file_extensions: - prompt += f"\n6. **File Type Restriction**: Only these extensions allowed: {', '.join(self.allowed_file_extensions)}\n" + prompt += f"\n6. **File Type Restriction**: Only these extensions allowed: {', '.join(self.allowed_file_extensions)}\n" # noqa: E501 return prompt @@ -1225,7 +1205,7 @@ def _build_rules_section(self) -> str: # Rule 1: Environment-specific if is_sandbox: if self._sandbox.name == "docker": - location_rule = "All file and command operations run in a local Docker container, NOT on the user's host filesystem." + location_rule = "All file and command operations run in a local Docker container, NOT on the user's host filesystem." # noqa: E501 else: location_rule = "All file and command operations run in a remote sandbox, NOT on the user's local machine." rules.append(f"1. **Sandbox Environment**: {location_rule} The sandbox is an isolated Linux environment.") @@ -1239,13 +1219,15 @@ def _build_rules_section(self) -> str: # Rule 3: Security if is_sandbox: - rules.append("3. **Security**: The sandbox is isolated. You can install packages, run any commands, and modify files freely.") + rules.append( + "3. **Security**: The sandbox is isolated. You can install packages, run any commands, and modify files freely." # noqa: E501 + ) else: rules.append("3. **Security**: Dangerous commands are blocked. All operations are logged.") # Rule 4: Tool priority rules.append( - """4. **Tool Priority**: When a built-in tool and an MCP tool (`mcp__*`) have the same functionality, use the built-in tool.""" + """4. **Tool Priority**: When a built-in tool and an MCP tool (`mcp__*`) have the same functionality, use the built-in tool.""" # noqa: E501 ) # Rule 5: Dedicated tools over shell @@ -1257,7 +1239,7 @@ def _build_rules_section(self) -> str: - Reserve `Bash` for: git, package managers, build tools, tests, and other system operations.""") # Rule 6: Background task description - rules.append("""6. **Background Task Description**: When using `Bash` or `Agent` with `run_in_background: true`, always include a clear `description` parameter. # noqa: E501 + rules.append("""6. **Background Task Description**: When using `Bash` or `Agent` with `run_in_background: true`, always include a clear `description` parameter. - The description is shown to the user in the background task indicator. - Keep it concise (5–10 words), action-oriented, e.g. "Run test suite", "Analyze API codebase". - Without a description, the raw command or agent name is shown, which is hard to read.""") diff --git a/core/runtime/middleware/memory/middleware.py b/core/runtime/middleware/memory/middleware.py index 8775e1c21..42e20c868 100644 --- a/core/runtime/middleware/memory/middleware.py +++ b/core/runtime/middleware/memory/middleware.py @@ -195,7 +195,7 @@ async def awrap_model_call( async def _do_compact(self, messages: list[Any], thread_id: str | None = None) -> list[Any]: """Execute compaction: summarize old messages, return compacted list.""" if self._runtime: - self._runtime.set_flag("is_compacting", True) + self._runtime.set_flag("isCompacting", True) try: to_summarize, to_keep = self.compactor.split_messages(messages) if len(to_summarize) < 2: @@ -239,7 +239,7 @@ async def _do_compact(self, messages: list[Any], thread_id: str | None = None) - return [summary_msg] + to_keep finally: if self._runtime: - self._runtime.set_flag("is_compacting", False) + self._runtime.set_flag("isCompacting", False) async def force_compact(self, messages: list[Any]) -> dict[str, Any] | None: """Manual compaction trigger (/compact command). Ignores threshold.""" @@ -252,7 +252,7 @@ async def force_compact(self, messages: list[Any]) -> dict[str, Any] | None: return None if self._runtime: - self._runtime.set_flag("is_compacting", True) + self._runtime.set_flag("isCompacting", True) try: summary_text = await self.compactor.compact(to_summarize, self._resolved_model) self._cached_summary = summary_text @@ -265,7 +265,7 @@ async def force_compact(self, messages: list[Any]) -> dict[str, Any] | None: } finally: if self._runtime: - self._runtime.set_flag("is_compacting", False) + self._runtime.set_flag("isCompacting", False) def _estimate_tokens(self, messages: list[Any]) -> int: """Estimate total tokens for messages (chars // 2).""" diff --git a/core/runtime/middleware/memory/summary_store.py b/core/runtime/middleware/memory/summary_store.py index 6fcff004c..fef2b4ea8 100644 --- a/core/runtime/middleware/memory/summary_store.py +++ b/core/runtime/middleware/memory/summary_store.py @@ -64,7 +64,7 @@ def __init__(self, db_path: Path | None = None, summary_repo: SummaryRepo | None if summary_repo is not None: self._repo = summary_repo else: - # @@@connect_injection - keep _connect as an indirection point so existing retry/rollback tests can patch it. + # @@@connect_injection - keep _connect as an indirection point so existing retry/rollback tests can patch it. # noqa: E501 self._repo = SQLiteSummaryRepo(db_path, connect_fn=lambda p: _connect(p)) self._ensure_tables() diff --git a/core/runtime/middleware/monitor/runtime.py b/core/runtime/middleware/monitor/runtime.py index 181629f56..c48b3ab77 100644 --- a/core/runtime/middleware/monitor/runtime.py +++ b/core/runtime/middleware/monitor/runtime.py @@ -6,12 +6,12 @@ from collections.abc import Callable from typing import Any -from .context_monitor import ContextMonitor -from .state_monitor import AgentFlags, AgentState, StateMonitor -from .token_monitor import TokenMonitor - logger = logging.getLogger(__name__) +from .context_monitor import ContextMonitor # noqa: E402 +from .state_monitor import AgentFlags, AgentState, StateMonitor # noqa: E402 +from .token_monitor import TokenMonitor # noqa: E402 + class AgentRuntime: """聚合所有 Monitor 的数据,提供统一的状态访问接口""" @@ -136,11 +136,11 @@ def get_status_line(self) -> str: parts = [f"[{self.current_state.value.upper()}]"] flag_names = [ - ("is_streaming", "streaming"), - ("is_compacting", "compacting"), - ("is_waiting", "waiting"), - ("is_blocked", "blocked"), - ("has_error", "error"), + ("isStreaming", "streaming"), + ("isCompacting", "compacting"), + ("isWaiting", "waiting"), + ("isBlocked", "blocked"), + ("hasError", "error"), ] for flag_attr, label in flag_names: if getattr(self.flags, flag_attr): diff --git a/core/runtime/middleware/monitor/state_monitor.py b/core/runtime/middleware/monitor/state_monitor.py index 51c8dcd56..bc1ead28a 100644 --- a/core/runtime/middleware/monitor/state_monitor.py +++ b/core/runtime/middleware/monitor/state_monitor.py @@ -27,13 +27,13 @@ class AgentState(Enum): class AgentFlags: """Agent 状态标志位""" - is_streaming: bool = False - is_compacting: bool = False - is_waiting: bool = False - is_blocked: bool = False - can_interrupt: bool = True - has_error: bool = False - needs_recovery: bool = False + isStreaming: bool = False # noqa: N815 + isCompacting: bool = False # noqa: N815 + isWaiting: bool = False # noqa: N815 + isBlocked: bool = False # noqa: N815 + canInterrupt: bool = True # noqa: N815 + hasError: bool = False # noqa: N815 + needsRecovery: bool = False # noqa: N815 # 状态转移规则 @@ -109,7 +109,7 @@ def mark_ready(self) -> bool: def mark_error(self, error: Exception | None = None) -> bool: """标记为错误状态""" - self.flags.has_error = True + self.flags.hasError = True if error is not None: # @@@error-snapshot - Capture a small, inspectable error snapshot for debugging. self.last_error_type = type(error).__name__ @@ -147,11 +147,11 @@ def get_metrics(self) -> dict[str, Any]: return { "state": self.state.value, "flags": { - "streaming": self.flags.is_streaming, - "compacting": self.flags.is_compacting, - "waiting": self.flags.is_waiting, - "blocked": self.flags.is_blocked, - "error": self.flags.has_error, + "streaming": self.flags.isStreaming, + "compacting": self.flags.isCompacting, + "waiting": self.flags.isWaiting, + "blocked": self.flags.isBlocked, + "error": self.flags.hasError, }, "error": { "type": self.last_error_type, diff --git a/core/runtime/middleware/queue/manager.py b/core/runtime/middleware/queue/manager.py index fd155b94d..53625512f 100644 --- a/core/runtime/middleware/queue/manager.py +++ b/core/runtime/middleware/queue/manager.py @@ -48,14 +48,7 @@ def enqueue( is_steer: bool = False, ) -> None: """Persist a message. Fires wake handler after INSERT.""" - self._repo.enqueue( - thread_id, - content, - notification_type, - source=source, - sender_id=sender_id, - sender_name=sender_name, - ) + self._repo.enqueue(thread_id, content, notification_type, source=source, sender_id=sender_id, sender_name=sender_name) with self._wake_lock: handler = self._wake_handlers.get(thread_id) if handler: diff --git a/core/runtime/middleware/queue/middleware.py b/core/runtime/middleware/queue/middleware.py index ccb9c30be..8a61829c1 100644 --- a/core/runtime/middleware/queue/middleware.py +++ b/core/runtime/middleware/queue/middleware.py @@ -10,8 +10,10 @@ from collections.abc import Awaitable, Callable from typing import Any -from langchain_core.messages import HumanMessage, ToolMessage -from langchain_core.runnables import RunnableConfig +logger = logging.getLogger(__name__) + +from langchain_core.messages import HumanMessage, ToolMessage # noqa: E402 +from langchain_core.runnables import RunnableConfig # noqa: E402 try: from langchain.agents.middleware.types import ( @@ -31,9 +33,7 @@ class AgentMiddleware: ModelCallResult = Any ToolCallRequest = Any -from .manager import MessageQueueManager - -logger = logging.getLogger(__name__) +from .manager import MessageQueueManager # noqa: E402 class SteeringMiddleware(AgentMiddleware): diff --git a/core/tools/command/middleware.py b/core/tools/command/middleware.py index dcd6453a4..0aa5145c4 100644 --- a/core/tools/command/middleware.py +++ b/core/tools/command/middleware.py @@ -11,17 +11,17 @@ from pathlib import Path from typing import Any -from langchain.agents.middleware import AgentMiddleware, AgentState -from langchain.agents.middleware.types import ModelRequest, ModelResponse -from langchain.tools import ToolRuntime, tool -from langgraph.runtime import Runtime +logger = logging.getLogger(__name__) -from sandbox.shell_output import normalize_pty_result +from langchain.agents.middleware import AgentMiddleware, AgentState # noqa: E402 +from langchain.agents.middleware.types import ModelRequest, ModelResponse # noqa: E402 +from langchain.tools import ToolRuntime, tool # noqa: E402 +from langgraph.runtime import Runtime # noqa: E402 -from .base import AsyncCommand, BaseExecutor -from .dispatcher import get_executor, get_shell_info +from sandbox.shell_output import normalize_pty_result # noqa: E402 -logger = logging.getLogger(__name__) +from .base import AsyncCommand, BaseExecutor # noqa: E402 +from .dispatcher import get_executor, get_shell_info # noqa: E402 RUN_COMMAND_TOOL_NAME = "run_command" COMMAND_STATUS_TOOL_NAME = "command_status" @@ -112,10 +112,10 @@ def __init__( async def run_command_tool( *, runtime: ToolRuntime[CommandState], - CommandLine: str, - Cwd: str | None = None, - Blocking: bool = True, - Timeout: int | None = None, + CommandLine: str, # noqa: N803 + Cwd: str | None = None, # noqa: N803 + Blocking: bool = True, # noqa: N803 + Timeout: int | None = None, # noqa: N803 ) -> str: """Execute shell command. OS auto-detects shell (mac→zsh, linux→bash, win→powershell). @@ -136,8 +136,8 @@ async def run_command_tool( async def command_status_tool( *, runtime: ToolRuntime[CommandState], - CommandId: str, - WaitDurationSeconds: int = 0, + CommandId: str, # noqa: N803 + WaitDurationSeconds: int = 0, # noqa: N803 ) -> str: """Check status of a non-blocking command. diff --git a/core/tools/command/service.py b/core/tools/command/service.py index 475289b9c..d63f5dac2 100644 --- a/core/tools/command/service.py +++ b/core/tools/command/service.py @@ -73,14 +73,11 @@ def _register(self, registry: ToolRegistry) -> None: }, "description": { "type": "string", - "description": ( - "Human-readable description of what this command does. " - "Required when run_in_background is true; shown in the background task indicator." - ), + "description": "Human-readable description of what this command does. Required when run_in_background is true; shown in the background task indicator.", # noqa: E501 }, "run_in_background": { "type": "boolean", - "description": "Run in background (default: false). Returns task ID for status queries.", + "description": "Run in background (default: false). Returns task ID for status queries.", # noqa: E501 }, "timeout": { "type": "integer", diff --git a/core/tools/filesystem/middleware.py b/core/tools/filesystem/middleware.py index 0844d892a..7adf9d7b7 100644 --- a/core/tools/filesystem/middleware.py +++ b/core/tools/filesystem/middleware.py @@ -242,7 +242,7 @@ def _read_file_impl(self, file_path: str, offset: int = 0, limit: int | None = N file_path=file_path, file_type=None, # type: ignore[arg-type] error=( - f"File content ({file_size:,} bytes) exceeds maximum allowed size ({limits.max_size_bytes:,} bytes).\n" + f"File content ({file_size:,} bytes) exceeds maximum allowed size ({limits.max_size_bytes:,} bytes).\n" # noqa: E501 f"Use offset and limit parameters to read specific sections.\n" f"Total lines: {total_lines}" ), @@ -255,7 +255,7 @@ def _read_file_impl(self, file_path: str, offset: int = 0, limit: int | None = N file_path=file_path, file_type=None, # type: ignore[arg-type] error=( - f"File content (~{estimated_tokens:,} tokens) exceeds maximum allowed tokens ({limits.max_tokens:,}).\n" + f"File content (~{estimated_tokens:,} tokens) exceeds maximum allowed tokens ({limits.max_tokens:,}).\n" # noqa: E501 f"Use offset and limit parameters to read specific sections.\n" f"Total lines: {total_lines}" ), @@ -299,7 +299,7 @@ def _make_read_tool_message(self, result: ReadResult, tool_call_id: str) -> Tool """Create ToolMessage from ReadResult, using content_blocks for images.""" if result.content_blocks: image_desc = ( - f"Image file: {result.file_path}\nSize: {result.total_size:,} bytes\nReturned as image content block for vision model." + f"Image file: {result.file_path}\nSize: {result.total_size:,} bytes\nReturned as image content block for vision model." # noqa: E501 ) return ToolMessage( content=image_desc, @@ -361,7 +361,7 @@ def _edit_file_impl(self, file_path: str, old_string: str, new_string: str) -> s count = content.count(old_string) if count > 1: - return f"String appears {count} times in file (not unique)\n Use multi_edit or provide more context to make it unique" + return f"String appears {count} times in file (not unique)\n Use multi_edit or provide more context to make it unique" # noqa: E501 new_content = content.replace(old_string, new_string) result = self.backend.write_file(str(resolved), new_content) @@ -468,9 +468,7 @@ def _get_tool_schemas(self) -> list[dict]: "type": "function", "function": { "name": self.TOOL_READ_FILE, - "description": ( - "Read file content (text/code/images/PDF/PPTX/Notebook). Images return as content_blocks. Path must be absolute." - ), + "description": "Read file content (text/code/images/PDF/PPTX/Notebook). Images return as content_blocks. Path must be absolute.", # noqa: E501 "parameters": { "type": "object", "properties": { diff --git a/core/tools/filesystem/service.py b/core/tools/filesystem/service.py index a8cf1c9c6..c203738bb 100644 --- a/core/tools/filesystem/service.py +++ b/core/tools/filesystem/service.py @@ -294,7 +294,7 @@ def _read_file(self, file_path: str, offset: int = 0, limit: int | None = None) if file_size > limits.max_size_bytes: total_lines = self._count_lines(resolved) return ( - f"File content ({file_size:,} bytes) exceeds maximum allowed size ({limits.max_size_bytes:,} bytes).\n" + f"File content ({file_size:,} bytes) exceeds maximum allowed size ({limits.max_size_bytes:,} bytes).\n" # noqa: E501 f"Use offset and limit parameters to read specific sections.\n" f"Total lines: {total_lines}" ) @@ -302,7 +302,7 @@ def _read_file(self, file_path: str, offset: int = 0, limit: int | None = None) if estimated_tokens > limits.max_tokens: total_lines = self._count_lines(resolved) return ( - f"File content (~{estimated_tokens:,} tokens) exceeds maximum allowed tokens ({limits.max_tokens:,}).\n" + f"File content (~{estimated_tokens:,} tokens) exceeds maximum allowed tokens ({limits.max_tokens:,}).\n" # noqa: E501 f"Use offset and limit parameters to read specific sections.\n" f"Total lines: {total_lines}" ) diff --git a/core/tools/task/service.py b/core/tools/task/service.py index b6e9f6f96..a5dacacf1 100644 --- a/core/tools/task/service.py +++ b/core/tools/task/service.py @@ -12,9 +12,9 @@ from pathlib import Path from typing import Any -from backend.web.core.storage_factory import make_tool_task_repo from core.runtime.registry import ToolEntry, ToolMode, ToolRegistry from core.tools.task.types import Task, TaskStatus +from storage.providers.sqlite.tool_task_repo import SQLiteToolTaskRepo logger = logging.getLogger(__name__) @@ -143,7 +143,7 @@ def __init__( db_path: Path | None = None, thread_id: str | None = None, ): - self._repo = make_tool_task_repo(db_path or DEFAULT_DB_PATH) + self._repo = SQLiteToolTaskRepo(db_path or DEFAULT_DB_PATH) self._default_thread_id = thread_id # override for tests / single-agent TUI self._register(registry) logger.info("TaskService initialized (db=%s)", db_path or DEFAULT_DB_PATH) diff --git a/core/tools/web/middleware.py b/core/tools/web/middleware.py index fedf1708e..7f722e060 100644 --- a/core/tools/web/middleware.py +++ b/core/tools/web/middleware.py @@ -101,10 +101,10 @@ def __init__( async def _web_search_impl( self, - Query: str, - MaxResults: int | None = None, - IncludeDomains: list[str] | None = None, - ExcludeDomains: list[str] | None = None, + Query: str, # noqa: N803 + MaxResults: int | None = None, # noqa: N803 + IncludeDomains: list[str] | None = None, # noqa: N803 + ExcludeDomains: list[str] | None = None, # noqa: N803 ) -> SearchResult: """ 实现 web_search(多提供商降级) @@ -132,7 +132,7 @@ async def _web_search_impl( return SearchResult(query=Query, error="All search providers failed") - async def _fetch_impl(self, Url: str, Prompt: str) -> str: + async def _fetch_impl(self, Url: str, Prompt: str) -> str: # noqa: N803 """ Fetch URL content and extract information using AI. @@ -176,7 +176,7 @@ async def _ai_extract(self, content: str, prompt: str, url: str) -> str: model = self._extraction_model if model is None: preview = content[:5000] if len(content) > 5000 else content - return f"AI extraction unavailable. Configure an extraction model (e.g. leon:mini) in settings. Raw content:\n\n{preview}" + return f"AI extraction unavailable. Configure an extraction model (e.g. leon:mini) in settings. Raw content:\n\n{preview}" # noqa: E501 extraction_prompt = ( f"You are extracting information from a web page.\n" @@ -236,7 +236,7 @@ def _get_tool_definitions(self) -> list[dict]: "type": "function", "function": { "name": self.TOOL_FETCH, - "description": "Fetch a URL and extract specific information using AI. Returns processed content, not raw HTML.", + "description": "Fetch a URL and extract specific information using AI. Returns processed content, not raw HTML.", # noqa: E501 "parameters": { "type": "object", "properties": { diff --git a/core/tools/web/service.py b/core/tools/web/service.py index 077db9b70..5aa16bd8d 100644 --- a/core/tools/web/service.py +++ b/core/tools/web/service.py @@ -99,7 +99,7 @@ def _register(self, registry: ToolRegistry) -> None: mode=ToolMode.INLINE, schema={ "name": "WebFetch", - "description": "Fetch a URL and extract specific information using AI. Returns processed content, not raw HTML.", + "description": "Fetch a URL and extract specific information using AI. Returns processed content, not raw HTML.", # noqa: E501 "parameters": { "type": "object", "properties": { diff --git a/core/tools/wechat/service.py b/core/tools/wechat/service.py index 9cb57e233..331d1d7fb 100644 --- a/core/tools/wechat/service.py +++ b/core/tools/wechat/service.py @@ -1,7 +1,7 @@ """WeChat tool service — registers wechat_send and wechat_contacts into ToolRegistry. Thin wrapper: actual API calls go through WeChatConnection (backend). -Tools are scoped to the agent's owner's user_id (the human who connected WeChat). +Tools are scoped to the agent's owner's entity_id (the human who connected WeChat). """ from __future__ import annotations @@ -97,7 +97,7 @@ def handle() -> str: mode=ToolMode.INLINE, schema={ "name": "wechat_contacts", - "description": "List WeChat contacts who have messaged the bot. Returns user_ids for use with wechat_send.", + "description": "List WeChat contacts who have messaged the bot. Returns user_ids for use with wechat_send.", # noqa: E501 "parameters": { "type": "object", "properties": {}, diff --git a/docs/en/cli.md b/docs/en/cli.md new file mode 100644 index 000000000..215132b25 --- /dev/null +++ b/docs/en/cli.md @@ -0,0 +1,129 @@ +🇬🇧 English | [🇨🇳 中文](../zh/cli.md) + +# CLI / TUI Reference + +Mycel includes a terminal interface for quick interactions, scripting, and sandbox management. The primary interface is the [Web UI](../../README.md#quick-start) — the CLI is a complementary tool for power users and development. + +## Installation + +```bash +pip install leonai +# or +uv tool install leonai +``` + +## First Run + +```bash +leonai +``` + +If no API key is detected, the interactive config wizard starts automatically: + +1. **API_KEY** (required) — Your OpenAI-compatible API key +2. **BASE_URL** (optional) — API endpoint, defaults to `https://api.openai.com/v1` +3. **MODEL_NAME** (optional) — Model to use, defaults to `claude-sonnet-4-5-20250929` + +Configuration is saved to `~/.leon/config.env`. + +```bash +leonai config # Re-run wizard +leonai config show # View current settings +``` + +## Usage + +```bash +leonai # Start a new conversation +leonai -c # Continue last conversation +leonai --thread # Resume a specific thread +leonai --model gpt-4o # Use a specific model +leonai --workspace /path/to/dir # Set working directory +``` + +## Thread Management + +```bash +leonai thread ls # List all conversations +leonai thread history # View conversation history +leonai thread rewind # Rewind to checkpoint +leonai thread rm # Delete a thread +``` + +## Non-interactive Mode + +```bash +leonai run "Explain this codebase" # Single message +echo "Summarize this" | leonai run --stdin # Read from stdin +leonai run -i # Interactive without TUI +``` + +## Sandbox via CLI + +### Starting with a Sandbox + +```bash +leonai --sandbox docker # Start with Docker sandbox +leonai --sandbox e2b # Start with E2B cloud sandbox +leonai --sandbox daytona # Start with Daytona sandbox +leonai --sandbox agentbay # Start with AgentBay sandbox +``` + +When resuming a thread (`-c` or `--thread`), the sandbox provider is auto-detected from the database — no need to pass `--sandbox` again. + +Resolution order: CLI flag → auto-detect from thread → `LEON_SANDBOX` env var → `local` (no sandbox). + +### Session Management + +```bash +leonai sandbox # Open sandbox manager TUI +leonai sandbox ls # List active sessions +leonai sandbox new docker # Create a new Docker session +leonai sandbox pause # Pause session (state preserved) +leonai sandbox resume # Resume paused session +leonai sandbox rm # Delete session +leonai sandbox metrics # View CPU/RAM/disk usage +leonai sandbox delete # Alias for rm +leonai sandbox destroy-all-sessions # Destroy all (requires confirmation) +``` + +Session IDs can be abbreviated — any unique prefix works. + +### Headless / Scripting + +```bash +leonai run --sandbox docker -d "Run echo hello" # Single command +leonai run --sandbox e2b -i # Interactive without TUI +``` + +### TUI Manager Keybindings + +Launch with `leonai sandbox` (no subcommand): + +| Key | Action | +|-----|--------| +| `r` | Refresh session list | +| `n` | Create new session | +| `d` | Delete selected session | +| `p` | Pause selected session | +| `u` | Resume selected session | +| `m` | Show metrics | +| `q` | Quit | + +## LLM Provider Examples + +Mycel uses the OpenAI-compatible API format. Any provider that speaks this protocol works. + +| Provider | BASE_URL | MODEL_NAME | +|----------|----------|------------| +| OpenAI | `https://api.openai.com/v1` | `gpt-4o` | +| OpenRouter | `https://openrouter.ai/api/v1` | `anthropic/claude-sonnet-4-5-20250929` | +| DeepSeek | `https://api.deepseek.com/v1` | `deepseek-chat` | + +Environment variables override `~/.leon/config.env`: + +```bash +export OPENAI_API_KEY="your-key" +export OPENAI_BASE_URL="https://api.openai.com/v1" +export MODEL_NAME="gpt-4o" +``` diff --git a/docs/en/configuration.md b/docs/en/configuration.md new file mode 100644 index 000000000..25e9a65c7 --- /dev/null +++ b/docs/en/configuration.md @@ -0,0 +1,666 @@ +English | [中文](../zh/configuration.md) + +# Mycel Configuration Guide + +Mycel uses a split configuration system: **runtime.json** for behavior settings, **models.json** for model/provider identity, and **config.env** for quick API key setup. Each config file follows a three-tier merge with system defaults, user overrides, and project overrides. + +## Quick Setup (First Run) + +On first launch without an API key, Mycel automatically opens the config wizard: + +```bash +leonai config # Interactive wizard: API key, base URL, model name +leonai config show # Show current config.env values +``` + +The wizard writes `~/.leon/config.env` with three values: + +```env +OPENAI_API_KEY=sk-xxx +OPENAI_BASE_URL=https://api.openai.com/v1 +MODEL_NAME=claude-sonnet-4-5-20250929 +``` + +This is enough to start using Mycel. The sections below cover advanced configuration. + +## Config File Locations + +Mycel has three separate config domains, each with its own file: + +| Domain | Filename | Purpose | +|--------|----------|---------| +| Runtime behavior | `runtime.json` | Tools, memory, MCP, skills, security | +| Model identity | `models.json` | Providers, API keys, virtual model mapping | +| Observation | `observation.json` | Langfuse / LangSmith tracing | +| Quick setup | `config.env` | API key + base URL (loaded to env vars) | +| Sandbox | `~/.leon/sandboxes/.json` | Per-sandbox-provider config | + +Each JSON config file is loaded from three tiers (highest priority first): + +1. **Project config**: `.leon/` in workspace root +2. **User config**: `~/.leon/` in home directory +3. **System defaults**: Built-in defaults in `config/defaults/` + +CLI arguments (`--model`, `--workspace`, etc.) override everything. + +### Merge Strategy + +- **runtime / memory / tools**: Deep merge across all tiers (fields from higher-priority tiers override lower) +- **mcp / skills**: Lookup merge (first tier that defines it wins, no merging) +- **system_prompt**: Lookup (project > user > system) +- **providers / mapping** (models.json): Deep merge per-key +- **pool** (models.json): Last wins (no list merge) +- **catalog / virtual_models** (models.json): System-only, never overridden + +## Runtime Configuration (runtime.json) + +Controls agent behavior, tools, memory, MCP, and skills. **Not** where model/provider identity goes (that's `models.json`). + +Full structure with defaults (from `config/defaults/runtime.json`): + +```json +{ + "context_limit": 0, + "enable_audit_log": true, + "allowed_extensions": null, + "block_dangerous_commands": true, + "block_network_commands": false, + "queue_mode": "steer", + "temperature": null, + "max_tokens": null, + "model_kwargs": {}, + "memory": { + "pruning": { + "enabled": true, + "soft_trim_chars": 3000, + "hard_clear_threshold": 10000, + "protect_recent": 3, + "trim_tool_results": true + }, + "compaction": { + "enabled": true, + "reserve_tokens": 16384, + "keep_recent_tokens": 20000, + "min_messages": 20 + } + }, + "system_prompt": null, + "tools": { + "filesystem": { + "enabled": true, + "tools": { + "read_file": { "enabled": true, "max_file_size": 10485760 }, + "write_file": true, + "edit_file": true, + "list_dir": true + } + }, + "search": { + "enabled": true, + "max_results": 50, + "tools": { + "grep": { "enabled": true, "max_file_size": 10485760 }, + "glob": true + } + }, + "web": { + "enabled": true, + "timeout": 15, + "tools": { + "web_search": { + "enabled": true, + "max_results": 5, + "tavily_api_key": null, + "exa_api_key": null, + "firecrawl_api_key": null + }, + "fetch": { + "enabled": true, + "jina_api_key": null + } + } + }, + "command": { + "enabled": true, + "tools": { + "run_command": { "enabled": true, "default_timeout": 120 }, + "command_status": true + } + } + }, + "mcp": { + "enabled": true, + "servers": {} + }, + "skills": { + "enabled": true, + "paths": ["~/.leon/skills"], + "skills": {} + } +} +``` + +> **Note:** The file is flat -- there is no `"runtime"` wrapper key. The config loader wraps these fields internally at load time. Fields like `spill_buffer`, `tool_modes`, `workspace_root` are optional overrides not present in the defaults file; see [Tools](#tools) below for details. + +### Runtime Fields + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `temperature` | float (0-2) | null (model default) | Sampling temperature | +| `max_tokens` | int | null (model default) | Max output tokens | +| `context_limit` | int | 0 | Context window limit in tokens. 0 = auto-detect from model | +| `enable_audit_log` | bool | true | Enable audit logging | +| `allowed_extensions` | list | null | Restrict file access to these extensions. null = all | +| `block_dangerous_commands` | bool | true | Block dangerous shell commands (rm -rf, etc.) | +| `block_network_commands` | bool | false | Block network commands | + +### Memory + +**Pruning** trims old tool results to save context space: + +| Field | Default | Description | +|-------|---------|-------------| +| `soft_trim_chars` | 3000 | Soft-trim tool results longer than this | +| `hard_clear_threshold` | 10000 | Hard-clear tool results longer than this | +| `protect_recent` | 3 | Keep last N tool messages untrimmed | +| `trim_tool_results` | true | Enable tool result trimming | + +**Compaction** summarizes old conversation history via LLM: + +| Field | Default | Description | +|-------|---------|-------------| +| `reserve_tokens` | 16384 | Reserve space for new messages | +| `keep_recent_tokens` | 20000 | Keep recent messages verbatim | +| `min_messages` | 20 | Minimum messages before compaction triggers | + +### Tools + +Each tool group (filesystem, search, web, command) has an `enabled` flag and a `tools` sub-object. Both the group and individual tool must be enabled for the tool to be available. + +Available tools and their config-level names: + +| Config Name | UI/Tool Catalog Name | Group | +|------------|----------------------|-------| +| `read_file` | Read | filesystem | +| `write_file` | Write | filesystem | +| `edit_file` | Edit | filesystem | +| `list_dir` | list_dir | filesystem | +| `grep` | Grep | search | +| `glob` | Glob | search | +| `web_search` | WebSearch | web | +| `fetch` | WebFetch | web | +| `run_command` | Bash | command | +| `command_status` | - | command | + +**Spill buffer** automatically writes large tool outputs to temp files instead of inlining them in conversation. This is an optional override -- it is not part of the system defaults file: + +```json +{ + "tools": { + "spill_buffer": { + "default_threshold": 50000, + "thresholds": { + "Grep": 20000, + "run_command": 100000 + } + } + } +} +``` + +**Tool modes** can be set per-tool to `"inline"` (default) or `"deferred"`. Also an optional override, not in defaults: + +```json +{ + "tools": { + "tool_modes": { + "TaskCreate": "deferred", + "TaskList": "deferred" + } + } +} +``` + +### Example: Project-level runtime.json + +`.leon/runtime.json` in your project root: + +```json +{ + "allowed_extensions": ["py", "js", "ts", "json", "yaml", "md"], + "block_dangerous_commands": true, + "tools": { + "web": { "enabled": false }, + "command": { + "tools": { + "run_command": { "default_timeout": 300 } + } + } + }, + "system_prompt": "You are a Python expert working on a FastAPI project." +} +``` + +## Models Configuration (models.json) + +Controls which model to use, provider credentials, and virtual model mapping. + +### Structure + +```json +{ + "active": { + "model": "claude-sonnet-4-5-20250929", + "provider": null, + "based_on": null, + "context_limit": null + }, + "providers": { + "anthropic": { + "api_key": "${ANTHROPIC_API_KEY}", + "base_url": "https://api.anthropic.com" + }, + "openai": { + "api_key": "${OPENAI_API_KEY}", + "base_url": "https://api.openai.com/v1" + } + }, + "mapping": { ... }, + "pool": { + "enabled": [], + "custom": [], + "custom_config": {} + } +} +``` + +### Providers + +Define API credentials per provider. The `active.provider` field determines which provider's credentials are used: + +```json +{ + "providers": { + "openrouter": { + "api_key": "${OPENROUTER_API_KEY}", + "base_url": "https://openrouter.ai/api/v1" + } + }, + "active": { + "model": "anthropic/claude-sonnet-4-5", + "provider": "openrouter" + } +} +``` + +### API Key Resolution + +Mycel looks for an API key in this order: +1. Active provider's `api_key` from `models.json` +2. Any provider with an `api_key` in `models.json` +3. Environment variables: `ANTHROPIC_API_KEY` > `OPENAI_API_KEY` > `OPENROUTER_API_KEY` + +### Provider Auto-Detection + +When no explicit `provider` is set, Mycel auto-detects from environment: +- `ANTHROPIC_API_KEY` set -> provider = `anthropic` +- `OPENAI_API_KEY` set -> provider = `openai` +- `OPENROUTER_API_KEY` set -> provider = `openai` + +### Custom Models + +Add models not in the built-in catalog via the `pool.custom` list: + +```json +{ + "pool": { + "custom": ["deepseek-chat", "qwen-72b"], + "custom_config": { + "deepseek-chat": { + "based_on": "gpt-4o", + "context_limit": 65536 + } + } + } +} +``` + +`based_on` tells Mycel which model family to use for tokenizer/context detection. `context_limit` overrides the auto-detected context window. + +## Virtual Models + +Mycel provides four virtual model aliases (`leon:*`) that map to concrete models with preset parameters: + +| Virtual Name | Concrete Model | Provider | Extras | Use Case | +|-------------|---------------|----------|--------|----------| +| `leon:mini` | claude-haiku-4-5-20250929 | anthropic | - | Fast, simple tasks | +| `leon:medium` | claude-sonnet-4-5-20250929 | anthropic | - | Balanced, daily work | +| `leon:large` | claude-opus-4-6 | anthropic | - | Complex reasoning | +| `leon:max` | claude-opus-4-6 | anthropic | temperature=0.0 | Maximum precision | + +Usage: + +```bash +leonai --model leon:mini +leonai --model leon:large +``` + +Or in `~/.leon/models.json`: + +```json +{ + "active": { + "model": "leon:large" + } +} +``` + +### Overriding Virtual Model Mapping + +You can remap virtual models to different concrete models in your user or project `models.json`: + +```json +{ + "mapping": { + "leon:medium": { + "model": "gpt-4o", + "provider": "openai" + } + } +} +``` + +When you override just the `model` without specifying `provider`, the inherited provider is cleared (you need to re-specify it if it differs from auto-detection). + +## Agent Profiles + +Mycel ships with four built-in agent profiles defined as Markdown files with YAML frontmatter: + +| Name | Description | +|------|-------------| +| `general` | Full-capability general agent, default sub-agent | +| `bash` | Shell command specialist | +| `explore` | Codebase exploration and analysis | +| `plan` | Task planning and decomposition | + +Usage: + +```bash +leonai --agent general +leonai --agent explore +``` + +### Agent File Format + +Agents are `.md` files with YAML frontmatter: + +```markdown +--- +name: my-agent +description: What this agent does +tools: + - "*" +model: leon:large +--- + +Your system prompt goes here. This is the body of the Markdown file. +``` + +Frontmatter fields: + +| Field | Required | Description | +|-------|----------|-------------| +| `name` | yes | Agent identifier | +| `description` | no | Human-readable description | +| `tools` | no | Tool whitelist. `["*"]` = all tools (default) | +| `model` | no | Model override for this agent | + +### Agent Loading Priority + +Agents are loaded from multiple directories (later overrides earlier by name): + +1. Built-in agents: `config/defaults/agents/*.md` +2. User agents: `~/.leon/agents/*.md` +3. Project agents: `.leon/agents/*.md` +4. Member agents: `~/.leon/members//agent.md` (highest priority) + +## Tool Configuration + +The full tool catalog includes tools beyond the runtime.json config groups: + +| Tool | Group | Mode | Description | +|------|-------|------|-------------| +| Read | filesystem | inline | Read file contents | +| Write | filesystem | inline | Write file | +| Edit | filesystem | inline | Edit file (exact replacement) | +| list_dir | filesystem | inline | List directory contents | +| Grep | search | inline | Regex search (ripgrep-based) | +| Glob | search | inline | Glob pattern file search | +| Bash | command | inline | Execute shell commands | +| WebSearch | web | inline | Internet search | +| WebFetch | web | inline | Fetch web page with AI extraction | +| Agent | agent | inline | Spawn sub-agent | +| SendMessage | agent | inline | Send message to another agent | +| TaskOutput | agent | inline | Get background task output | +| TaskStop | agent | inline | Stop background task | +| TaskCreate | todo | deferred | Create todo task | +| TaskGet | todo | deferred | Get task details | +| TaskList | todo | deferred | List all tasks | +| TaskUpdate | todo | deferred | Update task status | +| load_skill | skills | inline | Load a skill | +| tool_search | system | inline | Search available tools | + +Tools in `deferred` mode run asynchronously without blocking the conversation. + +## MCP Configuration + +MCP servers are configured in `runtime.json` under the `mcp` key. Each server can use either stdio (command + args) or HTTP transport (url): + +```json +{ + "mcp": { + "enabled": true, + "servers": { + "github": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-github"], + "env": { + "GITHUB_TOKEN": "${GITHUB_TOKEN}" + }, + "allowed_tools": null + }, + "remote-server": { + "url": "https://mcp.example.com/sse", + "allowed_tools": ["search", "fetch"] + } + } + } +} +``` + +MCP server fields: + +| Field | Description | +|-------|-------------| +| `command` | Executable to launch (stdio transport) | +| `args` | Command arguments | +| `env` | Environment variables passed to the server process | +| `url` | URL for streamable HTTP transport (alternative to command) | +| `allowed_tools` | Whitelist of tool names. null = all tools exposed | + +### Member-level MCP + +Members (`~/.leon/members//`) can have their own `.mcp.json` following the same format as Claude's MCP config: + +```json +{ + "mcpServers": { + "supabase": { + "command": "npx", + "args": ["-y", "@supabase/mcp-server"], + "env": { "SUPABASE_URL": "..." } + } + } +} +``` + +## Skills Configuration + +```json +{ + "skills": { + "enabled": true, + "paths": ["~/.leon/skills", "./skills"], + "skills": { + "code-review": true, + "debugging": false + } + } +} +``` + +Skill paths are directories containing skill subdirectories. Each skill has a `SKILL.md` file. The `skills` map enables/disables individual skills by name. + +Skill paths must exist on disk -- the validator requires each directory in `paths` to be present. Mycel does not create them automatically. + +## Observation Configuration (observation.json) + +Configure observability providers for tracing agent runs: + +```json +{ + "active": "langfuse", + "langfuse": { + "secret_key": "${LANGFUSE_SECRET_KEY}", + "public_key": "${LANGFUSE_PUBLIC_KEY}", + "host": "https://cloud.langfuse.com" + }, + "langsmith": { + "api_key": "${LANGSMITH_API_KEY}", + "project": "leon", + "endpoint": null + } +} +``` + +Set `active` to `"langfuse"`, `"langsmith"`, or `null` (disabled). + +## Sandbox Configuration + +Sandbox configs live at `~/.leon/sandboxes/.json`. Each file defines a sandbox provider: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "your-key", + "api_url": "https://app.daytona.io/api", + "target": "local", + "cwd": "/home/daytona" + } +} +``` + +Supported providers: `local`, `docker`, `e2b`, `daytona`, `agentbay`. + +Select at launch: + +```bash +leonai --sandbox daytona # Uses ~/.leon/sandboxes/daytona.json +leonai --sandbox docker # Uses ~/.leon/sandboxes/docker.json +export LEON_SANDBOX=e2b # Or set via env var +``` + +Provider-specific fields: + +| Provider | Fields | +|----------|--------| +| docker | `image`, `mount_path`, `docker_host` | +| e2b | `api_key`, `template`, `cwd`, `timeout` | +| daytona | `api_key`, `api_url`, `target`, `cwd` | +| agentbay | `api_key`, `region_id`, `context_path`, `image_id` | + +## Environment Variables + +### In config.env + +`~/.leon/config.env` is a simple key=value file loaded into environment variables at startup (only if the variable is not already set): + +```env +OPENAI_API_KEY=sk-xxx +OPENAI_BASE_URL=https://openrouter.ai/api/v1 +MODEL_NAME=claude-sonnet-4-5-20250929 +``` + +The `OPENAI_BASE_URL` value is auto-normalized to include `/v1` if missing. + +### In JSON config files + +All string values in `runtime.json`, `models.json`, and `observation.json` support: + +- `${VAR}` -- environment variable expansion +- `~` -- home directory expansion + +```json +{ + "providers": { + "anthropic": { + "api_key": "${ANTHROPIC_API_KEY}" + } + } +} +``` + +### Relevant Environment Variables + +| Variable | Purpose | +|----------|---------| +| `OPENAI_API_KEY` | API key (OpenAI-compatible format) | +| `OPENAI_BASE_URL` | API base URL | +| `ANTHROPIC_API_KEY` | Anthropic API key | +| `ANTHROPIC_BASE_URL` | Anthropic base URL | +| `OPENROUTER_API_KEY` | OpenRouter API key | +| `MODEL_NAME` | Override model name | +| `LEON_SANDBOX` | Default sandbox name | +| `LEON_SANDBOX_DB_PATH` | Override sandbox database path | +| `TAVILY_API_KEY` | Tavily web search API key | +| `JINA_API_KEY` | Jina AI fetch API key | +| `EXA_API_KEY` | Exa search API key | +| `FIRECRAWL_API_KEY` | Firecrawl API key | +| `AGENTBAY_API_KEY` | AgentBay API key | +| `E2B_API_KEY` | E2B API key | +| `DAYTONA_API_KEY` | Daytona API key | + +## CLI Reference + +```bash +leonai # Start new session (TUI) +leonai -c # Continue last session +leonai --model leon:large # Override model +leonai --agent explore # Use agent preset +leonai --workspace /path # Set workspace root +leonai --sandbox docker # Use sandbox config +leonai --thread # Resume specific thread + +leonai config # Interactive config wizard +leonai config show # Show current config.env + +leonai thread ls # List all threads +leonai thread history # Show thread history +leonai thread rewind # Rewind to checkpoint +leonai thread rm # Delete thread + +leonai sandbox # Sandbox manager TUI +leonai sandbox ls # List sandbox sessions +leonai sandbox new [provider] # Create session +leonai sandbox pause # Pause session +leonai sandbox resume # Resume session +leonai sandbox rm # Delete session +leonai sandbox metrics # Show resource metrics + +leonai run "message" # Non-interactive single message +leonai run --stdin # Read messages from stdin +leonai run -i # Interactive mode (no TUI) +leonai run -d # With debug output +``` diff --git a/docs/en/deployment.md b/docs/en/deployment.md new file mode 100644 index 000000000..f661709f3 --- /dev/null +++ b/docs/en/deployment.md @@ -0,0 +1,328 @@ +# Mycel Deployment Guide + +English | [中文](../zh/deployment.md) + +## Prerequisites + +### Required +- Python 3.11 or higher +- `uv` package manager ([installation guide](https://docs.astral.sh/uv/getting-started/installation/)) +- Git + +### Optional (by provider) +- **Docker**: Docker daemon for local sandbox provider +- **E2B**: API key from [e2b.dev](https://e2b.dev) +- **Daytona**: API key from [daytona.io](https://daytona.io) or self-hosted instance +- **AgentBay**: API key and region access + +--- + +## Installation + +### 1. Clone Repository + +```bash +git clone https://github.com/yourusername/leonai.git +cd leonai +``` + +### 2. Install Dependencies + +```bash +# Install all dependencies including sandbox providers +uv pip install -e ".[all]" + +# Or install specific providers only +uv pip install -e ".[e2b]" # E2B only +uv pip install -e ".[daytona]" # Daytona only +uv pip install -e ".[sandbox]" # All sandbox providers +``` + +--- + +## Configuration + +### User Config Directory + +Mycel stores configuration in `~/.leon/`: + +``` +~/.leon/ +├── config.json # Main configuration +├── config.env # Environment variables +├── models.json # LLM provider mappings +├── sandboxes/ # Sandbox provider configs +│ ├── docker.json +│ ├── e2b.json +│ ├── daytona_saas.json +│ └── daytona_selfhost.json +└── leon.db # SQLite database +``` + +### Environment Variables + +Create `~/.leon/config.env`: + +```bash +# LLM Provider (OpenRouter example) +ANTHROPIC_API_KEY=your_openrouter_key +ANTHROPIC_BASE_URL=https://openrouter.ai/api/v1 + +# Sandbox Providers +E2B_API_KEY=your_e2b_key +DAYTONA_API_KEY=your_daytona_key +AGENTBAY_API_KEY=your_agentbay_key + +# Optional: Supabase (if using remote storage) +SUPABASE_URL=https://your-project.supabase.co +SUPABASE_KEY=your_supabase_key +``` + +--- + +## Sandbox Provider Setup + +### Local (Default) + +No configuration needed. Uses local filesystem. + +```bash +# Test local sandbox +leon --sandbox local +``` + +### Docker + +**Requirements:** Docker daemon running + +Create `~/.leon/sandboxes/docker.json`: + +```json +{ + "provider": "docker", + "on_exit": "destroy", + "docker": { + "image": "python:3.11-slim", + "mount_path": "/workspace" + } +} +``` + +**Troubleshooting:** +- If Docker CLI hangs, check proxy environment variables +- Mycel strips `http_proxy`/`https_proxy` when calling Docker CLI +- Use `docker_host` config to override Docker socket path + +### E2B + +**Requirements:** E2B API key + +Create `~/.leon/sandboxes/e2b.json`: + +```json +{ + "provider": "e2b", + "on_exit": "pause", + "e2b": { + "api_key": "${E2B_API_KEY}", + "template": "base", + "cwd": "/home/user", + "timeout": 300 + } +} +``` + +### Daytona SaaS + +**Requirements:** Daytona account and API key + +Create `~/.leon/sandboxes/daytona_saas.json`: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "${DAYTONA_API_KEY}", + "api_url": "https://app.daytona.io/api", + "target": "local", + "cwd": "/home/daytona" + } +} +``` + +### Daytona Self-Hosted + +**Requirements:** Self-hosted Daytona instance + +**Critical:** Self-hosted Daytona requires: +1. Runner container with bash at `/usr/bin/bash` +2. Workspace image with bash at `/usr/bin/bash` +3. Runner on bridge network (for workspace container access) +4. Daytona Proxy accessible on port 4000 (for file operations) + +Create `~/.leon/sandboxes/daytona_selfhost.json`: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "${DAYTONA_API_KEY}", + "api_url": "http://localhost:3986/api", + "target": "us", + "cwd": "/workspace" + } +} +``` + +**Docker Compose Configuration:** + +```yaml +services: + daytona-runner: + image: your-runner-image-with-bash + environment: + - RUNNER_DOMAIN=runner # NOT localhost! + networks: + - default + - bridge # Required for workspace access + # ... other config + +networks: + bridge: + external: true +``` + +**Network Configuration:** + +The runner must be on both the compose network AND the default bridge network where workspace containers run. Add to `/etc/hosts` on runner: + +``` +127.0.0.1 proxy.localhost +``` + +**Troubleshooting:** +- "fork/exec /usr/bin/bash: no such file" → Workspace image missing bash +- "Failed to create sandbox within 60s" → Network isolation, check runner networks +- File operations fail → Daytona Proxy (port 4000) not accessible + +### AgentBay + +**Requirements:** AgentBay API key and region access + +Create `~/.leon/sandboxes/agentbay.json`: + +```json +{ + "provider": "agentbay", + "on_exit": "pause", + "agentbay": { + "api_key": "${AGENTBAY_API_KEY}", + "region_id": "ap-southeast-1", + "context_path": "/home/wuying" + } +} +``` + +--- + +## Verification + +### Health Check + +```bash +# Check Mycel installation +leon --version + +# List available sandboxes +leonai sandbox ls + +# Test sandbox provider +leon --sandbox docker +``` + +### Test Command Execution + +```python +from sandbox import SandboxConfig, create_sandbox + +config = SandboxConfig.load("docker") +sbx = create_sandbox(config) + +# Create session +session = sbx.create_session() + +# Execute command +result = sbx.execute(session.session_id, "echo 'Hello from sandbox'") +print(result.output) + +# Cleanup +sbx.destroy_session(session.session_id) +``` + +--- + +## Common Issues + +### "Could not import module 'main'" + +Backend startup failed. Check: +- Are you in the correct directory? +- Is the virtual environment activated? +- Use full path to uvicorn: `.venv/bin/uvicorn` + +### "SOCKS proxy error" from LLM client + +Shell environment has `all_proxy=socks5://...` set. Unset before starting: + +```bash +env -u ALL_PROXY -u all_proxy uvicorn main:app +``` + +### Docker provider hangs + +Proxy environment variables inherited by Docker CLI. Mycel strips these automatically, but if issues persist, check `docker_host` configuration. + +### Daytona PTY bootstrap fails + +Check: +1. Workspace image has bash at `/usr/bin/bash` +2. Runner has bash at `/usr/bin/bash` +3. Runner is on bridge network +4. Daytona Proxy (port 4000) is accessible + +--- + +## Production Deployment + +### Database + +Mycel uses SQLite by default (`~/.leon/leon.db`). For production: + +1. **Backup regularly:** + ```bash + cp ~/.leon/leon.db ~/.leon/leon.db.backup + ``` + +2. **Consider PostgreSQL** for multi-user deployments (requires code changes) + +### Security + +- Store API keys in `~/.leon/config.env`, never in code +- Use environment variable substitution in config files: `"${API_KEY}"` +- Restrict file permissions: `chmod 600 ~/.leon/config.env` + +### Monitoring + +- Backend logs: Check stdout/stderr from uvicorn +- Sandbox logs: Provider-specific (Docker logs, E2B dashboard, etc.) +- Database: Monitor `~/.leon/leon.db` size and query performance + +--- + +## Next Steps + +- See [SANDBOX.md](../sandbox/SANDBOX.md) for detailed sandbox provider documentation +- See [TROUBLESHOOTING.md](../TROUBLESHOOTING.md) for common issues and solutions +- See example configs in `examples/sandboxes/` diff --git a/docs/en/multi-agent-chat.md b/docs/en/multi-agent-chat.md new file mode 100644 index 000000000..02ebc6592 --- /dev/null +++ b/docs/en/multi-agent-chat.md @@ -0,0 +1,204 @@ +English | [中文](../zh/multi-agent-chat.md) + +# Multi-Agent Chat + +Mycel includes an Entity-Chat system that enables structured communication between humans and AI agents, and between agents themselves. This guide covers the core concepts, how to create agents, and how the messaging system works. + +## Core Concepts + +The Entity-Chat system has three layers: + +### Members + +A **Member** is a template -- the "class" that defines an agent's identity and capabilities. Members are stored as file bundles under `~/.leon/members//`: + +``` +~/.leon/members/m_AbCdEfGhIjKl/ + agent.md # Identity: name, description, model, system prompt (YAML frontmatter) + meta.json # Status (draft/active), version, timestamps + runtime.json # Enabled tools and skills + rules/ # Behavioral rules (one .md per rule) + agents/ # Sub-agent definitions + skills/ # Skill directories + .mcp.json # MCP server configuration +``` + +Member types: +- `human` -- A human user +- `mycel_agent` -- An AI agent built with Mycel + +Each agent member has an **owner** (the human member who created it). The built-in `Mycel` member (`__leon__`) is available to everyone. + +### Entities + +An **Entity** is a social identity -- the "instance" that participates in chats. Think of it as a profile in a messaging app. + +- Each Member can have multiple Entities (e.g., the same agent template deployed in different contexts) +- An Entity has a `type` (`human` or `agent`), a `name`, an optional avatar, and a `thread_id` linking it to its agent brain +- Entity IDs follow the format `{member_id}-{seq}` (member ID + sequence number) + +The key distinction: **Member = who you are. Entity = how you appear in chat.** + +### Threads + +A **Thread** is an agent's running brain -- its conversation state, memory, and execution context. Each agent Entity is bound to exactly one Thread. When a message arrives, the system routes it to the Entity's Thread, waking the agent to process it. + +Human Entities do not have Threads -- humans interact through the Web UI directly. + +## Architecture Overview + +``` +Human (Web UI) + | + v +[Entity: human] ---chat_send---> [Entity: agent] + | + v + [Thread: agent brain] + | + Agent processes message, + uses chat tools to respond + | + v + [Entity: agent] ---chat_send---> [Entity: human] + | + v + Web UI (SSE push) +``` + +Messages flow through Chats (conversations between Entities). A Chat between two Entities is automatically created on first contact. Group chats with 3+ entities are also supported. + +## Creating an Agent Member (Web UI) + +1. Open the Web UI and navigate to the Members page +2. Click "Create" to start a new agent member +3. Fill in the basics: + - **Name** -- The agent's display name + - **Description** -- What this agent does +4. Configure the agent: + - **System Prompt** -- The agent's core instructions (written in the `agent.md` body) + - **Tools** -- Enable/disable specific tools (file operations, search, web, commands) + - **Rules** -- Add behavioral rules as individual markdown files + - **Sub-Agents** -- Define specialized sub-agents with their own tool sets + - **MCP Servers** -- Connect external tool servers + - **Skills** -- Enable marketplace skills +5. Set the status to "active" and publish + +The backend creates: +- A `MemberRow` in SQLite (`members` table) with a generated `m_` ID +- A file bundle under `~/.leon/members//` +- An Entity and Thread are created when the agent is first used in a chat + +## How Agents Communicate + +Agents have five built-in chat tools registered in their tool registry: + +### `directory` + +Browse all known entities. Returns entity IDs needed for other tools. + +``` +directory(search="Alice", type="human") +-> - Alice [human] entity_id=m_abc123-1 +``` + +### `chats` + +List the agent's active chats with unread counts and last message preview. + +``` +chats(unread_only=true) +-> - Alice [entity_id: m_abc123-1] (3 unread) -- last: "Can you help me with..." +``` + +### `chat_read` + +Read message history in a chat. Automatically marks messages as read. + +``` +chat_read(entity_id="m_abc123-1", limit=10) +-> [Alice]: Can you help me with this bug? + [you]: Sure, let me take a look. +``` + +### `chat_send` + +Send a message. The agent must read unread messages before sending (enforced by the system). + +``` +chat_send(content="Here's the fix.", entity_id="m_abc123-1") +``` + +**Signal protocol** controls conversation flow: +- No signal (default) -- "I expect a reply" +- `signal: "yield"` -- "I'm done; reply only if you want to" +- `signal: "close"` -- "Conversation over, do not reply" + +### `chat_search` + +Search through message history across all chats or within a specific chat. + +``` +chat_search(query="bug fix", entity_id="m_abc123-1") +``` + +## How Human-Agent Chat Works + +When a human sends a message through the Web UI: + +1. The frontend calls `POST /api/chats/{chat_id}/messages` with the message content and the human's entity ID +2. The `ChatService` stores the message and publishes it to the `ChatEventBus` (SSE for real-time UI updates) +3. For each non-sender agent entity in the chat, the delivery system: + - Checks the **delivery strategy** (contact-level block/mute, chat-level mute, @mention overrides) + - If delivery is allowed, formats a lightweight notification (no message content -- the agent must `chat_read` to see it) + - Enqueues the notification into the agent's message queue + - Wakes the agent's Thread if it was idle (cold-wake) +4. The agent wakes, sees the notification, calls `chat_read` to get the actual messages, processes them, and responds via `chat_send` +5. The agent's response flows back through the same pipeline -- stored, broadcast via SSE, delivered to other participants + +### Real-time Updates + +The Web UI subscribes to `GET /api/chats/{chat_id}/events` (Server-Sent Events) for live updates: +- `message` events for new messages +- Typing indicators when an agent is processing +- All events are pushed without polling + +## Contact and Delivery System + +Entities can manage relationships with other entities: + +- **Normal** -- Full delivery (default) +- **Muted** -- Messages stored but no notification sent to the agent. @mentions override mute. +- **Blocked** -- Messages are silently dropped for this entity + +Chat-level muting is also supported -- mute a specific chat without affecting the contact relationship. + +These controls let you manage noisy agents or prevent unwanted interactions without deleting chats. + +## API Reference + +Key endpoints for the Entity-Chat system: + +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/api/entities` | GET | List all chattable entities | +| `/api/members` | GET | List agent members (templates) | +| `/api/chats` | GET | List chats for current user | +| `/api/chats` | POST | Create a new chat (1:1 or group) | +| `/api/chats/{id}/messages` | GET | List messages in a chat | +| `/api/chats/{id}/messages` | POST | Send a message | +| `/api/chats/{id}/read` | POST | Mark chat as read | +| `/api/chats/{id}/events` | GET | SSE stream for real-time events | +| `/api/chats/{id}/mute` | POST | Mute/unmute a chat | +| `/api/entities/contacts` | POST | Set contact relationship (block/mute/normal) | + +## Data Storage + +The Entity-Chat system uses SQLite databases: + +| Database | Tables | +|----------|--------| +| `~/.leon/leon.db` | `members`, `entities`, `accounts` | +| `~/.leon/chat.db` | `chats`, `chat_entities`, `chat_messages`, `contacts` | + +Member configuration files live on the filesystem under `~/.leon/members/`. The SQLite tables store relational data (ownership, identity, chat state) while the file bundles store the agent's full configuration. diff --git a/docs/en/product-primitives.md b/docs/en/product-primitives.md new file mode 100644 index 000000000..6f7fcf5b2 --- /dev/null +++ b/docs/en/product-primitives.md @@ -0,0 +1,144 @@ +# Mycel Product Primitives + +🇬🇧 English | [🇨🇳 中文](../zh/product-primitives.md) + +## Core Philosophy + +> An Agent has all the capabilities it needs -- the key is whether it has the corresponding resources. + +Capabilities are innate; resources are granted. With resources, an agent can act. Without them, it cannot. + +## Six Primitives + +| Primitive | Term | Meaning | Example | +|-----------|------|---------|---------| +| **Thread** | Thread | A single interaction session | A user's conversation with an Agent | +| **Member** | Member | The Agent performing work | Main Agent, Sub-Agent | +| **Task** | Task | Work to be completed | User instructions, decomposed subtasks | +| **Resource** | Resource | A fundamental interaction surface available to the Agent | File system, terminal, browser, phone | +| **Connection** | Connection | An external service the Agent connects to | GitHub, Slack, Jira (MCP) | +| **Model** | Model | The AI brain | Mini / Medium / Large / Max | + +### Relationship Diagram + +``` +Thread +├── Member (who does the work) +│ ├── Main Agent +│ └── Sub-Agent × N +├── Task (what to do) +│ ├── Task A → assigned to Member 1 +│ └── Task B → assigned to Member 2 +├── Resource (what to use) ← usage rights assigned to Members +│ ├── File system +│ ├── Terminal +│ └── Browser +├── Connection (which external services are connected) +│ ├── GitHub +│ └── Slack +└── Model (which brain to think with) +``` + +## The Essential Difference Between "Resources" and "Connections" + +### Resource + +The **fundamental channels** through which an Agent interacts with the world. Each resource opens an entire interaction dimension: + +| Resource | World It Opens | What the Agent Can Do | +|----------|---------------|----------------------| +| File system | Data world | Read/write files, manage projects | +| Terminal | Command world | Execute system commands, run programs | +| Browser | Web world | Browse pages, operate web applications | +| Phone | App world | Operate mobile apps, test applications | +| Camera | Visual world | See the physical environment (future) | +| Microphone | Audio world | Receive voice input (future) | + +### Connection + +**External services** the Agent connects to (via MCP protocol). Point-to-point data channels: + +- GitHub, Slack, Jira, databases, Supabase, etc. +- Plug one in, gain one more; unplug it, lose one +- Does not change the Agent's interaction dimensions -- only adds information sources + +### Distinction Criteria + +| | Resource | Connection | +|---|---|---| +| Essence | Interaction dimension | Data pipeline | +| Granularity | An entire world | A single service | +| Interaction mode | Perception + control | Request-response | +| User perception | "What the Agent can do" | "What services the Agent is connected to" | + +## Ownership and Usage Rights + +- The platform/user **owns** resources (ownership) +- When a thread is created, it **authorizes** which resources are available (usage rights) +- The main Agent can **delegate** resource usage rights to Sub-Agents +- Different Agents can have different resource permissions + +## Resource Page Design Direction + +### Principles + +1. **Resources are the star, Providers are implementation details** -- Users care about "what the Agent has", not "which cloud vendor it uses" +2. **Atomic granularity** -- Each resource is presented independently, enabled/disabled independently +3. **Provider abstraction** -- Don't expose configuration forms; use icons + cards instead + +### User Perspective (Goal) + +``` +Resources Source +├── ✓ File system ~/projects/app Local +├── ✓ Terminal Local +├── ○ Browser (click to enable) Playwright +└── ○ Phone (click to connect) Not configured + +Connections +├── ✓ GitHub +├── ✓ Supabase +└── ○ Slack (not connected) +``` + +### Where Providers Fit + +Providers (Local / AgentBay / Docker / E2B / Daytona) determine **where the file system and terminal come from**: + +- Choose Local → File system = local disk, Terminal = local shell +- Choose AgentBay → File system = cloud VM, Terminal = cloud shell, + Browser +- Choose Docker → File system = inside container, Terminal = container shell + +A Provider is a **source attribute** of a resource, not a top-level concept. It appears in settings as "Runtime Mode": + +``` +Runtime Mode + ● Local (file system and terminal on your computer) + ○ Cloud (file system and terminal on a cloud machine) +``` + +### Abstracting the Capability Matrix + +The problem with the current design (provider × capability matrix table): +- The perspective is Provider-first ("what does this Provider support") +- It should be Resource-first ("I need this resource -- who can provide it") +- The dot matrix is too "database-style" -- should be replaced with icons + cards + toggles + +## Terminology Mapping + +| User Sees | Code / Technical Concept | Notes | +|-----------|-------------------------|-------| +| Resource | Sandbox capabilities | File system, terminal, browser, phone | +| Connection | MCP Server | External service integration | +| Runtime Mode | Sandbox Provider | Local / AgentBay / Docker | +| Thread | Thread | thread_id | +| Member | Agent / Sub-Agent | LeonAgent instance | +| Task | Task | TaskMiddleware | +| Model | Model | leon:mini/medium/large/max | + +## Design Anti-Patterns + +- Do not use the word "sandbox" in the user interface +- Do not make users choose a Provider every time they create a new thread +- Do not expose Provider configuration forms directly to users +- Do not conflate resources and connections (they are different layers) diff --git a/docs/en/sandbox.md b/docs/en/sandbox.md new file mode 100644 index 000000000..c1458dd45 --- /dev/null +++ b/docs/en/sandbox.md @@ -0,0 +1,221 @@ +🇬🇧 English | [🇨🇳 中文](../zh/sandbox.md) + +# Sandbox + +Mycel's sandbox system runs agent operations (file I/O, shell commands) in isolated environments instead of the host machine. Five providers are supported: **Local** (host passthrough), **Docker** (container), **E2B** (cloud), **Daytona** (cloud or self-hosted), and **AgentBay** (Alibaba Cloud). + +## Quick Start (Web UI) + +### 1. Configure a Provider + +Go to **Settings → Sandbox** in the Web UI. You'll see cards for each provider. Expand a card and fill in the required fields: + +| Provider | Required Fields | +|----------|----------------| +| **Docker** | Image name (default: `python:3.12-slim`), mount path | +| **E2B** | API key | +| **Daytona** | API key, API URL | +| **AgentBay** | API key | + +Click **Save**. The configuration is stored in `~/.leon/sandboxes/.json`. + +### 2. Create a Thread with Sandbox + +When starting a new conversation, use the **sandbox dropdown** in the top-left of the input area. Select your configured provider (e.g. `docker`). Then type your message and send. + +The thread is bound to that sandbox at creation — all subsequent agent runs in this thread use the same sandbox. + +### 3. Monitor Resources + +Go to the **Resources** page (sidebar icon). You'll see: + +- **Provider cards** — status (active/ready/unavailable) for each provider +- **Sandbox cards** — each running/paused sandbox with agent avatars, duration, and metrics (CPU/RAM/Disk) +- **Detail sheet** — click a sandbox card to see agents using it, detailed metrics, and a file browser + +## Example Configurations + +See [`examples/sandboxes/`](../../examples/sandboxes/) for ready-to-use config templates for all providers. Copy to `~/.leon/sandboxes/` or configure directly in the Web UI Settings. + +## Provider Configuration + +### Docker + +Requires Docker installed on the host. No API key needed. + +```json +{ + "provider": "docker", + "docker": { + "image": "python:3.12-slim", + "mount_path": "/workspace" + }, + "on_exit": "pause" +} +``` + +| Field | Default | Description | +|-------|---------|-------------| +| `docker.image` | `python:3.12-slim` | Docker image | +| `docker.mount_path` | `/workspace` | Working directory inside container | +| `on_exit` | `pause` | `pause` (preserve state) or `destroy` (clean slate) | + +### E2B + +Cloud sandbox service. Requires an [E2B](https://e2b.dev) API key. + +```json +{ + "provider": "e2b", + "e2b": { + "api_key": "e2b_...", + "template": "base", + "cwd": "/home/user", + "timeout": 300 + }, + "on_exit": "pause" +} +``` + +### Daytona + +Supports both [Daytona](https://daytona.io) SaaS and self-hosted instances. + +**SaaS:** +```json +{ + "provider": "daytona", + "daytona": { + "api_key": "dtn_...", + "api_url": "https://app.daytona.io/api", + "cwd": "/home/daytona" + }, + "on_exit": "pause" +} +``` + +**Self-hosted:** +```json +{ + "provider": "daytona", + "daytona": { + "api_key": "dtn_...", + "api_url": "https://your-server.com/api", + "target": "local", + "cwd": "/home/daytona" + }, + "on_exit": "pause" +} +``` + +### AgentBay + +Alibaba Cloud sandbox (China region). Requires an AgentBay API key. + +```json +{ + "provider": "agentbay", + "agentbay": { + "api_key": "akm-...", + "region_id": "ap-southeast-1", + "context_path": "/home/wuying" + }, + "on_exit": "pause" +} +``` + +### Extra Dependencies + +Cloud sandbox providers require extra Python packages: + +```bash +uv sync --extra sandbox # AgentBay +uv sync --extra e2b # E2B +uv sync --extra daytona # Daytona +``` + +Docker works out of the box (uses the Docker CLI). + +### API Key Resolution + +API keys are resolved in order: + +1. Config file field (`e2b.api_key`, `daytona.api_key`, etc.) +2. Environment variable (`E2B_API_KEY`, `DAYTONA_API_KEY`, `AGENTBAY_API_KEY`) +3. `~/.leon/config.env` + +## Session Lifecycle + +Each thread is bound to one sandbox. Sessions follow a lifecycle: + +``` +idle → active → paused → destroyed +``` + +### `on_exit` Behavior + +| Value | Behavior | +|-------|----------| +| `pause` | Pause session on exit. Resume on next startup. Files, packages, processes preserved. | +| `destroy` | Kill session on exit. Clean slate next time. | + +`pause` is the default — you keep everything across restarts. + +### Web UI Session Management + +From the **Resources** page: + +- View all sessions across all providers in a unified grid +- Click a session card → detail sheet with metrics + file browser +- Pause / Resume / Destroy via API (endpoints below) + +**API Endpoints:** + +| Action | Endpoint | +|--------|----------| +| List resources | `GET /api/monitor/resources` | +| Force refresh | `POST /api/monitor/resources/refresh` | +| Pause session | `POST /api/sandbox/sessions/{id}/pause?provider={type}` | +| Resume session | `POST /api/sandbox/sessions/{id}/resume?provider={type}` | +| Destroy session | `DELETE /api/sandbox/sessions/{id}?provider={type}` | + +## CLI Reference + +For terminal-based sandbox management, see the [CLI docs](cli.md#sandbox-management). + +Summary of CLI commands: + +```bash +leonai sandbox # TUI manager +leonai sandbox ls # List sessions +leonai sandbox new docker # Create session +leonai sandbox pause # Pause +leonai sandbox resume # Resume +leonai sandbox rm # Delete +leonai sandbox metrics # Show metrics +``` + +## Architecture + +The sandbox is an infrastructure layer below the middleware stack. It provides backends that existing middleware uses: + +``` +Agent + ├── sandbox.fs() → FileSystemBackend (used by FileSystemMiddleware) + └── sandbox.shell() → BaseExecutor (used by CommandMiddleware) +``` + +Middleware owns **policy** (validation, path rules, hooks). The backend owns **I/O** (where operations execute). Swapping the backend changes where operations happen without touching middleware logic. + +### Session Tracking + +Sessions are tracked in SQLite (`~/.leon/sandbox.db`): + +| Table | Purpose | +|-------|---------| +| `sandbox_leases` | Lease lifecycle — provider, desired/observed state | +| `sandbox_instances` | Provider-side session IDs | +| `abstract_terminals` | Virtual terminals bound to thread + lease | +| `lease_resource_snapshots` | CPU, memory, disk metrics | + +Thread → sandbox mapping goes through `abstract_terminals.thread_id` → `abstract_terminals.lease_id`. diff --git a/docs/zh/cli.md b/docs/zh/cli.md new file mode 100644 index 000000000..a775d3efa --- /dev/null +++ b/docs/zh/cli.md @@ -0,0 +1,129 @@ +[🇬🇧 English](../en/cli.md) | 🇨🇳 中文 + +# CLI / TUI 参考 + +Mycel 包含终端界面,用于快速交互、脚本化操作和沙箱管理。项目的主界面是 [Web UI](../../README.zh.md#快速开始)——CLI 是面向开发者和高级用户的补充工具。 + +## 安装 + +```bash +pip install leonai +# 或 +uv tool install leonai +``` + +## 首次运行 + +```bash +leonai +``` + +如果未检测到 API 密钥,交互式配置向导会自动启动: + +1. **API_KEY**(必填)— OpenAI 兼容的 API 密钥 +2. **BASE_URL**(可选)— API 端点,默认 `https://api.openai.com/v1` +3. **MODEL_NAME**(可选)— 使用的模型,默认 `claude-sonnet-4-5-20250929` + +配置保存到 `~/.leon/config.env`。 + +```bash +leonai config # 重新运行向导 +leonai config show # 查看当前设置 +``` + +## 使用 + +```bash +leonai # 开始新对话 +leonai -c # 继续上次对话 +leonai --thread # 恢复指定对话 +leonai --model gpt-4o # 使用指定模型 +leonai --workspace /path/to/dir # 设置工作目录 +``` + +## 对话管理 + +```bash +leonai thread ls # 列出所有对话 +leonai thread history # 查看对话历史 +leonai thread rewind # 回退到检查点 +leonai thread rm # 删除对话 +``` + +## 非交互模式 + +```bash +leonai run "解释这个代码库" # 单条消息 +echo "总结一下" | leonai run --stdin # 从 stdin 读取 +leonai run -i # 无 TUI 交互模式 +``` + +## 通过 CLI 使用沙箱 + +### 启动时指定沙箱 + +```bash +leonai --sandbox docker # Docker 容器 +leonai --sandbox e2b # E2B 云沙箱 +leonai --sandbox daytona # Daytona 沙箱 +leonai --sandbox agentbay # AgentBay 沙箱 +``` + +恢复对话(`-c` 或 `--thread`)时,沙箱 Provider 从数据库自动检测,无需再次传 `--sandbox`。 + +解析顺序:CLI 参数 → 从对话自动检测 → `LEON_SANDBOX` 环境变量 → `local`(无沙箱)。 + +### 会话管理 + +```bash +leonai sandbox # 打开沙箱管理 TUI +leonai sandbox ls # 列出活跃会话 +leonai sandbox new docker # 创建新 Docker 会话 +leonai sandbox pause # 暂停会话(状态保留) +leonai sandbox resume # 恢复暂停的会话 +leonai sandbox rm # 删除会话 +leonai sandbox metrics # 查看 CPU/RAM/磁盘 +leonai sandbox delete # rm 的别名 +leonai sandbox destroy-all-sessions # 销毁所有(需确认) +``` + +会话 ID 可以缩写——任何唯一前缀都有效。 + +### Headless / 脚本化 + +```bash +leonai run --sandbox docker -d "Run echo hello" # 单条命令 +leonai run --sandbox e2b -i # 无 TUI 交互模式 +``` + +### TUI 管理器快捷键 + +用 `leonai sandbox`(不带子命令)启动: + +| 按键 | 操作 | +|------|------| +| `r` | 刷新会话列表 | +| `n` | 创建新会话 | +| `d` | 删除选中的会话 | +| `p` | 暂停选中的会话 | +| `u` | 恢复选中的会话 | +| `m` | 显示指标 | +| `q` | 退出 | + +## LLM 提供商示例 + +Mycel 使用 OpenAI 兼容 API 格式,支持任何兼容的提供商。 + +| 提供商 | BASE_URL | MODEL_NAME | +|--------|----------|------------| +| OpenAI | `https://api.openai.com/v1` | `gpt-4o` | +| OpenRouter | `https://openrouter.ai/api/v1` | `anthropic/claude-sonnet-4-5-20250929` | +| DeepSeek | `https://api.deepseek.com/v1` | `deepseek-chat` | + +环境变量优先于 `~/.leon/config.env`: + +```bash +export OPENAI_API_KEY="your-key" +export OPENAI_BASE_URL="https://api.openai.com/v1" +export MODEL_NAME="gpt-4o" +``` diff --git a/docs/zh/configuration.md b/docs/zh/configuration.md new file mode 100644 index 000000000..a073c0975 --- /dev/null +++ b/docs/zh/configuration.md @@ -0,0 +1,666 @@ +[English](../en/configuration.md) | 中文 + +# Mycel 配置指南 + +Mycel 使用分离式配置系统:**runtime.json** 控制行为设置,**models.json** 控制模型/提供商身份,**config.env** 用于快速 API 密钥设置。每个配置文件遵循三层合并策略:系统默认值、用户覆盖和项目覆盖。 + +## 快速设置(首次运行) + +首次启动时如果没有 API 密钥,Mycel 会自动打开配置向导: + +```bash +leonai config # 交互式向导:API 密钥、Base URL、模型名称 +leonai config show # 显示当前 config.env 的值 +``` + +向导会将三个值写入 `~/.leon/config.env`: + +```env +OPENAI_API_KEY=sk-xxx +OPENAI_BASE_URL=https://api.openai.com/v1 +MODEL_NAME=claude-sonnet-4-5-20250929 +``` + +这些就足够开始使用 Mycel 了。以下章节涵盖高级配置。 + +## 配置文件位置 + +Mycel 有三个独立的配置域,各自有对应的文件: + +| 域 | 文件名 | 用途 | +|--------|----------|---------| +| 运行时行为 | `runtime.json` | 工具、记忆、MCP、技能、安全 | +| 模型身份 | `models.json` | 提供商、API 密钥、虚拟模型映射 | +| 可观测性 | `observation.json` | Langfuse / LangSmith 追踪 | +| 快速设置 | `config.env` | API 密钥 + Base URL(加载为环境变量) | +| 沙箱 | `~/.leon/sandboxes/.json` | 每个沙箱提供商的配置 | + +每个 JSON 配置文件从三个层级加载(优先级从高到低): + +1. **项目配置**:工作区根目录下的 `.leon/` +2. **用户配置**:主目录下的 `~/.leon/` +3. **系统默认值**:`config/defaults/` 中的内置默认值 + +CLI 参数(`--model`、`--workspace` 等)优先级最高,覆盖一切。 + +### 合并策略 + +- **runtime / memory / tools**:所有层级深度合并(高优先级层的字段覆盖低优先级层) +- **mcp / skills**:查找合并(第一个定义它的层级生效,不合并) +- **system_prompt**:查找(项目 > 用户 > 系统) +- **providers / mapping**(models.json):按键深度合并 +- **pool**(models.json):后者覆盖(不合并列表) +- **catalog / virtual_models**(models.json):仅系统级,不可覆盖 + +## 运行时配置(runtime.json) + +控制智能体行为、工具、记忆、MCP 和技能。模型/提供商身份**不在**此处配置(那是 `models.json` 的职责)。 + +完整结构及默认值(来自 `config/defaults/runtime.json`): + +```json +{ + "context_limit": 0, + "enable_audit_log": true, + "allowed_extensions": null, + "block_dangerous_commands": true, + "block_network_commands": false, + "queue_mode": "steer", + "temperature": null, + "max_tokens": null, + "model_kwargs": {}, + "memory": { + "pruning": { + "enabled": true, + "soft_trim_chars": 3000, + "hard_clear_threshold": 10000, + "protect_recent": 3, + "trim_tool_results": true + }, + "compaction": { + "enabled": true, + "reserve_tokens": 16384, + "keep_recent_tokens": 20000, + "min_messages": 20 + } + }, + "system_prompt": null, + "tools": { + "filesystem": { + "enabled": true, + "tools": { + "read_file": { "enabled": true, "max_file_size": 10485760 }, + "write_file": true, + "edit_file": true, + "list_dir": true + } + }, + "search": { + "enabled": true, + "max_results": 50, + "tools": { + "grep": { "enabled": true, "max_file_size": 10485760 }, + "glob": true + } + }, + "web": { + "enabled": true, + "timeout": 15, + "tools": { + "web_search": { + "enabled": true, + "max_results": 5, + "tavily_api_key": null, + "exa_api_key": null, + "firecrawl_api_key": null + }, + "fetch": { + "enabled": true, + "jina_api_key": null + } + } + }, + "command": { + "enabled": true, + "tools": { + "run_command": { "enabled": true, "default_timeout": 120 }, + "command_status": true + } + } + }, + "mcp": { + "enabled": true, + "servers": {} + }, + "skills": { + "enabled": true, + "paths": ["~/.leon/skills"], + "skills": {} + } +} +``` + +> **注意:** 文件是扁平结构——没有 `"runtime"` 包装键。配置加载器在加载时会内部包装这些字段。`spill_buffer`、`tool_modes`、`workspace_root` 等字段是可选覆盖项,不在默认文件中;详见下方[工具](#工具)章节。 + +### 运行时字段 + +| 字段 | 类型 | 默认值 | 说明 | +|-------|------|---------|-------------| +| `temperature` | float (0-2) | null(模型默认) | 采样温度 | +| `max_tokens` | int | null(模型默认) | 最大输出 token 数 | +| `context_limit` | int | 0 | 上下文窗口限制(token 数)。0 = 从模型自动检测 | +| `enable_audit_log` | bool | true | 启用审计日志 | +| `allowed_extensions` | list | null | 限制文件访问的扩展名列表。null = 全部 | +| `block_dangerous_commands` | bool | true | 阻止危险的 shell 命令(如 rm -rf 等) | +| `block_network_commands` | bool | false | 阻止网络命令 | + +### 记忆 + +**裁剪(Pruning)** 修剪旧的工具结果以节省上下文空间: + +| 字段 | 默认值 | 说明 | +|-------|---------|-------------| +| `soft_trim_chars` | 3000 | 超过此长度的工具结果进行软修剪 | +| `hard_clear_threshold` | 10000 | 超过此长度的工具结果进行硬清除 | +| `protect_recent` | 3 | 保留最近 N 条工具消息不修剪 | +| `trim_tool_results` | true | 启用工具结果修剪 | + +**压缩(Compaction)** 通过 LLM 总结旧的对话历史: + +| 字段 | 默认值 | 说明 | +|-------|---------|-------------| +| `reserve_tokens` | 16384 | 为新消息预留的空间 | +| `keep_recent_tokens` | 20000 | 保留最近消息的原文 | +| `min_messages` | 20 | 触发压缩前的最少消息数 | + +### 工具 + +每个工具组(filesystem、search、web、command)都有一个 `enabled` 标志和一个 `tools` 子对象。工具组和单个工具都必须启用,工具才可用。 + +可用工具及其配置名称: + +| 配置名称 | UI/工具目录名称 | 组 | +|------------|----------------------|-------| +| `read_file` | Read | filesystem | +| `write_file` | Write | filesystem | +| `edit_file` | Edit | filesystem | +| `list_dir` | list_dir | filesystem | +| `grep` | Grep | search | +| `glob` | Glob | search | +| `web_search` | WebSearch | web | +| `fetch` | WebFetch | web | +| `run_command` | Bash | command | +| `command_status` | - | command | + +**溢出缓冲区(Spill buffer)** 自动将大型工具输出写入临时文件,而不是内联到对话中。这是可选覆盖项,不在系统默认文件中: + +```json +{ + "tools": { + "spill_buffer": { + "default_threshold": 50000, + "thresholds": { + "Grep": 20000, + "run_command": 100000 + } + } + } +} +``` + +**工具模式** 可以为每个工具设置为 `"inline"`(默认)或 `"deferred"`。同样是可选覆盖项,不在默认文件中: + +```json +{ + "tools": { + "tool_modes": { + "TaskCreate": "deferred", + "TaskList": "deferred" + } + } +} +``` + +### 示例:项目级 runtime.json + +项目根目录下的 `.leon/runtime.json`: + +```json +{ + "allowed_extensions": ["py", "js", "ts", "json", "yaml", "md"], + "block_dangerous_commands": true, + "tools": { + "web": { "enabled": false }, + "command": { + "tools": { + "run_command": { "default_timeout": 300 } + } + } + }, + "system_prompt": "You are a Python expert working on a FastAPI project." +} +``` + +## 模型配置(models.json) + +控制使用哪个模型、提供商凭据和虚拟模型映射。 + +### 结构 + +```json +{ + "active": { + "model": "claude-sonnet-4-5-20250929", + "provider": null, + "based_on": null, + "context_limit": null + }, + "providers": { + "anthropic": { + "api_key": "${ANTHROPIC_API_KEY}", + "base_url": "https://api.anthropic.com" + }, + "openai": { + "api_key": "${OPENAI_API_KEY}", + "base_url": "https://api.openai.com/v1" + } + }, + "mapping": { ... }, + "pool": { + "enabled": [], + "custom": [], + "custom_config": {} + } +} +``` + +### 提供商 + +为每个提供商定义 API 凭据。`active.provider` 字段决定使用哪个提供商的凭据: + +```json +{ + "providers": { + "openrouter": { + "api_key": "${OPENROUTER_API_KEY}", + "base_url": "https://openrouter.ai/api/v1" + } + }, + "active": { + "model": "anthropic/claude-sonnet-4-5", + "provider": "openrouter" + } +} +``` + +### API 密钥解析顺序 + +Mycel 按以下顺序查找 API 密钥: +1. `models.json` 中当前提供商的 `api_key` +2. `models.json` 中任何有 `api_key` 的提供商 +3. 环境变量:`ANTHROPIC_API_KEY` > `OPENAI_API_KEY` > `OPENROUTER_API_KEY` + +### 提供商自动检测 + +未明确设置 `provider` 时,Mycel 从环境变量自动检测: +- 设置了 `ANTHROPIC_API_KEY` -> provider = `anthropic` +- 设置了 `OPENAI_API_KEY` -> provider = `openai` +- 设置了 `OPENROUTER_API_KEY` -> provider = `openai` + +### 自定义模型 + +通过 `pool.custom` 列表添加不在内置目录中的模型: + +```json +{ + "pool": { + "custom": ["deepseek-chat", "qwen-72b"], + "custom_config": { + "deepseek-chat": { + "based_on": "gpt-4o", + "context_limit": 65536 + } + } + } +} +``` + +`based_on` 告诉 Mycel 使用哪个模型族进行分词器/上下文检测。`context_limit` 覆盖自动检测的上下文窗口大小。 + +## 虚拟模型 + +Mycel 提供四个虚拟模型别名(`leon:*`),映射到具体模型并带有预设参数: + +| 虚拟名称 | 具体模型 | 提供商 | 额外参数 | 适用场景 | +|-------------|---------------|----------|--------|----------| +| `leon:mini` | claude-haiku-4-5-20250929 | anthropic | - | 快速、简单任务 | +| `leon:medium` | claude-sonnet-4-5-20250929 | anthropic | - | 均衡、日常工作 | +| `leon:large` | claude-opus-4-6 | anthropic | - | 复杂推理 | +| `leon:max` | claude-opus-4-6 | anthropic | temperature=0.0 | 最高精度 | + +用法: + +```bash +leonai --model leon:mini +leonai --model leon:large +``` + +或在 `~/.leon/models.json` 中: + +```json +{ + "active": { + "model": "leon:large" + } +} +``` + +### 覆盖虚拟模型映射 + +你可以在用户或项目的 `models.json` 中将虚拟模型重新映射到不同的具体模型: + +```json +{ + "mapping": { + "leon:medium": { + "model": "gpt-4o", + "provider": "openai" + } + } +} +``` + +当你只覆盖 `model` 而不指定 `provider` 时,继承的提供商会被清除(如果与自动检测不同,需要重新指定)。 + +## 智能体预设 + +Mycel 内置四个智能体预设,定义为带有 YAML frontmatter 的 Markdown 文件: + +| 名称 | 说明 | +|------|-------------| +| `general` | 全功能通用智能体,默认子智能体 | +| `bash` | Shell 命令专家 | +| `explore` | 代码库探索与分析 | +| `plan` | 任务规划与分解 | + +用法: + +```bash +leonai --agent general +leonai --agent explore +``` + +### 智能体文件格式 + +智能体是带有 YAML frontmatter 的 `.md` 文件: + +```markdown +--- +name: my-agent +description: What this agent does +tools: + - "*" +model: leon:large +--- + +Your system prompt goes here. This is the body of the Markdown file. +``` + +frontmatter 字段: + +| 字段 | 必填 | 说明 | +|-------|----------|-------------| +| `name` | 是 | 智能体标识符 | +| `description` | 否 | 人类可读的说明 | +| `tools` | 否 | 工具白名单。`["*"]` = 所有工具(默认) | +| `model` | 否 | 此智能体的模型覆盖 | + +### 智能体加载优先级 + +智能体从多个目录加载(后者按名称覆盖前者): + +1. 内置智能体:`config/defaults/agents/*.md` +2. 用户智能体:`~/.leon/agents/*.md` +3. 项目智能体:`.leon/agents/*.md` +4. 成员智能体:`~/.leon/members//agent.md`(最高优先级) + +## 工具配置 + +完整的工具目录包含 runtime.json 配置组之外的工具: + +| 工具 | 组 | 模式 | 说明 | +|------|-------|------|-------------| +| Read | filesystem | inline | 读取文件内容 | +| Write | filesystem | inline | 写入文件 | +| Edit | filesystem | inline | 编辑文件(精确替换) | +| list_dir | filesystem | inline | 列出目录内容 | +| Grep | search | inline | 正则搜索(基于 ripgrep) | +| Glob | search | inline | Glob 模式文件搜索 | +| Bash | command | inline | 执行 shell 命令 | +| WebSearch | web | inline | 互联网搜索 | +| WebFetch | web | inline | 获取网页并用 AI 提取内容 | +| Agent | agent | inline | 派生子智能体 | +| SendMessage | agent | inline | 向其他智能体发送消息 | +| TaskOutput | agent | inline | 获取后台任务输出 | +| TaskStop | agent | inline | 停止后台任务 | +| TaskCreate | todo | deferred | 创建待办任务 | +| TaskGet | todo | deferred | 获取任务详情 | +| TaskList | todo | deferred | 列出所有任务 | +| TaskUpdate | todo | deferred | 更新任务状态 | +| load_skill | skills | inline | 加载技能 | +| tool_search | system | inline | 搜索可用工具 | + +`deferred` 模式的工具异步运行,不会阻塞对话。 + +## MCP 配置 + +MCP 服务器在 `runtime.json` 的 `mcp` 键下配置。每个服务器可以使用 stdio(command + args)或 HTTP 传输(url): + +```json +{ + "mcp": { + "enabled": true, + "servers": { + "github": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-github"], + "env": { + "GITHUB_TOKEN": "${GITHUB_TOKEN}" + }, + "allowed_tools": null + }, + "remote-server": { + "url": "https://mcp.example.com/sse", + "allowed_tools": ["search", "fetch"] + } + } + } +} +``` + +MCP 服务器字段: + +| 字段 | 说明 | +|-------|-------------| +| `command` | 要启动的可执行文件(stdio 传输) | +| `args` | 命令参数 | +| `env` | 传递给服务器进程的环境变量 | +| `url` | 可流式 HTTP 传输的 URL(command 的替代方案) | +| `allowed_tools` | 工具名称白名单。null = 暴露所有工具 | + +### 成员级 MCP + +成员(`~/.leon/members//`)可以有自己的 `.mcp.json`,遵循与 Claude 的 MCP 配置相同的格式: + +```json +{ + "mcpServers": { + "supabase": { + "command": "npx", + "args": ["-y", "@supabase/mcp-server"], + "env": { "SUPABASE_URL": "..." } + } + } +} +``` + +## 技能配置 + +```json +{ + "skills": { + "enabled": true, + "paths": ["~/.leon/skills", "./skills"], + "skills": { + "code-review": true, + "debugging": false + } + } +} +``` + +技能路径是包含技能子目录的目录。每个技能有一个 `SKILL.md` 文件。`skills` 映射按名称启用/禁用单个技能。 + +技能路径必须在磁盘上存在——验证器要求 `paths` 中的每个目录都已创建。Mycel 不会自动创建它们。 + +## 可观测性配置(observation.json) + +配置用于追踪智能体运行的可观测性提供商: + +```json +{ + "active": "langfuse", + "langfuse": { + "secret_key": "${LANGFUSE_SECRET_KEY}", + "public_key": "${LANGFUSE_PUBLIC_KEY}", + "host": "https://cloud.langfuse.com" + }, + "langsmith": { + "api_key": "${LANGSMITH_API_KEY}", + "project": "leon", + "endpoint": null + } +} +``` + +将 `active` 设置为 `"langfuse"`、`"langsmith"` 或 `null`(禁用)。 + +## 沙箱配置 + +沙箱配置位于 `~/.leon/sandboxes/.json`。每个文件定义一个沙箱提供商: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "your-key", + "api_url": "https://app.daytona.io/api", + "target": "local", + "cwd": "/home/daytona" + } +} +``` + +支持的提供商:`local`、`docker`、`e2b`、`daytona`、`agentbay`。 + +启动时选择: + +```bash +leonai --sandbox daytona # 使用 ~/.leon/sandboxes/daytona.json +leonai --sandbox docker # 使用 ~/.leon/sandboxes/docker.json +export LEON_SANDBOX=e2b # 或通过环境变量设置 +``` + +各提供商的特有字段: + +| 提供商 | 字段 | +|----------|--------| +| docker | `image`、`mount_path`、`docker_host` | +| e2b | `api_key`、`template`、`cwd`、`timeout` | +| daytona | `api_key`、`api_url`、`target`、`cwd` | +| agentbay | `api_key`、`region_id`、`context_path`、`image_id` | + +## 环境变量 + +### config.env 中的变量 + +`~/.leon/config.env` 是一个简单的 key=value 文件,在启动时加载为环境变量(仅在变量尚未设置时): + +```env +OPENAI_API_KEY=sk-xxx +OPENAI_BASE_URL=https://openrouter.ai/api/v1 +MODEL_NAME=claude-sonnet-4-5-20250929 +``` + +`OPENAI_BASE_URL` 的值会自动规范化,缺少 `/v1` 时自动补齐。 + +### JSON 配置文件中的变量 + +`runtime.json`、`models.json` 和 `observation.json` 中的所有字符串值支持: + +- `${VAR}` —— 环境变量展开 +- `~` —— 主目录展开 + +```json +{ + "providers": { + "anthropic": { + "api_key": "${ANTHROPIC_API_KEY}" + } + } +} +``` + +### 相关环境变量 + +| 变量 | 用途 | +|----------|---------| +| `OPENAI_API_KEY` | API 密钥(OpenAI 兼容格式) | +| `OPENAI_BASE_URL` | API Base URL | +| `ANTHROPIC_API_KEY` | Anthropic API 密钥 | +| `ANTHROPIC_BASE_URL` | Anthropic Base URL | +| `OPENROUTER_API_KEY` | OpenRouter API 密钥 | +| `MODEL_NAME` | 覆盖模型名称 | +| `LEON_SANDBOX` | 默认沙箱名称 | +| `LEON_SANDBOX_DB_PATH` | 覆盖沙箱数据库路径 | +| `TAVILY_API_KEY` | Tavily 网络搜索 API 密钥 | +| `JINA_API_KEY` | Jina AI 抓取 API 密钥 | +| `EXA_API_KEY` | Exa 搜索 API 密钥 | +| `FIRECRAWL_API_KEY` | Firecrawl API 密钥 | +| `AGENTBAY_API_KEY` | AgentBay API 密钥 | +| `E2B_API_KEY` | E2B API 密钥 | +| `DAYTONA_API_KEY` | Daytona API 密钥 | + +## CLI 参考 + +```bash +leonai # 启动新会话(TUI) +leonai -c # 继续上次会话 +leonai --model leon:large # 覆盖模型 +leonai --agent explore # 使用智能体预设 +leonai --workspace /path # 设置工作区根目录 +leonai --sandbox docker # 使用沙箱配置 +leonai --thread # 恢复特定线程 + +leonai config # 交互式配置向导 +leonai config show # 显示当前 config.env + +leonai thread ls # 列出所有线程 +leonai thread history # 显示线程历史 +leonai thread rewind # 回退到检查点 +leonai thread rm # 删除线程 + +leonai sandbox # 沙箱管理器 TUI +leonai sandbox ls # 列出沙箱会话 +leonai sandbox new [provider] # 创建会话 +leonai sandbox pause # 暂停会话 +leonai sandbox resume # 恢复会话 +leonai sandbox rm # 删除会话 +leonai sandbox metrics # 显示资源指标 + +leonai run "message" # 非交互式单条消息 +leonai run --stdin # 从标准输入读取消息 +leonai run -i # 交互模式(无 TUI) +leonai run -d # 带调试输出 +``` diff --git a/docs/zh/deployment.md b/docs/zh/deployment.md new file mode 100644 index 000000000..a52cc7043 --- /dev/null +++ b/docs/zh/deployment.md @@ -0,0 +1,330 @@ +# Mycel 部署指南 + +[English](../en/deployment.md) | 中文 + +## 前置要求 + +### 必需 + +- Python 3.11 或更高版本 +- `uv` 包管理器([安装指南](https://docs.astral.sh/uv/getting-started/installation/)) +- Git + +### 可选(按 Provider) + +- **Docker**:本地 Sandbox Provider 需要 Docker daemon +- **E2B**:从 [e2b.dev](https://e2b.dev) 获取 API key +- **Daytona**:从 [daytona.io](https://daytona.io) 获取 API key 或使用自托管实例 +- **AgentBay**:API key 和区域访问权限 + +--- + +## 安装 + +### 1. 克隆仓库 + +```bash +git clone https://github.com/yourusername/leonai.git +cd leonai +``` + +### 2. 安装依赖 + +```bash +# 安装所有依赖,包括 Sandbox Provider +uv pip install -e ".[all]" + +# 或仅安装特定 Provider +uv pip install -e ".[e2b]" # 仅 E2B +uv pip install -e ".[daytona]" # 仅 Daytona +uv pip install -e ".[sandbox]" # 所有 Sandbox Provider +``` + +--- + +## 配置 + +### 用户配置目录 + +Mycel 将配置存储在 `~/.leon/`: + +``` +~/.leon/ +├── config.json # 主配置 +├── config.env # 环境变量 +├── models.json # LLM Provider 映射 +├── sandboxes/ # Sandbox Provider 配置 +│ ├── docker.json +│ ├── e2b.json +│ ├── daytona_saas.json +│ └── daytona_selfhost.json +└── leon.db # SQLite 数据库 +``` + +### 环境变量 + +创建 `~/.leon/config.env`: + +```bash +# LLM Provider(OpenRouter 示例) +ANTHROPIC_API_KEY=your_openrouter_key +ANTHROPIC_BASE_URL=https://openrouter.ai/api/v1 + +# Sandbox Provider +E2B_API_KEY=your_e2b_key +DAYTONA_API_KEY=your_daytona_key +AGENTBAY_API_KEY=your_agentbay_key + +# 可选:Supabase(如果使用远程存储) +SUPABASE_URL=https://your-project.supabase.co +SUPABASE_KEY=your_supabase_key +``` + +--- + +## Sandbox Provider 设置 + +### Local(默认) + +无需配置。使用本地文件系统。 + +```bash +# 测试本地 Sandbox +leon --sandbox local +``` + +### Docker + +**前置要求:** Docker daemon 运行中 + +创建 `~/.leon/sandboxes/docker.json`: + +```json +{ + "provider": "docker", + "on_exit": "destroy", + "docker": { + "image": "python:3.11-slim", + "mount_path": "/workspace" + } +} +``` + +**故障排除:** +- 如果 Docker CLI 卡住,检查代理环境变量 +- Mycel 调用 Docker CLI 时会自动去除 `http_proxy`/`https_proxy` +- 使用 `docker_host` 配置覆盖 Docker socket 路径 + +### E2B + +**前置要求:** E2B API key + +创建 `~/.leon/sandboxes/e2b.json`: + +```json +{ + "provider": "e2b", + "on_exit": "pause", + "e2b": { + "api_key": "${E2B_API_KEY}", + "template": "base", + "cwd": "/home/user", + "timeout": 300 + } +} +``` + +### Daytona SaaS + +**前置要求:** Daytona 账户和 API key + +创建 `~/.leon/sandboxes/daytona_saas.json`: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "${DAYTONA_API_KEY}", + "api_url": "https://app.daytona.io/api", + "target": "local", + "cwd": "/home/daytona" + } +} +``` + +### Daytona 自托管 + +**前置要求:** 自托管 Daytona 实例 + +**关键要求:** 自托管 Daytona 需要: +1. Runner 容器中有 bash(路径 `/usr/bin/bash`) +2. Workspace 镜像中有 bash(路径 `/usr/bin/bash`) +3. Runner 连接到 bridge 网络(以访问 Workspace 容器) +4. Daytona Proxy 在端口 4000 可访问(用于文件操作) + +创建 `~/.leon/sandboxes/daytona_selfhost.json`: + +```json +{ + "provider": "daytona", + "on_exit": "pause", + "daytona": { + "api_key": "${DAYTONA_API_KEY}", + "api_url": "http://localhost:3986/api", + "target": "us", + "cwd": "/workspace" + } +} +``` + +**Docker Compose 配置:** + +```yaml +services: + daytona-runner: + image: your-runner-image-with-bash + environment: + - RUNNER_DOMAIN=runner # 不是 localhost! + networks: + - default + - bridge # 访问 Workspace 容器必需 + # ... 其他配置 + +networks: + bridge: + external: true +``` + +**网络配置:** + +Runner 必须同时在 Compose 网络和 Workspace 容器所在的默认 bridge 网络上。在 Runner 的 `/etc/hosts` 中添加: + +``` +127.0.0.1 proxy.localhost +``` + +**故障排除:** +- "fork/exec /usr/bin/bash: no such file" → Workspace 镜像缺少 bash +- "Failed to create sandbox within 60s" → 网络隔离问题,检查 Runner 网络 +- 文件操作失败 → Daytona Proxy(端口 4000)不可访问 + +### AgentBay + +**前置要求:** AgentBay API key 和区域访问权限 + +创建 `~/.leon/sandboxes/agentbay.json`: + +```json +{ + "provider": "agentbay", + "on_exit": "pause", + "agentbay": { + "api_key": "${AGENTBAY_API_KEY}", + "region_id": "ap-southeast-1", + "context_path": "/home/wuying" + } +} +``` + +--- + +## 验证 + +### 健康检查 + +```bash +# 检查 Mycel 安装 +leon --version + +# 列出可用 Sandbox +leonai sandbox ls + +# 测试 Sandbox Provider +leon --sandbox docker +``` + +### 测试命令执行 + +```python +from sandbox import SandboxConfig, create_sandbox + +config = SandboxConfig.load("docker") +sbx = create_sandbox(config) + +# 创建会话 +session = sbx.create_session() + +# 执行命令 +result = sbx.execute(session.session_id, "echo 'Hello from sandbox'") +print(result.output) + +# 清理 +sbx.destroy_session(session.session_id) +``` + +--- + +## 常见问题 + +### "Could not import module 'main'" + +后端启动失败。检查: +- 是否在正确的目录下? +- 虚拟环境是否已激活? +- 使用完整路径运行 uvicorn:`.venv/bin/uvicorn` + +### LLM 客户端报 "SOCKS proxy error" + +Shell 环境设置了 `all_proxy=socks5://...`。启动前取消设置: + +```bash +env -u ALL_PROXY -u all_proxy uvicorn main:app +``` + +### Docker Provider 卡住 + +Docker CLI 继承了代理环境变量。Mycel 会自动去除这些变量,但如果问题持续,检查 `docker_host` 配置。 + +### Daytona PTY 引导失败 + +检查: +1. Workspace 镜像在 `/usr/bin/bash` 有 bash +2. Runner 在 `/usr/bin/bash` 有 bash +3. Runner 在 bridge 网络上 +4. Daytona Proxy(端口 4000)可访问 + +--- + +## 生产部署 + +### 数据库 + +Mycel 默认使用 SQLite(`~/.leon/leon.db`)。生产环境建议: + +1. **定期备份:** + ```bash + cp ~/.leon/leon.db ~/.leon/leon.db.backup + ``` + +2. **多用户部署考虑 PostgreSQL**(需要代码修改) + +### 安全 + +- 将 API key 存储在 `~/.leon/config.env` 中,不要写在代码里 +- 在配置文件中使用环境变量替换:`"${API_KEY}"` +- 限制文件权限:`chmod 600 ~/.leon/config.env` + +### 监控 + +- 后端日志:检查 uvicorn 的 stdout/stderr +- Sandbox 日志:Provider 相关(Docker 日志、E2B 控制台等) +- 数据库:监控 `~/.leon/leon.db` 大小和查询性能 + +--- + +## 后续步骤 + +- 查看 [SANDBOX.md](../sandbox/SANDBOX.md) 了解详细的 Sandbox Provider 文档 +- 查看 [TROUBLESHOOTING.md](../TROUBLESHOOTING.md) 了解常见问题和解决方案 +- 查看 `examples/sandboxes/` 中的示例配置 diff --git a/docs/zh/multi-agent-chat.md b/docs/zh/multi-agent-chat.md new file mode 100644 index 000000000..58b0e81ff --- /dev/null +++ b/docs/zh/multi-agent-chat.md @@ -0,0 +1,204 @@ +[English](../en/multi-agent-chat.md) | 中文 + +# 多智能体聊天 + +Mycel 包含一个 Entity-Chat 系统,支持人类与 AI 智能体之间、以及智能体之间的结构化通信。本指南涵盖核心概念、如何创建智能体,以及消息系统的工作原理。 + +## 核心概念 + +Entity-Chat 系统分为三层: + +### 成员(Member) + +**成员**是一个模板——定义智能体身份和能力的"类"。成员以文件包的形式存储在 `~/.leon/members//` 下: + +``` +~/.leon/members/m_AbCdEfGhIjKl/ + agent.md # 身份:名称、描述、模型、系统提示词(YAML frontmatter) + meta.json # 状态(draft/active)、版本、时间戳 + runtime.json # 启用的工具和技能 + rules/ # 行为规则(每条规则一个 .md 文件) + agents/ # 子智能体定义 + skills/ # 技能目录 + .mcp.json # MCP 服务器配置 +``` + +成员类型: +- `human` —— 人类用户 +- `mycel_agent` —— 用 Mycel 构建的 AI 智能体 + +每个智能体成员都有一个**所有者**(创建它的人类成员)。内置的 `Mycel` 成员(`__leon__`)对所有人可用。 + +### 实体(Entity) + +**实体**是社交身份——参与聊天的"实例"。可以理解为即时通讯应用中的个人资料。 + +- 每个成员可以有多个实体(例如,同一个智能体模板部署在不同场景中) +- 实体具有 `type`(`human` 或 `agent`)、`name`、可选的头像,以及链接到其智能体大脑的 `thread_id` +- 实体 ID 格式为 `{member_id}-{seq}`(成员 ID + 序列号) + +核心区别:**成员 = 你是谁。实体 = 你在聊天中的呈现方式。** + +### 线程(Thread) + +**线程**是智能体正在运行的大脑——它的对话状态、记忆和执行上下文。每个智能体实体绑定到唯一一个线程。当消息到达时,系统将其路由到实体的线程,唤醒智能体进行处理。 + +人类实体没有线程——人类通过 Web UI 直接交互。 + +## 架构概览 + +``` +Human (Web UI) + | + v +[Entity: human] ---chat_send---> [Entity: agent] + | + v + [Thread: agent brain] + | + Agent processes message, + uses chat tools to respond + | + v + [Entity: agent] ---chat_send---> [Entity: human] + | + v + Web UI (SSE push) +``` + +消息通过聊天(实体之间的对话)流转。两个实体之间的聊天在首次联系时自动创建。也支持 3 个及以上实体的群聊。 + +## 创建智能体成员(Web UI) + +1. 打开 Web UI,导航到成员页面 +2. 点击"创建"开始新建智能体成员 +3. 填写基本信息: + - **名称** —— 智能体的显示名称 + - **描述** —— 此智能体的功能说明 +4. 配置智能体: + - **系统提示词** —— 智能体的核心指令(写在 `agent.md` 的正文中) + - **工具** —— 启用/禁用特定工具(文件操作、搜索、网络、命令) + - **规则** —— 以单独的 Markdown 文件添加行为规则 + - **子智能体** —— 定义具有独立工具集的专用子智能体 + - **MCP 服务器** —— 连接外部工具服务器 + - **技能** —— 启用市场技能 +5. 将状态设置为"active"并发布 + +后端会创建: +- SQLite(`members` 表)中带有生成的 `m_` ID 的 `MemberRow` +- `~/.leon/members//` 下的文件包 +- 智能体首次在聊天中使用时,会创建实体和线程 + +## 智能体如何通信 + +智能体在其工具注册表中有五个内置聊天工具: + +### `directory` + +浏览所有已知实体。返回其他工具所需的实体 ID。 + +``` +directory(search="Alice", type="human") +-> - Alice [human] entity_id=m_abc123-1 +``` + +### `chats` + +列出智能体的活跃聊天,包含未读数和最新消息预览。 + +``` +chats(unread_only=true) +-> - Alice [entity_id: m_abc123-1] (3 unread) -- last: "Can you help me with..." +``` + +### `chat_read` + +读取聊天中的消息历史。自动将消息标记为已读。 + +``` +chat_read(entity_id="m_abc123-1", limit=10) +-> [Alice]: Can you help me with this bug? + [you]: Sure, let me take a look. +``` + +### `chat_send` + +发送消息。智能体必须先读取未读消息才能发送(系统强制执行)。 + +``` +chat_send(content="Here's the fix.", entity_id="m_abc123-1") +``` + +**信号协议**控制对话流程: +- 无信号(默认)—— "我期待回复" +- `signal: "yield"` —— "我说完了;你想回复就回复" +- `signal: "close"` —— "对话结束,请勿回复" + +### `chat_search` + +在所有聊天或特定聊天中搜索消息历史。 + +``` +chat_search(query="bug fix", entity_id="m_abc123-1") +``` + +## 人机聊天的工作原理 + +当人类通过 Web UI 发送消息时: + +1. 前端调用 `POST /api/chats/{chat_id}/messages`,携带消息内容和人类的实体 ID +2. `ChatService` 存储消息并发布到 `ChatEventBus`(SSE 用于实时 UI 更新) +3. 对于聊天中每个非发送者的智能体实体,投递系统: + - 检查**投递策略**(联系人级别的屏蔽/静音、聊天级别的静音、@提及覆盖) + - 如果允许投递,格式化一个轻量通知(不含消息内容——智能体必须调用 `chat_read` 来查看) + - 将通知加入智能体的消息队列 + - 如果智能体的线程处于空闲状态,则唤醒它(冷启动) +4. 智能体被唤醒,看到通知,调用 `chat_read` 获取实际消息,处理后通过 `chat_send` 回复 +5. 智能体的回复通过相同的管道流回——存储、通过 SSE 广播、投递给其他参与者 + +### 实时更新 + +Web UI 订阅 `GET /api/chats/{chat_id}/events`(Server-Sent Events)以获取实时更新: +- `message` 事件用于新消息 +- 智能体处理时的输入指示器 +- 所有事件均为推送,无需轮询 + +## 联系人与投递系统 + +实体可以管理与其他实体的关系: + +- **正常(Normal)** —— 完整投递(默认) +- **静音(Muted)** —— 消息会存储但不向智能体发送通知。@提及可以覆盖静音。 +- **屏蔽(Blocked)** —— 该实体的消息被静默丢弃 + +也支持聊天级别的静音——静音特定聊天而不影响联系人关系。 + +这些控制让你可以管理嘈杂的智能体或阻止不需要的交互,而无需删除聊天。 + +## API 参考 + +Entity-Chat 系统的关键端点: + +| 端点 | 方法 | 说明 | +|----------|--------|-------------| +| `/api/entities` | GET | 列出所有可聊天的实体 | +| `/api/members` | GET | 列出智能体成员(模板) | +| `/api/chats` | GET | 列出当前用户的聊天 | +| `/api/chats` | POST | 创建新聊天(1:1 或群聊) | +| `/api/chats/{id}/messages` | GET | 列出聊天中的消息 | +| `/api/chats/{id}/messages` | POST | 发送消息 | +| `/api/chats/{id}/read` | POST | 标记聊天为已读 | +| `/api/chats/{id}/events` | GET | 实时事件的 SSE 流 | +| `/api/chats/{id}/mute` | POST | 静音/取消静音聊天 | +| `/api/entities/contacts` | POST | 设置联系人关系(屏蔽/静音/正常) | + +## 数据存储 + +Entity-Chat 系统使用 SQLite 数据库: + +| 数据库 | 表 | +|----------|--------| +| `~/.leon/leon.db` | `members`、`entities`、`accounts` | +| `~/.leon/chat.db` | `chats`、`chat_entities`、`chat_messages`、`contacts` | + +成员配置文件存储在 `~/.leon/members/` 下的文件系统中。SQLite 表存储关系数据(所有权、身份、聊天状态),而文件包存储智能体的完整配置。 diff --git a/docs/zh/product-primitives.md b/docs/zh/product-primitives.md new file mode 100644 index 000000000..d11b16562 --- /dev/null +++ b/docs/zh/product-primitives.md @@ -0,0 +1,144 @@ +# Mycel 产品原语设计 + +[English](../en/product-primitives.md) | 中文 + +## 核心哲学 + +> Agent 拥有一切的能力,关键在于有没有对应的资源。 + +能力是天生的,资源是给的。有资源就能用,没资源就不能用。 + +## 六大原语 + +| 原语 | 英文 | 含义 | 例子 | +|------|------|------|------| +| **对话** | Thread | 一次交互过程 | 用户与 Agent 的会话 | +| **成员** | Member | 执行工作的 Agent | 主 Agent、Sub-Agent | +| **任务** | Task | 要完成的工作 | 用户指令、拆分出的子任务 | +| **资源** | Resource | Agent 可使用的基础交互面 | 文件系统、终端、浏览器、手机 | +| **连接** | Connection | Agent 接入的外部服务 | GitHub、Slack、Jira(MCP) | +| **模型** | Model | AI 大脑 | Mini / Medium / Large / Max | + +### 关系图 + +``` +对话 +├── 成员(谁来干) +│ ├── 主 Agent +│ └── Sub-Agent × N +├── 任务(干什么) +│ ├── 任务 A → 分配给成员 1 +│ └── 任务 B → 分配给成员 2 +├── 资源(用什么干)← 使用权分配给成员 +│ ├── 文件系统 +│ ├── 终端 +│ └── 浏览器 +├── 连接(接了什么外部服务) +│ ├── GitHub +│ └── Slack +└── 模型(用什么脑子想) +``` + +## "资源"与"连接"的本质区别 + +### 资源(Resource) + +Agent 与世界交互的**根本通道**。每一个资源都打开一整个交互维度: + +| 资源 | 打开的世界 | Agent 能做什么 | +|------|-----------|---------------| +| 文件系统 | 数据世界 | 读写文件、管理项目 | +| 终端 | 命令世界 | 执行系统命令、运行程序 | +| 浏览器 | Web 世界 | 浏览网页、操作 Web 应用 | +| 手机 | App 世界 | 操作移动应用、测试 App | +| 摄像头 | 视觉世界 | 看到物理环境(未来) | +| 麦克风 | 听觉世界 | 接收语音输入(未来) | + +### 连接(Connection) + +Agent 接入的**外部服务**(通过 MCP 协议)。点对点的数据通道: + +- GitHub、Slack、Jira、数据库、Supabase 等 +- 接上就多一个,拔掉就少一个 +- 不改变 Agent 的交互维度,只增加信息来源 + +### 区分标准 + +| | 资源 | 连接 | +|---|---|---| +| 本质 | 交互维度 | 数据管道 | +| 粒度 | 一整个世界 | 单个服务 | +| 交互方式 | 感知 + 操控 | 请求 - 响应 | +| 用户感知 | "Agent 能做什么" | "Agent 接了什么服务" | + +## 所有权与使用权 + +- 平台/用户**拥有**资源(所有权) +- 对话创建时**授权**哪些资源可用(使用权) +- 主 Agent 可以**分配**资源使用权给 Sub-Agent +- 不同 Agent 可以有不同的资源权限 + +## 资源页设计方向 + +### 原则 + +1. **资源是主角,Provider 是实现细节** — 用户关心"Agent 有什么",不关心"用的哪家云" +2. **原子粒度** — 每个资源独立呈现、独立启用/关闭 +3. **Provider 抽象化** — 不暴露配置表单,用 icon + 卡片呈现 + +### 用户视角(目标) + +``` +资源 来源 +├── ✓ 文件系统 ~/projects/app 本地 +├── ✓ 终端 本地 +├── ○ 浏览器(点击启用) Playwright +└── ○ 手机(点击连接) 未配置 + +连接 +├── ✓ GitHub +├── ✓ Supabase +└── ○ Slack(未连接) +``` + +### Provider 的位置 + +Provider(Local / AgentBay / Docker / E2B / Daytona)决定了**文件系统和终端从哪来**: + +- 选 Local → 文件系统 = 本地磁盘,终端 = 本地 Shell +- 选 AgentBay → 文件系统 = 云端 VM,终端 = 云端 Shell,+ 浏览器 +- 选 Docker → 文件系统 = 容器内,终端 = 容器 Shell + +Provider 是资源的**来源属性**,不是顶层概念。在设置中作为"运行方式"呈现: + +``` +运行方式 + ● 本地(文件系统和终端在你的电脑上) + ○ 云端(文件系统和终端在云端机器上) +``` + +### 能力矩阵的抽象 + +当前设计(provider × 能力矩阵表)的问题: +- 视角是 Provider-first("这个 Provider 支持什么") +- 应该是 Resource-first("我要这个资源,谁能提供") +- 圆点矩阵太"数据库"风格,应换成 icon + 卡片 + 开关 + +## 术语对照 + +| 用户看到的 | 代码/技术概念 | 说明 | +|-----------|-------------|------| +| 资源 | Sandbox capabilities | 文件系统、终端、浏览器、手机 | +| 连接 | MCP Server | 外部服务接入 | +| 运行方式 | Sandbox Provider | Local / AgentBay / Docker | +| 对话 | Thread | thread_id | +| 成员 | Agent / Sub-Agent | LeonAgent 实例 | +| 任务 | Task | TaskMiddleware | +| 模型 | Model | leon:mini/medium/large/max | + +## 设计禁区 + +- 不要在用户界面出现"沙盒"这个词 +- 不要让用户每次新建对话都选 Provider +- 不要把 Provider 配置表单直接暴露给用户 +- 不要把资源和连接混为一谈(它们是不同层次) diff --git a/docs/zh/sandbox.md b/docs/zh/sandbox.md new file mode 100644 index 000000000..4782eb538 --- /dev/null +++ b/docs/zh/sandbox.md @@ -0,0 +1,221 @@ +[🇬🇧 English](../en/sandbox.md) | 🇨🇳 中文 + +# 沙箱 + +Mycel 的沙箱系统将 Agent 操作(文件 I/O、Shell 命令)运行在隔离环境中,而非宿主机上。支持 5 种 Provider:**Local**(主机直通)、**Docker**(容器)、**E2B**(云端)、**Daytona**(云端或自建)、**AgentBay**(阿里云)。 + +## 快速开始(Web UI) + +### 1. 配置 Provider + +在 Web UI 中进入 **设置 → 沙箱**。你会看到每个 Provider 的配置卡片,展开后填写必要字段: + +| Provider | 必填字段 | +|----------|---------| +| **Docker** | 镜像名(默认 `python:3.12-slim`)、挂载路径 | +| **E2B** | API 密钥 | +| **Daytona** | API 密钥、API URL | +| **AgentBay** | API 密钥 | + +点击 **保存**。配置存储在 `~/.leon/sandboxes/.json`。 + +### 2. 创建使用沙箱的对话 + +开始新对话时,在输入框左上角的**沙箱下拉菜单**中选择已配置的 Provider(如 `docker`)。然后输入消息并发送。 + +对话在创建时绑定到该沙箱——后续所有 Agent 运行都使用同一个沙箱。 + +### 3. 监控资源 + +进入侧边栏的**资源**页面,你会看到: + +- **Provider 卡片** — 每个 Provider 的状态(活跃/就绪/不可用) +- **沙箱卡片** — 每个运行中/暂停的沙箱,包含 Agent 头像、持续时间和指标(CPU/RAM/Disk) +- **详情面板** — 点击沙箱卡片查看使用它的 Agent、详细指标和文件浏览器 + +## 示例配置 + +参见 [`examples/sandboxes/`](../../examples/sandboxes/),包含所有 Provider 的即用配置模板。复制到 `~/.leon/sandboxes/` 或直接在 Web UI 设置中配置。 + +## Provider 配置 + +### Docker + +需要主机安装 Docker。无需 API 密钥。 + +```json +{ + "provider": "docker", + "docker": { + "image": "python:3.12-slim", + "mount_path": "/workspace" + }, + "on_exit": "pause" +} +``` + +| 字段 | 默认值 | 说明 | +|------|--------|------| +| `docker.image` | `python:3.12-slim` | Docker 镜像 | +| `docker.mount_path` | `/workspace` | 容器内工作目录 | +| `on_exit` | `pause` | `pause`(保留状态)或 `destroy`(清空重来) | + +### E2B + +云端沙箱服务。需要 [E2B](https://e2b.dev) API 密钥。 + +```json +{ + "provider": "e2b", + "e2b": { + "api_key": "e2b_...", + "template": "base", + "cwd": "/home/user", + "timeout": 300 + }, + "on_exit": "pause" +} +``` + +### Daytona + +支持 [Daytona](https://daytona.io) SaaS 和自建实例。 + +**SaaS:** +```json +{ + "provider": "daytona", + "daytona": { + "api_key": "dtn_...", + "api_url": "https://app.daytona.io/api", + "cwd": "/home/daytona" + }, + "on_exit": "pause" +} +``` + +**自建:** +```json +{ + "provider": "daytona", + "daytona": { + "api_key": "dtn_...", + "api_url": "https://your-server.com/api", + "target": "local", + "cwd": "/home/daytona" + }, + "on_exit": "pause" +} +``` + +### AgentBay + +阿里云沙箱(中国区域)。需要 AgentBay API 密钥。 + +```json +{ + "provider": "agentbay", + "agentbay": { + "api_key": "akm-...", + "region_id": "ap-southeast-1", + "context_path": "/home/wuying" + }, + "on_exit": "pause" +} +``` + +### 额外依赖 + +云端沙箱 Provider 需要额外 Python 包: + +```bash +uv sync --extra sandbox # AgentBay +uv sync --extra e2b # E2B +uv sync --extra daytona # Daytona +``` + +Docker 开箱即用(使用 Docker CLI)。 + +### API 密钥解析 + +API 密钥按以下顺序查找: + +1. 配置文件字段(`e2b.api_key`、`daytona.api_key` 等) +2. 环境变量(`E2B_API_KEY`、`DAYTONA_API_KEY`、`AGENTBAY_API_KEY`) +3. `~/.leon/config.env` + +## 会话生命周期 + +每个对话绑定一个沙箱。会话遵循生命周期: + +``` +闲置 → 激活 → 暂停 → 销毁 +``` + +### `on_exit` 行为 + +| 值 | 行为 | +|----|------| +| `pause` | 退出时暂停会话。下次启动恢复。文件、安装的包、进程都保留。 | +| `destroy` | 退出时销毁会话。下次从零开始。 | + +`pause` 是默认值——跨重启保留所有状态。 + +### Web UI 会话管理 + +在**资源**页面: + +- 统一网格视图查看所有 Provider 的所有会话 +- 点击会话卡片 → 详情面板,包含指标和文件浏览器 +- 通过 API 暂停 / 恢复 / 销毁 + +**API 端点:** + +| 操作 | 端点 | +|------|------| +| 查看资源 | `GET /api/monitor/resources` | +| 强制刷新 | `POST /api/monitor/resources/refresh` | +| 暂停会话 | `POST /api/sandbox/sessions/{id}/pause?provider={type}` | +| 恢复会话 | `POST /api/sandbox/sessions/{id}/resume?provider={type}` | +| 销毁会话 | `DELETE /api/sandbox/sessions/{id}?provider={type}` | + +## CLI 参考 + +终端下的沙箱管理请见 [CLI 文档](cli.md#沙箱管理)。 + +命令摘要: + +```bash +leonai sandbox # TUI 管理器 +leonai sandbox ls # 列出会话 +leonai sandbox new docker # 创建会话 +leonai sandbox pause # 暂停 +leonai sandbox resume # 恢复 +leonai sandbox rm # 删除 +leonai sandbox metrics # 查看指标 +``` + +## 架构 + +沙箱是中间件栈下方的基础设施层。它提供后端供现有中间件使用: + +``` +Agent + ├── sandbox.fs() → FileSystemBackend(FileSystemMiddleware 使用) + └── sandbox.shell() → BaseExecutor(CommandMiddleware 使用) +``` + +中间件负责**策略**(校验、路径规则、hook)。后端负责**I/O**(操作实际执行位置)。切换后端改变执行位置而不影响中间件逻辑。 + +### 会话追踪 + +会话记录在 SQLite(`~/.leon/sandbox.db`)中: + +| 表 | 用途 | +|----|------| +| `sandbox_leases` | Lease 生命周期 — Provider、期望/观测状态 | +| `sandbox_instances` | Provider 侧的会话 ID | +| `abstract_terminals` | 绑定到 Thread + Lease 的虚拟终端 | +| `lease_resource_snapshots` | CPU、内存、磁盘指标 | + +Thread → 沙箱的映射通过 `abstract_terminals.thread_id` → `abstract_terminals.lease_id`。 diff --git a/eval/harness/runner.py b/eval/harness/runner.py index 9fc00a899..2679d186c 100644 --- a/eval/harness/runner.py +++ b/eval/harness/runner.py @@ -4,16 +4,12 @@ import asyncio from datetime import UTC, datetime -from typing import TYPE_CHECKING from eval.collector import MetricsCollector from eval.harness.client import EvalClient from eval.models import EvalResult, EvalScenario, TrajectoryCapture from eval.storage import TrajectoryStore -if TYPE_CHECKING: - from eval.models import RunTrajectory - class EvalRunner: """Run eval scenarios against a Leon backend instance.""" @@ -110,7 +106,7 @@ def _build_trajectory( captures: list[TrajectoryCapture], started_at: str, finished_at: str, - ) -> RunTrajectory: + ) -> RunTrajectory: # noqa: F821 """Merge multiple TrajectoryCaptures into a single RunTrajectory.""" from eval.models import LLMCallRecord, RunTrajectory, ToolCallRecord diff --git a/eval/tracer.py b/eval/tracer.py index 0048a297e..1fa42e06c 100644 --- a/eval/tracer.py +++ b/eval/tracer.py @@ -7,14 +7,11 @@ from __future__ import annotations from datetime import UTC, datetime -from typing import TYPE_CHECKING, Any +from typing import Any from langchain_core.tracers.base import BaseTracer from langchain_core.tracers.schemas import Run -if TYPE_CHECKING: - from eval.models import LLMCallRecord, RunTrajectory, ToolCallRecord - class TrajectoryTracer(BaseTracer): """Capture agent execution trajectory via LangChain callback system. @@ -45,7 +42,7 @@ def _persist_run(self, run: Run) -> None: """Called when a root run completes. Collect the full Run tree.""" self.traced_runs.append(run) - def to_trajectory(self) -> RunTrajectory: + def to_trajectory(self) -> RunTrajectory: # noqa: F821 """Convert collected Run trees into a RunTrajectory.""" import json @@ -77,7 +74,7 @@ def to_trajectory(self) -> RunTrajectory: status="completed", ) - def enrich_from_runtime(self, trajectory: RunTrajectory, runtime: Any) -> None: + def enrich_from_runtime(self, trajectory: RunTrajectory, runtime: Any) -> None: # noqa: F821 """Enrich trajectory with token data from MonitorMiddleware runtime. Streaming mode doesn't populate Run.outputs with usage_metadata, @@ -138,7 +135,7 @@ def _walk_run_tree( for child in run.child_runs: self._walk_run_tree(child, llm_calls, tool_calls) - def _extract_llm_record(self, run: Run) -> LLMCallRecord | None: + def _extract_llm_record(self, run: Run) -> LLMCallRecord | None: # noqa: F821 """Extract LLMCallRecord from a chat_model Run.""" from eval.models import LLMCallRecord @@ -214,7 +211,7 @@ def _extract_llm_record(self, run: Run) -> LLMCallRecord | None: tool_calls_requested=tool_calls_requested, ) - def _extract_tool_record(self, run: Run) -> ToolCallRecord | None: + def _extract_tool_record(self, run: Run) -> ToolCallRecord | None: # noqa: F821 """Extract ToolCallRecord from a tool Run.""" import json diff --git a/examples/integration/langchain_tool_image_openai.py b/examples/integration/langchain_tool_image_openai.py index 0c3da232d..4726e569d 100644 --- a/examples/integration/langchain_tool_image_openai.py +++ b/examples/integration/langchain_tool_image_openai.py @@ -80,8 +80,8 @@ def main() -> None: if not base_url.endswith("/v1"): base_url = f"{base_url}/v1" - ChatOpenAI = _maybe_import_langchain_openai() - HumanMessage, ToolMessage, tool = _maybe_import_langchain_tools() + ChatOpenAI = _maybe_import_langchain_openai() # noqa: N806 + HumanMessage, ToolMessage, tool = _maybe_import_langchain_tools() # noqa: N806 @tool(description="Return repo image.png as an OpenAI-compatible image content block.") def make_test_image() -> list[dict[str, str]]: diff --git a/frontend/app/index.html b/frontend/app/index.html index c01c00d8c..5df279dba 100644 --- a/frontend/app/index.html +++ b/frontend/app/index.html @@ -4,8 +4,6 @@ - - Mycel diff --git a/frontend/app/package-lock.json b/frontend/app/package-lock.json index 8af285c77..7b7c45be5 100644 --- a/frontend/app/package-lock.json +++ b/frontend/app/package-lock.json @@ -35,6 +35,7 @@ "@radix-ui/react-toggle": "^1.1.10", "@radix-ui/react-toggle-group": "^1.1.11", "@radix-ui/react-tooltip": "^1.2.8", + "@supabase/supabase-js": "^2.101.1", "@types/diff": "^7.0.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -4613,6 +4614,92 @@ "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", "license": "MIT" }, + "node_modules/@supabase/auth-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.101.1.tgz", + "integrity": "sha512-Kd0Wey+RkFHgyVep7adS6UOE2pN6MJ3mZ32PAXSvfw6IjUkFRC7IQpdZZjUOcUe5pXr1ejufCRgF6lsGINe4Tw==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/functions-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.101.1.tgz", + "integrity": "sha512-OZWU7YtaG+NNNFZK8p/FuJ6gpq7pFyrG2fLOopP73HAIDHDGpOttPJapvO8ADu3RkqfQfkwrB354vPkSBbZ20A==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/phoenix": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@supabase/phoenix/-/phoenix-0.4.0.tgz", + "integrity": "sha512-RHSx8bHS02xwfHdAbX5Lpbo6PXbgyf7lTaXTlwtFDPwOIw64NnVRwFAXGojHhjtVYI+PEPNSWwkL90f4agN3bw==", + "license": "MIT" + }, + "node_modules/@supabase/postgrest-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-2.101.1.tgz", + "integrity": "sha512-UW1RajH5jbZoK+ldAJ1I6VZ+HWwZ2oaKjEQ6Gn+AQ67CHQVxGl8wNQoLYyumbyaExm41I+wn7arulcY1eHeZJw==", + "license": "MIT", + "dependencies": { + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/realtime-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.101.1.tgz", + "integrity": "sha512-Oa6dno0OB9I+hv5do5zsZHbFu41ViZnE9IWjmkeeF/8fPmB5fWoHGqeTYEC3/0DAgtpUoFJa4FpvzFH0SBHo1Q==", + "license": "MIT", + "dependencies": { + "@supabase/phoenix": "^0.4.0", + "@types/ws": "^8.18.1", + "tslib": "2.8.1", + "ws": "^8.18.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/storage-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.101.1.tgz", + "integrity": "sha512-WhTaUOBgeEvnKLy95Cdlp6+D5igSF/65yC727w1olxbet5nzUvMlajKUWyzNtQu2efrz2cQ7FcdVBdQqgT9YKQ==", + "license": "MIT", + "dependencies": { + "iceberg-js": "^0.8.1", + "tslib": "2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@supabase/supabase-js": { + "version": "2.101.1", + "resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.101.1.tgz", + "integrity": "sha512-Jnhm3LfuACwjIzvk2pfUbGQn7pa7hi6MFzfSyPrRYWVCCu69RPLCFyHSBl7HSBwadbQ3UZOznnD3gPca3ePrRA==", + "license": "MIT", + "dependencies": { + "@supabase/auth-js": "2.101.1", + "@supabase/functions-js": "2.101.1", + "@supabase/postgrest-js": "2.101.1", + "@supabase/realtime-js": "2.101.1", + "@supabase/storage-js": "2.101.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -4786,9 +4873,7 @@ "version": "24.10.4", "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.4.tgz", "integrity": "sha512-vnDVpYPMzs4wunl27jHrfmwojOGKya0xyM3sH+UE5iv5uPS6vX7UIoh6m+vQc5LGBq52HBKPIn/zcSZVzeDEZg==", - "dev": true, "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -4821,6 +4906,15 @@ "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", "license": "MIT" }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.52.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.52.0.tgz", @@ -6717,6 +6811,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/iceberg-js": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/iceberg-js/-/iceberg-js-0.8.1.tgz", + "integrity": "sha512-1dhVQZXhcHje7798IVM+xoo/1ZdVfzOMIc8/rgVSijRK38EDqOJoGula9N/8ZI5RD8QTxNQtK/Gozpr+qUqRRA==", + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -9545,7 +9648,6 @@ "version": "7.16.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "dev": true, "license": "MIT" }, "node_modules/unicode-canonical-property-names-ecmascript": { @@ -9968,6 +10070,27 @@ "node": ">=0.10.0" } }, + "node_modules/ws": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", + "integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", diff --git a/frontend/app/package.json b/frontend/app/package.json index 52199cd30..8a9556af9 100644 --- a/frontend/app/package.json +++ b/frontend/app/package.json @@ -37,6 +37,7 @@ "@radix-ui/react-toggle": "^1.1.10", "@radix-ui/react-toggle-group": "^1.1.11", "@radix-ui/react-tooltip": "^1.2.8", + "@supabase/supabase-js": "^2.101.1", "@types/diff": "^7.0.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", diff --git a/frontend/app/src/api/types.ts b/frontend/app/src/api/types.ts index 08d990935..49990dfd8 100644 --- a/frontend/app/src/api/types.ts +++ b/frontend/app/src/api/types.ts @@ -315,8 +315,12 @@ export interface ChatMessage { sender_id: string; sender_name: string; content: string; + message_type: MessageType; mentioned_ids: string[]; + signal: "open" | "yield" | "close" | null; + retracted_at: string | null; created_at: number; + _status?: MessageStatus; } export interface TaskAgentRequest { @@ -349,3 +353,42 @@ export interface SandboxUploadResult { size_bytes: number; sha256: string; } + +// --- Social / Relationship types --- + +export type RelationshipState = + | "none" | "pending_a_to_b" | "pending_b_to_a" | "visit" | "hire"; + +export interface Relationship { + id: string; + other_user_id: string; + state: RelationshipState; + direction: "a_to_b" | "b_to_a" | null; + is_requester: boolean; + hire_granted_at: string | null; + hire_revoked_at: string | null; + created_at: string; + updated_at: string; +} + +export type ContactRelation = "normal" | "blocked" | "muted"; + +export interface Contact { + owner_user_id: string; + target_user_id: string; + relation: ContactRelation; + created_at: string; + updated_at: string | null; +} + +export interface AgentProfile { + id: string; + name: string; + type: "agent"; + avatar_url?: string; + description?: string; +} + +export type MessageStatus = "sending" | "sent" | "read"; + +export type MessageType = "human" | "ai" | "ai_process" | "system" | "notification"; diff --git a/frontend/app/src/components/AgentProfileSheet.tsx b/frontend/app/src/components/AgentProfileSheet.tsx new file mode 100644 index 000000000..d121892f3 --- /dev/null +++ b/frontend/app/src/components/AgentProfileSheet.tsx @@ -0,0 +1,151 @@ +/** + * AgentProfileSheet — right-side sheet for agent profile + quick relationship actions. + */ + +import { useEffect, useState } from "react"; +import { MessageSquare, Users, ExternalLink } from "lucide-react"; +import { Sheet, SheetContent, SheetHeader, SheetTitle } from "@/components/ui/sheet"; +import MemberAvatar from "@/components/MemberAvatar"; +import { authFetch, useAuthStore } from "@/store/auth-store"; +import { useNavigate } from "react-router-dom"; +import { toast } from "sonner"; +import type { AgentProfile, Relationship } from "@/api/types"; + +interface AgentProfileSheetProps { + entityId: string | null; + open: boolean; + onOpenChange: (open: boolean) => void; +} + +export default function AgentProfileSheet({ entityId, open, onOpenChange }: AgentProfileSheetProps) { + const myEntityId = useAuthStore(s => s.entityId); + const navigate = useNavigate(); + const [profile, setProfile] = useState(null); + const [relationship, setRelationship] = useState(null); + const [acting, setActing] = useState(false); + + const fetchData = () => { + if (!entityId || !open) return; + fetch(`/api/entities/${entityId}/profile`) + .then(r => r.ok ? r.json() : null) + .then(setProfile) + .catch(() => setProfile(null)); + + if (myEntityId) { + authFetch("/api/relationships") + .then(r => r.json()) + .then((rels: Relationship[]) => { + setRelationship(rels.find(r => r.other_user_id === entityId) ?? null); + }) + .catch(() => {}); + } + }; + + useEffect(() => { fetchData(); }, [entityId, open, myEntityId]); + + const handleRequest = async () => { + if (!entityId) return; + setActing(true); + try { + const res = await authFetch("/api/relationships/request", { + method: "POST", + body: JSON.stringify({ target_user_id: entityId }), + }); + if (!res.ok) { toast.error("申请失败"); return; } + toast.success("已发送 Visit 申请"); + // Refresh + const rels: Relationship[] = await authFetch("/api/relationships").then(r => r.json()); + setRelationship(rels.find(r => r.other_user_id === entityId) ?? null); + } catch { toast.error("网络错误"); } + finally { setActing(false); } + }; + + const handleCancelRequest = async () => { + if (!relationship) return; + setActing(true); + try { + const res = await authFetch(`/api/relationships/${relationship.id}/revoke`, { method: "POST" }); + if (!res.ok) { toast.error("操作失败"); return; } + toast.success("已取消申请"); + setRelationship(null); + } catch { toast.error("网络错误"); } + finally { setActing(false); } + }; + + const state = relationship?.state ?? "none"; + const isPending = state.startsWith("pending"); + const isRequester = relationship?.is_requester ?? false; + const hasActiveRel = state === "hire" || state === "visit"; + + return ( + + + + Agent 信息 + +
+ {!profile ? ( +

加载中...

+ ) : ( + <> +
+ +
+

{profile.name}

+ Agent +
+ {profile.description && ( +

{profile.description}

+ )} +
+ + {state !== "none" && ( +
+ {state === "hire" && Hire 关系} + {state === "visit" && Visit 关系} + {isPending && isRequester && 申请中} + {isPending && !isRequester && 等待你确认} +
+ )} + +
+ + {state === "none" && ( + + )} + {isPending && isRequester && ( + + )} + {hasActiveRel && ( + + )} +
+ + )} +
+
+
+ ); +} diff --git a/frontend/app/src/components/NotificationBell.tsx b/frontend/app/src/components/NotificationBell.tsx new file mode 100644 index 000000000..d5054d9c3 --- /dev/null +++ b/frontend/app/src/components/NotificationBell.tsx @@ -0,0 +1,135 @@ +/** + * NotificationBell — shows pending relationship approval requests. + * Appears in sidebar, above avatar popover. + */ + +import { useCallback, useEffect, useState } from "react"; +import { Bell } from "lucide-react"; +import { Popover, PopoverTrigger, PopoverContent } from "@/components/ui/popover"; +import MemberAvatar from "@/components/MemberAvatar"; +import { authFetch, useAuthStore } from "@/store/auth-store"; +import { supabase } from "@/lib/supabase"; +import { toast } from "sonner"; +import { useNavigate } from "react-router-dom"; +import type { Relationship } from "@/api/types"; + +interface PendingItem { + relId: string; + entityId: string; +} + +interface NotificationBellProps { + showLabel?: boolean; +} + +export default function NotificationBell({ showLabel }: NotificationBellProps) { + const myEntityId = useAuthStore(s => s.entityId); + const navigate = useNavigate(); + const [pending, setPending] = useState([]); + const [open, setOpen] = useState(false); + const [acting, setActing] = useState(null); + + const fetchPending = useCallback(async () => { + if (!myEntityId) return; + try { + const res = await authFetch("/api/relationships"); + if (!res.ok) return; + const rels: Relationship[] = await res.json(); + const items = rels + .filter(r => !r.is_requester && r.state.startsWith("pending")) + .map(r => ({ relId: r.id, entityId: r.other_user_id })); + setPending(items); + } catch { /* silent */ } + }, [myEntityId]); + + useEffect(() => { fetchPending(); }, [fetchPending]); + + useEffect(() => { + if (!supabase || !myEntityId) return; + const channel = supabase + .channel(`notifications:${myEntityId}`) + .on("postgres_changes", { event: "*", schema: "public", table: "relationships", filter: `principal_a=eq.${myEntityId}` }, fetchPending) + .on("postgres_changes", { event: "*", schema: "public", table: "relationships", filter: `principal_b=eq.${myEntityId}` }, fetchPending) + .subscribe(); + return () => { supabase?.removeChannel(channel); }; + }, [myEntityId, fetchPending]); + + const handleApprove = async (relId: string) => { + setActing(relId); + try { + const res = await authFetch(`/api/relationships/${relId}/approve`, { method: "POST" }); + if (!res.ok) { toast.error("操作失败"); return; } + toast.success("已批准"); + fetchPending(); + } catch { toast.error("网络错误"); } + finally { setActing(null); } + }; + + const handleReject = async (relId: string) => { + setActing(relId); + try { + const res = await authFetch(`/api/relationships/${relId}/reject`, { method: "POST" }); + if (!res.ok) { toast.error("操作失败"); return; } + toast.success("已拒绝"); + fetchPending(); + } catch { toast.error("网络错误"); } + finally { setActing(null); } + }; + + const count = pending.length; + + return ( + + + + + +
+

通知

+
+ {pending.length === 0 ? ( +
暂无待处理请求
+ ) : ( +
+ {pending.map(item => ( +
+ +
+

{item.entityId.slice(0, 12)}… 请求 Visit

+
+
+ + +
+
+ ))} +
+ )} +
+ +
+
+
+ ); +} diff --git a/frontend/app/src/components/RelationshipPanel.tsx b/frontend/app/src/components/RelationshipPanel.tsx new file mode 100644 index 000000000..f9b1a3bb7 --- /dev/null +++ b/frontend/app/src/components/RelationshipPanel.tsx @@ -0,0 +1,308 @@ +/** + * RelationshipPanel — Hire/Visit relationship management for an agent. + * + * Shows on AgentDetailPage. Uses entity_id (not member_id) for relationships. + * Supports: request Visit, approve/reject pending, upgrade to Hire, revoke. + */ + +import { useCallback, useEffect, useState } from "react"; +import { Users, ArrowUpCircle, ArrowDownCircle, XCircle, CheckCircle, Clock } from "lucide-react"; +import { authFetch, useAuthStore } from "@/store/auth-store"; +import { supabase } from "@/lib/supabase"; +import { toast } from "sonner"; +import { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle } from "@/components/ui/alert-dialog"; + +type RelationshipState = "none" | "pending_a_to_b" | "pending_b_to_a" | "visit" | "hire"; + +interface Relationship { + id: string; + other_user_id: string; + state: RelationshipState; + direction: string | null; + hire_granted_at: string | null; + updated_at: string; +} + +interface Props { + agentMemberId: string; +} + +const STATE_LABEL: Record = { + none: "无关系", + pending_a_to_b: "申请中", + pending_b_to_a: "待审批", + visit: "Visit", + hire: "Hire", +}; + +const STATE_COLOR: Record = { + none: "text-muted-foreground", + pending_a_to_b: "text-warning", + pending_b_to_a: "text-info", + visit: "text-success", + hire: "text-success", +}; + +export default function RelationshipPanel({ agentMemberId }: Props) { + const myEntityId = useAuthStore(s => s.entityId); + const [agentEntityId, setAgentEntityId] = useState(null); + const [relationship, setRelationship] = useState(null); + const [loading, setLoading] = useState(true); + const [acting, setActing] = useState(false); + const [confirmAction, setConfirmAction] = useState<{ + label: string; + desc: string; + fn: () => void; + } | null>(null); + + // Resolve agent entity_id from member_id + useEffect(() => { + authFetch("/api/entities") + .then(r => r.json()) + .then((entities: { id: string; member_id: string; type: string }[]) => { + const match = entities.find(e => e.member_id === agentMemberId && e.type === "agent"); + setAgentEntityId(match?.id ?? null); + }) + .catch(() => setAgentEntityId(null)); + }, [agentMemberId]); + + const fetchRelationship = useCallback(() => { + if (!agentEntityId || !myEntityId) { setLoading(false); return; } + authFetch("/api/relationships") + .then(r => r.json()) + .then((rows: Relationship[]) => { + const rel = rows.find(r => r.other_user_id === agentEntityId) ?? null; + setRelationship(rel); + }) + .catch(() => setRelationship(null)) + .finally(() => setLoading(false)); + }, [agentEntityId, myEntityId]); + + useEffect(() => { fetchRelationship(); }, [fetchRelationship]); + + // Realtime: subscribe to relationship changes for instant approval notifications + useEffect(() => { + if (!supabase || !myEntityId) return; + // Filter by principal_a to avoid reacting to unrelated relationship changes + const channel = supabase + .channel(`relationships_watch:${myEntityId}`) + .on( + "postgres_changes", + { event: "*", schema: "public", table: "relationships", filter: `principal_a=eq.${myEntityId}` }, + () => { fetchRelationship(); }, + ) + .on( + "postgres_changes", + { event: "*", schema: "public", table: "relationships", filter: `principal_b=eq.${myEntityId}` }, + () => { fetchRelationship(); }, + ) + .subscribe(); + return () => { supabase?.removeChannel(channel); }; + }, [myEntityId, fetchRelationship]); + + const act = useCallback(async (action: () => Promise, successMsg: string) => { + setActing(true); + try { + const res = await action(); + if (!res.ok) { + const data = await res.json().catch(() => ({})); + toast.error(data.detail || `操作失败 (${res.status})`); + return; + } + toast.success(successMsg); + fetchRelationship(); + } catch { + toast.error("网络错误"); + } finally { + setActing(false); + } + }, [fetchRelationship]); + + const handleRequest = () => + act( + () => authFetch("/api/relationships/request", { method: "POST", body: JSON.stringify({ target_user_id: agentEntityId }) }), + "已发送 Visit 申请", + ); + + const handleApprove = () => + act( + () => authFetch(`/api/relationships/${relationship!.id}/approve`, { method: "POST" }), + "已批准", + ); + + const handleReject = () => + act( + () => authFetch(`/api/relationships/${relationship!.id}/reject`, { method: "POST" }), + "已拒绝", + ); + + const handleUpgrade = () => + act( + () => authFetch(`/api/relationships/${relationship!.id}/upgrade`, { method: "POST", body: JSON.stringify({}) }), + "已升级为 Hire", + ); + + const handleRevoke = () => + act( + () => authFetch(`/api/relationships/${relationship!.id}/revoke`, { method: "POST" }), + "已收回授权", + ); + + const handleDowngrade = () => + act( + () => authFetch(`/api/relationships/${relationship!.id}/downgrade`, { method: "POST" }), + "已降级为 Visit", + ); + + if (!myEntityId || !agentEntityId) return null; + if (loading) { + return ( +
加载关系状态...
+ ); + } + + const state: RelationshipState = relationship?.state ?? "none"; + // Determine if current user is the "approver" (other side of a pending request) + const isPendingIncoming = ( + (state === "pending_a_to_b" && relationship?.direction === "a_to_b" && agentEntityId < myEntityId) || + (state === "pending_b_to_a" && relationship?.direction === "b_to_a" && agentEntityId > myEntityId) + ); + + return ( +
+
+ + 关系状态 + + {STATE_LABEL[state]} + +
+ + {/* Relationship description */} +
+ {state === "none" && ( +

申请 Visit 后,此 Agent 的消息将进入通知队列(不直接唤醒)。

+ )} + {(state === "pending_a_to_b" || state === "pending_b_to_a") && !isPendingIncoming && ( +

申请已发出,等待对方确认。

+ )} + {isPendingIncoming && ( +

对方申请了 Visit,请审批。

+ )} + {state === "visit" && ( +

Visit 已授予:此 Agent 的消息进入通知队列。升级为 Hire 可直接唤醒。

+ )} + {state === "hire" && ( +

Hire 已授予:此 Agent 消息直达主线程,立即唤醒响应。

+ )} +
+ + {/* Actions */} +
+ {state === "none" && ( + + )} + + {isPendingIncoming && ( + <> + + + + )} + + {state === "visit" && ( + <> + + + + )} + + {state === "hire" && ( + <> + + + + )} +
+ + setConfirmAction(null)}> + + + {confirmAction?.label} + {confirmAction?.desc} + + + 取消 + { confirmAction?.fn(); setConfirmAction(null); }} + className="bg-destructive text-destructive-foreground hover:bg-destructive/90" + > + 确认 + + + + +
+ ); +} diff --git a/frontend/app/src/lib/supabase.ts b/frontend/app/src/lib/supabase.ts new file mode 100644 index 000000000..11a09cdec --- /dev/null +++ b/frontend/app/src/lib/supabase.ts @@ -0,0 +1,46 @@ +/** + * Supabase client singleton for frontend Realtime subscriptions. + * + * URL and anon key are injected at build time via Vite env vars: + * VITE_SUPABASE_URL + * VITE_SUPABASE_ANON_KEY + * + * For local dev without Supabase, both vars can be empty — the client + * will be null and subscriptions will be skipped (SSE fallback remains). + */ + +import { createClient, type SupabaseClient } from "@supabase/supabase-js"; + +const url = import.meta.env.VITE_SUPABASE_URL as string | undefined; +const anonKey = import.meta.env.VITE_SUPABASE_ANON_KEY as string | undefined; + +export const supabase: SupabaseClient | null = + url && anonKey ? createClient(url, anonKey) : null; + +export type ChatMessagePayload = { + id: string; + chat_id: string; + sender_id: string; + content: string; + content_type: string; + message_type: string; + signal: string | null; + mentions: string[]; + retracted_at: string | null; + created_at: string; +}; + +export type MessageReadPayload = { + message_id: string; + user_id: string; + read_at: string; +}; + +export type RelationshipPayload = { + id: string; + principal_a: string; + principal_b: string; + state: string; + direction: string | null; + updated_at: string; +}; diff --git a/frontend/app/src/pages/AgentDetailPage.tsx b/frontend/app/src/pages/AgentDetailPage.tsx index 473bb855d..2a292a634 100644 --- a/frontend/app/src/pages/AgentDetailPage.tsx +++ b/frontend/app/src/pages/AgentDetailPage.tsx @@ -196,7 +196,7 @@ export default function AgentDetail() {
{/* Header */}
- diff --git a/frontend/app/src/pages/AgentPublicPage.tsx b/frontend/app/src/pages/AgentPublicPage.tsx new file mode 100644 index 000000000..35465202d --- /dev/null +++ b/frontend/app/src/pages/AgentPublicPage.tsx @@ -0,0 +1,112 @@ +/** + * AgentPublicPage — public agent profile page, no auth required. + * Route: /a/:entityId + */ + +import { useEffect, useState } from "react"; +import { useParams, useNavigate } from "react-router-dom"; +import MemberAvatar from "@/components/MemberAvatar"; +import { authFetch, useAuthStore } from "@/store/auth-store"; +import { toast } from "sonner"; +import type { AgentProfile } from "@/api/types"; + +export default function AgentPublicPage() { + const { entityId } = useParams<{ entityId: string }>(); + const navigate = useNavigate(); + const token = useAuthStore(s => s.token); + const [profile, setProfile] = useState(null); + const [loading, setLoading] = useState(true); + const [applying, setApplying] = useState(false); + + useEffect(() => { + if (!entityId) return; + fetch(`/api/entities/${entityId}/profile`) + .then(r => { + if (!r.ok) throw new Error("Agent not found"); + return r.json(); + }) + .then(setProfile) + .catch(() => setProfile(null)) + .finally(() => setLoading(false)); + }, [entityId]); + + const handleApply = async () => { + if (!token) { + navigate(`/?redirect=/a/${entityId}`); + return; + } + if (!entityId) return; + setApplying(true); + try { + const res = await authFetch("/api/relationships/request", { + method: "POST", + body: JSON.stringify({ target_user_id: entityId }), + }); + if (res.status === 401) { + navigate(`/?redirect=/a/${entityId}`); + return; + } + if (!res.ok) { + const data = await res.json().catch(() => ({})); + toast.error(data.detail || "申请失败"); + return; + } + toast.success("已发送 Visit 申请"); + } catch { + toast.error("网络错误"); + } finally { + setApplying(false); + } + }; + + if (loading) { + return ( +
+

加载中...

+
+ ); + } + + if (!profile) { + return ( +
+

Agent 不存在

+
+ ); + } + + return ( +
+
+
+ +
+

{profile.name}

+ Agent +
+ {profile.description && ( +

{profile.description}

+ )} +
+ +
+

联系

+ +
+ +

由 Mycel 提供技术支持

+
+
+ ); +} diff --git a/frontend/app/src/pages/ChatConversationPage.tsx b/frontend/app/src/pages/ChatConversationPage.tsx index 6f5a5b5c1..3ff295edd 100644 --- a/frontend/app/src/pages/ChatConversationPage.tsx +++ b/frontend/app/src/pages/ChatConversationPage.tsx @@ -190,7 +190,10 @@ function ChatConversationInner({ chatId }: { chatId: string }) { sender_id: myEntityId, sender_name: useAuthStore.getState().user?.name || "me", content: text, + message_type: "human", mentioned_ids: [], + signal: null, + retracted_at: null, created_at: Date.now() / 1000, }; setMessages(prev => [...prev, optimisticMsg]); diff --git a/frontend/app/src/pages/ContactsPage.tsx b/frontend/app/src/pages/ContactsPage.tsx new file mode 100644 index 000000000..d20ca2704 --- /dev/null +++ b/frontend/app/src/pages/ContactsPage.tsx @@ -0,0 +1,228 @@ +/** + * ContactsPage — 通讻录 + * Three tabs: 待确认 | 联系人 | 已屏蔽 + */ + +import { useCallback, useEffect, useState } from "react"; +import { useNavigate } from "react-router-dom"; +import { Check, X, MessageSquare, ShieldOff } from "lucide-react"; +import MemberAvatar from "@/components/MemberAvatar"; +import { authFetch } from "@/store/auth-store"; +import { toast } from "sonner"; +import type { Relationship, Contact } from "@/api/types"; + +type Tab = "pending" | "contacts" | "blocked"; + +export default function ContactsPage() { + const navigate = useNavigate(); + const [tab, setTab] = useState("pending"); + const [relationships, setRelationships] = useState([]); + const [contacts, setContacts] = useState([]); + const [acting, setActing] = useState(null); + + const fetchRelationships = useCallback(async () => { + try { + const res = await authFetch("/api/relationships"); + if (res.ok) setRelationships(await res.json()); + } catch { /* silent */ } + }, []); + + const fetchContacts = useCallback(async () => { + try { + const res = await authFetch("/api/contacts"); + if (res.ok) setContacts(await res.json()); + } catch { /* silent */ } + }, []); + + useEffect(() => { + fetchRelationships(); + fetchContacts(); + }, [fetchRelationships, fetchContacts]); + + const pendingForMe = relationships.filter(r => !r.is_requester && r.state.startsWith("pending")); + const activeContacts = relationships + .filter(r => r.state === "hire" || r.state === "visit") + .sort((a, b) => (a.state === "hire" ? -1 : b.state === "hire" ? 1 : 0)); + const blockedContacts = contacts.filter(c => c.relation === "blocked"); + + const act = async (fn: () => Promise, successMsg: string, onDone: () => void) => { + try { + const res = await fn(); + if (!res.ok) { toast.error("操作失败"); return; } + toast.success(successMsg); + onDone(); + } catch { toast.error("网络错误"); } + }; + + const handleApprove = (relId: string) => { + setActing(relId); + act( + () => authFetch(`/api/relationships/${relId}/approve`, { method: "POST" }), + "已批准", + fetchRelationships, + ).finally(() => setActing(null)); + }; + + const handleReject = (relId: string) => { + setActing(relId); + act( + () => authFetch(`/api/relationships/${relId}/reject`, { method: "POST" }), + "已拒绝", + fetchRelationships, + ).finally(() => setActing(null)); + }; + + const handleRevoke = (relId: string) => { + setActing(relId); + act( + () => authFetch(`/api/relationships/${relId}/revoke`, { method: "POST" }), + "已撤回", + fetchRelationships, + ).finally(() => setActing(null)); + }; + + const handleUnblock = (targetId: string) => { + setActing(targetId); + act( + () => authFetch(`/api/contacts/${targetId}`, { method: "DELETE" }), + "已解除屏蔽", + fetchContacts, + ).finally(() => setActing(null)); + }; + + const tabs: { id: Tab; label: string; count?: number }[] = [ + { id: "pending", label: "待确认", count: pendingForMe.length }, + { id: "contacts", label: "联系人" }, + { id: "blocked", label: "已屏蔽" }, + ]; + + return ( +
+ {/* Header */} +
+

通讻录

+
+ {tabs.map(t => ( + + ))} +
+
+ + {/* Content */} +
+ {tab === "pending" && ( +
+ {pendingForMe.length === 0 && ( +
暂无待确认请求
+ )} + {pendingForMe.map(rel => ( +
+ +
+

{rel.other_user_id}

+

申请 Visit 权限

+
+
+ + +
+
+ ))} +
+ )} + + {tab === "contacts" && ( +
+ {activeContacts.length === 0 && ( +
暂无联系人
+ )} + {activeContacts.map(rel => ( +
+ +
+
+

{rel.other_user_id}

+ {rel.state === "hire" && ( + Hire + )} + {rel.state === "visit" && ( + Visit + )} +
+
+
+ + +
+
+ ))} +
+ )} + + {tab === "blocked" && ( +
+ {blockedContacts.length === 0 && ( +
暂无屏蔽记录
+ )} + {blockedContacts.map(c => ( +
+ +
+

{c.target_user_id}

+

已屏蔽

+
+ +
+ ))} +
+ )} +
+
+ ); +} diff --git a/frontend/app/src/pages/MarketplaceDetailPage.tsx b/frontend/app/src/pages/MarketplaceDetailPage.tsx index 38a32f4fa..b88703852 100644 --- a/frontend/app/src/pages/MarketplaceDetailPage.tsx +++ b/frontend/app/src/pages/MarketplaceDetailPage.tsx @@ -22,6 +22,8 @@ export default function MarketplaceDetailPage() { const fetchVersionSnapshot = useMarketplaceStore((s) => s.fetchVersionSnapshot); const clearSnapshot = useMarketplaceStore((s) => s.clearSnapshot); const [installOpen, setInstallOpen] = useState(false); + const [activeTab, setActiveTab] = useState<"overview" | "versions" | "files">("overview"); + const tabLabels: Record = { overview: "概览", versions: "版本", files: "文件" }; useEffect(() => { if (id) { @@ -32,11 +34,11 @@ export default function MarketplaceDetailPage() { }, [id, fetchDetail, fetchLineage, clearDetail]); useEffect(() => { - if (detail && detail.versions.length > 0 && (detail.type === "skill" || detail.type === "agent")) { + if (activeTab === "files" && detail && detail.versions.length > 0) { fetchVersionSnapshot(detail.id, detail.versions[0].version); } return () => clearSnapshot(); - }, [detail?.id, detail?.type, fetchVersionSnapshot, clearSnapshot]); + }, [activeTab, detail?.id, fetchVersionSnapshot, clearSnapshot]); if (detailLoading) { return ( @@ -66,11 +68,11 @@ export default function MarketplaceDetailPage() {
{/* Back button */} {/* Header */} @@ -106,40 +108,89 @@ export default function MarketplaceDetailPage() {
)} -
- {/* Lineage */} - navigate(`/marketplace/${nodeId}`)} - /> + {/* Tabs */} +
+ {(["overview", "versions", ...(detail.type === "skill" || detail.type === "agent" ? ["files" as const] : [])] as const).map((tab) => ( + + ))} +
+ + {/* Tab content */} + {activeTab === "overview" && ( +
+ {/* Lineage */} + navigate(`/marketplace/${nodeId}`)} + /> + + {/* Latest version info */} + {detail.versions.length > 0 && ( +
+

最新版本

+

v{detail.versions[0].version}

+ {detail.versions[0].release_notes && ( +

{detail.versions[0].release_notes}

+ )} +
+ )} +
+ )} - {/* Version history */} - {detail.versions.length > 0 && ( -
-

版本历史

- {detail.versions.map((v) => ( -
-
- v{v.version} - {new Date(v.created_at).toLocaleDateString()} -
- {v.release_notes && ( -

{v.release_notes}

- )} + {activeTab === "versions" && ( +
+ {detail.versions.map((v) => ( +
+
+ v{v.version} + {new Date(v.created_at).toLocaleDateString()}
- ))} + {v.release_notes && ( +

{v.release_notes}

+ )} +
+ ))} + {detail.versions.length === 0 && ( +

暂无已发布的版本

+ )} +
+ )} + + {activeTab === "files" && ( +
+ {/* File tree */} +
+

文件结构

+
+
+ 📁 + {detail.slug}/ +
+
+ 📄 + SKILL.md +
+
+ 📄 + meta.json +
+
- )} - {/* File content for skill / agent */} - {(detail.type === "skill" || detail.type === "agent") && ( + {/* SKILL.md preview */}
- - {detail.type === "skill" ? "SKILL.md" : "agent.md"} - + SKILL.md
{snapshotLoading ? (
@@ -153,8 +204,8 @@ export default function MarketplaceDetailPage() {

暂无内容

)}
- )} -
+
+ )}
{/* Install dialog */} diff --git a/frontend/app/src/pages/MarketplacePage.tsx b/frontend/app/src/pages/MarketplacePage.tsx index e5e85d1f3..ed529d16b 100644 --- a/frontend/app/src/pages/MarketplacePage.tsx +++ b/frontend/app/src/pages/MarketplacePage.tsx @@ -1,6 +1,6 @@ -import React, { useState, useEffect, useCallback } from "react"; -import { useNavigate, useSearchParams } from "react-router-dom"; -import { Search, Store, Package, TrendingUp, Clock, Star, RefreshCw, Zap, Users, Trash2 } from "lucide-react"; +import { useState, useEffect, useCallback } from "react"; +import { useNavigate } from "react-router-dom"; +import { Search, Store, Package, TrendingUp, Clock, Star, RefreshCw } from "lucide-react"; import { useMarketplaceStore } from "@/store/marketplace-store"; import { useAppStore } from "@/store/app-store"; import { useIsMobile } from "@/hooks/use-mobile"; @@ -9,7 +9,6 @@ import UpdateDialog from "@/components/marketplace/UpdateDialog"; import type { Member } from "@/store/types"; type Tab = "explore" | "installed"; -type InstalledSubTab = "member" | "skill" | "agent"; type TypeFilter = "all" | "member" | "agent" | "skill" | "env"; const typeFilters: { id: TypeFilter; label: string }[] = [ @@ -29,13 +28,7 @@ const sortOptions = [ export default function MarketplacePage() { const isMobile = useIsMobile(); const navigate = useNavigate(); - const [searchParams, setSearchParams] = useSearchParams(); - - const tab = (searchParams.get("tab") as Tab) || "explore"; - const installedSubTab = (searchParams.get("sub") as InstalledSubTab) || "member"; - - const setTab = (t: Tab) => setSearchParams((p) => { p.set("tab", t); p.delete("sub"); return p; }, { replace: true }); - const setInstalledSubTab = (s: InstalledSubTab) => setSearchParams((p) => { p.set("sub", s); return p; }, { replace: true }); + const [tab, setTab] = useState("explore"); // Explore state const items = useMarketplaceStore((s) => s.items); @@ -48,10 +41,6 @@ export default function MarketplacePage() { // Installed state const memberList = useAppStore((s) => s.memberList); - const librarySkills = useAppStore((s) => s.librarySkills); - const libraryAgents = useAppStore((s) => s.libraryAgents); - const fetchLibrary = useAppStore((s) => s.fetchLibrary); - const deleteResource = useAppStore((s) => s.deleteResource); const updates = useMarketplaceStore((s) => s.updates); const checkUpdates = useMarketplaceStore((s) => s.checkUpdates); @@ -63,7 +52,6 @@ export default function MarketplacePage() { const [updateDialogOpen, setUpdateDialogOpen] = useState(false); const [updateTarget, setUpdateTarget] = useState<{ member: Member; update: any } | null>(null); - // Fetch explore items when filters change useEffect(() => { if (tab === "explore") fetchItems(); @@ -81,31 +69,12 @@ export default function MarketplacePage() { setFilter("type", type === "all" ? null : type); }; - // Load library on installed tab open - useEffect(() => { - if (tab === "installed") { - fetchLibrary("skill"); - fetchLibrary("agent"); - } - }, [tab, fetchLibrary]); - // Installed members with marketplace source info const installedMembers = memberList.filter((m) => !m.builtin); - const filteredMembers = installedMembers.filter((m) => - !installedSearch || m.name.toLowerCase().includes(installedSearch.toLowerCase()) - ); - const filteredSkills = librarySkills.filter((s) => - !installedSearch || s.name.toLowerCase().includes(installedSearch.toLowerCase()) - ); - const filteredAgents = libraryAgents.filter((a) => - !installedSearch || a.name.toLowerCase().includes(installedSearch.toLowerCase()) - ); - - const installedSubTabs: { id: InstalledSubTab; label: string; icon: React.ElementType; count: number }[] = [ - { id: "member", label: "成员", icon: Package, count: installedMembers.length }, - { id: "skill", label: "Skill", icon: Zap, count: librarySkills.length }, - { id: "agent", label: "Agent", icon: Users, count: libraryAgents.length }, - ]; + const filteredInstalled = installedMembers.filter((m) => { + if (installedSearch && !m.name.toLowerCase().includes(installedSearch.toLowerCase())) return false; + return true; + }); const handleCheckUpdates = useCallback(async () => { // source field comes from meta.json; members without it cannot be checked @@ -317,139 +286,40 @@ export default function MarketplacePage() { />
- {/* Sub-tabs */} -
- {installedSubTabs.map((t) => ( - - ))} -
- - {/* Member list */} - {installedSubTab === "member" && ( - <> -
- {filteredMembers.map((member) => { - const update = updates.find((u) => u.marketplace_item_id === member.id); - return ( -
navigate(`/members/${member.id}`)}> -
-
- -
-
-

{member.name}

-

{member.description}

-

v{member.version}

-
-
- {update && ( - - )} + {/* Grid */} +
+ {filteredInstalled.map((member) => { + const update = updates.find((u) => u.marketplace_item_id === member.id); + return ( +
navigate(`/members/${member.id}`)}> +
+
+
- ); - })} -
- {filteredMembers.length === 0 && ( -
暂无已安装的成员
- )} - - )} - - {/* Skill list */} - {installedSubTab === "skill" && ( - <> -
- {filteredSkills.map((skill) => ( -
navigate(`/library/skill/${skill.id}`)} - className="surface-interactive p-4 cursor-pointer group relative" - > -
-
- -
-
-

{skill.name}

-

{skill.desc || "暂无描述"}

-
+
+

{member.name}

+

{member.description}

+

v{member.version}

-
- ))} -
- {filteredSkills.length === 0 && ( -
暂无已安装的 Skill
- )} - - )} - - {/* Agent list */} - {installedSubTab === "agent" && ( - <> -
- {filteredAgents.map((agent) => ( -
navigate(`/library/agent/${agent.id}`)} - className="surface-interactive p-4 cursor-pointer group relative" - > -
-
- -
-
-

{agent.name}

-

{agent.desc || "暂无描述"}

-
-
+ {update && ( -
- ))} -
- {filteredAgents.length === 0 && ( -
暂无已安装的 Agent
- )} - + )} +
+ ); + })} +
+ {filteredInstalled.length === 0 && ( +
暂无已安装的成员
)} )} @@ -467,7 +337,6 @@ export default function MarketplacePage() { memberName={updateTarget.member.name} /> )} -
); } diff --git a/frontend/app/src/pages/RootLayout.tsx b/frontend/app/src/pages/RootLayout.tsx index d285056f0..0192ea51c 100644 --- a/frontend/app/src/pages/RootLayout.tsx +++ b/frontend/app/src/pages/RootLayout.tsx @@ -1,5 +1,5 @@ import { NavLink, Outlet, useLocation, useNavigate } from "react-router-dom"; -import { MessageSquare, MessagesSquare, Users, ListTodo, Store, Layers, Plug, Settings, Plus, ChevronLeft, ChevronRight, LogOut, Camera, Eye, EyeOff } from "lucide-react"; +import { MessageSquare, MessagesSquare, Users, ListTodo, Store, Layers, Plug, Settings, Plus, ChevronLeft, ChevronRight, LogOut, Camera } from "lucide-react"; import { useState, useEffect, useCallback, useRef } from "react"; import { uploadMemberAvatar } from "@/api/client"; import MemberAvatar from "@/components/MemberAvatar"; @@ -29,9 +29,7 @@ const mobileNavItems = [ // @@@auth-guard — wrapper that shows LoginForm when not authenticated export default function RootLayout() { const token = useAuthStore(s => s.token); - const setupInfo = useAuthStore(s => s.setupInfo); if (!token) return ; - if (setupInfo) return ; return ; } @@ -79,12 +77,19 @@ function AuthenticatedLayout() { useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { if (e.key === "Escape" && showCreate) setShowCreate(false); - if ((e.metaKey || e.ctrlKey) && e.key === "b") { e.preventDefault(); setExpanded((prev) => !prev); } }; document.addEventListener("keydown", handleKeyDown); return () => document.removeEventListener("keydown", handleKeyDown); }, [showCreate]); + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if ((e.metaKey || e.ctrlKey) && e.key === "b") { e.preventDefault(); setExpanded((prev) => !prev); } + }; + document.addEventListener("keydown", handleKeyDown); + return () => document.removeEventListener("keydown", handleKeyDown); + }, []); + const handleCreateAction = useCallback(async (action: string) => { setShowCreate(false); switch (action) { @@ -364,291 +369,73 @@ function CreateDropdown({ ); } -// ── Auth form states ────────────────────────────────────────────────────── -type AuthStep = - | { type: "login" } - | { type: "reg_email" } - | { type: "reg_otp"; email: string; password: string; inviteCode: string }; - -function AuthCard({ children }: { children: React.ReactNode }) { - return ( -
-
{children}
-
- ); -} - -function AuthHeader({ title, subtitle }: { title: string; subtitle?: string }) { - return ( -
- Mycel -

{title}

- {subtitle &&

{subtitle}

} -
- ); -} - function LoginForm() { - const [step, setStep] = useState({ type: "login" }); + const [mode, setMode] = useState<"login" | "register">("login"); + const [username, setUsername] = useState(""); + const [password, setPassword] = useState(""); const [error, setError] = useState(null); const [loading, setLoading] = useState(false); - const login = useAuthStore(s => s.login); - const sendOtp = useAuthStore(s => s.sendOtp); - const verifyOtp = useAuthStore(s => s.verifyOtp); - const completeRegister = useAuthStore(s => s.completeRegister); - - function reset(t: AuthStep) { setStep(t); setError(null); } - - // ── Step: Login ── - if (step.type === "login") { - return { - await login(identifier, password); - }} - onSwitch={() => reset({ type: "reg_email" })} - error={error} setError={setError} - loading={loading} setLoading={setLoading} - />; - } - - // ── Step: Enter email + password + invite code ── - if (step.type === "reg_email") { - return { - await sendOtp(email, password, inviteCode); - setStep({ type: "reg_otp", email, password, inviteCode }); - }} - onBack={() => reset({ type: "login" })} - error={error} setError={setError} - loading={loading} setLoading={setLoading} - />; - } - - // ── Step: Enter OTP ── - const { email, password, inviteCode } = step; - return { - const { tempToken } = await verifyOtp(email, token); - await completeRegister(tempToken, inviteCode); - // RootLayout will detect setupInfo and render SetupNameStep automatically - }} - onResend={async () => { - await sendOtp(email, password, inviteCode); - }} - onBack={() => reset({ type: "reg_email" })} - error={error} setError={setError} - loading={loading} setLoading={setLoading} - />; -} - -// ── Sub-steps ──────────────────────────────────────────────────────────── - -const inputCls = "w-full px-4 py-2.5 rounded-lg border border-border bg-card text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary/50"; -const btnCls = "w-full py-2.5 rounded-lg bg-primary text-primary-foreground text-sm font-medium hover:opacity-90 disabled:opacity-50"; - -function LoginStep({ onSubmit, onSwitch, error, setError, loading, setLoading }: { - onSubmit: (id: string, pw: string) => Promise; - onSwitch: () => void; - error: string | null; setError: (e: string | null) => void; - loading: boolean; setLoading: (v: boolean) => void; -}) { - const [identifier, setIdentifier] = useState(""); - const [password, setPassword] = useState(""); - async function handle(e: React.FormEvent) { - e.preventDefault(); setError(null); setLoading(true); - try { await onSubmit(identifier, password); } - catch (err) { setError(err instanceof Error ? err.message : "登录失败"); } - finally { setLoading(false); } - } - return ( - - -
- setIdentifier(e.target.value)} className={inputCls} required autoComplete="username" /> - setPassword(e.target.value)} className={inputCls} required autoComplete="current-password" /> - {error &&

{error}

} - -
-

- 没有账号? -

-
- ); -} - -function RegEmailStep({ onSubmit, onBack, error, setError, loading, setLoading }: { - onSubmit: (email: string, password: string, inviteCode: string) => Promise; - onBack: () => void; - error: string | null; setError: (e: string | null) => void; - loading: boolean; setLoading: (v: boolean) => void; -}) { - const [email, setEmail] = useState(""); - const [password, setPassword] = useState(""); - const [confirm, setConfirm] = useState(""); - const [inviteCode, setInviteCode] = useState(""); - async function handle(e: React.FormEvent) { - e.preventDefault(); - if (password !== confirm) { setError("两次输入的密码不一致"); return; } - setError(null); setLoading(true); - try { await onSubmit(email, password, inviteCode); } - catch (err) { setError(err instanceof Error ? err.message : "发送失败"); } - finally { setLoading(false); } - } - return ( - - -
- setEmail(e.target.value)} className={inputCls} required autoComplete="email" autoFocus /> - - - setInviteCode(e.target.value)} className={inputCls} autoComplete="off" required /> - {error &&

{error}

} - - -

- 已有账号? -

-
- ); -} - -function RegOtpStep({ email, onSubmit, onResend, onBack, error, setError, loading, setLoading }: { - email: string; - onSubmit: (token: string) => Promise; - onResend: () => Promise; - onBack: () => void; - error: string | null; setError: (e: string | null) => void; - loading: boolean; setLoading: (v: boolean) => void; -}) { - const [otp, setOtp] = useState(""); - const [resending, setResending] = useState(false); - const [resendDone, setResendDone] = useState(false); - async function handle(e: React.FormEvent) { - e.preventDefault(); setError(null); setLoading(true); - try { await onSubmit(otp.trim()); } - catch (err) { setError(err instanceof Error ? err.message : "验证失败"); } - finally { setLoading(false); } - } - async function handleResend() { - setError(null); setResending(true); setResendDone(false); - try { await onResend(); setResendDone(true); } - catch (err) { setError(err instanceof Error ? err.message : "发送失败"); } - finally { setResending(false); } - } - return ( - - -
- setOtp(e.target.value.replace(/\D/g, ""))} - maxLength={6} autoComplete="one-time-code" autoFocus - className={`${inputCls} text-center tracking-widest text-lg font-mono`} - required - /> - {error &&

{error}

} - {resendDone && !error &&

验证码已重新发送

} - -
-

- 没收到? - · - -

-
- ); -} - -function PasswordInput({ value, onChange, placeholder, autoFocus, autoComplete }: { - value: string; - onChange: (v: string) => void; - placeholder: string; - autoFocus?: boolean; - autoComplete?: string; -}) { - const [visible, setVisible] = useState(false); - return ( -
- onChange(e.target.value)} - className={`${inputCls} pr-10`} - required - autoComplete={autoComplete} - autoFocus={autoFocus} - minLength={6} - /> - -
- ); -} - - -function SetupNameStep({ userId, defaultName }: { userId: string; defaultName: string }) { - const [name, setName] = useState(defaultName); - const [loading, setLoading] = useState(false); - const token = useAuthStore(s => s.token); - const clearSetupInfo = useAuthStore(s => s.clearSetupInfo); - - function done() { - clearSetupInfo(); - window.location.href = "/threads"; - } + const register = useAuthStore(s => s.register); async function handleSubmit(e: React.FormEvent) { e.preventDefault(); + setError(null); setLoading(true); try { - if (name.trim() && name.trim() !== defaultName) { - await fetch(`/api/panel/members/${userId}`, { - method: "PUT", - headers: { "Content-Type": "application/json", "Authorization": `Bearer ${token}` }, - body: JSON.stringify({ name: name.trim() }), - }); - useAuthStore.setState(s => ({ user: s.user ? { ...s.user, name: name.trim() } : s.user })); - } + if (mode === "login") await login(username, password); + else await register(username, password); + } catch (err) { + setError(err instanceof Error ? err.message : "Authentication failed"); } finally { - done(); + setLoading(false); } } return ( - - -
- setName(e.target.value)} - className={inputCls} - autoFocus - maxLength={32} - /> - -
-

- -

-
+
+
+
+ Mycel +

Mycel

+

+ {mode === "login" ? "登录你的账号" : "创建新账号"} +

+
+
+ setUsername(e.target.value)} + className="w-full px-4 py-2.5 rounded-lg border border-border bg-card text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary/50" + required + /> + setPassword(e.target.value)} + className="w-full px-4 py-2.5 rounded-lg border border-border bg-card text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary/50" + required + /> + {error &&

{error}

} + +
+

+ {mode === "login" ? ( + <>没有账号? + ) : ( + <>已有账号? + )} +

+
+
); } diff --git a/frontend/app/src/router.tsx b/frontend/app/src/router.tsx index 024478143..a14d8bcd6 100644 --- a/frontend/app/src/router.tsx +++ b/frontend/app/src/router.tsx @@ -13,10 +13,8 @@ import AgentDetailPage from './pages/AgentDetailPage'; import TasksPage from './pages/TasksPage'; import MarketplacePage from './pages/MarketplacePage'; import MarketplaceDetailPage from './pages/MarketplaceDetailPage'; -import LibraryItemDetailPage from './pages/LibraryItemDetailPage'; import ResourcesPage from './pages/ResourcesPage'; import ConnectionsPage from './pages/ConnectionsPage'; -import InviteCodesPage from './pages/InviteCodesPage'; export const router = createBrowserRouter([ // Old /chat/* URLs → redirect to /threads @@ -92,10 +90,6 @@ export const router = createBrowserRouter([ path: 'marketplace/:id', element: , }, - { - path: 'library/:type/:id', - element: , - }, { path: 'library', element: , @@ -104,10 +98,6 @@ export const router = createBrowserRouter([ path: 'connections', element: , }, - { - path: 'invite-codes', - element: , - }, { path: 'settings', element: , diff --git a/frontend/app/src/store/auth-store.ts b/frontend/app/src/store/auth-store.ts index fb0d7b1d8..a1f9cf8ba 100644 --- a/frontend/app/src/store/auth-store.ts +++ b/frontend/app/src/store/auth-store.ts @@ -10,15 +10,6 @@ import { persist } from "zustand/middleware"; const DEV_SKIP_AUTH = import.meta.env.VITE_DEV_SKIP_AUTH === "true"; -// Allow overriding the API origin at runtime via window.__MYCEL_CONFIG__.apiBase -// (injected by docker-entrypoint.sh), falling back to the Vite build-time variable. -// Relative URLs are used when neither is set (same-origin / local dev). -const API_BASE = ( - (window as { __MYCEL_CONFIG__?: { apiBase?: string } }).__MYCEL_CONFIG__?.apiBase - ?? import.meta.env.VITE_API_BASE - ?? "" -).replace(/\/$/, ""); - export interface AuthIdentity { id: string; name: string; @@ -31,33 +22,28 @@ interface AuthState { user: AuthIdentity | null; agent: AuthIdentity | null; entityId: string | null; - setupInfo: { userId: string; defaultName: string } | null; - login: (identifier: string, password: string) => Promise; - sendOtp: (email: string, password: string, inviteCode: string) => Promise; - verifyOtp: (email: string, token: string) => Promise<{ tempToken: string }>; - completeRegister: (tempToken: string, inviteCode: string) => Promise; - clearSetupInfo: () => void; + login: (username: string, password: string) => Promise; + register: (username: string, password: string) => Promise; logout: () => void; } -async function apiPost(endpoint: string, body: Record) { - const res = await fetch(`${API_BASE}/api/auth/${endpoint}`, { +async function authCall(endpoint: string, username: string, password: string) { + const res = await fetch(`/api/auth/${endpoint}`, { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify(body), + body: JSON.stringify({ username, password }), }); if (!res.ok) { - const text = await res.text(); - let message = text || res.statusText; + const body = await res.text(); + // Parse FastAPI {"detail": "..."} error format try { - const parsed = JSON.parse(text); - const detail = parsed.detail; - if (typeof detail === "string") message = detail; - else if (Array.isArray(detail)) message = detail.map((d: { msg: string; loc?: string[] }) => `${d.loc?.at(-1) ?? "?"}: ${d.msg}`).join("; "); - else if (detail != null) message = JSON.stringify(detail); - } catch { /* not JSON, use raw text */ } - throw new Error(message); + const parsed = JSON.parse(body); + throw new Error(parsed.detail || body); + } catch (e) { + if (e instanceof Error && e.message !== body) throw e; + throw new Error(body || res.statusText); + } } return res.json(); } @@ -70,49 +56,34 @@ export const useAuthStore = create()( token: DEV_SKIP_AUTH ? "dev-skip-auth" : null, user: DEV_SKIP_AUTH ? DEV_MOCK_USER : null, agent: null, - entityId: DEV_SKIP_AUTH ? "dev-user" : null, - setupInfo: null, + entityId: DEV_SKIP_AUTH ? DEV_MOCK_USER.id : null, - login: async (identifier, password) => { - const data = await apiPost("login", { identifier, password }); + login: async (username, password) => { + const data = await authCall("login", username, password); set({ token: data.token, user: data.user, agent: data.agent, entityId: data.user?.id ?? null, }); + // Full reload so all components initialize from fresh auth state window.location.href = "/threads"; }, - sendOtp: async (email, password, inviteCode) => { - await apiPost("send-otp", { email, password, invite_code: inviteCode }); - }, - - verifyOtp: async (email, token) => { - const data = await apiPost("verify-otp", { email, token }); - return { tempToken: data.temp_token }; - }, - - completeRegister: async (tempToken, inviteCode) => { - const data = await apiPost("complete-register", { - temp_token: tempToken, - invite_code: inviteCode, - }); + register: async (username, password) => { + const data = await authCall("register", username, password); set({ token: data.token, user: data.user, - agent: data.agent ?? null, + agent: data.agent, entityId: data.user?.id ?? null, - setupInfo: { userId: data.user.id, defaultName: data.user.name }, }); - }, - - clearSetupInfo: () => { - set({ setupInfo: null }); + // Full reload so all components initialize from fresh auth state + window.location.href = "/threads"; }, logout: () => { - set({ token: null, user: null, agent: null, entityId: null, setupInfo: null }); + set({ token: null, user: null, agent: null, entityId: null }); }, }), { @@ -137,9 +108,7 @@ export async function authFetch(url: string, init?: RequestInit): Promise str: + """Current UTC time as ISO 8601 string.""" + return datetime.now(tz=UTC).isoformat() + + +def ts_to_iso(ts: float) -> str: + """Unix float timestamp → ISO 8601 string.""" + return datetime.fromtimestamp(ts, tz=UTC).isoformat() diff --git a/messaging/contracts.py b/messaging/contracts.py new file mode 100644 index 000000000..553265d33 --- /dev/null +++ b/messaging/contracts.py @@ -0,0 +1,161 @@ +"""messaging/contracts.py — canonical types for the messaging module. + +All types are Pydantic v2, strict=True, frozen=True. +User is the first-class social identity (wraps entity_id). +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any, Literal, Protocol + +from pydantic import BaseModel, ConfigDict + +# --------------------------------------------------------------------------- +# User — social identity first-class citizen +# --------------------------------------------------------------------------- + + +class User(BaseModel): + model_config = ConfigDict(strict=True, frozen=True) + + id: str # entity_id + name: str + avatar_url: str | None = None + type: Literal["human", "agent"] + owner_id: str | None = None # owner user_id for agents; None for humans + + +class UserRepo(Protocol): + """Resolve a User from entity_id. Reads from entity + member tables.""" + + def get_user(self, user_id: str) -> User | None: ... + def list_users(self) -> list[User]: ... + + +# --------------------------------------------------------------------------- +# AI metadata +# --------------------------------------------------------------------------- + + +class AiMetadata(BaseModel): + model_config = ConfigDict(strict=True, frozen=True) + + tool_calls: dict[str, int] = {} + elapsed_seconds: float | None = None + + +# --------------------------------------------------------------------------- +# Message +# --------------------------------------------------------------------------- + +MessageType = Literal["human", "ai", "ai_process", "system", "notification"] +ContentType = Literal["text", "markdown"] +SignalType = Literal["open", "yield", "close"] + + +class MessageRow(BaseModel): + model_config = ConfigDict(frozen=True) + + id: str + chat_id: str + sender_id: str # user_id (entity_id) + content: str + content_type: ContentType = "text" + message_type: MessageType = "human" + signal: SignalType | None = None + mentions: list[str] = [] + reply_to: str | None = None + ai_metadata: AiMetadata | None = None + created_at: datetime + delivered_at: datetime | None = None + edited_at: datetime | None = None + retracted_at: datetime | None = None + deleted_at: datetime | None = None + deleted_for: list[str] = [] + + +# --------------------------------------------------------------------------- +# Chat + Member +# --------------------------------------------------------------------------- + +ChatType = Literal["direct", "group"] +ChatStatus = Literal["active", "archived", "deleted"] +MemberRole = Literal["member", "admin"] + + +class ChatMemberRow(BaseModel): + model_config = ConfigDict(frozen=True) + + chat_id: str + user_id: str + role: MemberRole = "member" + joined_at: datetime + muted: bool = False + mute_until: datetime | None = None + last_read_at: datetime | None = None + + +class ChatRow(BaseModel): + model_config = ConfigDict(frozen=True) + + id: str + title: str | None = None + type: ChatType = "direct" + status: ChatStatus = "active" + created_at: datetime + updated_at: datetime | None = None + + +# --------------------------------------------------------------------------- +# Contact +# --------------------------------------------------------------------------- + +ContactRelation = Literal["normal", "blocked", "muted"] + + +class ContactRow(BaseModel): + model_config = ConfigDict(frozen=True) + + owner_user_id: str + target_user_id: str + relation: ContactRelation = "normal" + created_at: datetime + updated_at: datetime | None = None + + +# --------------------------------------------------------------------------- +# Relationship (Hire/Visit state machine) +# --------------------------------------------------------------------------- + +RelationshipState = Literal["none", "pending_a_to_b", "pending_b_to_a", "visit", "hire"] +RelationshipDirection = Literal["a_to_b", "b_to_a"] +RelationshipEvent = Literal["request", "approve", "reject", "upgrade", "downgrade", "revoke"] + + +class RelationshipRow(BaseModel): + model_config = ConfigDict(frozen=True) + + id: str + principal_a: str + principal_b: str + state: RelationshipState = "none" + direction: RelationshipDirection | None = None + hire_granted_at: datetime | None = None + hire_revoked_at: datetime | None = None + hire_snapshot: dict[str, Any] | None = None + created_at: datetime + updated_at: datetime + + +# --------------------------------------------------------------------------- +# Delivery +# --------------------------------------------------------------------------- + +DeliveryAction = Literal["deliver", "notify", "drop"] + + +class MessageSendStatus(BaseModel): + model_config = ConfigDict(strict=True, frozen=True) + + status: Literal["sending", "sent", "delivered", "read", "retracted", "deleted"] diff --git a/messaging/delivery/__init__.py b/messaging/delivery/__init__.py new file mode 100644 index 000000000..7d2dab521 --- /dev/null +++ b/messaging/delivery/__init__.py @@ -0,0 +1 @@ +# messaging/delivery/ diff --git a/messaging/delivery/actions.py b/messaging/delivery/actions.py new file mode 100644 index 000000000..254a9a923 --- /dev/null +++ b/messaging/delivery/actions.py @@ -0,0 +1,11 @@ +"""Delivery action enum for messaging module.""" + +from __future__ import annotations + +from enum import StrEnum + + +class DeliveryAction(StrEnum): + DELIVER = "deliver" # inject into agent context, wake agent + NOTIFY = "notify" # store + unread count, no delivery + DROP = "drop" # silent: stored but invisible to recipient diff --git a/messaging/delivery/resolver.py b/messaging/delivery/resolver.py new file mode 100644 index 000000000..1e7dcbd2f --- /dev/null +++ b/messaging/delivery/resolver.py @@ -0,0 +1,128 @@ +"""HireVisitDeliveryResolver — delivery action based on relationship state. + +Priority chain (highest wins): +1. blocked (contact relation) → DROP +2. HIRE relationship → DELIVER (direct access) +3. @mention override → DELIVER +4. muted contact → NOTIFY +5. muted chat → NOTIFY +6. VISIT relationship → NOTIFY (queue, not direct) +7. stranger (no relationship) → NOTIFY (anti-spam default) +8. Default → DELIVER (same-owner entities, known contacts) +""" + +from __future__ import annotations + +import logging +import time +from typing import Any + +from messaging.delivery.actions import DeliveryAction + +logger = logging.getLogger(__name__) + + +class HireVisitDeliveryResolver: + """Evaluates delivery action for a chat message recipient. + + Args: + contact_repo: Provides get(owner, target) → ContactRow-like dict. + chat_member_repo: Provides list_members(chat_id) → list of member dicts. + relationship_repo: Provides get(user_a, user_b) → relationship dict. + """ + + def __init__( + self, + contact_repo: Any, + chat_member_repo: Any, + relationship_repo: Any | None = None, + ) -> None: + self._contacts = contact_repo + self._chat_members = chat_member_repo + self._relationships = relationship_repo + + def resolve( + self, + recipient_id: str, + chat_id: str, + sender_id: str, + *, + is_mentioned: bool = False, + ) -> DeliveryAction: + # 1. Contact-level block — always DROP + contact = self._get_contact(recipient_id, sender_id) + if contact and contact.get("relation") == "blocked": + logger.debug("[resolver] DROP: %s blocked %s", recipient_id[:15], sender_id[:15]) + return DeliveryAction.DROP + + # Fetch relationship once for checks 2, 6, 7 + rel = self._relationships.get(recipient_id, sender_id) if self._relationships else None + rel_state = rel.get("state") if rel else "none" + + # 2. HIRE → DELIVER + if rel_state == "hire": + logger.debug("[resolver] DELIVER: HIRE relationship %s←%s", recipient_id[:15], sender_id[:15]) + return DeliveryAction.DELIVER + + # 3. @mention override — skip mute checks (not block) + if is_mentioned: + return DeliveryAction.DELIVER + + # 4. Contact-level mute + if contact and contact.get("relation") == "muted": + logger.debug("[resolver] NOTIFY: %s muted %s", recipient_id[:15], sender_id[:15]) + return DeliveryAction.NOTIFY + + # 5. Chat-level mute + if self._is_chat_muted(recipient_id, chat_id): + logger.debug("[resolver] NOTIFY: %s muted chat %s", recipient_id[:15], chat_id[:8]) + return DeliveryAction.NOTIFY + + # 6. VISIT → NOTIFY + if rel_state == "visit": + logger.debug("[resolver] NOTIFY: VISIT relationship %s←%s", recipient_id[:15], sender_id[:15]) + return DeliveryAction.NOTIFY + + # 7. Stranger (none or no relationship) → NOTIFY (anti-spam) + if self._relationships and rel_state == "none": + logger.debug("[resolver] NOTIFY: stranger %s←%s", recipient_id[:15], sender_id[:15]) + return DeliveryAction.NOTIFY + + # 8. Default → DELIVER + return DeliveryAction.DELIVER + + def _get_contact(self, owner_id: str, target_id: str): + """Fetch contact row — handles both old and new field names.""" + try: + # New contacts table (owner_user_id / target_user_id) + if hasattr(self._contacts, "get"): + return self._contacts.get(owner_id, target_id) + except Exception: + pass + return None + + def _is_chat_muted(self, user_id: str, chat_id: str) -> bool: + """Check if user has muted this specific chat.""" + try: + members = self._chat_members.list_members(chat_id) + except AttributeError: + # Fallback for old ChatEntityRepo interface + try: + members = self._chat_members.list_entities(chat_id) + except Exception: + return False + + for m in members: + uid = m.get("user_id") or getattr(m, "user_id", None) + if uid != user_id: + continue + muted = m.get("muted", False) if isinstance(m, dict) else getattr(m, "muted", False) + if not muted: + return False + mute_until = m.get("mute_until") if isinstance(m, dict) else getattr(m, "mute_until", None) + if mute_until is not None: + # Handle both timestamp float and ISO string + if isinstance(mute_until, (int, float)) and mute_until < time.time(): + return False + return True + return False diff --git a/messaging/realtime/__init__.py b/messaging/realtime/__init__.py new file mode 100644 index 000000000..3aa889c8c --- /dev/null +++ b/messaging/realtime/__init__.py @@ -0,0 +1 @@ +# messaging/realtime/ diff --git a/messaging/realtime/bridge.py b/messaging/realtime/bridge.py new file mode 100644 index 000000000..3fa994c13 --- /dev/null +++ b/messaging/realtime/bridge.py @@ -0,0 +1,59 @@ +"""SupabaseRealtimeBridge — event bus backed by Supabase Broadcast. + +Replaces ChatEventBus for typing indicators and process-level pub/sub. +For message persistence, Supabase Postgres Changes handles delivery directly +to the frontend via @supabase/supabase-js subscriptions. + +This bridge: +1. Implements the same publish/subscribe interface as ChatEventBus +2. Routes typing events through Supabase Broadcast channels +3. Falls back to in-process asyncio.Queue for local subscribers (SSE compat) +""" + +from __future__ import annotations + +import asyncio +import logging +from typing import Any + +logger = logging.getLogger(__name__) + + +class SupabaseRealtimeBridge: + """Hybrid event bus: local asyncio.Queue + Supabase Broadcast for typing.""" + + def __init__(self, supabase_client: Any | None = None) -> None: + self._supabase = supabase_client + # Local subscribers for SSE fallback + self._subscribers: dict[str, list[asyncio.Queue]] = {} + + def subscribe(self, chat_id: str) -> asyncio.Queue: + """Subscribe to events for a chat (SSE / local consumer).""" + queue: asyncio.Queue = asyncio.Queue(maxsize=256) + self._subscribers.setdefault(chat_id, []).append(queue) + return queue + + def unsubscribe(self, chat_id: str, queue: asyncio.Queue) -> None: + subs = self._subscribers.get(chat_id, []) + if queue in subs: + subs.remove(queue) + if not subs: + self._subscribers.pop(chat_id, None) + + def publish(self, chat_id: str, event: dict) -> None: + """Publish event to local subscribers and Supabase Broadcast.""" + # Local delivery (SSE consumers) + for queue in self._subscribers.get(chat_id, []): + try: + queue.put_nowait(event) + except asyncio.QueueFull: + logger.warning("[realtime] queue full for chat %s", chat_id[:8]) + + # Supabase Broadcast (typing indicators, not messages — messages go via Postgres Changes) + event_type = event.get("event", "") + if self._supabase and event_type in ("typing_start", "typing_stop"): + try: + channel = self._supabase.channel(f"chat:{chat_id}") + channel.send_broadcast(event_type, event.get("data", {})) + except Exception as e: + logger.debug("[realtime] broadcast send failed: %s", e) diff --git a/messaging/realtime/typing.py b/messaging/realtime/typing.py new file mode 100644 index 000000000..cc8082d43 --- /dev/null +++ b/messaging/realtime/typing.py @@ -0,0 +1,52 @@ +"""TypingTracker — Broadcast-backed typing indicator. + +Same interface as backend/web/services/typing_tracker.py, +but routes through SupabaseRealtimeBridge (Broadcast) instead of ChatEventBus. +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from messaging.realtime.bridge import SupabaseRealtimeBridge + +logger = logging.getLogger(__name__) + + +@dataclass +class _ChatEntry: + chat_id: str + user_id: str + + +class TypingTracker: + """Tracks which chat triggered each brain thread run, broadcasts typing events.""" + + def __init__(self, bridge: SupabaseRealtimeBridge) -> None: + self._bridge = bridge + self._active: dict[str, _ChatEntry] = {} + + def start_chat(self, thread_id: str, chat_id: str, user_id: str) -> None: + self._active[thread_id] = _ChatEntry(chat_id, user_id) + self._bridge.publish( + chat_id, + { + "event": "typing_start", + "data": {"user_id": user_id}, + }, + ) + + def stop(self, thread_id: str) -> None: + entry = self._active.pop(thread_id, None) + if not entry: + return + self._bridge.publish( + entry.chat_id, + { + "event": "typing_stop", + "data": {"user_id": entry.user_id}, + }, + ) diff --git a/messaging/relationships/__init__.py b/messaging/relationships/__init__.py new file mode 100644 index 000000000..ec3a51edc --- /dev/null +++ b/messaging/relationships/__init__.py @@ -0,0 +1 @@ +# messaging/relationships/ diff --git a/messaging/relationships/router.py b/messaging/relationships/router.py new file mode 100644 index 000000000..6ff2c9293 --- /dev/null +++ b/messaging/relationships/router.py @@ -0,0 +1,174 @@ +"""Relationship API router — /api/relationships endpoints.""" + +from __future__ import annotations + +import logging +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel + +from backend.web.core.dependencies import get_app, get_current_user_id +from messaging.contracts import RelationshipRow +from messaging.relationships.state_machine import TransitionError + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/relationships", tags=["relationships"]) + + +class RelationshipRequestBody(BaseModel): + target_user_id: str + + +class RelationshipActionBody(BaseModel): + hire_snapshot: dict[str, Any] | None = None + + +def _get_rel_service(app: Any): + svc = getattr(app.state, "relationship_service", None) + if svc is None: + raise HTTPException(503, "Relationship service unavailable") + return svc + + +def _get_existing(svc, relationship_id: str) -> dict: + existing = svc.get_by_id(relationship_id) + if not existing: + raise HTTPException(404, "Relationship not found") + return existing + + +def _resolve_parties(existing: dict, actor_id: str) -> tuple[str, str]: + """Return (requester_id, other_id) from a relationship row and actor.""" + requester_id = existing["principal_a"] if existing["state"] == "pending_a_to_b" else existing["principal_b"] + other_id = existing["principal_b"] if actor_id == existing["principal_a"] else existing["principal_a"] + return requester_id, other_id + + +def _row_to_dict(row: RelationshipRow, viewer_id: str) -> dict: + other_id = row.principal_b if viewer_id == row.principal_a else row.principal_a + # Determine who is the requester based on state direction + if row.state == "pending_a_to_b": + is_requester = viewer_id == row.principal_a + elif row.state == "pending_b_to_a": + is_requester = viewer_id == row.principal_b + else: + is_requester = False + return { + "id": row.id, + "other_user_id": other_id, + "state": row.state, + "direction": row.direction, + "is_requester": is_requester, + "hire_granted_at": row.hire_granted_at.isoformat() if row.hire_granted_at else None, + "hire_revoked_at": row.hire_revoked_at.isoformat() if row.hire_revoked_at else None, + "created_at": row.created_at.isoformat(), + "updated_at": row.updated_at.isoformat(), + } + + +@router.get("") +async def list_relationships( + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + rows = svc.list_for_user(user_id) + return [_row_to_dict(r, user_id) for r in rows] + + +@router.post("/request") +async def request_relationship( + body: RelationshipRequestBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + if user_id == body.target_user_id: + raise HTTPException(400, "Cannot request relationship with yourself") + try: + row = svc.request(user_id, body.target_user_id) + return _row_to_dict(row, user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) + + +@router.post("/{relationship_id}/approve") +async def approve_relationship( + relationship_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + existing = _get_existing(svc, relationship_id) + requester_id, _ = _resolve_parties(existing, user_id) + if user_id == requester_id: + raise HTTPException(409, "Cannot approve your own request") + try: + return _row_to_dict(svc.approve(user_id, requester_id), user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) + + +@router.post("/{relationship_id}/reject") +async def reject_relationship( + relationship_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + existing = _get_existing(svc, relationship_id) + requester_id, _ = _resolve_parties(existing, user_id) + if user_id == requester_id: + raise HTTPException(409, "Cannot reject your own request") + try: + return _row_to_dict(svc.reject(user_id, requester_id), user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) + + +@router.post("/{relationship_id}/upgrade") +async def upgrade_relationship( + relationship_id: str, + body: RelationshipActionBody, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + existing = _get_existing(svc, relationship_id) + _, other_id = _resolve_parties(existing, user_id) + try: + return _row_to_dict(svc.upgrade(user_id, other_id, snapshot=body.hire_snapshot), user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) + + +@router.post("/{relationship_id}/revoke") +async def revoke_relationship( + relationship_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + existing = _get_existing(svc, relationship_id) + _, other_id = _resolve_parties(existing, user_id) + try: + return _row_to_dict(svc.revoke(user_id, other_id), user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) + + +@router.post("/{relationship_id}/downgrade") +async def downgrade_relationship( + relationship_id: str, + user_id: Annotated[str, Depends(get_current_user_id)], + app: Annotated[Any, Depends(get_app)], +): + svc = _get_rel_service(app) + existing = _get_existing(svc, relationship_id) + _, other_id = _resolve_parties(existing, user_id) + try: + return _row_to_dict(svc.downgrade(user_id, other_id), user_id) + except TransitionError as e: + raise HTTPException(409, str(e)) diff --git a/messaging/relationships/service.py b/messaging/relationships/service.py new file mode 100644 index 000000000..14d017f6d --- /dev/null +++ b/messaging/relationships/service.py @@ -0,0 +1,116 @@ +"""RelationshipService — Hire/Visit lifecycle management.""" + +from __future__ import annotations + +import logging +from typing import Any + +from messaging._utils import now_iso +from messaging.contracts import RelationshipEvent, RelationshipRow, RelationshipState +from messaging.relationships.state_machine import transition + +logger = logging.getLogger(__name__) + + +class RelationshipService: + """Manages Hire/Visit relationships between users.""" + + def __init__(self, relationship_repo: Any, entity_repo: Any = None) -> None: + self._repo = relationship_repo + self._entity_repo = entity_repo + + def apply_event( + self, + actor_id: str, + target_id: str, + event: RelationshipEvent, + *, + hire_snapshot: dict[str, Any] | None = None, + ) -> RelationshipRow: + """Apply an event to the relationship between actor and target. + + Returns the updated RelationshipRow. + Raises TransitionError on invalid transition. + """ + # Ensure canonical ordering + if actor_id < target_id: + pa, pb = actor_id, target_id + requester_is_a = True + else: + pa, pb = target_id, actor_id + requester_is_a = False + + existing = self._repo.get(actor_id, target_id) + if existing is None: + current_state: RelationshipState = "none" + current_direction = None + else: + current_state = existing["state"] + current_direction = existing.get("direction") + + new_state, new_direction = transition(current_state, current_direction, event, requester_is_a=requester_is_a) + logger.info( + "[relationship] %s + %s → %s (actor=%s event=%s)", + current_state, + event, + new_state, + actor_id[:15], + event, + ) + + fields: dict[str, Any] = {"state": new_state, "direction": new_direction} + if new_state == "hire" and current_state != "hire": + fields["hire_granted_at"] = now_iso() + if hire_snapshot: + fields["hire_snapshot"] = hire_snapshot + if new_state == "none" and current_state in ("hire", "visit"): + fields["hire_revoked_at"] = now_iso() + if current_state == "hire" and self._entity_repo is not None: + other_id = pb if actor_id == pa else pa + e = self._entity_repo.get_by_id(other_id) + fields["hire_snapshot"] = { + "entity_id": other_id, + "name": e.name if e else other_id, + "thread_id": getattr(e, "thread_id", None), + "snapshot_at": now_iso(), + } + + row = self._repo.upsert(actor_id, target_id, **fields) + return RelationshipRow.model_validate(row) + + def request(self, requester_id: str, target_id: str) -> RelationshipRow: + return self.apply_event(requester_id, target_id, "request") + + def approve(self, approver_id: str, requester_id: str) -> RelationshipRow: + return self.apply_event(approver_id, requester_id, "approve") + + def reject(self, approver_id: str, requester_id: str) -> RelationshipRow: + return self.apply_event(approver_id, requester_id, "reject") + + def upgrade(self, owner_id: str, agent_id: str, snapshot: dict[str, Any] | None = None) -> RelationshipRow: + return self.apply_event(owner_id, agent_id, "upgrade", hire_snapshot=snapshot) + + def downgrade(self, owner_id: str, agent_id: str) -> RelationshipRow: + return self.apply_event(owner_id, agent_id, "downgrade") + + def revoke(self, revoker_id: str, other_id: str) -> RelationshipRow: + return self.apply_event(revoker_id, other_id, "revoke") + + def list_for_user(self, user_id: str) -> list[RelationshipRow]: + rows = self._repo.list_for_user(user_id) + result = [] + for r in rows: + try: + result.append(RelationshipRow.model_validate(r)) + except Exception: + logger.warning("[relationship] invalid row: %s", r) + return result + + def get_by_id(self, relationship_id: str) -> dict | None: + return self._repo.get_by_id(relationship_id) + + def get_state(self, user_a: str, user_b: str) -> RelationshipState: + existing = self._repo.get(user_a, user_b) + if not existing: + return "none" + return existing.get("state", "none") diff --git a/messaging/relationships/state_machine.py b/messaging/relationships/state_machine.py new file mode 100644 index 000000000..7cdb65ee4 --- /dev/null +++ b/messaging/relationships/state_machine.py @@ -0,0 +1,101 @@ +"""Hire/Visit relationship state machine — pure functions, no I/O. + +State transitions: + NONE + request → PENDING (direction set) + PENDING_A_TO_B + approve → VISIT + PENDING_A_TO_B + reject → NONE + PENDING_B_TO_A + approve → VISIT + PENDING_B_TO_A + reject → NONE + VISIT + upgrade → HIRE + HIRE + downgrade → VISIT + HIRE | VISIT + revoke → NONE +""" + +from __future__ import annotations + +from messaging.contracts import ( + RelationshipDirection, + RelationshipEvent, + RelationshipState, +) + + +class TransitionError(ValueError): + """Invalid state machine transition.""" + + +def transition( + current_state: RelationshipState, + current_direction: RelationshipDirection | None, + event: RelationshipEvent, + *, + requester_is_a: bool, +) -> tuple[RelationshipState, RelationshipDirection | None]: + """Apply an event and return (new_state, new_direction). + + Args: + current_state: The current relationship state. + current_direction: Current direction (only relevant for pending states). + event: The event to apply. + requester_is_a: True if the actor is principal_a (lexicographically smaller id). + + Returns: + (new_state, new_direction) + + Raises: + TransitionError: If the transition is not valid in the current state. + """ + match (current_state, event): + case ("none", "request"): + direction: RelationshipDirection = "a_to_b" if requester_is_a else "b_to_a" + return ("pending_a_to_b" if requester_is_a else "pending_b_to_a", direction) + + case ("pending_a_to_b", "approve") if not requester_is_a: + # b approves a's request + return ("visit", None) + + case ("pending_b_to_a", "approve") if requester_is_a: + # a approves b's request + return ("visit", None) + + case ("pending_a_to_b", "reject") if not requester_is_a: + return ("none", None) + + case ("pending_b_to_a", "reject") if requester_is_a: + return ("none", None) + + # Requester can cancel their own pending request + case ("pending_a_to_b", "revoke") if requester_is_a: + return ("none", None) + + case ("pending_b_to_a", "revoke") if not requester_is_a: + return ("none", None) + + case (("visit" | "hire"), "revoke"): + return ("none", None) + + case ("visit", "upgrade"): + return ("hire", None) + + case ("hire", "downgrade"): + return ("visit", None) + + case _: + raise TransitionError(f"Invalid transition: state={current_state!r} event={event!r} requester_is_a={requester_is_a}") + + +def resolve_direction( + relationship: dict, + actor_id: str, +) -> bool: + """Return True if actor_id is principal_a (used to compute requester_is_a).""" + return actor_id == relationship.get("principal_a") + + +def get_pending_direction(state: RelationshipState, principal_a: str, principal_b: str) -> tuple[str, str] | None: + """Return (requester_id, approver_id) for a pending state, or None.""" + if state == "pending_a_to_b": + return (principal_a, principal_b) + if state == "pending_b_to_a": + return (principal_b, principal_a) + return None diff --git a/messaging/service.py b/messaging/service.py new file mode 100644 index 000000000..1f4fe9657 --- /dev/null +++ b/messaging/service.py @@ -0,0 +1,266 @@ +"""MessagingService — core business logic for the messaging module. + +Wraps Supabase messaging repos with business rules: +- create_chat, find_or_create_chat +- send (with delivery routing) +- retract, delete_for, mark_read +- list_messages, list_chats +""" + +from __future__ import annotations + +import logging +import uuid +from collections.abc import Callable +from typing import Any + +from backend.web.utils.serializers import avatar_url +from messaging._utils import now_iso +from messaging.contracts import ContentType, MessageType + +logger = logging.getLogger(__name__) + + +class MessagingService: + """Core messaging operations backed by Supabase repos.""" + + def __init__( + self, + chat_repo: Any, # storage.providers.sqlite.chat_repo.SQLiteChatRepo (for chat creation) + chat_member_repo: Any, # SupabaseChatMemberRepo or compatible + messages_repo: Any, # SupabaseMessagesRepo + message_read_repo: Any, # SupabaseMessageReadRepo + entity_repo: Any, # EntityRepo (for sender lookup) + member_repo: Any, # MemberRepo (for avatar) + delivery_resolver: Any | None = None, + delivery_fn: Callable | None = None, + event_bus: Any | None = None, # ChatEventBus or SupabaseRealtimeBridge (optional) + ) -> None: + self._chats = chat_repo + self._members_repo = chat_member_repo + self._messages = messages_repo + self._reads = message_read_repo + self._entities = entity_repo + self._member_repo = member_repo + self._delivery_resolver = delivery_resolver + self._delivery_fn = delivery_fn + self._event_bus = event_bus + + def set_delivery_fn(self, fn: Callable) -> None: + self._delivery_fn = fn + + # ------------------------------------------------------------------ + # Chat lifecycle + # ------------------------------------------------------------------ + + def find_or_create_chat(self, user_ids: list[str], title: str | None = None) -> dict[str, Any]: + if len(user_ids) != 2: + raise ValueError("Use create_group_chat() for 3+ users") + existing_id = self._members_repo.find_chat_between(user_ids[0], user_ids[1]) + if existing_id: + chat = self._chats.get_by_id(existing_id) + return {"id": chat.id, "title": chat.title, "status": chat.status, "created_at": chat.created_at} + + return self._create_chat(user_ids, chat_type="direct", title=title) + + def create_group_chat(self, user_ids: list[str], title: str | None = None) -> dict[str, Any]: + if len(user_ids) < 3: + raise ValueError("Group chat requires 3+ users") + return self._create_chat(user_ids, chat_type="group", title=title) + + def _create_chat(self, user_ids: list[str], *, chat_type: str, title: str | None) -> dict[str, Any]: + import time + + from storage.contracts import ChatRow + + chat_id = str(uuid.uuid4()) + now = time.time() + self._chats.create(ChatRow(id=chat_id, title=title, status="active", created_at=now)) + for uid in user_ids: + self._members_repo.add_member(chat_id, uid) + return {"id": chat_id, "title": title, "status": "active", "created_at": now} + + # ------------------------------------------------------------------ + # Sending + # ------------------------------------------------------------------ + + def send( + self, + chat_id: str, + sender_id: str, + content: str, + *, + message_type: MessageType = "human", + content_type: ContentType = "text", + mentions: list[str] | None = None, + signal: str | None = None, + reply_to: str | None = None, + ai_metadata: dict[str, Any] | None = None, + ) -> dict[str, Any]: + msg_id = str(uuid.uuid4()) + + row: dict[str, Any] = { + "id": msg_id, + "chat_id": chat_id, + "sender_id": sender_id, + "content": content, + "content_type": content_type, + "message_type": message_type, + "mentions": mentions or [], + "created_at": now_iso(), + } + if signal in ("open", "yield", "close"): + row["signal"] = signal + if reply_to: + row["reply_to"] = reply_to + if ai_metadata: + row["ai_metadata"] = ai_metadata + + created = self._messages.create(row) + logger.debug("[messaging] send chat=%s sender=%s msg=%s type=%s", chat_id[:8], sender_id[:15], msg_id[:8], message_type) + + # Publish to event bus (SSE / Realtime bridge) + sender = self._entities.get_by_id(sender_id) + sender_name = sender.name if sender else "unknown" + if self._event_bus: + self._event_bus.publish( + chat_id, + { + "event": "message", + "data": {**created, "sender_name": sender_name}, + }, + ) + + # Deliver to agent recipients + if message_type in ("human", "ai"): + self._deliver_to_agents(chat_id, sender_id, content, mentions or [], signal=signal) + + return created + + def _deliver_to_agents( + self, + chat_id: str, + sender_id: str, + content: str, + mentions: list[str], + signal: str | None = None, + ) -> None: + mention_set = set(mentions) + members = self._members_repo.list_members(chat_id) + sender_entity = self._entities.get_by_id(sender_id) + sender_name = sender_entity.name if sender_entity else "unknown" + sender_avatar_url = None + if sender_entity: + m = self._member_repo.get_by_id(sender_entity.member_id) if self._member_repo else None + sender_avatar_url = avatar_url(sender_entity.member_id, bool(m.avatar if m else None)) + + for member in members: + uid = member.get("user_id") + if not uid or uid == sender_id: + continue + entity = self._entities.get_by_id(uid) + if not entity or entity.type != "agent" or not entity.thread_id: + continue + + from messaging.delivery.actions import DeliveryAction + + if self._delivery_resolver: + is_mentioned = uid in mention_set + action = self._delivery_resolver.resolve(uid, chat_id, sender_id, is_mentioned=is_mentioned) + if action != DeliveryAction.DELIVER: + logger.info("[messaging] POLICY %s for %s", action.value, uid[:15]) + continue + + if self._delivery_fn: + try: + self._delivery_fn(entity, content, sender_name, chat_id, sender_id, sender_avatar_url, signal=signal) + except Exception: + logger.exception("[messaging] delivery failed for entity %s", uid) + + # ------------------------------------------------------------------ + # Lifecycle operations + # ------------------------------------------------------------------ + + def retract(self, message_id: str, sender_id: str) -> bool: + return self._messages.retract(message_id, sender_id) + + def delete_for(self, message_id: str, user_id: str) -> None: + self._messages.delete_for(message_id, user_id) + + def mark_read(self, chat_id: str, user_id: str) -> None: + """Mark all messages in a chat as read for user.""" + self._members_repo.update_last_read(chat_id, user_id) + # Also write per-message reads for recent messages + msgs = self._messages.list_by_chat(chat_id, limit=50, viewer_id=user_id) + msg_ids = [m["id"] for m in msgs if m.get("sender_id") != user_id] + if msg_ids: + self._reads.mark_chat_read(chat_id, user_id, msg_ids) + + def mark_message_read(self, message_id: str, user_id: str) -> None: + self._reads.mark_read(message_id, user_id) + + # ------------------------------------------------------------------ + # Queries + # ------------------------------------------------------------------ + + def list_messages( + self, chat_id: str, *, limit: int = 50, before: str | None = None, viewer_id: str | None = None + ) -> list[dict[str, Any]]: + return self._messages.list_by_chat(chat_id, limit=limit, before=before, viewer_id=viewer_id) + + def list_unread(self, chat_id: str, user_id: str) -> list[dict[str, Any]]: + return self._messages.list_unread(chat_id, user_id) + + def count_unread(self, chat_id: str, user_id: str) -> int: + return self._messages.count_unread(chat_id, user_id) + + def search_messages(self, query: str, *, chat_id: str | None = None) -> list[dict[str, Any]]: + return self._messages.search(query, chat_id=chat_id) + + def list_chats_for_user(self, user_id: str) -> list[dict[str, Any]]: + """List all active chats for user with summary info.""" + chat_ids = self._members_repo.list_chats_for_user(user_id) + result = [] + for cid in chat_ids: + chat = self._chats.get_by_id(cid) + if not chat or chat.status != "active": + continue + members = self._members_repo.list_members(cid) + entities_info = [] + for m in members: + uid = m.get("user_id") + e = self._entities.get_by_id(uid) if uid else None + if e: + mem = self._member_repo.get_by_id(e.member_id) if self._member_repo else None + entities_info.append( + { + "id": e.id, + "name": e.name, + "type": e.type, + "avatar_url": avatar_url(e.member_id, bool(mem.avatar if mem else None)), + } + ) + msgs = self._messages.list_by_chat(cid, limit=1) + last_msg = None + if msgs: + m = msgs[-1] + sender = self._entities.get_by_id(m.get("sender_id", "")) + last_msg = { + "content": m.get("content", ""), + "sender_name": sender.name if sender else "unknown", + "created_at": m.get("created_at"), + } + unread = self.count_unread(cid, user_id) + result.append( + { + "id": cid, + "title": chat.title, + "status": chat.status, + "created_at": chat.created_at, + "entities": entities_info, + "last_message": last_msg, + "unread_count": unread, + "has_mention": False, # TODO: implement mention tracking + } + ) + return result diff --git a/messaging/tools/__init__.py b/messaging/tools/__init__.py new file mode 100644 index 000000000..4437f69ee --- /dev/null +++ b/messaging/tools/__init__.py @@ -0,0 +1 @@ +# messaging/tools/ diff --git a/messaging/tools/chat_tool_service.py b/messaging/tools/chat_tool_service.py new file mode 100644 index 000000000..d06b626f1 --- /dev/null +++ b/messaging/tools/chat_tool_service.py @@ -0,0 +1,439 @@ +"""Chat tool service (messaging module version). + +Provides 5 tools: chats, chat_read, chat_send, chat_search, directory. +directory includes privacy filter: only shows entities with existing relationships. +""" + +from __future__ import annotations + +import logging +import re +import time +from datetime import UTC, datetime +from typing import Any + +from core.runtime.registry import ToolEntry, ToolMode, ToolRegistry + +logger = logging.getLogger(__name__) + +_RELATIVE_RE = re.compile(r"^-(\d+)([hdm])$") + + +def _parse_range(range_str: str) -> dict: + parts = range_str.split(":", 1) + if len(parts) != 2: + raise ValueError(f"Invalid range format '{range_str}'. Use 'start:end' (e.g. '-10:-1', '-1h:').") + left, right = parts[0].strip(), parts[1].strip() + left_is_neg_int = bool(re.match(r"^-\d+$", left)) if left else True + right_is_neg_int = bool(re.match(r"^-\d+$", right)) if right else True + left_is_pos_int = bool(re.match(r"^\d+$", left)) if left else False + right_is_pos_int = bool(re.match(r"^\d+$", right)) if right else False + if left_is_pos_int or right_is_pos_int: + raise ValueError("Positive indices not allowed. Use negative indices like '-10:-1'.") + if left_is_neg_int and right_is_neg_int and not _RELATIVE_RE.match(left or "") and not _RELATIVE_RE.match(right or ""): + start = int(left) if left else None + end = int(right) if right else None + if start is not None and end is not None: + if start >= end: + raise ValueError(f"Start ({start}) must be less than end ({end}). E.g. '-10:-1'.") + limit = end - start + skip_last = -end + elif start is not None: + limit = -start + skip_last = 0 + else: + limit = -end if end else 20 + skip_last = 0 + return {"type": "index", "limit": limit, "skip_last": skip_last} + now = time.time() + after_ts = _parse_time_endpoint(left, now) if left else None + before_ts = _parse_time_endpoint(right, now) if right else None + if after_ts is None and before_ts is None: + raise ValueError(f"Invalid range '{range_str}'. Use '-10:-1', '-1h:', or '2026-03-20:'.") + return {"type": "time", "after": after_ts, "before": before_ts} + + +def _parse_time_endpoint(s: str, now: float) -> float | None: + m = _RELATIVE_RE.match(s) + if m: + n, unit = int(m.group(1)), m.group(2) + return now - n * {"h": 3600, "d": 86400, "m": 60}[unit] + try: + dt = datetime.strptime(s, "%Y-%m-%d").replace(tzinfo=UTC) + return dt.timestamp() + except ValueError: + pass + raise ValueError(f"Cannot parse time '{s}'. Use '-2h', '-1d', '-30m', or '2026-03-20'.") + + +def _float_ts(ts: Any) -> float | None: + """Convert ISO string or float timestamp to float.""" + if ts is None: + return None + if isinstance(ts, (int, float)): + return float(ts) + try: + dt = datetime.fromisoformat(str(ts).replace("Z", "+00:00")) + return dt.timestamp() + except (ValueError, TypeError): + return None + + +class ChatToolService: + """Registers 5 chat tools into ToolRegistry (messaging module version).""" + + def __init__( + self, + registry: ToolRegistry, + user_id: str, + owner_id: str, + *, + entity_repo: Any = None, + messaging_service: Any = None, # MessagingService (new) + chat_member_repo: Any = None, # SupabaseChatMemberRepo + messages_repo: Any = None, # SupabaseMessagesRepo + member_repo: Any = None, + relationship_repo: Any = None, # for directory privacy filter + ) -> None: + self._user_id = user_id + self._owner_id = owner_id + self._entities = entity_repo + self._messaging = messaging_service + self._chat_members = chat_member_repo + self._messages = messages_repo + self._member_repo = member_repo + self._relationships = relationship_repo + self._register(registry) + + def _register(self, registry: ToolRegistry) -> None: + self._register_chats(registry) + self._register_chat_read(registry) + self._register_chat_send(registry) + self._register_chat_search(registry) + self._register_directory(registry) + + def _format_msgs(self, msgs: list[dict], eid: str) -> str: + lines = [] + for m in msgs: + sender = self._entities.get_by_id(m.get("sender_id", "")) + name = sender.name if sender else "unknown" + tag = "you" if m.get("sender_id") == eid else name + content = m.get("content", "") + if m.get("retracted_at"): + content = "[已撤回]" + lines.append(f"[{tag}]: {content}") + return "\n".join(lines) + + def _fetch_by_range(self, chat_id: str, parsed: dict) -> list[dict]: + if parsed["type"] == "index": + limit = parsed["limit"] + skip_last = parsed["skip_last"] + fetch_count = limit + skip_last + msgs = self._messages.list_by_chat(chat_id, limit=fetch_count, viewer_id=self._user_id) + if skip_last > 0: + msgs = msgs[: len(msgs) - skip_last] if len(msgs) > skip_last else [] + return msgs + else: + after_iso = datetime.fromtimestamp(parsed["after"], tz=UTC).isoformat() if parsed.get("after") else None + before_iso = datetime.fromtimestamp(parsed["before"], tz=UTC).isoformat() if parsed.get("before") else None + return self._messages.list_by_time_range(chat_id, after=after_iso, before=before_iso) + + def _register_chats(self, registry: ToolRegistry) -> None: + eid = self._user_id + + def handle(unread_only: bool = False, limit: int = 20) -> str: + chats = self._messaging.list_chats_for_user(eid) + if unread_only: + chats = [c for c in chats if c.get("unread_count", 0) > 0] + chats = chats[:limit] + if not chats: + return "No chats found." + lines = [] + for c in chats: + others = [e for e in c.get("entities", []) if e["id"] != eid] + name = ", ".join(e["name"] for e in others) or "Unknown" + unread = c.get("unread_count", 0) + last = c.get("last_message") + last_preview = f' — last: "{last["content"][:50]}"' if last else "" + unread_str = f" ({unread} unread)" if unread > 0 else "" + is_group = len(others) >= 2 + if is_group: + id_str = f" [chat_id: {c['id']}]" + else: + other_id = others[0]["id"] if others else "" + id_str = f" [id: {other_id}]" if other_id else "" + lines.append(f"- {name}{id_str}{unread_str}{last_preview}") + return "\n".join(lines) + + registry.register( + ToolEntry( + name="chats", + mode=ToolMode.INLINE, + schema={ + "name": "chats", + "description": "List your chats. Returns chat summaries with user_ids of participants.", + "parameters": { + "type": "object", + "properties": { + "unread_only": { + "type": "boolean", + "description": "Only show chats with unread messages", + "default": False, + }, + "limit": {"type": "integer", "description": "Max number of chats to return", "default": 20}, + }, + }, + }, + handler=handle, + source="chat", + ) + ) + + def _register_chat_read(self, registry: ToolRegistry) -> None: + eid = self._user_id + + def handle(entity_id: str | None = None, chat_id: str | None = None, range: str | None = None) -> str: + if chat_id: + pass + elif entity_id: + chat_id = self._chat_members.find_chat_between(eid, entity_id) + if not chat_id: + target = self._entities.get_by_id(entity_id) + name = target.name if target else entity_id + return f"No chat history with {name}." + else: + return "Provide entity_id or chat_id." + + if range: + try: + parsed = _parse_range(range) + except ValueError as e: + return str(e) + msgs = self._fetch_by_range(chat_id, parsed) + if not msgs: + return "No messages in that range." + self._messaging.mark_read(chat_id, eid) + return self._format_msgs(msgs, eid) + + msgs = self._messaging.list_unread(chat_id, eid) + if msgs: + self._messaging.mark_read(chat_id, eid) + return self._format_msgs(msgs, eid) + + return ( + "No unread messages. To read history, call again with range:\n" + " range='-10:-1' (last 10 messages)\n" + " range='-5:' (last 5 messages)\n" + " range='-1h:' (last hour)\n" + " range='-2d:-1d' (yesterday)\n" + " range='2026-03-20:2026-03-22' (date range)" + ) + + registry.register( + ToolEntry( + name="chat_read", + mode=ToolMode.INLINE, + schema={ + "name": "chat_read", + "description": ( + "Read chat messages. Returns unread messages by default.\n" + "If nothing unread, use range to read history:\n" + " Negative index: '-10:-1' (last 10), '-5:' (last 5)\n" + " Time interval: '-1h:', '-2d:-1d', '2026-03-20:2026-03-22'\n" + "Positive indices are NOT allowed." + ), + "parameters": { + "type": "object", + "properties": { + "entity_id": {"type": "string", "description": "Entity_id for 1:1 chat history"}, + "chat_id": {"type": "string", "description": "Chat_id for group chat history"}, + "range": { + "type": "string", + "description": "History range. Negative index '-X:-Y' or time '-1h:', '2026-03-20:'.", + }, + }, + }, + }, + handler=handle, + source="chat", + ) + ) + + def _register_chat_send(self, registry: ToolRegistry) -> None: + eid = self._user_id + + def handle( + content: str, + entity_id: str | None = None, + chat_id: str | None = None, + signal: str = "open", + mentions: list[str] | None = None, + ) -> str: + resolved_chat_id = chat_id + target_name = "chat" + + if chat_id: + if not self._chat_members.is_member(chat_id, eid): + raise RuntimeError(f"You are not a member of chat {chat_id}") + elif entity_id: + if entity_id == eid: + raise RuntimeError("Cannot send a message to yourself.") + target = self._entities.get_by_id(entity_id) + if not target: + raise RuntimeError(f"Entity not found: {entity_id}") + target_name = target.name + chat = self._messaging.find_or_create_chat([eid, entity_id]) + resolved_chat_id = chat["id"] + else: + raise RuntimeError("Provide entity_id (for 1:1) or chat_id (for group)") + + unread = self._messaging.count_unread(resolved_chat_id, eid) + if unread > 0: + raise RuntimeError(f"You have {unread} unread message(s). Call chat_read(chat_id='{resolved_chat_id}') first.") + + effective_signal = signal if signal in ("yield", "close") else None + if effective_signal: + content = f"{content}\n[signal: {effective_signal}]" + + self._messaging.send(resolved_chat_id, eid, content, mentions=mentions, signal=effective_signal) + return f"Message sent to {target_name}." + + registry.register( + ToolEntry( + name="chat_send", + mode=ToolMode.INLINE, + schema={ + "name": "chat_send", + "description": ( + "Send a message. Use entity_id for 1:1 chats, chat_id for group chats.\n\n" + "You MUST call chat_read() first if you have unread messages — sending will fail otherwise.\n\n" + "Signal protocol:\n" + " (no tag) = I expect a reply from you\n" + " ::yield = I'm done with my turn; reply only if you want to\n" + " ::close = conversation over, do NOT reply" + ), + "parameters": { + "type": "object", + "properties": { + "content": {"type": "string", "description": "Message content"}, + "entity_id": {"type": "string", "description": "Target entity_id (for 1:1 chat)"}, + "chat_id": {"type": "string", "description": "Target chat_id (for group chat)"}, + "signal": {"type": "string", "enum": ["open", "yield", "close"], "default": "open"}, + "mentions": { + "type": "array", + "items": {"type": "string"}, + "description": "Entity IDs to @mention", + }, + }, + "required": ["content"], + }, + }, + handler=handle, + source="chat", + ) + ) + + def _register_chat_search(self, registry: ToolRegistry) -> None: + eid = self._user_id + + def handle(query: str, entity_id: str | None = None) -> str: + chat_id = None + if entity_id: + chat_id = self._chat_members.find_chat_between(eid, entity_id) + results = self._messaging.search_messages(query, chat_id=chat_id) + if not results: + return f"No messages matching '{query}'." + lines = [] + for m in results: + sender = self._entities.get_by_id(m.get("sender_id", "")) + name = sender.name if sender else "unknown" + lines.append(f"[{name}] {m.get('content', '')[:100]}") + return "\n".join(lines) + + registry.register( + ToolEntry( + name="chat_search", + mode=ToolMode.INLINE, + schema={ + "name": "chat_search", + "description": "Search messages. Optionally filter by entity_id.", + "parameters": { + "type": "object", + "properties": { + "query": {"type": "string", "description": "Search query"}, + "entity_id": { + "type": "string", + "description": "Optional: only search in chat with this entity", + }, + }, + "required": ["query"], + }, + }, + handler=handle, + source="chat", + ) + ) + + def _register_directory(self, registry: ToolRegistry) -> None: + eid = self._user_id + + def handle(search: str | None = None, type: str | None = None) -> str: + all_entities = self._entities.list_all() + entities = [e for e in all_entities if e.id != eid] + if type: + entities = [e for e in entities if e.type == type] + if search: + q = search.lower() + entities = [e for e in entities if q in e.name.lower()] + + # Privacy filter: only show entities with a relationship (VISIT or HIRE) + # or entities owned by the same user (owner_id) + if self._relationships: + + def _is_visible(e) -> bool: + # Same owner → always visible + if hasattr(e, "member_id"): + mem = self._member_repo.get_by_id(e.member_id) if self._member_repo else None + if mem and getattr(mem, "owner_user_id", None) == getattr( + self._entities.get_by_id(self._owner_id), "member_id", None + ): + return True + rel = self._relationships.get(eid, e.id) + if rel and rel.get("state") in ("visit", "hire"): + return True + return False + + entities = [e for e in entities if _is_visible(e)] + + if not entities: + return "No entities found." + lines = [] + for e in entities: + member = self._member_repo.get_by_id(e.member_id) if self._member_repo else None + owner_info = "" + if e.type == "agent" and member and getattr(member, "owner_user_id", None): + owner_member = self._member_repo.get_by_id(member.owner_user_id) + if owner_member: + owner_info = f" (owner: {owner_member.name})" + lines.append(f"- {e.name} [{e.type}] entity_id={e.id}{owner_info}") + return "\n".join(lines) + + registry.register( + ToolEntry( + name="directory", + mode=ToolMode.INLINE, + schema={ + "name": "directory", + "description": "Browse the entity directory. Shows entities with Visit/Hire relationships. Returns user_ids for chat_send.", # noqa: E501 + "parameters": { + "type": "object", + "properties": { + "search": {"type": "string", "description": "Search by name"}, + "type": {"type": "string", "description": "Filter by type: 'human' or 'agent'"}, + }, + }, + }, + handler=handle, + source="chat", + ) + ) diff --git a/sandbox/__init__.py b/sandbox/__init__.py index 995e0798e..937f81d98 100644 --- a/sandbox/__init__.py +++ b/sandbox/__init__.py @@ -16,12 +16,12 @@ import os from pathlib import Path -from sandbox.base import LocalSandbox, RemoteSandbox, Sandbox -from sandbox.config import SandboxConfig, resolve_sandbox_name -from sandbox.thread_context import get_current_thread_id, set_current_thread_id - logger = logging.getLogger(__name__) +from sandbox.base import LocalSandbox, RemoteSandbox, Sandbox # noqa: E402 +from sandbox.config import SandboxConfig, resolve_sandbox_name # noqa: E402 +from sandbox.thread_context import get_current_thread_id, set_current_thread_id # noqa: E402 + def create_sandbox( config: SandboxConfig, diff --git a/sandbox/manager.py b/sandbox/manager.py index 29f380b0a..c8d161159 100644 --- a/sandbox/manager.py +++ b/sandbox/manager.py @@ -10,20 +10,20 @@ from pathlib import Path from typing import Any -from sandbox.capability import SandboxCapability -from sandbox.chat_session import ChatSessionManager, ChatSessionPolicy -from sandbox.lease import lease_from_row -from sandbox.provider import SandboxProvider -from sandbox.recipes import bootstrap_recipe -from sandbox.terminal import TerminalState, terminal_from_row -from storage.providers.sqlite.chat_session_repo import SQLiteChatSessionRepo -from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path -from storage.providers.sqlite.lease_repo import SQLiteLeaseRepo -from storage.providers.sqlite.terminal_repo import SQLiteTerminalRepo -from storage.providers.sqlite.thread_repo import SQLiteThreadRepo - logger = logging.getLogger(__name__) +from sandbox.capability import SandboxCapability # noqa: E402 +from sandbox.chat_session import ChatSessionManager, ChatSessionPolicy # noqa: E402 +from sandbox.lease import lease_from_row # noqa: E402 +from sandbox.provider import SandboxProvider # noqa: E402 +from sandbox.recipes import bootstrap_recipe # noqa: E402 +from sandbox.terminal import TerminalState, terminal_from_row # noqa: E402 +from storage.providers.sqlite.chat_session_repo import SQLiteChatSessionRepo # noqa: E402 +from storage.providers.sqlite.kernel import SQLiteDBRole, resolve_role_db_path # noqa: E402 +from storage.providers.sqlite.lease_repo import SQLiteLeaseRepo # noqa: E402 +from storage.providers.sqlite.terminal_repo import SQLiteTerminalRepo # noqa: E402 +from storage.providers.sqlite.thread_repo import SQLiteThreadRepo # noqa: E402 + def resolve_provider_cwd(provider) -> str: """Get the default working directory for a provider.""" @@ -209,7 +209,7 @@ def _get_active_terminal(self, thread_id: str): if row: return terminal_from_row(row, self.terminal_store.db_path) thread_terminals = self.terminal_store.list_by_thread(thread_id) - # @@@thread-pointer-consistency - If terminals exist but no active pointer, DB is inconsistent and must fail loudly. + # @@@thread-pointer-consistency - If terminals exist but no active pointer, DB is inconsistent and must fail loudly. # noqa: E501 if thread_terminals: raise RuntimeError(f"Thread {thread_id} has terminals but no active terminal pointer") return None @@ -523,7 +523,9 @@ def enforce_idle_timeouts(self) -> int: try: paused = lease.pause_instance(self.provider, source="idle_reaper") except Exception as exc: - print(f"[idle-reaper] failed to pause expired lease {lease.lease_id} for thread {thread_id}: {exc}") + print( + f"[idle-reaper] failed to pause expired lease {lease.lease_id} for thread {thread_id}: {exc}" # noqa: E501 + ) continue if not paused: print(f"[idle-reaper] failed to pause expired lease {lease.lease_id} for thread {thread_id}") diff --git a/sandbox/providers/daytona.py b/sandbox/providers/daytona.py index def0f865f..04fdd4adc 100644 --- a/sandbox/providers/daytona.py +++ b/sandbox/providers/daytona.py @@ -174,7 +174,7 @@ def create_session(self, context_id: str | None = None, thread_id: str | None = mount_mounts.append(mount) if mount_mounts: - # @@@daytona-bindmount-http-create - SDK currently lacks bind_mounts field, so self-host bind mounts use direct API create. + # @@@daytona-bindmount-http-create - SDK currently lacks bind_mounts field, so self-host bind mounts use direct API create. # noqa: E501 sandbox_id = self._create_via_http(bind_mounts=mount_mounts) self._wait_until_started(sandbox_id) sb = self.client.find_one(sandbox_id) @@ -454,7 +454,9 @@ def _wait_until_started(self, sandbox_id: str, timeout_seconds: int = 120) -> No while time.time() < deadline: response = client.get(f"{self.api_url.rstrip('/')}/sandbox/{sandbox_id}", headers=self._api_auth_headers()) if response.status_code != 200: - raise RuntimeError(f"Daytona get sandbox failed while waiting for started ({response.status_code}): {response.text}") + raise RuntimeError( + f"Daytona get sandbox failed while waiting for started ({response.status_code}): {response.text}" # noqa: E501 + ) body = response.json() state = str(body.get("state") or "") if state == "started": @@ -519,7 +521,7 @@ def _sanitize_terminal_snapshot(self) -> tuple[str, dict[str, str]]: if cleaned_cwd != state.cwd or cleaned_env != state.env_delta: from sandbox.terminal import TerminalState - # @@@daytona-state-sanitize - Legacy prompt noise can corrupt persisted cwd/env_delta and break PTY creation. + # @@@daytona-state-sanitize - Legacy prompt noise can corrupt persisted cwd/env_delta and break PTY creation. # noqa: E501 # Normalize once here so new abstract terminals inherit only valid state. self.update_terminal_state(TerminalState(cwd=cleaned_cwd, env_delta=cleaned_env)) return cleaned_cwd, cleaned_env @@ -708,7 +710,7 @@ async def _snapshot_state_async(self, generation: int, timeout: float | None) -> except Exception as exc: message = str(exc) if self._looks_like_infra_error(message): - # @@@daytona-snapshot-retry - Snapshot can fail due to stale PTY websocket even if sandbox is running. + # @@@daytona-snapshot-retry - Snapshot can fail due to stale PTY websocket even if sandbox is running. # noqa: E501 # Refresh infra truth once, re-create PTY, and retry exactly once. try: self._recover_infra() diff --git a/sandbox/providers/docker.py b/sandbox/providers/docker.py index 6fbf436fc..3408c3ddb 100644 --- a/sandbox/providers/docker.py +++ b/sandbox/providers/docker.py @@ -16,9 +16,11 @@ from pathlib import Path from typing import TYPE_CHECKING -from sandbox.config import MountSpec -from sandbox.interfaces.executor import ExecuteResult -from sandbox.provider import ( +logger = logging.getLogger(__name__) + +from sandbox.config import MountSpec # noqa: E402 +from sandbox.interfaces.executor import ExecuteResult # noqa: E402 +from sandbox.provider import ( # noqa: E402 Metrics, MountCapability, ProviderCapability, @@ -27,7 +29,7 @@ SessionInfo, build_resource_capabilities, ) -from sandbox.runtime import ( +from sandbox.runtime import ( # noqa: E402 _build_export_block, _build_state_snapshot_cmd, _compute_env_delta, @@ -42,8 +44,6 @@ from sandbox.runtime import PhysicalTerminalRuntime from sandbox.terminal import AbstractTerminal -logger = logging.getLogger(__name__) - class DockerProvider(SandboxProvider): """ diff --git a/sandbox/providers/e2b.py b/sandbox/providers/e2b.py index 5827b124b..dc093708a 100644 --- a/sandbox/providers/e2b.py +++ b/sandbox/providers/e2b.py @@ -15,7 +15,9 @@ import os from typing import TYPE_CHECKING, Any -from sandbox.provider import ( +logger = logging.getLogger(__name__) + +from sandbox.provider import ( # noqa: E402 Metrics, ProviderCapability, ProviderExecResult, @@ -29,8 +31,6 @@ from sandbox.runtime import PhysicalTerminalRuntime from sandbox.terminal import AbstractTerminal -logger = logging.getLogger(__name__) - class E2BProvider(SandboxProvider): """E2B cloud sandbox provider.""" diff --git a/sandbox/runtime.py b/sandbox/runtime.py index 87cecd024..7a5fd2c1f 100644 --- a/sandbox/runtime.py +++ b/sandbox/runtime.py @@ -78,7 +78,7 @@ def _extract_state_from_output( matches = list(pattern.finditer(raw_output)) if not matches: # @@@markerless-empty-output-fallback - Some lightweight providers/tests return empty stdout on successful exec. - # Keep previous terminal snapshot only for truly-empty output; any non-empty markerless output still fails loudly. + # Keep previous terminal snapshot only for truly-empty output; any non-empty markerless output still fails loudly. # noqa: E501 if not _sanitize_shell_output(raw_output).strip(): return cwd_fallback, dict(env_fallback), "" raise RuntimeError("Failed to parse terminal state: state markers not found") @@ -607,7 +607,7 @@ async def get_command(self, command_id: str) -> AsyncCommand | None: cmd = self._commands.get(command_id) if cmd: if not cmd.done and command_id not in self._tasks: - # @@@cross-runtime-status-source - If this runtime didn't start the task, trust DB row instead of stale memory. + # @@@cross-runtime-status-source - If this runtime didn't start the task, trust DB row instead of stale memory. # noqa: E501 refreshed = self._load_command_from_db(command_id) return refreshed or cmd return cmd diff --git a/sandbox/sync/retry.py b/sandbox/sync/retry.py index 209858a42..b683d9803 100644 --- a/sandbox/sync/retry.py +++ b/sandbox/sync/retry.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class RetryWithBackoff: +class retry_with_backoff: # noqa: N801 """Decorator: retry on transient errors with exponential backoff.""" TRANSIENT = (OSError, ConnectionError, TimeoutError) diff --git a/sandbox/sync/state.py b/sandbox/sync/state.py index 4c1836ad2..dc3670b56 100644 --- a/sandbox/sync/state.py +++ b/sandbox/sync/state.py @@ -1,7 +1,7 @@ import hashlib from pathlib import Path -from backend.web.core.storage_factory import make_sync_file_repo +from storage.providers.sqlite.sync_file_repo import SQLiteSyncFileRepo def _calculate_checksum(file_path: Path) -> str: @@ -15,7 +15,7 @@ def _calculate_checksum(file_path: Path) -> str: class SyncState: def __init__(self): - self._repo = make_sync_file_repo() + self._repo = SQLiteSyncFileRepo() def close(self) -> None: self._repo.close() diff --git a/sandbox/sync/strategy.py b/sandbox/sync/strategy.py index 593691ccc..aaad60f7c 100644 --- a/sandbox/sync/strategy.py +++ b/sandbox/sync/strategy.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod from pathlib import Path -from sandbox.sync.retry import RetryWithBackoff +from sandbox.sync.retry import retry_with_backoff logger = logging.getLogger(__name__) @@ -106,7 +106,7 @@ def _batch_upload_tar(session_id: str, provider, workspace: Path, workspace_root if len(b64) < 100_000: cmd = f"mkdir -p {workspace_root} && printf '%s' '{b64}' | base64 -d | tar xzmf - -C {workspace_root}" else: - cmd = f"mkdir -p {workspace_root} && base64 -d <<'__TAR_EOF__' | tar xzmf - -C {workspace_root}\n{b64}\n__TAR_EOF__" + cmd = f"mkdir -p {workspace_root} && base64 -d <<'__TAR_EOF__' | tar xzmf - -C {workspace_root}\n{b64}\n__TAR_EOF__" # noqa: E501 result = provider.execute(session_id, cmd, timeout_ms=60000) exit_code = getattr(result, "exit_code", None) @@ -188,7 +188,7 @@ class IncrementalSyncStrategy(SyncStrategy): def __init__(self, state): self.state = state - @RetryWithBackoff(max_retries=3, backoff_factor=1) + @retry_with_backoff(max_retries=3, backoff_factor=1) def upload( self, source_path: Path, diff --git a/storage/contracts.py b/storage/contracts.py index fef514943..b1aea36ee 100644 --- a/storage/contracts.py +++ b/storage/contracts.py @@ -121,8 +121,6 @@ class MemberRow(BaseModel): next_entity_seq: int = 0 created_at: float updated_at: float | None = None - email: str | None = None - mycel_id: int | None = None class AccountRow(BaseModel): @@ -154,7 +152,7 @@ class ChatRow(BaseModel): class ChatEntityRow(BaseModel): chat_id: str - user_id: str # social identity: user_id for humans, member_id for agents + user_id: str joined_at: float last_read_at: float | None = None muted: bool = False @@ -164,7 +162,7 @@ class ChatEntityRow(BaseModel): class ChatMessageRow(BaseModel): id: str chat_id: str - sender_id: str # social identity: user_id for humans, member_id for agents + sender_id: str content: str mentioned_ids: list[str] = [] created_at: float @@ -187,10 +185,10 @@ class DeliveryAction(StrEnum): class ContactRow(BaseModel): - """Directional relationship between two social identities. A→B independent of B→A.""" + """Directional relationship between two entities. A→B independent of B→A.""" - owner_id: str # social identity: user_id for humans, member_id for agents - target_id: str # social identity: user_id for humans, member_id for agents + owner_id: str + target_id: str relation: ContactRelation created_at: float updated_at: float | None = None @@ -272,7 +270,7 @@ def mark_reverted(self, operation_ids: list[str]) -> None: ... def delete_thread_operations(self, thread_id: str) -> int: ... -# @@@summary-row-contract - standardize summary row payload as dict to keep provider parity explicit for static type checks. +# @@@summary-row-contract - standardize summary row payload as dict to keep provider parity explicit for static type checks. # noqa: E501 type SummaryRow = dict[str, Any] @@ -301,7 +299,7 @@ class QueueItem(BaseModel): content: str notification_type: NotificationType source: str | None = None # "owner" | "external" | "system" - sender_id: str | None = None # social identity: user_id for humans, member_id for agents + sender_id: str | None = None sender_name: str | None = None sender_avatar_url: str | None = None is_steer: bool = False @@ -356,8 +354,6 @@ def close(self) -> None: ... def create(self, row: MemberRow) -> None: ... def get_by_id(self, member_id: str) -> MemberRow | None: ... def get_by_name(self, name: str) -> MemberRow | None: ... - def get_by_email(self, email: str) -> MemberRow | None: ... - def get_by_mycel_id(self, mycel_id: int) -> MemberRow | None: ... def list_all(self) -> list[MemberRow]: ... def list_by_owner_user_id(self, owner_user_id: str) -> list[MemberRow]: ... def update(self, member_id: str, **fields: Any) -> None: ... @@ -377,12 +373,13 @@ def delete(self, account_id: str) -> None: ... class EntityRepo(Protocol): def close(self) -> None: ... def create(self, row: EntityRow) -> None: ... - def get_by_id(self, id: str) -> EntityRow | None: ... + def get_by_id(self, entity_id: str) -> EntityRow | None: ... def get_by_member_id(self, member_id: str) -> list[EntityRow]: ... + def get_by_thread_id(self, thread_id: str) -> EntityRow | None: ... def list_all(self) -> list[EntityRow]: ... def list_by_type(self, entity_type: str) -> list[EntityRow]: ... - def update(self, id: str, **fields: Any) -> None: ... - def delete(self, id: str) -> None: ... + def update(self, entity_id: str, **fields: Any) -> None: ... + def delete(self, entity_id: str) -> None: ... class ChatRepo(Protocol): @@ -394,10 +391,10 @@ def delete(self, chat_id: str) -> None: ... class ChatEntityRepo(Protocol): def close(self) -> None: ... - def add_participant(self, chat_id: str, user_id: str, joined_at: float) -> None: ... - def list_participants(self, chat_id: str) -> list[ChatEntityRow]: ... + def add_member(self, chat_id: str, user_id: str, joined_at: float) -> None: ... + def list_members(self, chat_id: str) -> list[ChatEntityRow]: ... def list_chats_for_user(self, user_id: str) -> list[str]: ... - def is_participant_in_chat(self, chat_id: str, user_id: str) -> bool: ... + def is_member_in_chat(self, chat_id: str, user_id: str) -> bool: ... def update_last_read(self, chat_id: str, user_id: str, last_read_at: float) -> None: ... def update_mute(self, chat_id: str, user_id: str, muted: bool, mute_until: float | None = None) -> None: ... def find_chat_between(self, user_a: str, user_b: str) -> str | None: ... @@ -442,13 +439,3 @@ class DeliveryResolver(Protocol): """ def resolve(self, recipient_id: str, chat_id: str, sender_id: str, *, is_mentioned: bool = False) -> DeliveryAction: ... - - -class InviteCodeRepo(Protocol): - def close(self) -> None: ... - def generate(self, *, created_by: str | None = None, expires_days: int | None = 7) -> dict: ... - def get(self, code: str) -> dict | None: ... - def list_all(self) -> list[dict]: ... - def use(self, code: str, user_id: str) -> dict | None: ... - def is_valid(self, code: str) -> bool: ... - def revoke(self, code: str) -> bool: ... diff --git a/storage/providers/sqlite/chat_repo.py b/storage/providers/sqlite/chat_repo.py index f761c6e5a..83a6b6e0a 100644 --- a/storage/providers/sqlite/chat_repo.py +++ b/storage/providers/sqlite/chat_repo.py @@ -83,7 +83,7 @@ def close(self) -> None: if self._own_conn: self._conn.close() - def add_participant(self, chat_id: str, user_id: str, joined_at: float) -> None: + def add_member(self, chat_id: str, user_id: str, joined_at: float) -> None: with self._lock: self._conn.execute( "INSERT OR IGNORE INTO chat_entities (chat_id, user_id, joined_at) VALUES (?, ?, ?)", @@ -91,7 +91,7 @@ def add_participant(self, chat_id: str, user_id: str, joined_at: float) -> None: ) self._conn.commit() - def list_participants(self, chat_id: str) -> list[ChatEntityRow]: + def list_members(self, chat_id: str) -> list[ChatEntityRow]: with self._lock: rows = self._conn.execute( "SELECT chat_id, user_id, joined_at, last_read_at, muted, mute_until FROM chat_entities WHERE chat_id = ?", @@ -117,7 +117,7 @@ def list_chats_for_user(self, user_id: str) -> list[str]: ).fetchall() return [r[0] for r in rows] - def is_participant_in_chat(self, chat_id: str, user_id: str) -> bool: + def is_member_in_chat(self, chat_id: str, user_id: str) -> bool: with self._lock: row = self._conn.execute( "SELECT 1 FROM chat_entities WHERE chat_id = ? AND user_id = ? LIMIT 1", @@ -144,8 +144,8 @@ def _do(): _retry_on_locked(_do) - # @@@find-chat-between — find the 1:1 chat (exactly 2 members) between two social identities. - # Must NOT return group chats that happen to contain both. + # @@@find-chat-between — find the 1:1 chat (exactly 2 members) between two users. + # Must NOT return group chats that happen to contain both users. def find_chat_between(self, user_a: str, user_b: str) -> str | None: with self._lock: row = self._conn.execute( @@ -181,13 +181,14 @@ def _ensure_table(self) -> None: self._conn.execute("ALTER TABLE chat_entities ADD COLUMN mute_until REAL") except sqlite3.OperationalError: pass - # @@@chat-entity-index — speeds up find_chat_between and list_chats_for_user - self._conn.execute("CREATE INDEX IF NOT EXISTS idx_chat_entities_user ON chat_entities(user_id, chat_id)") - # @@@entity-id-to-user-id-migration — rename column for existing databases + # @@@rm-entity-id — rename entity_id column to user_id if old schema try: self._conn.execute("ALTER TABLE chat_entities RENAME COLUMN entity_id TO user_id") - except sqlite3.OperationalError: - pass # column already named user_id, or table is new + self._conn.commit() + except Exception: + pass # column already named user_id or doesn't exist + # @@@chat-entity-index — speeds up find_chat_between and list_chats_for_user + self._conn.execute("CREATE INDEX IF NOT EXISTS idx_chat_entities_user ON chat_entities(user_id, chat_id)") self._conn.commit() @@ -337,7 +338,7 @@ def has_unread_mention(self, chat_id: str, user_id: str) -> bool: ).fetchone() else: row = self._conn.execute( - "SELECT COUNT(*) FROM chat_messages WHERE chat_id = ? AND mentions LIKE ? AND sender_id != ? AND created_at > ?", + "SELECT COUNT(*) FROM chat_messages WHERE chat_id = ? AND mentions LIKE ? AND sender_id != ? AND created_at > ?", # noqa: E501 (chat_id, mention_pattern, user_id, last_read), ).fetchone() return int(row[0]) > 0 if row else False @@ -375,13 +376,4 @@ def _ensure_table(self) -> None: self._conn.execute("ALTER TABLE chat_messages ADD COLUMN mentions TEXT") except sqlite3.OperationalError: pass - # @@@sender-entity-id-to-sender-id-migration — rename columns for existing databases - try: - self._conn.execute("ALTER TABLE chat_messages RENAME COLUMN sender_entity_id TO sender_id") - except sqlite3.OperationalError: - pass # column already named sender_id, or table is new - try: - self._conn.execute("ALTER TABLE chat_messages RENAME COLUMN mentioned_entity_ids TO mentions") - except sqlite3.OperationalError: - pass self._conn.commit() diff --git a/storage/providers/sqlite/chat_session_repo.py b/storage/providers/sqlite/chat_session_repo.py index 9602beaa0..cc3f5de1f 100644 --- a/storage/providers/sqlite/chat_session_repo.py +++ b/storage/providers/sqlite/chat_session_repo.py @@ -416,7 +416,7 @@ def delete_by_thread(self, thread_id: str) -> None: command_ids, ) self._conn.execute( - "DELETE FROM terminal_commands WHERE terminal_id IN (SELECT terminal_id FROM abstract_terminals WHERE thread_id = ?)", + "DELETE FROM terminal_commands WHERE terminal_id IN (SELECT terminal_id FROM abstract_terminals WHERE thread_id = ?)", # noqa: E501 (thread_id,), ) self._conn.execute("DELETE FROM chat_sessions WHERE thread_id = ?", (thread_id,)) diff --git a/storage/providers/sqlite/contact_repo.py b/storage/providers/sqlite/contact_repo.py index dea542e38..6d99ed104 100644 --- a/storage/providers/sqlite/contact_repo.py +++ b/storage/providers/sqlite/contact_repo.py @@ -90,21 +90,12 @@ def _ensure_table(self) -> None: with self._lock: self._conn.execute(""" CREATE TABLE IF NOT EXISTS contacts ( - owner_id TEXT NOT NULL, - target_id TEXT NOT NULL, + owner_id TEXT NOT NULL, + target_id TEXT NOT NULL, relation TEXT NOT NULL DEFAULT 'normal', created_at REAL NOT NULL, updated_at REAL, PRIMARY KEY (owner_id, target_id) ) """) - # @@@entity-id-to-user-id-migration — rename columns for existing databases - try: - self._conn.execute("ALTER TABLE contacts RENAME COLUMN owner_entity_id TO owner_id") - except sqlite3.OperationalError: - pass - try: - self._conn.execute("ALTER TABLE contacts RENAME COLUMN target_entity_id TO target_id") - except sqlite3.OperationalError: - pass self._conn.commit() diff --git a/storage/providers/sqlite/entity_repo.py b/storage/providers/sqlite/entity_repo.py index 4f89ef3e3..aea68d642 100644 --- a/storage/providers/sqlite/entity_repo.py +++ b/storage/providers/sqlite/entity_repo.py @@ -35,9 +35,9 @@ def create(self, row: EntityRow) -> None: ) self._conn.commit() - def get_by_id(self, id: str) -> EntityRow | None: + def get_by_id(self, entity_id: str) -> EntityRow | None: with self._lock: - row = self._conn.execute("SELECT * FROM entities WHERE id = ?", (id,)).fetchone() + row = self._conn.execute("SELECT * FROM entities WHERE id = ?", (entity_id,)).fetchone() return self._to_row(row) if row else None def get_by_member_id(self, member_id: str) -> list[EntityRow]: @@ -45,6 +45,11 @@ def get_by_member_id(self, member_id: str) -> list[EntityRow]: rows = self._conn.execute("SELECT * FROM entities WHERE member_id = ?", (member_id,)).fetchall() return [self._to_row(r) for r in rows] + def get_by_thread_id(self, thread_id: str) -> EntityRow | None: + with self._lock: + row = self._conn.execute("SELECT * FROM entities WHERE thread_id = ?", (thread_id,)).fetchone() + return self._to_row(row) if row else None + def list_all(self) -> list[EntityRow]: with self._lock: rows = self._conn.execute("SELECT * FROM entities ORDER BY created_at").fetchall() @@ -58,7 +63,7 @@ def list_by_type(self, entity_type: str) -> list[EntityRow]: ).fetchall() return [self._to_row(r) for r in rows] - def update(self, id: str, **fields: str | None) -> None: + def update(self, entity_id: str, **fields: str | None) -> None: allowed = {"name", "avatar", "thread_id"} updates = {k: v for k, v in fields.items() if k in allowed} if not updates: @@ -67,13 +72,13 @@ def update(self, id: str, **fields: str | None) -> None: with self._lock: self._conn.execute( f"UPDATE entities SET {set_clause} WHERE id = ?", - (*updates.values(), id), + (*updates.values(), entity_id), ) self._conn.commit() - def delete(self, id: str) -> None: + def delete(self, entity_id: str) -> None: with self._lock: - self._conn.execute("DELETE FROM entities WHERE id = ?", (id,)) + self._conn.execute("DELETE FROM entities WHERE id = ?", (entity_id,)) self._conn.commit() def _to_row(self, r: tuple) -> EntityRow: @@ -102,4 +107,5 @@ def _ensure_table(self) -> None: """ ) self._conn.execute("CREATE INDEX IF NOT EXISTS idx_entities_member ON entities(member_id)") + self._conn.execute("CREATE INDEX IF NOT EXISTS idx_entities_thread ON entities(thread_id)") self._conn.commit() diff --git a/storage/providers/sqlite/lease_repo.py b/storage/providers/sqlite/lease_repo.py index f0ab745c9..c94a626cd 100644 --- a/storage/providers/sqlite/lease_repo.py +++ b/storage/providers/sqlite/lease_repo.py @@ -391,7 +391,7 @@ def _ensure_tables(self) -> None: missing_instances = REQUIRED_INSTANCE_COLUMNS - instance_cols if missing_instances: raise RuntimeError( - f"sandbox_instances schema mismatch: missing {sorted(missing_instances)}. Purge ~/.leon/sandbox.db and retry." + f"sandbox_instances schema mismatch: missing {sorted(missing_instances)}. Purge ~/.leon/sandbox.db and retry." # noqa: E501 ) missing_events = REQUIRED_EVENT_COLUMNS - event_cols if missing_events: diff --git a/storage/providers/sqlite/member_repo.py b/storage/providers/sqlite/member_repo.py index 1e026e627..eddf01719 100644 --- a/storage/providers/sqlite/member_repo.py +++ b/storage/providers/sqlite/member_repo.py @@ -40,7 +40,7 @@ def close(self) -> None: def create(self, row: MemberRow) -> None: with self._lock: self._conn.execute( - "INSERT INTO members (id, name, type, avatar, description, config_dir, owner_user_id, created_at, updated_at)" + "INSERT INTO members (id, name, type, avatar, description, config_dir, owner_user_id, created_at, updated_at)" # noqa: E501 " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", ( row.id, @@ -66,16 +66,6 @@ def get_by_name(self, name: str) -> MemberRow | None: row = self._conn.execute("SELECT * FROM members WHERE name = ?", (name,)).fetchone() return self._to_row(row) if row else None - def get_by_email(self, email: str) -> MemberRow | None: - with self._lock: - row = self._conn.execute("SELECT * FROM members WHERE email = ?", (email,)).fetchone() - return self._to_row(row) if row else None - - def get_by_mycel_id(self, mycel_id: int) -> MemberRow | None: - with self._lock: - row = self._conn.execute("SELECT * FROM members WHERE mycel_id = ?", (mycel_id,)).fetchone() - return self._to_row(row) if row else None - def list_all(self) -> list[MemberRow]: with self._lock: rows = self._conn.execute("SELECT * FROM members ORDER BY created_at").fetchall() diff --git a/storage/providers/sqlite/queue_repo.py b/storage/providers/sqlite/queue_repo.py index 09e4b349e..0a82e7232 100644 --- a/storage/providers/sqlite/queue_repo.py +++ b/storage/providers/sqlite/queue_repo.py @@ -128,7 +128,7 @@ def _ensure_table(self) -> None: " content TEXT NOT NULL," " notification_type TEXT NOT NULL DEFAULT 'steer'," " source TEXT," - " sender_id TEXT," + " sender_id TEXT," " sender_name TEXT," " created_at TEXT DEFAULT (datetime('now'))" ")" @@ -145,9 +145,4 @@ def _ensure_table(self) -> None: self._conn.execute(f"ALTER TABLE message_queue ADD COLUMN {col} {col_type}") except sqlite3.OperationalError: pass - # @@@entity-id-to-user-id-migration — rename column for existing databases - try: - self._conn.execute("ALTER TABLE message_queue RENAME COLUMN sender_entity_id TO sender_id") - except sqlite3.OperationalError: - pass self._conn.commit() diff --git a/storage/providers/sqlite/sandbox_monitor_repo.py b/storage/providers/sqlite/sandbox_monitor_repo.py index d3ed18004..4efc2266b 100644 --- a/storage/providers/sqlite/sandbox_monitor_repo.py +++ b/storage/providers/sqlite/sandbox_monitor_repo.py @@ -461,3 +461,15 @@ def _table_exists(self, table_name: str) -> bool: (table_name,), ).fetchone() return row is not None + + def query_event(self, event_id: str) -> dict | None: # noqa: F811 + row = self._conn.execute( + """ + SELECT le.*, sl.provider_name + FROM lease_events le + LEFT JOIN sandbox_leases sl ON le.lease_id = sl.lease_id + WHERE le.event_id = ? + """, + (event_id,), + ).fetchone() + return _row_to_dict(row) if row else None diff --git a/storage/providers/sqlite/thread_launch_pref_repo.py b/storage/providers/sqlite/thread_launch_pref_repo.py index 66678632c..4f72a273d 100644 --- a/storage/providers/sqlite/thread_launch_pref_repo.py +++ b/storage/providers/sqlite/thread_launch_pref_repo.py @@ -81,7 +81,7 @@ def _save( (owner_user_id, member_id), ) self._conn.execute( - f"UPDATE thread_launch_prefs SET {json_col} = ?, {ts_col} = ? WHERE owner_user_id = ? AND member_id = ?", + f"UPDATE thread_launch_prefs SET {json_col} = ?, {ts_col} = ? WHERE owner_user_id = ? AND member_id = ?", # noqa: E501 (payload, now, owner_user_id, member_id), ) self._conn.commit() diff --git a/storage/providers/sqlite/thread_repo.py b/storage/providers/sqlite/thread_repo.py index a7fd5779f..5f97e4dab 100644 --- a/storage/providers/sqlite/thread_repo.py +++ b/storage/providers/sqlite/thread_repo.py @@ -55,7 +55,7 @@ def create( _validate_thread_identity(is_main=is_main, branch_index=branch_index) with self._lock: self._conn.execute( - "INSERT INTO threads (id, member_id, sandbox_type, cwd, model, observation_provider, is_main, branch_index, created_at)" + "INSERT INTO threads (id, member_id, sandbox_type, cwd, model, observation_provider, is_main, branch_index, created_at)" # noqa: E501 " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", ( thread_id, @@ -122,7 +122,7 @@ def list_by_member(self, member_id: str) -> list[dict[str, Any]]: def list_by_owner_user_id(self, owner_user_id: str) -> list[dict[str, Any]]: """Return all threads owned by this user (via members.owner_user_id JOIN). - Also JOINs entities (entity.id == member_id) for entity_name. + Also JOINs entities (thread_id == entity_id) for entity_name. """ cols = ", ".join(f"t.{c}" for c in self._COLS) with self._lock: @@ -130,7 +130,7 @@ def list_by_owner_user_id(self, owner_user_id: str) -> list[dict[str, Any]]: f"SELECT {cols}, m.name as member_name, m.avatar as member_avatar," " e.name as entity_name FROM threads t" " JOIN members m ON t.member_id = m.id" - " LEFT JOIN entities e ON e.id = t.member_id" + " LEFT JOIN entities e ON e.thread_id = t.id" " WHERE m.owner_user_id = ?" " ORDER BY t.is_main DESC, t.created_at", (owner_user_id,), @@ -191,7 +191,9 @@ def _ensure_table(self) -> None: cols = {row[1] for row in self._conn.execute("PRAGMA table_info(threads)").fetchall()} if "branch_index" not in cols: raise RuntimeError("threads table missing branch_index; reset ~/.leon/leon.db for the new schema") - self._conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_threads_single_main_per_member ON threads(member_id) WHERE is_main = 1") + self._conn.execute( + "CREATE UNIQUE INDEX IF NOT EXISTS idx_threads_single_main_per_member ON threads(member_id) WHERE is_main = 1" # noqa: E501 + ) self._conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_threads_member_branch ON threads(member_id, branch_index)") self._conn.execute("CREATE INDEX IF NOT EXISTS idx_threads_member_created ON threads(member_id, branch_index, created_at)") self._conn.commit() diff --git a/storage/providers/supabase/__init__.py b/storage/providers/supabase/__init__.py index 87c3e19d1..d00874958 100644 --- a/storage/providers/supabase/__init__.py +++ b/storage/providers/supabase/__init__.py @@ -1,63 +1,15 @@ """Supabase storage provider implementations.""" -from .agent_registry_repo import SupabaseAgentRegistryRepo -from .chat_repo import SupabaseChatEntityRepo, SupabaseChatMessageRepo, SupabaseChatRepo -from .chat_session_repo import SupabaseChatSessionRepo from .checkpoint_repo import SupabaseCheckpointRepo -from .contact_repo import SupabaseContactRepo -from .cron_job_repo import SupabaseCronJobRepo -from .entity_repo import SupabaseEntityRepo from .eval_repo import SupabaseEvalRepo from .file_operation_repo import SupabaseFileOperationRepo -from .invite_code_repo import SupabaseInviteCodeRepo -from .lease_repo import SupabaseLeaseRepo -from .member_repo import SupabaseAccountRepo, SupabaseMemberRepo -from .panel_task_repo import SupabasePanelTaskRepo -from .provider_event_repo import SupabaseProviderEventRepo -from .queue_repo import SupabaseQueueRepo -from .recipe_repo import SupabaseRecipeRepo -from .resource_snapshot_repo import list_snapshots_by_lease_ids, upsert_lease_resource_snapshot from .run_event_repo import SupabaseRunEventRepo -from .sandbox_monitor_repo import SupabaseSandboxMonitorRepo -from .sandbox_volume_repo import SupabaseSandboxVolumeRepo from .summary_repo import SupabaseSummaryRepo -from .sync_file_repo import SupabaseSyncFileRepo -from .terminal_repo import SupabaseTerminalRepo -from .thread_launch_pref_repo import SupabaseThreadLaunchPrefRepo -from .thread_repo import SupabaseThreadRepo -from .tool_task_repo import SupabaseToolTaskRepo -from .user_settings_repo import SupabaseUserSettingsRepo __all__ = [ - "SupabaseAccountRepo", - "SupabaseAgentRegistryRepo", - "SupabaseChatEntityRepo", - "SupabaseChatMessageRepo", - "SupabaseChatRepo", - "SupabaseChatSessionRepo", "SupabaseCheckpointRepo", - "SupabaseContactRepo", - "SupabaseCronJobRepo", - "SupabaseEntityRepo", - "SupabaseEvalRepo", - "SupabaseFileOperationRepo", - "SupabaseInviteCodeRepo", - "SupabaseLeaseRepo", - "SupabaseMemberRepo", - "SupabasePanelTaskRepo", - "SupabaseProviderEventRepo", - "SupabaseQueueRepo", - "SupabaseRecipeRepo", "SupabaseRunEventRepo", - "SupabaseSandboxMonitorRepo", - "SupabaseSandboxVolumeRepo", + "SupabaseFileOperationRepo", "SupabaseSummaryRepo", - "SupabaseSyncFileRepo", - "SupabaseTerminalRepo", - "SupabaseThreadLaunchPrefRepo", - "SupabaseThreadRepo", - "SupabaseToolTaskRepo", - "SupabaseUserSettingsRepo", - "list_snapshots_by_lease_ids", - "upsert_lease_resource_snapshot", + "SupabaseEvalRepo", ] diff --git a/storage/providers/supabase/contact_repo.py b/storage/providers/supabase/contact_repo.py index 8ac1ba681..4fc7708e5 100644 --- a/storage/providers/supabase/contact_repo.py +++ b/storage/providers/supabase/contact_repo.py @@ -1,66 +1,60 @@ -"""Supabase repository for directional contact relationships.""" +"""Supabase-backed ContactRepo — block/mute contacts for multi-user deployment.""" from __future__ import annotations +import logging +import time from typing import Any from storage.contracts import ContactRow -from storage.providers.supabase import _query as q -_REPO = "contact repo" -_TABLE = "contacts" +logger = logging.getLogger(__name__) class SupabaseContactRepo: - """Directional contact relationship CRUD backed by Supabase.""" + """ContactRepo backed by Supabase `contacts` table. + + Schema: owner_id TEXT, target_id TEXT, relation TEXT, created_at FLOAT, updated_at FLOAT + PK: (owner_id, target_id) + """ def __init__(self, client: Any) -> None: - self._client = q.validate_client(client, _REPO) + self._client = client def close(self) -> None: - return None + pass def upsert(self, row: ContactRow) -> None: - self._t().upsert( + self._client.table("contacts").upsert( { "owner_id": row.owner_id, "target_id": row.target_id, "relation": row.relation, "created_at": row.created_at, - "updated_at": row.updated_at, + "updated_at": row.updated_at or time.time(), }, on_conflict="owner_id,target_id", ).execute() def get(self, owner_id: str, target_id: str) -> ContactRow | None: - response = self._t().select("*").eq("owner_id", owner_id).eq("target_id", target_id).execute() - rows = q.rows(response, _REPO, "get") - if not rows: + res = self._client.table("contacts").select("*").eq("owner_id", owner_id).eq("target_id", target_id).maybe_single().execute() + if not res.data: return None - return self._to_row(rows[0]) + return self._to_row(res.data) def list_for_user(self, owner_id: str) -> list[ContactRow]: - query = q.order( - self._t().select("*").eq("owner_id", owner_id), - "created_at", - desc=False, - repo=_REPO, - operation="list_for_user", - ) - raw = q.rows(query.execute(), _REPO, "list_for_user") - return [self._to_row(r) for r in raw] + res = self._client.table("contacts").select("*").eq("owner_id", owner_id).execute() + return [self._to_row(r) for r in (res.data or [])] def delete(self, owner_id: str, target_id: str) -> None: - self._t().delete().eq("owner_id", owner_id).eq("target_id", target_id).execute() + self._client.table("contacts").delete().eq("owner_id", owner_id).eq("target_id", target_id).execute() - def _to_row(self, r: dict[str, Any]) -> ContactRow: + @staticmethod + def _to_row(r: dict) -> ContactRow: return ContactRow( owner_id=r["owner_id"], target_id=r["target_id"], relation=r["relation"], - created_at=float(r["created_at"]), - updated_at=float(r["updated_at"]) if r.get("updated_at") is not None else None, + created_at=r.get("created_at") or time.time(), + updated_at=r.get("updated_at"), ) - - def _t(self) -> Any: - return self._client.table(_TABLE) diff --git a/storage/providers/supabase/messaging_repo.py b/storage/providers/supabase/messaging_repo.py new file mode 100644 index 000000000..d672d2e47 --- /dev/null +++ b/storage/providers/supabase/messaging_repo.py @@ -0,0 +1,249 @@ +"""Supabase implementations for messaging v2 repos. + +Covers: chats, chat_members, messages, message_reads, message_deliveries. +All IDs are TEXT (UUID strings) for consistency with existing SQLite schema. +""" + +from __future__ import annotations + +import logging +from datetime import UTC, datetime, timedelta +from typing import Any + +from messaging._utils import now_iso + +logger = logging.getLogger(__name__) + + +class SupabaseChatMemberRepo: + """chat_members table — replaces SQLiteChatEntityRepo for Supabase backend.""" + + def __init__(self, client: Any) -> None: + self._client = client + + def close(self) -> None: + pass + + def add_member(self, chat_id: str, user_id: str) -> None: + self._client.table("chat_members").upsert( + {"chat_id": chat_id, "user_id": user_id, "role": "member", "joined_at": now_iso()}, + on_conflict="chat_id,user_id", + ).execute() + + def list_members(self, chat_id: str) -> list[dict[str, Any]]: + res = self._client.table("chat_members").select("*").eq("chat_id", chat_id).execute() + return res.data or [] + + def list_chats_for_user(self, user_id: str) -> list[str]: + res = self._client.table("chat_members").select("chat_id").eq("user_id", user_id).execute() + return [r["chat_id"] for r in (res.data or [])] + + def is_member(self, chat_id: str, user_id: str) -> bool: + res = self._client.table("chat_members").select("user_id").eq("chat_id", chat_id).eq("user_id", user_id).limit(1).execute() + return bool(res.data) + + def find_chat_between(self, user_a: str, user_b: str) -> str | None: + """Find the 1:1 chat between two users (exactly 2 members).""" + # Fetch all chats for user_a, then find which has user_b as only other member + chats_a = set(self.list_chats_for_user(user_a)) + chats_b = set(self.list_chats_for_user(user_b)) + common = chats_a & chats_b + for chat_id in common: + members = self.list_members(chat_id) + if len(members) == 2: + return chat_id + return None + + def update_last_read(self, chat_id: str, user_id: str) -> None: + self._client.table("chat_members").update({"last_read_at": now_iso()}).eq("chat_id", chat_id).eq("user_id", user_id).execute() + + def update_mute(self, chat_id: str, user_id: str, muted: bool, mute_until: str | None = None) -> None: + self._client.table("chat_members").update({"muted": muted, "mute_until": mute_until}).eq("chat_id", chat_id).eq( + "user_id", user_id + ).execute() + + +class SupabaseMessagesRepo: + """messages table — rich message model for Supabase backend.""" + + def __init__(self, client: Any) -> None: + self._client = client + + def close(self) -> None: + pass + + def create(self, row: dict[str, Any]) -> dict[str, Any]: + """Insert a new message. Returns the created row.""" + res = self._client.table("messages").insert(row).execute() + return res.data[0] if res.data else row + + def get_by_id(self, message_id: str) -> dict[str, Any] | None: + res = self._client.table("messages").select("*").eq("id", message_id).limit(1).execute() + return res.data[0] if res.data else None + + def list_by_chat( + self, chat_id: str, *, limit: int = 50, before: str | None = None, viewer_id: str | None = None + ) -> list[dict[str, Any]]: + q = self._client.table("messages").select("*").eq("chat_id", chat_id).is_("deleted_at", "null") + if before: + q = q.lt("created_at", before) + res = q.order("created_at", desc=True).limit(limit).execute() + rows = list(reversed(res.data or [])) + # Filter soft-deleted for viewer + if viewer_id: + rows = [r for r in rows if viewer_id not in (r.get("deleted_for") or [])] + return rows + + def list_unread(self, chat_id: str, user_id: str) -> list[dict[str, Any]]: + """Messages after user's last_read_at, excluding own, not deleted.""" + # Get last_read_at from chat_members + member_res = ( + self._client.table("chat_members").select("last_read_at").eq("chat_id", chat_id).eq("user_id", user_id).limit(1).execute() + ) + last_read = None + if member_res.data: + last_read = member_res.data[0].get("last_read_at") + + q = self._client.table("messages").select("*").eq("chat_id", chat_id).neq("sender_id", user_id).is_("deleted_at", "null") + if last_read: + q = q.gt("created_at", last_read) + res = q.order("created_at", desc=False).execute() + rows = res.data or [] + return [r for r in rows if user_id not in (r.get("deleted_for") or [])] + + def count_unread(self, chat_id: str, user_id: str) -> int: + """Count unread messages using a COUNT query to avoid materializing rows.""" + member_res = ( + self._client.table("chat_members").select("last_read_at").eq("chat_id", chat_id).eq("user_id", user_id).limit(1).execute() + ) + last_read = None + if member_res.data: + last_read = member_res.data[0].get("last_read_at") + + q = ( + self._client.table("messages") + .select("id", count="exact") + .eq("chat_id", chat_id) + .neq("sender_id", user_id) + .is_("deleted_at", "null") + ) + if last_read: + q = q.gt("created_at", last_read) + res = q.execute() + return res.count or 0 + + def retract(self, message_id: str, sender_id: str) -> bool: + """Retract a message within 2-minute window.""" + + msg = self.get_by_id(message_id) + if not msg or msg.get("sender_id") != sender_id: + return False + created = msg.get("created_at") + if created: + try: + created_dt = datetime.fromisoformat(created.replace("Z", "+00:00")) + if datetime.now(tz=UTC) - created_dt > timedelta(minutes=2): + return False + except (ValueError, AttributeError): + pass + self._client.table("messages").update({"retracted_at": now_iso(), "content": "[已撤回]"}).eq("id", message_id).execute() + return True + + def delete_for(self, message_id: str, user_id: str) -> None: + """Soft-delete for a specific user.""" + msg = self.get_by_id(message_id) + if not msg: + return + deleted_for = list(msg.get("deleted_for") or []) + if user_id not in deleted_for: + deleted_for.append(user_id) + self._client.table("messages").update({"deleted_for": deleted_for}).eq("id", message_id).execute() + + def search(self, query: str, *, chat_id: str | None = None, limit: int = 50) -> list[dict[str, Any]]: + q = self._client.table("messages").select("*").ilike("content", f"%{query}%").is_("deleted_at", "null") + if chat_id: + q = q.eq("chat_id", chat_id) + res = q.order("created_at", desc=False).limit(limit).execute() + return res.data or [] + + def list_by_time_range( + self, chat_id: str, *, after: str | None = None, before: str | None = None, limit: int = 100 + ) -> list[dict[str, Any]]: + q = self._client.table("messages").select("*").eq("chat_id", chat_id).is_("deleted_at", "null") + if after: + q = q.gte("created_at", after) + if before: + q = q.lte("created_at", before) + res = q.order("created_at", desc=False).limit(limit).execute() + return res.data or [] + + +class SupabaseMessageReadRepo: + """message_reads table — per-message read receipts.""" + + def __init__(self, client: Any) -> None: + self._client = client + + def close(self) -> None: + pass + + def mark_read(self, message_id: str, user_id: str) -> None: + self._client.table("message_reads").upsert( + {"message_id": message_id, "user_id": user_id, "read_at": now_iso()}, + on_conflict="message_id,user_id", + ).execute() + + def mark_chat_read(self, chat_id: str, user_id: str, message_ids: list[str]) -> None: + """Bulk mark messages as read.""" + rows = [{"message_id": mid, "user_id": user_id, "read_at": now_iso()} for mid in message_ids] + if rows: + self._client.table("message_reads").upsert(rows, on_conflict="message_id,user_id").execute() + + def get_read_count(self, message_id: str) -> int: + res = self._client.table("message_reads").select("user_id", count="exact").eq("message_id", message_id).execute() + return res.count or 0 + + def has_read(self, message_id: str, user_id: str) -> bool: + res = self._client.table("message_reads").select("user_id").eq("message_id", message_id).eq("user_id", user_id).limit(1).execute() + return bool(res.data) + + +class SupabaseRelationshipRepo: + """relationships table — Hire/Visit state machine persistence.""" + + def __init__(self, client: Any) -> None: + self._client = client + + def close(self) -> None: + pass + + def _ordered(self, a: str, b: str) -> tuple[str, str]: + return (a, b) if a < b else (b, a) + + def get(self, user_a: str, user_b: str) -> dict[str, Any] | None: + pa, pb = self._ordered(user_a, user_b) + res = self._client.table("relationships").select("*").eq("principal_a", pa).eq("principal_b", pb).limit(1).execute() + return res.data[0] if res.data else None + + def get_by_id(self, relationship_id: str) -> dict[str, Any] | None: + res = self._client.table("relationships").select("*").eq("id", relationship_id).limit(1).execute() + return res.data[0] if res.data else None + + def upsert(self, user_a: str, user_b: str, **fields: Any) -> dict[str, Any]: + pa, pb = self._ordered(user_a, user_b) + existing = self.get(user_a, user_b) + now = now_iso() + if existing: + res = self._client.table("relationships").update({"updated_at": now, **fields}).eq("id", existing["id"]).execute() + return res.data[0] if res.data else {**existing, "updated_at": now, **fields} + else: + import uuid + + row = {"id": str(uuid.uuid4()), "principal_a": pa, "principal_b": pb, "updated_at": now, **fields} + res = self._client.table("relationships").insert(row).execute() + return res.data[0] if res.data else row + + def list_for_user(self, user_id: str) -> list[dict[str, Any]]: + # Single query with OR filter + res = self._client.table("relationships").select("*").or_(f"principal_a.eq.{user_id},principal_b.eq.{user_id}").execute() + return res.data or [] diff --git a/storage/providers/supabase/sandbox_volume_repo.py b/storage/providers/supabase/sandbox_volume_repo.py index 1db863ae0..be05f9cc4 100644 --- a/storage/providers/supabase/sandbox_volume_repo.py +++ b/storage/providers/supabase/sandbox_volume_repo.py @@ -1,39 +1,28 @@ -"""Supabase sandbox volume repository.""" +"""Supabase stub for sandbox volume repository.""" from __future__ import annotations from typing import Any -from ._query import rows, validate_client - class SupabaseSandboxVolumeRepo: - _TABLE = "sandbox_volumes" - def __init__(self, client: Any) -> None: - self._client = validate_client(client, "SupabaseSandboxVolumeRepo") + raise NotImplementedError("SupabaseSandboxVolumeRepo is not yet implemented") def close(self) -> None: - pass + raise NotImplementedError def create(self, volume_id: str, source_json: str, name: str | None, created_at: str) -> None: - self._client.table(self._TABLE).insert( - {"volume_id": volume_id, "source": source_json, "name": name, "created_at": created_at} - ).execute() + raise NotImplementedError def get(self, volume_id: str) -> dict[str, Any] | None: - resp = self._client.table(self._TABLE).select("*").eq("volume_id", volume_id).execute() - data = rows(resp, "SupabaseSandboxVolumeRepo", "get") - return data[0] if data else None + raise NotImplementedError def update_source(self, volume_id: str, source_json: str) -> None: - self._client.table(self._TABLE).update({"source": source_json}).eq("volume_id", volume_id).execute() + raise NotImplementedError def list_all(self) -> list[dict[str, Any]]: - resp = self._client.table(self._TABLE).select("*").order("created_at", desc=True).execute() - return rows(resp, "SupabaseSandboxVolumeRepo", "list_all") + raise NotImplementedError def delete(self, volume_id: str) -> bool: - resp = self._client.table(self._TABLE).delete().eq("volume_id", volume_id).execute() - data = rows(resp, "SupabaseSandboxVolumeRepo", "delete") - return len(data) > 0 + raise NotImplementedError diff --git a/storage/providers/supabase/thread_launch_pref_repo.py b/storage/providers/supabase/thread_launch_pref_repo.py index 693da056d..13036e9d8 100644 --- a/storage/providers/supabase/thread_launch_pref_repo.py +++ b/storage/providers/supabase/thread_launch_pref_repo.py @@ -24,7 +24,9 @@ def close(self) -> None: def get(self, owner_user_id: str, member_id: str) -> dict[str, Any] | None: response = ( self._t() - .select("owner_user_id, member_id, last_confirmed_json, last_successful_json, last_confirmed_at, last_successful_at") + .select( + "owner_user_id, member_id, last_confirmed_json, last_successful_json, last_confirmed_at, last_successful_at" # noqa: E501 + ) .eq("owner_user_id", owner_user_id) .eq("member_id", member_id) .execute() diff --git a/storage/providers/supabase/user_settings_repo.py b/storage/providers/supabase/user_settings_repo.py index 633c0041c..55207e0ee 100644 --- a/storage/providers/supabase/user_settings_repo.py +++ b/storage/providers/supabase/user_settings_repo.py @@ -28,7 +28,12 @@ def get(self, user_id: str) -> dict[str, Any]: "get", ) if not rows: - return {"user_id": user_id, "default_workspace": None, "recent_workspaces": [], "default_model": "leon:large"} + return { + "user_id": user_id, + "default_workspace": None, + "recent_workspaces": [], + "default_model": "leon:large", + } row = dict(rows[0]) if isinstance(row.get("recent_workspaces"), str): import json diff --git a/tests/middleware/memory/test_summary_store_performance.py b/tests/middleware/memory/test_summary_store_performance.py index ce3b0c3bb..7260c00ba 100644 --- a/tests/middleware/memory/test_summary_store_performance.py +++ b/tests/middleware/memory/test_summary_store_performance.py @@ -17,7 +17,8 @@ import pytest _SKIP_WINDOWS = pytest.mark.skipif( - sys.platform == "win32", reason="SQLite connection-per-call is slow on Windows; performance tests not meaningful there" + sys.platform == "win32", + reason="SQLite connection-per-call is slow on Windows; performance tests not meaningful there", ) from core.runtime.middleware.memory.summary_store import SummaryStore @@ -164,7 +165,9 @@ def write_summaries(thread_idx: int): min_write_time = min(all_times) print(f"[Performance Test] Concurrent writes completed in {total_time:.2f}s") - print(f"[Performance Test] Write times: avg={avg_write_time:.2f}ms, min={min_write_time:.2f}ms, max={max_write_time:.2f}ms") + print( + f"[Performance Test] Write times: avg={avg_write_time:.2f}ms, min={min_write_time:.2f}ms, max={max_write_time:.2f}ms" # noqa: E501 + ) # Assert performance requirements assert avg_write_time < 100, f"Average write time {avg_write_time:.2f}ms exceeds 100ms threshold" diff --git a/tests/test_file_operation_repo.py b/tests/test_file_operation_repo.py index b7c5f1526..d08ddfcfa 100644 --- a/tests/test_file_operation_repo.py +++ b/tests/test_file_operation_repo.py @@ -56,7 +56,8 @@ def test_delete_thread_operations(tmp_path): @pytest.mark.skipif( - sys.platform == "win32", reason="time.time() resolution on Windows can produce identical timestamps; ordering becomes non-deterministic" + sys.platform == "win32", + reason="time.time() resolution on Windows can produce identical timestamps; ordering becomes non-deterministic", ) def test_supabase_file_operation_repo_record_and_query(): tables: dict[str, list[dict]] = {"file_operations": []} diff --git a/tests/test_runtime.py b/tests/test_runtime.py index ef168ebbe..6cfb751e7 100644 --- a/tests/test_runtime.py +++ b/tests/test_runtime.py @@ -672,7 +672,7 @@ def test_normalize_pty_result_strips_prompt_echo_and_tail_prompt(): "api-existing-thread-after-fix\n" "% =pprintf '\\n__LEON_PTY_END_71d24aee__ %s\\n' $?>\n" # noqa: E501 "\n" - "% \n" + "% \n" # noqa: E501 ) cleaned = _normalize_pty_result(output, "echo api-existing-thread-after-fix") assert cleaned == "api-existing-thread-after-fix" diff --git a/tests/test_sandbox_e2e.py b/tests/test_sandbox_e2e.py index f1dd64383..7569d284c 100644 --- a/tests/test_sandbox_e2e.py +++ b/tests/test_sandbox_e2e.py @@ -144,7 +144,7 @@ def test_file_operations(self): extracted = _invoke_and_extract( agent, - "Write the text 'hello from test' to /workspace/test_e2e.txt, then read it back and tell me the content.", + "Write the text 'hello from test' to /workspace/test_e2e.txt, then read it back and tell me the content.", # noqa: E501 thread_id, ) @@ -215,7 +215,7 @@ def test_file_operations(self): extracted = _invoke_and_extract( agent, - "Write the text 'e2b test content' to /home/user/test_e2e.txt, then read it back and tell me the content.", + "Write the text 'e2b test content' to /home/user/test_e2e.txt, then read it back and tell me the content.", # noqa: E501 thread_id, ) diff --git a/tests/test_terminal_persistence.py b/tests/test_terminal_persistence.py index db57f3a0d..380bf8dd6 100644 --- a/tests/test_terminal_persistence.py +++ b/tests/test_terminal_persistence.py @@ -13,7 +13,8 @@ # TODO(windows-compat): BashExecutor/ZshExecutor require Unix shell semantics. # Tracked in: https://github.com/OpenDCAI/Mycel/issues — Windows shell support needed. @pytest.mark.skipif( - sys.platform == "win32" or shutil.which("bash") is None, reason="bash not available or not Unix-compatible on this platform" + sys.platform == "win32" or shutil.which("bash") is None, + reason="bash not available or not Unix-compatible on this platform", ) def test_bash_env_persistence(): """Test that environment variables persist across commands in bash.""" @@ -34,7 +35,8 @@ async def run(): @pytest.mark.skipif( - sys.platform == "win32" or shutil.which("bash") is None, reason="bash not available or not Unix-compatible on this platform" + sys.platform == "win32" or shutil.which("bash") is None, + reason="bash not available or not Unix-compatible on this platform", ) def test_bash_cwd_persistence(): """Test that working directory persists across commands in bash.""" @@ -59,7 +61,8 @@ async def run(): @pytest.mark.skipif( - sys.platform == "win32" or shutil.which("zsh") is None, reason="zsh not available or not Unix-compatible on this platform" + sys.platform == "win32" or shutil.which("zsh") is None, + reason="zsh not available or not Unix-compatible on this platform", ) def test_zsh_env_persistence(): """Test that environment variables persist across commands in zsh.""" @@ -80,7 +83,8 @@ async def run(): @pytest.mark.skipif( - sys.platform == "win32" or shutil.which("zsh") is None, reason="zsh not available or not Unix-compatible on this platform" + sys.platform == "win32" or shutil.which("zsh") is None, + reason="zsh not available or not Unix-compatible on this platform", ) def test_zsh_cwd_persistence(): """Test that working directory persists across commands in zsh.""" diff --git a/tests/test_thread_config_repo.py b/tests/test_thread_config_repo.py index 007d30c40..9a822c717 100644 --- a/tests/test_thread_config_repo.py +++ b/tests/test_thread_config_repo.py @@ -15,7 +15,9 @@ def test_migrate_thread_metadata_table(tmp_path): db_path = tmp_path / "leon.db" with sqlite3.connect(str(db_path)) as conn: - conn.execute("CREATE TABLE thread_metadata (thread_id TEXT PRIMARY KEY, sandbox_type TEXT NOT NULL, cwd TEXT, model TEXT)") + conn.execute( + "CREATE TABLE thread_metadata (thread_id TEXT PRIMARY KEY, sandbox_type TEXT NOT NULL, cwd TEXT, model TEXT)" # noqa: E501 + ) conn.execute( "INSERT INTO thread_metadata (thread_id, sandbox_type, cwd, model) VALUES (?, ?, ?, ?)", ("t-1", "local", "/tmp/ws", "m-1"), diff --git a/uv.lock b/uv.lock index 721e5c891..3285d04c7 100644 --- a/uv.lock +++ b/uv.lock @@ -769,7 +769,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.129.0" +version = "0.135.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -778,9 +778,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/47/75f6bea02e797abff1bca968d5997793898032d9923c1935ae2efdece642/fastapi-0.129.0.tar.gz", hash = "sha256:61315cebd2e65df5f97ec298c888f9de30430dd0612d59d6480beafbc10655af", size = 375450, upload-time = "2026-02-12T13:54:52.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/e6/7adb4c5fa231e82c35b8f5741a9f2d055f520c29af5546fd70d3e8e1cd2e/fastapi-0.135.3.tar.gz", hash = "sha256:bd6d7caf1a2bdd8d676843cdcd2287729572a1ef524fc4d65c17ae002a1be654", size = 396524, upload-time = "2026-04-01T16:23:58.188Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/dd/d0ee25348ac58245ee9f90b6f3cbb666bf01f69be7e0911f9851bddbda16/fastapi-0.129.0-py3-none-any.whl", hash = "sha256:b4946880e48f462692b31c083be0432275cbfb6e2274566b1be91479cc1a84ec", size = 102950, upload-time = "2026-02-12T13:54:54.528Z" }, + { url = "https://files.pythonhosted.org/packages/84/a4/5caa2de7f917a04ada20018eccf60d6cc6145b0199d55ca3711b0fc08312/fastapi-0.135.3-py3-none-any.whl", hash = "sha256:9b0f590c813acd13d0ab43dd8494138eb58e484bfac405db1f3187cfc5810d98", size = 117734, upload-time = "2026-04-01T16:23:59.328Z" }, ] [[package]] @@ -3151,27 +3151,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" }, - { url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" }, - { url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" }, - { url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" }, - { url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" }, - { url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" }, - { url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" }, - { url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" }, - { url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" }, - { url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" }, - { url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" }, - { url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" }, - { url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" }, - { url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" }, - { url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" }, +version = "0.15.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/97/e9f1ca355108ef7194e38c812ef40ba98c7208f47b13ad78d023caa583da/ruff-0.15.9.tar.gz", hash = "sha256:29cbb1255a9797903f6dde5ba0188c707907ff44a9006eb273b5a17bfa0739a2", size = 4617361, upload-time = "2026-04-02T18:17:20.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/1f/9cdfd0ac4b9d1e5a6cf09bedabdf0b56306ab5e333c85c87281273e7b041/ruff-0.15.9-py3-none-linux_armv6l.whl", hash = "sha256:6efbe303983441c51975c243e26dff328aca11f94b70992f35b093c2e71801e1", size = 10511206, upload-time = "2026-04-02T18:16:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/3d/f6/32bfe3e9c136b35f02e489778d94384118bb80fd92c6d92e7ccd97db12ce/ruff-0.15.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4965bac6ac9ea86772f4e23587746f0b7a395eccabb823eb8bfacc3fa06069f7", size = 10923307, upload-time = "2026-04-02T18:17:08.645Z" }, + { url = "https://files.pythonhosted.org/packages/ca/25/de55f52ab5535d12e7aaba1de37a84be6179fb20bddcbe71ec091b4a3243/ruff-0.15.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf05aad70ca5b5a0a4b0e080df3a6b699803916d88f006efd1f5b46302daab8", size = 10316722, upload-time = "2026-04-02T18:16:44.206Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/690d75f3fd6278fe55fff7c9eb429c92d207e14b25d1cae4064a32677029/ruff-0.15.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9439a342adb8725f32f92732e2bafb6d5246bd7a5021101166b223d312e8fc59", size = 10623674, upload-time = "2026-04-02T18:16:50.951Z" }, + { url = "https://files.pythonhosted.org/packages/bd/ec/176f6987be248fc5404199255522f57af1b4a5a1b57727e942479fec98ad/ruff-0.15.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5e6faf9d97c8edc43877c3f406f47446fc48c40e1442d58cfcdaba2acea745", size = 10351516, upload-time = "2026-04-02T18:16:57.206Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fc/51cffbd2b3f240accc380171d51446a32aa2ea43a40d4a45ada67368fbd2/ruff-0.15.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b34a9766aeec27a222373d0b055722900fbc0582b24f39661aa96f3fe6ad901", size = 11150202, upload-time = "2026-04-02T18:17:06.452Z" }, + { url = "https://files.pythonhosted.org/packages/d6/d4/25292a6dfc125f6b6528fe6af31f5e996e19bf73ca8e3ce6eb7fa5b95885/ruff-0.15.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89dd695bc72ae76ff484ae54b7e8b0f6b50f49046e198355e44ea656e521fef9", size = 11988891, upload-time = "2026-04-02T18:17:18.575Z" }, + { url = "https://files.pythonhosted.org/packages/13/e1/1eebcb885c10e19f969dcb93d8413dfee8172578709d7ee933640f5e7147/ruff-0.15.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce187224ef1de1bd225bc9a152ac7102a6171107f026e81f317e4257052916d5", size = 11480576, upload-time = "2026-04-02T18:16:52.986Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6b/a1548ac378a78332a4c3dcf4a134c2475a36d2a22ddfa272acd574140b50/ruff-0.15.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0c7c341f68adb01c488c3b7d4b49aa8ea97409eae6462d860a79cf55f431b6", size = 11254525, upload-time = "2026-04-02T18:17:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/42/aa/4bb3af8e61acd9b1281db2ab77e8b2c3c5e5599bf2a29d4a942f1c62b8d6/ruff-0.15.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:55cc15eee27dc0eebdfcb0d185a6153420efbedc15eb1d38fe5e685657b0f840", size = 11204072, upload-time = "2026-04-02T18:17:13.581Z" }, + { url = "https://files.pythonhosted.org/packages/69/48/d550dc2aa6e423ea0bcc1d0ff0699325ffe8a811e2dba156bd80750b86dc/ruff-0.15.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a6537f6eed5cda688c81073d46ffdfb962a5f29ecb6f7e770b2dc920598997ed", size = 10594998, upload-time = "2026-04-02T18:16:46.369Z" }, + { url = "https://files.pythonhosted.org/packages/63/47/321167e17f5344ed5ec6b0aa2cff64efef5f9e985af8f5622cfa6536043f/ruff-0.15.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6d3fcbca7388b066139c523bda744c822258ebdcfbba7d24410c3f454cc9af71", size = 10359769, upload-time = "2026-04-02T18:17:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/67/5e/074f00b9785d1d2c6f8c22a21e023d0c2c1817838cfca4c8243200a1fa87/ruff-0.15.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:058d8e99e1bfe79d8a0def0b481c56059ee6716214f7e425d8e737e412d69677", size = 10850236, upload-time = "2026-04-02T18:16:48.749Z" }, + { url = "https://files.pythonhosted.org/packages/76/37/804c4135a2a2caf042925d30d5f68181bdbd4461fd0d7739da28305df593/ruff-0.15.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8e1ddb11dbd61d5983fa2d7d6370ef3eb210951e443cace19594c01c72abab4c", size = 11358343, upload-time = "2026-04-02T18:16:55.068Z" }, + { url = "https://files.pythonhosted.org/packages/88/3d/1364fcde8656962782aa9ea93c92d98682b1ecec2f184e625a965ad3b4a6/ruff-0.15.9-py3-none-win32.whl", hash = "sha256:bde6ff36eaf72b700f32b7196088970bf8fdb2b917b7accd8c371bfc0fd573ec", size = 10583382, upload-time = "2026-04-02T18:17:04.261Z" }, + { url = "https://files.pythonhosted.org/packages/4c/56/5c7084299bd2cacaa07ae63a91c6f4ba66edc08bf28f356b24f6b717c799/ruff-0.15.9-py3-none-win_amd64.whl", hash = "sha256:45a70921b80e1c10cf0b734ef09421f71b5aa11d27404edc89d7e8a69505e43d", size = 11744969, upload-time = "2026-04-02T18:16:59.611Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/76704c4f312257d6dbaae3c959add2a622f63fcca9d864659ce6d8d97d3d/ruff-0.15.9-py3-none-win_arm64.whl", hash = "sha256:0694e601c028fd97dc5c6ee244675bc241aeefced7ef80cd9c6935a871078f53", size = 11005870, upload-time = "2026-04-02T18:17:15.773Z" }, ] [[package]]