diff --git a/pyrightconfig.json b/pyrightconfig.json index b503883..853ab99 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,6 +1,7 @@ { "include": [ - "src/scc_cli/services" + "src", + "tests" ], "exclude": [ "**/__pycache__", diff --git a/src/scc_cli/adapters/config_normalizer.py b/src/scc_cli/adapters/config_normalizer.py new file mode 100644 index 0000000..a60ec7b --- /dev/null +++ b/src/scc_cli/adapters/config_normalizer.py @@ -0,0 +1,218 @@ +"""Config normalization - converts raw dicts to typed config models. + +Parse and validate configuration at load edges, then pass normalized +models inward to the application layer. This reduces stringly-typed +access and schema drift risk. +""" + +from __future__ import annotations + +from typing import Any + +from scc_cli.ports.config_models import ( + DefaultsConfig, + DelegationConfig, + MarketplaceConfig, + MCPServerConfig, + NormalizedOrgConfig, + NormalizedProjectConfig, + NormalizedTeamConfig, + NormalizedUserConfig, + OrganizationInfo, + OrganizationSource, + ProjectsDelegation, + SecurityConfig, + SessionSettings, + TeamDelegation, + TeamsDelegation, +) + + +def normalize_user_config(raw: dict[str, Any]) -> NormalizedUserConfig: + """Normalize a raw user config dict to typed model. + + Args: + raw: Raw user config dict from JSON. + + Returns: + NormalizedUserConfig with typed fields. + """ + org_source = None + raw_source = raw.get("organization_source") + if raw_source and isinstance(raw_source, dict): + org_source = OrganizationSource( + url=raw_source.get("url", ""), + auth=raw_source.get("auth"), + auth_header=raw_source.get("auth_header"), + ) + + workspace_map = raw.get("workspace_team_map", {}) + if not isinstance(workspace_map, dict): + workspace_map = {} + + return NormalizedUserConfig( + selected_profile=raw.get("selected_profile"), + standalone=bool(raw.get("standalone", False)), + organization_source=org_source, + workspace_team_map=workspace_map, + onboarding_seen=bool(raw.get("onboarding_seen", False)), + ) + + +def _normalize_session_settings(raw: dict[str, Any] | None) -> SessionSettings: + """Normalize session settings from raw dict.""" + if not raw: + return SessionSettings() + return SessionSettings( + timeout_hours=raw.get("timeout_hours"), + auto_resume=bool(raw.get("auto_resume", False)), + ) + + +def _normalize_mcp_server(raw: dict[str, Any]) -> MCPServerConfig: + """Normalize a single MCP server config.""" + return MCPServerConfig( + name=raw.get("name", ""), + type=raw.get("type", "sse"), + url=raw.get("url"), + command=raw.get("command"), + args=list(raw.get("args", [])), + env=dict(raw.get("env", {})), + headers=dict(raw.get("headers", {})), + ) + + +def _normalize_team_config(name: str, raw: dict[str, Any]) -> NormalizedTeamConfig: + """Normalize a single team/profile config.""" + mcp_servers = tuple(_normalize_mcp_server(s) for s in raw.get("additional_mcp_servers", [])) + + delegation_raw = raw.get("delegation", {}) + delegation = TeamDelegation( + allow_project_overrides=bool(delegation_raw.get("allow_project_overrides", False)), + ) + + return NormalizedTeamConfig( + name=name, + description=raw.get("description", ""), + plugin=raw.get("plugin"), + marketplace=raw.get("marketplace"), + additional_plugins=tuple(raw.get("additional_plugins", [])), + additional_mcp_servers=mcp_servers, + session=_normalize_session_settings(raw.get("session")), + delegation=delegation, + ) + + +def _normalize_security(raw: dict[str, Any] | None) -> SecurityConfig: + """Normalize security config.""" + if not raw: + return SecurityConfig() + return SecurityConfig( + blocked_plugins=tuple(raw.get("blocked_plugins", [])), + blocked_mcp_servers=tuple(raw.get("blocked_mcp_servers", [])), + allow_stdio_mcp=bool(raw.get("allow_stdio_mcp", False)), + allowed_stdio_prefixes=tuple(raw.get("allowed_stdio_prefixes", [])), + ) + + +def _normalize_defaults(raw: dict[str, Any] | None) -> DefaultsConfig: + """Normalize defaults config.""" + if not raw: + return DefaultsConfig() + + allowed_plugins = raw.get("allowed_plugins") + allowed_mcp = raw.get("allowed_mcp_servers") + + return DefaultsConfig( + enabled_plugins=tuple(raw.get("enabled_plugins", [])), + disabled_plugins=tuple(raw.get("disabled_plugins", [])), + allowed_plugins=tuple(allowed_plugins) if allowed_plugins is not None else None, + allowed_mcp_servers=tuple(allowed_mcp) if allowed_mcp is not None else None, + network_policy=raw.get("network_policy"), + session=_normalize_session_settings(raw.get("session")), + ) + + +def _normalize_delegation(raw: dict[str, Any] | None) -> DelegationConfig: + """Normalize delegation config.""" + if not raw: + return DelegationConfig() + + teams_raw = raw.get("teams", {}) + projects_raw = raw.get("projects", {}) + + return DelegationConfig( + teams=TeamsDelegation( + allow_additional_plugins=tuple(teams_raw.get("allow_additional_plugins", [])), + allow_additional_mcp_servers=tuple(teams_raw.get("allow_additional_mcp_servers", [])), + ), + projects=ProjectsDelegation( + inherit_team_delegation=bool(projects_raw.get("inherit_team_delegation", False)), + ), + ) + + +def _normalize_marketplace(name: str, raw: dict[str, Any]) -> MarketplaceConfig: + """Normalize a single marketplace config.""" + return MarketplaceConfig( + name=name, + source=raw.get("source", ""), + owner=raw.get("owner"), + repo=raw.get("repo"), + branch=raw.get("branch"), + url=raw.get("url"), + host=raw.get("host"), + path=raw.get("path"), + headers=dict(raw.get("headers", {})), + ) + + +def normalize_org_config(raw: dict[str, Any]) -> NormalizedOrgConfig: + """Normalize a raw organization config dict to typed model. + + Args: + raw: Raw org config dict from JSON/cache. + + Returns: + NormalizedOrgConfig with typed fields. + """ + org_raw = raw.get("organization", {}) + org_info = OrganizationInfo(name=org_raw.get("name", "")) + + profiles_raw = raw.get("profiles", {}) + profiles = {name: _normalize_team_config(name, config) for name, config in profiles_raw.items()} + + marketplaces_raw = raw.get("marketplaces", {}) + marketplaces = { + name: _normalize_marketplace(name, config) for name, config in marketplaces_raw.items() + } + + return NormalizedOrgConfig( + organization=org_info, + security=_normalize_security(raw.get("security")), + defaults=_normalize_defaults(raw.get("defaults")), + delegation=_normalize_delegation(raw.get("delegation")), + profiles=profiles, + marketplaces=marketplaces, + ) + + +def normalize_project_config(raw: dict[str, Any] | None) -> NormalizedProjectConfig | None: + """Normalize a raw project config dict to typed model. + + Args: + raw: Raw project config dict from .scc.yaml, or None. + + Returns: + NormalizedProjectConfig with typed fields, or None if no config. + """ + if raw is None: + return None + + mcp_servers = tuple(_normalize_mcp_server(s) for s in raw.get("additional_mcp_servers", [])) + + return NormalizedProjectConfig( + additional_plugins=tuple(raw.get("additional_plugins", [])), + additional_mcp_servers=mcp_servers, + session=_normalize_session_settings(raw.get("session")), + ) diff --git a/src/scc_cli/adapters/local_config_store.py b/src/scc_cli/adapters/local_config_store.py new file mode 100644 index 0000000..4e67a81 --- /dev/null +++ b/src/scc_cli/adapters/local_config_store.py @@ -0,0 +1,59 @@ +"""Local config store adapter - implements ConfigStore using local filesystem.""" + +from __future__ import annotations + +from pathlib import Path + +from scc_cli import config as config_module +from scc_cli.adapters.config_normalizer import ( + normalize_org_config, + normalize_project_config, + normalize_user_config, +) +from scc_cli.ports.config_models import ( + NormalizedOrgConfig, + NormalizedProjectConfig, + NormalizedUserConfig, +) +from scc_cli.ports.config_store import ConfigStore + + +class LocalConfigStore: + """Config store implementation using local filesystem. + + Wraps the existing config module and normalizes results to typed models. + """ + + def load_user_config(self) -> NormalizedUserConfig: + """Load and normalize user configuration.""" + raw = config_module.load_user_config() + return normalize_user_config(raw) + + def load_org_config(self) -> NormalizedOrgConfig | None: + """Load and normalize cached organization configuration.""" + raw = config_module.load_cached_org_config() + if raw is None: + return None + return normalize_org_config(raw) + + def load_project_config(self, workspace_path: Path) -> NormalizedProjectConfig | None: + """Load and normalize project configuration from workspace.""" + raw = config_module.read_project_config(workspace_path) + return normalize_project_config(raw) + + def get_selected_profile(self) -> str | None: + """Get the currently selected profile/team name.""" + return config_module.get_selected_profile() + + def is_standalone_mode(self) -> bool: + """Check if running in standalone (solo) mode.""" + return config_module.is_standalone_mode() + + def is_organization_configured(self) -> bool: + """Check if organization source is configured.""" + return config_module.is_organization_configured() + + +def _assert_implements_protocol() -> None: + """Type check that LocalConfigStore implements ConfigStore.""" + _: ConfigStore = LocalConfigStore() diff --git a/src/scc_cli/adapters/local_dependency_installer.py b/src/scc_cli/adapters/local_dependency_installer.py new file mode 100644 index 0000000..31b9fc8 --- /dev/null +++ b/src/scc_cli/adapters/local_dependency_installer.py @@ -0,0 +1,32 @@ +"""Local dependency installer adapter.""" + +from __future__ import annotations + +from pathlib import Path + +from scc_cli import deps +from scc_cli.ports.dependency_installer import DependencyInstaller, DependencyInstallResult + + +class LocalDependencyInstaller(DependencyInstaller): + """Install dependencies using local package managers.""" + + def install(self, workspace: Path) -> DependencyInstallResult: + """Install dependencies for a workspace. + + Args: + workspace: Workspace directory to inspect and install dependencies. + + Returns: + Result describing whether installation was attempted and succeeded. + """ + package_manager = deps.detect_package_manager(workspace) + if package_manager is None: + return DependencyInstallResult(attempted=False, success=False) + + success = deps.install_dependencies(workspace, package_manager, strict=False) + return DependencyInstallResult( + attempted=True, + success=success, + package_manager=package_manager, + ) diff --git a/src/scc_cli/adapters/local_doctor_runner.py b/src/scc_cli/adapters/local_doctor_runner.py new file mode 100644 index 0000000..51899e0 --- /dev/null +++ b/src/scc_cli/adapters/local_doctor_runner.py @@ -0,0 +1,17 @@ +"""Local adapter for running doctor checks.""" + +from __future__ import annotations + +from pathlib import Path + +from scc_cli.doctor.core import run_doctor +from scc_cli.doctor.types import DoctorResult +from scc_cli.ports.doctor_runner import DoctorRunner + + +class LocalDoctorRunner(DoctorRunner): + """Adapter that executes doctor checks locally.""" + + def run(self, workspace: str | None = None) -> DoctorResult: + workspace_path = Path(workspace) if workspace else None + return run_doctor(workspace_path) diff --git a/src/scc_cli/adapters/local_git_client.py b/src/scc_cli/adapters/local_git_client.py index cb2db0e..f5c1ad0 100644 --- a/src/scc_cli/adapters/local_git_client.py +++ b/src/scc_cli/adapters/local_git_client.py @@ -7,6 +7,7 @@ from scc_cli.ports.git_client import GitClient from scc_cli.services.git import branch as git_branch from scc_cli.services.git import core as git_core +from scc_cli.services.git import worktree as git_worktree class LocalGitClient(GitClient): @@ -35,3 +36,49 @@ def detect_workspace_root(self, start_dir: Path) -> tuple[Path | None, Path]: def get_current_branch(self, path: Path) -> str | None: return git_branch.get_current_branch(path) + + def has_commits(self, path: Path) -> bool: + return git_core.has_commits(path) + + def has_remote(self, path: Path) -> bool: + return git_core.has_remote(path) + + def get_default_branch(self, path: Path) -> str: + return git_branch.get_default_branch(path) + + def list_worktrees(self, path: Path) -> list[git_worktree.WorktreeInfo]: + return git_worktree.get_worktrees_data(path) + + def get_worktree_status(self, path: Path) -> tuple[int, int, int, bool]: + return git_worktree.get_worktree_status(str(path)) + + def find_worktree_by_query( + self, + path: Path, + query: str, + ) -> tuple[git_worktree.WorktreeInfo | None, list[git_worktree.WorktreeInfo]]: + return git_worktree.find_worktree_by_query(path, query) + + def find_main_worktree(self, path: Path) -> git_worktree.WorktreeInfo | None: + return git_worktree.find_main_worktree(path) + + def list_branches_without_worktrees(self, path: Path) -> list[str]: + return git_branch.list_branches_without_worktrees(path) + + def fetch_branch(self, path: Path, branch: str) -> None: + git_worktree.fetch_branch(path, branch) + + def add_worktree( + self, + repo_path: Path, + worktree_path: Path, + branch_name: str, + base_branch: str, + ) -> None: + git_worktree.add_worktree(repo_path, worktree_path, branch_name, base_branch) + + def remove_worktree(self, repo_path: Path, worktree_path: Path, *, force: bool) -> None: + git_worktree.remove_worktree(repo_path, worktree_path, force=force) + + def prune_worktrees(self, repo_path: Path) -> None: + git_worktree.prune_worktrees(repo_path) diff --git a/src/scc_cli/adapters/local_platform_probe.py b/src/scc_cli/adapters/local_platform_probe.py new file mode 100644 index 0000000..006dc15 --- /dev/null +++ b/src/scc_cli/adapters/local_platform_probe.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from pathlib import Path + +from scc_cli import platform as platform_module +from scc_cli.ports.platform_probe import PlatformProbe + + +class LocalPlatformProbe(PlatformProbe): + """Platform probe using local system checks.""" + + def is_wsl2(self) -> bool: + """Return True when running inside WSL2.""" + return platform_module.is_wsl2() + + def check_path_performance(self, path: Path) -> tuple[bool, str | None]: + """Return whether a path is optimal and an optional warning message.""" + return platform_module.check_path_performance(path) diff --git a/src/scc_cli/adapters/personal_profile_service_local.py b/src/scc_cli/adapters/personal_profile_service_local.py new file mode 100644 index 0000000..895202e --- /dev/null +++ b/src/scc_cli/adapters/personal_profile_service_local.py @@ -0,0 +1,60 @@ +"""Local adapter for personal profile operations.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from scc_cli.core import personal_profiles +from scc_cli.core.personal_profiles import PersonalProfile +from scc_cli.ports.personal_profile_service import PersonalProfileService + + +class LocalPersonalProfileService(PersonalProfileService): + """Filesystem-backed personal profile adapter.""" + + def load_personal_profile_with_status( + self, workspace: Path + ) -> tuple[PersonalProfile | None, bool]: + return personal_profiles.load_personal_profile_with_status(workspace) + + def detect_drift(self, workspace: Path) -> bool: + return personal_profiles.detect_drift(workspace) + + def workspace_has_overrides(self, workspace: Path) -> bool: + return personal_profiles.workspace_has_overrides(workspace) + + def load_workspace_settings_with_status( + self, workspace: Path + ) -> tuple[dict[str, Any] | None, bool]: + return personal_profiles.load_workspace_settings_with_status(workspace) + + def load_workspace_mcp_with_status(self, workspace: Path) -> tuple[dict[str, Any] | None, bool]: + return personal_profiles.load_workspace_mcp_with_status(workspace) + + def merge_personal_settings( + self, + workspace: Path, + existing: dict[str, Any], + personal: dict[str, Any], + ) -> dict[str, Any]: + return personal_profiles.merge_personal_settings(workspace, existing, personal) + + def merge_personal_mcp( + self, existing: dict[str, Any], personal: dict[str, Any] + ) -> dict[str, Any]: + return personal_profiles.merge_personal_mcp(existing, personal) + + def write_workspace_settings(self, workspace: Path, data: dict[str, Any]) -> None: + personal_profiles.write_workspace_settings(workspace, data) + + def write_workspace_mcp(self, workspace: Path, data: dict[str, Any]) -> None: + personal_profiles.write_workspace_mcp(workspace, data) + + def save_applied_state( + self, workspace: Path, profile_id: str, fingerprints: dict[str, str] + ) -> None: + personal_profiles.save_applied_state(workspace, profile_id, fingerprints) + + def compute_fingerprints(self, workspace: Path) -> dict[str, str]: + return personal_profiles.compute_fingerprints(workspace) diff --git a/src/scc_cli/adapters/session_store_json.py b/src/scc_cli/adapters/session_store_json.py new file mode 100644 index 0000000..e367021 --- /dev/null +++ b/src/scc_cli/adapters/session_store_json.py @@ -0,0 +1,84 @@ +"""JSON-backed session store adapter.""" + +from __future__ import annotations + +import json +from contextlib import AbstractContextManager +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, cast + +from scc_cli import config +from scc_cli.ports.filesystem import Filesystem +from scc_cli.ports.session_models import SessionRecord +from scc_cli.utils.locks import file_lock, lock_path + + +@dataclass(frozen=True) +class JsonSessionStore: + """Persist session records in the legacy JSON schema. + + Invariants: + - JSON payload remains `{ "sessions": [...] }` with stable field names. + - Legacy migrations run on load ("base" team -> None). + + Args: + filesystem: Filesystem adapter used to read/write session data. + sessions_file: Path to the sessions JSON file. + lock_file: Path to the lock file guarding sessions access. + """ + + filesystem: Filesystem + sessions_file: Path = config.SESSIONS_FILE + lock_file: Path = field(default_factory=lambda: lock_path("sessions")) + + def lock(self) -> AbstractContextManager[None]: + """Return an exclusive lock for session operations. + + Returns: + Context manager enforcing exclusive access to session data. + """ + return file_lock(self.lock_file) + + def load_sessions(self) -> list[SessionRecord]: + """Load sessions from disk. + + Returns: + List of session records, or an empty list on read errors. + """ + if not self.filesystem.exists(self.sessions_file): + return [] + + try: + data = json.loads(self.filesystem.read_text(self.sessions_file)) + sessions = cast(list[dict[str, Any]], data.get("sessions", [])) + return [SessionRecord.from_dict(item) for item in _migrate_legacy_sessions(sessions)] + except (OSError, json.JSONDecodeError, TypeError): + return [] + + def save_sessions(self, sessions: list[SessionRecord]) -> None: + """Persist sessions to disk. + + Args: + sessions: Session records to store. + """ + payload = {"sessions": [record.to_dict() for record in sessions]} + self.filesystem.write_text(self.sessions_file, json.dumps(payload, indent=2)) + + +def _migrate_legacy_sessions(sessions: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Apply legacy migrations to session records. + + Invariants: + - "base" team sentinel becomes None for standalone sessions. + + Args: + sessions: Raw session dictionaries loaded from disk. + + Returns: + Migrated session dictionaries. + """ + for session in sessions: + if session.get("team") == "base": + session["team"] = None + return sessions diff --git a/src/scc_cli/adapters/zip_archive_writer.py b/src/scc_cli/adapters/zip_archive_writer.py new file mode 100644 index 0000000..c07bd41 --- /dev/null +++ b/src/scc_cli/adapters/zip_archive_writer.py @@ -0,0 +1,15 @@ +"""Archive writer adapter using zipfile.""" + +from __future__ import annotations + +import zipfile + +from scc_cli.ports.archive_writer import ArchiveWriter + + +class ZipArchiveWriter(ArchiveWriter): + """Archive writer implementation backed by zipfile.""" + + def write_manifest(self, output_path: str, manifest_json: str) -> None: + with zipfile.ZipFile(output_path, "w", zipfile.ZIP_DEFLATED) as bundle: + bundle.writestr("manifest.json", manifest_json) diff --git a/src/scc_cli/application/compute_effective_config.py b/src/scc_cli/application/compute_effective_config.py index 8d35ded..9380f62 100644 --- a/src/scc_cli/application/compute_effective_config.py +++ b/src/scc_cli/application/compute_effective_config.py @@ -9,6 +9,7 @@ from urllib.parse import urlparse from scc_cli import config as config_module +from scc_cli.core.enums import MCPServerType, RequestSource, TargetType if TYPE_CHECKING: pass @@ -29,9 +30,9 @@ class BlockedItem: """Tracks an item blocked by security pattern.""" item: str - blocked_by: str # The pattern that matched + blocked_by: str source: str # Always "org.security" - target_type: str = "plugin" # "plugin" | "mcp_server" + target_type: str = TargetType.PLUGIN @dataclass @@ -39,9 +40,9 @@ class DelegationDenied: """Tracks an addition denied due to delegation rules.""" item: str - requested_by: str # "team" | "project" + requested_by: str # RequestSource.TEAM or RequestSource.PROJECT reason: str - target_type: str = "plugin" # "plugin" | "mcp_server" + target_type: str = TargetType.PLUGIN @dataclass @@ -55,7 +56,7 @@ class MCPServer: """ name: str - type: str # "sse" | "stdio" | "http" + type: str # MCPServerType value url: str | None = None command: str | None = None args: list[str] | None = None @@ -401,7 +402,7 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=plugin, - requested_by="team", + requested_by=RequestSource.TEAM, reason=f"Team '{team_name}' not allowed to add plugins", ) ) @@ -411,7 +412,7 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=plugin, - requested_by="team", + requested_by=RequestSource.TEAM, reason="Plugin not allowed by defaults.allowed_plugins", ) ) @@ -445,7 +446,7 @@ def compute_effective_config( item=server_name or server_url, blocked_by=blocked_by, source="org.security", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue @@ -454,9 +455,9 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=server_name, - requested_by="team", + requested_by=RequestSource.TEAM, reason=f"Team '{team_name}' not allowed to add MCP servers", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue @@ -465,14 +466,14 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=server_name or server_url, - requested_by="team", + requested_by=RequestSource.TEAM, reason="MCP server not allowed by defaults.allowed_mcp_servers", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue - if server_dict.get("type") == "stdio": + if server_dict.get("type") == MCPServerType.STDIO: stdio_result = validate_stdio_server(server_dict, org_config) if stdio_result.blocked: result.blocked_items.append( @@ -480,14 +481,14 @@ def compute_effective_config( item=server_name, blocked_by=stdio_result.reason, source="org.security", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue mcp_server = MCPServer( name=server_name, - type=server_dict.get("type", "sse"), + type=server_dict.get("type", MCPServerType.SSE), url=server_url or None, command=server_dict.get("command"), args=server_dict.get("args"), @@ -530,7 +531,7 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=plugin, - requested_by="project", + requested_by=RequestSource.PROJECT, reason=delegation_reason, ) ) @@ -540,7 +541,7 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=plugin, - requested_by="project", + requested_by=RequestSource.PROJECT, reason="Plugin not allowed by defaults.allowed_plugins", ) ) @@ -572,7 +573,7 @@ def compute_effective_config( item=server_name or server_url, blocked_by=blocked_by, source="org.security", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue @@ -581,9 +582,9 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=server_name, - requested_by="project", + requested_by=RequestSource.PROJECT, reason=delegation_reason, - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue @@ -592,14 +593,14 @@ def compute_effective_config( result.denied_additions.append( DelegationDenied( item=server_name or server_url, - requested_by="project", + requested_by=RequestSource.PROJECT, reason="MCP server not allowed by defaults.allowed_mcp_servers", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue - if server_dict.get("type") == "stdio": + if server_dict.get("type") == MCPServerType.STDIO: stdio_result = validate_stdio_server(server_dict, org_config) if stdio_result.blocked: result.blocked_items.append( @@ -607,14 +608,14 @@ def compute_effective_config( item=server_name, blocked_by=stdio_result.reason, source="org.security", - target_type="mcp_server", + target_type=TargetType.MCP_SERVER, ) ) continue mcp_server = MCPServer( name=server_name, - type=server_dict.get("type", "sse"), + type=server_dict.get("type", MCPServerType.SSE), url=server_url or None, command=server_dict.get("command"), args=server_dict.get("args"), diff --git a/src/scc_cli/application/dashboard.py b/src/scc_cli/application/dashboard.py index f3ba55e..65e8cde 100644 --- a/src/scc_cli/application/dashboard.py +++ b/src/scc_cli/application/dashboard.py @@ -6,9 +6,11 @@ from dataclasses import dataclass, replace from datetime import datetime from enum import Enum, auto -from typing import Any, TypeAlias +from typing import TypeAlias +from scc_cli.application.sessions import SessionService from scc_cli.docker.core import ContainerInfo +from scc_cli.ports.session_models import SessionFilter, SessionSummary from scc_cli.services.git.worktree import WorktreeInfo @@ -71,7 +73,7 @@ class StatusItem: description: str action: StatusAction | None = None action_tab: DashboardTab | None = None - session: dict[str, Any] | None = None + session: SessionSummary | None = None @dataclass(frozen=True) @@ -99,7 +101,7 @@ class SessionItem: label: str description: str - session: dict[str, Any] + session: SessionSummary @dataclass(frozen=True) @@ -200,7 +202,7 @@ class SessionResumeEvent: """Event for resuming a session.""" return_to: DashboardTab - session: dict[str, Any] + session: SessionSummary @dataclass(frozen=True) @@ -302,7 +304,7 @@ class SessionActionMenuEvent: """Event for the session action menu.""" return_to: DashboardTab - session: dict[str, Any] + session: SessionSummary @dataclass(frozen=True) @@ -644,12 +646,17 @@ def _apply_container_message( return DashboardFlowOutcome(state=next_state) -def load_status_tab_data(refresh_at: datetime | None = None) -> DashboardTabData: +def load_status_tab_data( + refresh_at: datetime | None = None, + *, + session_service: SessionService, + format_last_used: Callable[[str], str] | None = None, +) -> DashboardTabData: """Load Status tab data showing quick actions and context.""" import os from pathlib import Path - from scc_cli import config, sessions + from scc_cli import config from scc_cli.core.personal_profiles import get_profile_status from scc_cli.docker import core as docker_core @@ -666,21 +673,18 @@ def load_status_tab_data(refresh_at: datetime | None = None) -> DashboardTabData ) try: - recent_session = sessions.get_most_recent() + recent_result = session_service.list_recent(SessionFilter(limit=1, include_all=True)) + recent_session = recent_result.sessions[0] if recent_result.sessions else None if recent_session: - workspace = recent_session.get("workspace", "") + workspace = recent_session.workspace workspace_name = workspace.split("/")[-1] if workspace else "unknown" - last_used = recent_session.get("last_used") + last_used = recent_session.last_used last_used_display = "" if last_used: - try: - dt = datetime.fromisoformat(last_used) - last_used_display = sessions.format_relative_time(dt) - except ValueError: - last_used_display = last_used + last_used_display = format_last_used(last_used) if format_last_used else last_used desc_parts = [workspace_name] - if recent_session.get("branch"): - desc_parts.append(str(recent_session.get("branch"))) + if recent_session.branch: + desc_parts.append(str(recent_session.branch)) if last_used_display: desc_parts.append(last_used_display) items.append( @@ -717,8 +721,8 @@ def load_status_tab_data(refresh_at: datetime | None = None) -> DashboardTabData ) try: - workspace = Path(os.getcwd()) - profile_status = get_profile_status(workspace) + workspace_path = Path(os.getcwd()) + profile_status = get_profile_status(workspace_path) if profile_status.exists: if profile_status.import_count > 0: @@ -865,29 +869,33 @@ def load_containers_tab_data() -> DashboardTabData: ) -def load_sessions_tab_data() -> DashboardTabData: +def load_sessions_tab_data( + *, + session_service: SessionService, + format_last_used: Callable[[str], str] | None = None, +) -> DashboardTabData: """Load Sessions tab data showing recent Claude sessions.""" - from scc_cli import sessions - items: list[DashboardItem] = [] try: - recent = sessions.list_recent(limit=20) + recent_result = session_service.list_recent(SessionFilter(limit=20, include_all=True)) + recent = recent_result.sessions for session in recent: - name = session.get("name", "Unnamed") desc_parts = [] - if session.get("team"): - desc_parts.append(str(session["team"])) - if session.get("branch"): - desc_parts.append(str(session["branch"])) - if session.get("last_used"): - desc_parts.append(str(session["last_used"])) + if session.team: + desc_parts.append(str(session.team)) + if session.branch: + desc_parts.append(str(session.branch)) + if session.last_used: + desc_parts.append( + format_last_used(session.last_used) if format_last_used else session.last_used + ) items.append( SessionItem( - label=name, + label=session.name or "Unnamed", description=" · ".join(desc_parts), session=session, ) @@ -998,12 +1006,23 @@ def load_worktrees_tab_data(verbose: bool = False) -> DashboardTabData: ) -def load_all_tab_data(verbose_worktrees: bool = False) -> Mapping[DashboardTab, DashboardTabData]: +def load_all_tab_data( + *, + session_service: SessionService, + format_last_used: Callable[[str], str] | None = None, + verbose_worktrees: bool = False, +) -> Mapping[DashboardTab, DashboardTabData]: """Load data for all dashboard tabs.""" return { - DashboardTab.STATUS: load_status_tab_data(), + DashboardTab.STATUS: load_status_tab_data( + session_service=session_service, + format_last_used=format_last_used, + ), DashboardTab.CONTAINERS: load_containers_tab_data(), - DashboardTab.SESSIONS: load_sessions_tab_data(), + DashboardTab.SESSIONS: load_sessions_tab_data( + session_service=session_service, + format_last_used=format_last_used, + ), DashboardTab.WORKTREES: load_worktrees_tab_data(verbose=verbose_worktrees), } diff --git a/src/scc_cli/application/interaction_requests.py b/src/scc_cli/application/interaction_requests.py new file mode 100644 index 0000000..670c05a --- /dev/null +++ b/src/scc_cli/application/interaction_requests.py @@ -0,0 +1,139 @@ +"""Interaction request models for use case/UI boundaries.""" + +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from typing import Generic, TypeVar + +BACK_ACTION_ID = "back" +BACK_ACTION_LABEL = "Back" +BACK_ACTION_HOTKEY = "esc" + +CONFIRM_ACTION_ID = "confirm" +CONFIRM_ACTION_LABEL = "Yes" +CONFIRM_ACTION_HOTKEY = "y" + +CANCEL_ACTION_ID = "cancel" +CANCEL_ACTION_LABEL = "No" +CANCEL_ACTION_HOTKEY = "n" + +T = TypeVar("T", covariant=True) + + +@dataclass(frozen=True) +class SelectOption(Generic[T]): + """Selectable option for a SelectRequest. + + Invariants: + - `option_id`, `label`, and `hotkey` remain stable for adapters. + + Args: + option_id: Stable identifier for the option. + label: Display label for UI adapters. + hotkey: Optional hotkey label shown to users. + description: Optional helper text for the option. + value: Optional payload returned when selected. + """ + + option_id: str + label: str + hotkey: str | None = None + description: str | None = None + value: T | None = None + + +@dataclass(frozen=True) +class ConfirmRequest: + """Request a yes/no confirmation at the UI edge. + + Invariants: + - Confirm/cancel/back identifiers, labels, and hotkeys stay stable. + + Args: + request_id: Stable identifier for the confirmation request. + prompt: Prompt text for the confirmation. + confirm_id: Stable identifier for the confirm action. + confirm_label: Display label for the confirm action. + confirm_hotkey: Hotkey label for the confirm action. + cancel_id: Stable identifier for the cancel action. + cancel_label: Display label for the cancel action. + cancel_hotkey: Hotkey label for the cancel action. + allow_back: Whether the UI may return the back action. + back_id: Stable identifier for the back action. + back_label: Display label for the back action. + back_hotkey: Hotkey label for the back action. + """ + + request_id: str + prompt: str + confirm_id: str = CONFIRM_ACTION_ID + confirm_label: str = CONFIRM_ACTION_LABEL + confirm_hotkey: str = CONFIRM_ACTION_HOTKEY + cancel_id: str = CANCEL_ACTION_ID + cancel_label: str = CANCEL_ACTION_LABEL + cancel_hotkey: str = CANCEL_ACTION_HOTKEY + allow_back: bool = False + back_id: str = BACK_ACTION_ID + back_label: str = BACK_ACTION_LABEL + back_hotkey: str = BACK_ACTION_HOTKEY + + +@dataclass(frozen=True) +class SelectRequest(Generic[T]): + """Request that the user selects from a list of options. + + Invariants: + - Option identifiers, labels, and hotkeys stay stable. + - Back action metadata stays stable when enabled. + + Args: + request_id: Stable identifier for the selection request. + title: Title displayed above the selection list. + options: Sequence of selection options. + subtitle: Optional subtitle for context. + allow_back: Whether the UI may return the back action. + back_id: Stable identifier for the back action. + back_label: Display label for the back action. + back_hotkey: Hotkey label for the back action. + """ + + request_id: str + title: str + options: Sequence[SelectOption[T]] + subtitle: str | None = None + allow_back: bool = False + back_id: str = BACK_ACTION_ID + back_label: str = BACK_ACTION_LABEL + back_hotkey: str = BACK_ACTION_HOTKEY + + +@dataclass(frozen=True) +class InputRequest: + """Request text input from the user. + + Invariants: + - Back action metadata stays stable when enabled. + + Args: + request_id: Stable identifier for the input request. + prompt: Prompt text for the input. + default: Optional default value. + placeholder: Optional placeholder for UI adapters. + allow_back: Whether the UI may return the back action. + back_id: Stable identifier for the back action. + back_label: Display label for the back action. + back_hotkey: Hotkey label for the back action. + """ + + request_id: str + prompt: str + default: str | None = None + placeholder: str | None = None + allow_back: bool = False + back_id: str = BACK_ACTION_ID + back_label: str = BACK_ACTION_LABEL + back_hotkey: str = BACK_ACTION_HOTKEY + + +InteractionRequest = ConfirmRequest | SelectRequest[object] | InputRequest diff --git a/src/scc_cli/application/launch/__init__.py b/src/scc_cli/application/launch/__init__.py new file mode 100644 index 0000000..1292f18 --- /dev/null +++ b/src/scc_cli/application/launch/__init__.py @@ -0,0 +1,153 @@ +"""Launch-related application use cases.""" + +from scc_cli.application.launch.apply_personal_profile import ( + ApplyPersonalProfileConfirmation, + ApplyPersonalProfileDependencies, + ApplyPersonalProfileRequest, + ApplyPersonalProfileResult, + apply_personal_profile, +) +from scc_cli.application.launch.finalize_launch import ( + FinalizeLaunchDependencies, + FinalizeLaunchPlan, + FinalizeLaunchResult, + finalize_launch, +) +from scc_cli.application.launch.output_models import ( + LaunchInfoEvent, + LaunchOutputEvent, + LaunchOutputKind, + LaunchOutputViewModel, + LaunchSuccessEvent, + LaunchWarningEvent, +) +from scc_cli.application.launch.prepare_launch_plan import ( + PrepareLaunchPlanDependencies, + PrepareLaunchPlanRequest, + PrepareLaunchPlanResult, + prepare_launch_plan, +) +from scc_cli.application.launch.select_session import ( + SelectSessionDependencies, + SelectSessionRequest, + SelectSessionResult, + SessionSelectionItem, + SessionSelectionMode, + SessionSelectionPrompt, + SessionSelectionWarningOutcome, + select_session, +) +from scc_cli.application.launch.start_wizard import ( + BackRequested, + CancelRequested, + CwdContext, + QuickResumeDismissed, + QuickResumeOption, + QuickResumeSelected, + QuickResumeViewModel, + SessionNameEntered, + StartWizardConfig, + StartWizardContext, + StartWizardOutcome, + StartWizardProgress, + StartWizardPrompt, + StartWizardState, + StartWizardStep, + TeamOption, + TeamRepoOption, + TeamRepoPickerViewModel, + TeamSelected, + TeamSelectionViewModel, + WorkspacePickerViewModel, + WorkspaceSelected, + WorkspaceSource, + WorkspaceSourceChosen, + WorkspaceSourceOption, + WorkspaceSourceViewModel, + WorkspaceSummary, + WorktreeSelected, + apply_start_wizard_event, + build_clone_repo_prompt, + build_confirm_worktree_prompt, + build_cross_team_resume_prompt, + build_custom_workspace_prompt, + build_quick_resume_prompt, + build_session_name_prompt, + build_team_repo_prompt, + build_team_selection_prompt, + build_workspace_picker_prompt, + build_workspace_source_prompt, + build_worktree_name_prompt, + initialize_start_wizard, +) + +__all__ = [ + "ApplyPersonalProfileConfirmation", + "ApplyPersonalProfileDependencies", + "ApplyPersonalProfileRequest", + "ApplyPersonalProfileResult", + "BackRequested", + "CancelRequested", + "CwdContext", + "FinalizeLaunchDependencies", + "FinalizeLaunchPlan", + "FinalizeLaunchResult", + "PrepareLaunchPlanDependencies", + "PrepareLaunchPlanRequest", + "PrepareLaunchPlanResult", + "QuickResumeDismissed", + "QuickResumeSelected", + "SessionNameEntered", + "SelectSessionDependencies", + "SelectSessionRequest", + "SelectSessionResult", + "SessionSelectionItem", + "SessionSelectionMode", + "SessionSelectionPrompt", + "SessionSelectionWarningOutcome", + "QuickResumeOption", + "QuickResumeViewModel", + "StartWizardConfig", + "StartWizardContext", + "StartWizardOutcome", + "StartWizardProgress", + "StartWizardPrompt", + "StartWizardState", + "StartWizardStep", + "TeamOption", + "TeamRepoOption", + "TeamRepoPickerViewModel", + "TeamSelectionViewModel", + "TeamSelected", + "WorkspacePickerViewModel", + "WorkspaceSelected", + "WorkspaceSource", + "WorkspaceSourceChosen", + "WorkspaceSourceOption", + "WorkspaceSourceViewModel", + "WorkspaceSummary", + "WorktreeSelected", + "build_quick_resume_prompt", + "build_team_repo_prompt", + "build_team_selection_prompt", + "build_workspace_picker_prompt", + "build_workspace_source_prompt", + "build_confirm_worktree_prompt", + "build_cross_team_resume_prompt", + "build_worktree_name_prompt", + "build_session_name_prompt", + "build_custom_workspace_prompt", + "build_clone_repo_prompt", + "LaunchInfoEvent", + "LaunchOutputEvent", + "LaunchOutputKind", + "LaunchOutputViewModel", + "LaunchSuccessEvent", + "LaunchWarningEvent", + "apply_personal_profile", + "apply_start_wizard_event", + "finalize_launch", + "initialize_start_wizard", + "prepare_launch_plan", + "select_session", +] diff --git a/src/scc_cli/application/launch/apply_personal_profile.py b/src/scc_cli/application/launch/apply_personal_profile.py new file mode 100644 index 0000000..c85a2ed --- /dev/null +++ b/src/scc_cli/application/launch/apply_personal_profile.py @@ -0,0 +1,186 @@ +"""Apply personal profiles to workspace settings.""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from scc_cli.application.interaction_requests import ConfirmRequest +from scc_cli.ports.personal_profile_service import PersonalProfileService + + +@dataclass(frozen=True) +class ApplyPersonalProfileDependencies: + """Dependencies for applying personal profiles. + + Invariants: + - Personal profile operations are delegated to the profile service. + + Args: + profile_service: Port for personal profile operations. + """ + + profile_service: PersonalProfileService + + +@dataclass(frozen=True) +class ApplyPersonalProfileRequest: + """Inputs for applying a personal profile to a workspace. + + Invariants: + - Confirmation prompts are returned as interaction requests. + - Applied profile content mirrors existing merge behavior. + + Args: + workspace_path: Path to the workspace. + interactive_allowed: Whether the UI may prompt for confirmation. + confirm_apply: Optional confirmation response when prompted. + """ + + workspace_path: Path + interactive_allowed: bool + confirm_apply: bool | None = None + + +@dataclass(frozen=True) +class ApplyPersonalProfileConfirmation: + """Confirmation request returned when drift requires user input. + + Invariants: + - Prompt text remains stable for CLI/UI adapters. + + Args: + request: ConfirmRequest describing the prompt. + profile_id: Identifier for the profile being applied. + default_response: Default response value for the confirmation. + message: Optional notice to render before prompting. + """ + + request: ConfirmRequest + profile_id: str + default_response: bool + message: str | None + + +@dataclass(frozen=True) +class ApplyPersonalProfileResult: + """Result of applying (or skipping) a personal profile. + + Invariants: + - Applied state is saved only when settings are written. + - Messages mirror existing CLI output. + + Args: + profile_id: Identifier for the profile, if one exists. + applied: Whether the profile was applied. + message: Optional message to render at the edge. + """ + + profile_id: str | None + applied: bool + message: str | None = None + + +ApplyPersonalProfileOutcome = ApplyPersonalProfileConfirmation | ApplyPersonalProfileResult + + +def apply_personal_profile( + request: ApplyPersonalProfileRequest, + *, + dependencies: ApplyPersonalProfileDependencies, +) -> ApplyPersonalProfileOutcome: + """Apply personal profile data to a workspace without prompting. + + Invariants: + - Drift confirmation mirrors existing CLI flow. + - Invalid JSON conditions skip applying the profile. + + Args: + request: ApplyPersonalProfileRequest inputs. + dependencies: Ports required to load and write profiles. + + Returns: + Confirmation request or result describing applied state. + """ + profile, corrupt = dependencies.profile_service.load_personal_profile_with_status( + request.workspace_path + ) + if corrupt: + return ApplyPersonalProfileResult( + profile_id=None, + applied=False, + message="[yellow]Personal profile is invalid JSON. Skipping.[/yellow]", + ) + if profile is None: + return ApplyPersonalProfileResult(profile_id=None, applied=False) + + drift = dependencies.profile_service.detect_drift(request.workspace_path) + if drift and not dependencies.profile_service.workspace_has_overrides(request.workspace_path): + drift = False + + if drift: + if not request.interactive_allowed: + return ApplyPersonalProfileResult( + profile_id=profile.profile_id, + applied=False, + message=( + "[yellow]Workspace overrides detected; personal profile not applied.[/yellow]" + ), + ) + if request.confirm_apply is None: + return ApplyPersonalProfileConfirmation( + request=ConfirmRequest( + request_id="apply-personal-profile", + prompt="Apply personal profile anyway?", + ), + profile_id=profile.profile_id, + default_response=False, + message="[yellow]Workspace overrides detected.[/yellow]", + ) + if request.confirm_apply is False: + return ApplyPersonalProfileResult(profile_id=profile.profile_id, applied=False) + + existing_settings, settings_invalid = ( + dependencies.profile_service.load_workspace_settings_with_status(request.workspace_path) + ) + existing_mcp, mcp_invalid = dependencies.profile_service.load_workspace_mcp_with_status( + request.workspace_path + ) + if settings_invalid: + return ApplyPersonalProfileResult( + profile_id=profile.profile_id, + applied=False, + message="[yellow]Invalid JSON in .claude/settings.local.json[/yellow]", + ) + if mcp_invalid: + return ApplyPersonalProfileResult( + profile_id=profile.profile_id, + applied=False, + message="[yellow]Invalid JSON in .mcp.json[/yellow]", + ) + + existing_settings = existing_settings or {} + existing_mcp = existing_mcp or {} + + merged_settings = dependencies.profile_service.merge_personal_settings( + request.workspace_path, + existing_settings, + profile.settings or {}, + ) + merged_mcp = dependencies.profile_service.merge_personal_mcp(existing_mcp, profile.mcp or {}) + + dependencies.profile_service.write_workspace_settings(request.workspace_path, merged_settings) + if profile.mcp: + dependencies.profile_service.write_workspace_mcp(request.workspace_path, merged_mcp) + + dependencies.profile_service.save_applied_state( + request.workspace_path, + profile.profile_id, + dependencies.profile_service.compute_fingerprints(request.workspace_path), + ) + + return ApplyPersonalProfileResult( + profile_id=profile.profile_id, + applied=True, + message="[green]Applied personal profile.[/green]", + ) diff --git a/src/scc_cli/application/launch/finalize_launch.py b/src/scc_cli/application/launch/finalize_launch.py new file mode 100644 index 0000000..7c5c89b --- /dev/null +++ b/src/scc_cli/application/launch/finalize_launch.py @@ -0,0 +1,39 @@ +"""Finalize launch use case for start flows.""" + +from __future__ import annotations + +from scc_cli.application.start_session import ( + StartSessionDependencies, + StartSessionPlan, + start_session, +) +from scc_cli.ports.models import SandboxHandle + +FinalizeLaunchDependencies = StartSessionDependencies +FinalizeLaunchPlan = StartSessionPlan +FinalizeLaunchResult = SandboxHandle + + +def finalize_launch( + plan: FinalizeLaunchPlan, + *, + dependencies: FinalizeLaunchDependencies, +) -> FinalizeLaunchResult: + """Finalize a prepared launch plan by starting the sandbox runtime. + + Invariants: + - Delegates to the existing start session execution to preserve behavior. + - Does not perform any CLI output or prompting. + + Args: + plan: Prepared launch plan ready to execute. + dependencies: Ports and collaborators required to run the sandbox. + + Returns: + SandboxHandle for the launched session. + + Raises: + SCCError: Propagated from sandbox runtime execution failures. + ValueError: Raised if the plan is missing a sandbox specification. + """ + return start_session(plan, dependencies=dependencies) diff --git a/src/scc_cli/application/launch/output_models.py b/src/scc_cli/application/launch/output_models.py new file mode 100644 index 0000000..2a9d517 --- /dev/null +++ b/src/scc_cli/application/launch/output_models.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from enum import Enum, auto +from typing import TypeAlias + +from scc_cli.application.sync_marketplace import SyncResult + + +class LaunchOutputKind(Enum): + """Kinds of output events emitted by launch flows.""" + + INFO = auto() + WARNING = auto() + SUCCESS = auto() + + +@dataclass(frozen=True) +class LaunchInfoEvent: + """Informational output from the launch flow. + + Invariants: + - message preserves existing launch messaging text. + + Args: + message: Human-facing informational message. + """ + + message: str + + +@dataclass(frozen=True) +class LaunchWarningEvent: + """Warning output from the launch flow. + + Invariants: + - message preserves existing launch warning text. + + Args: + message: Human-facing warning message. + """ + + message: str + + +@dataclass(frozen=True) +class LaunchSuccessEvent: + """Success output from the launch flow. + + Invariants: + - message preserves existing launch success text. + + Args: + message: Human-facing success message. + """ + + message: str + + +LaunchOutputEvent: TypeAlias = LaunchInfoEvent | LaunchWarningEvent | LaunchSuccessEvent + + +@dataclass(frozen=True) +class LaunchOutputViewModel: + """View model for launch output events. + + Invariants: + - events remain ordered for deterministic rendering. + - sync results match the existing marketplace sync output. + + Args: + events: Ordered output events describing launch progress. + sync_result: Marketplace sync result payload, if available. + sync_error_message: Sync error message, if any. + """ + + events: Sequence[LaunchOutputEvent] + sync_result: SyncResult | None = None + sync_error_message: str | None = None diff --git a/src/scc_cli/application/launch/prepare_launch_plan.py b/src/scc_cli/application/launch/prepare_launch_plan.py new file mode 100644 index 0000000..1df74c5 --- /dev/null +++ b/src/scc_cli/application/launch/prepare_launch_plan.py @@ -0,0 +1,39 @@ +"""Prepare launch plan use case for start flows.""" + +from __future__ import annotations + +from scc_cli.application.start_session import ( + StartSessionDependencies, + StartSessionPlan, + StartSessionRequest, + prepare_start_session, +) + +PrepareLaunchPlanDependencies = StartSessionDependencies +PrepareLaunchPlanRequest = StartSessionRequest +PrepareLaunchPlanResult = StartSessionPlan + + +def prepare_launch_plan( + request: PrepareLaunchPlanRequest, + *, + dependencies: PrepareLaunchPlanDependencies, +) -> PrepareLaunchPlanResult: + """Prepare the launch plan for a start session. + + Invariants: + - Delegates to the existing start session preparation to preserve behavior. + - Maintains deterministic output for the same request inputs. + + Args: + request: Input data needed to compute launch settings and sandbox specs. + dependencies: Ports and collaborators required to build the plan. + + Returns: + Prepared launch plan describing the computed settings and sandbox spec. + + Raises: + SCCError: Propagated from underlying plan preparation when a failure occurs. + ValueError: Propagated if the request is invalid for plan construction. + """ + return prepare_start_session(request, dependencies=dependencies) diff --git a/src/scc_cli/application/launch/select_session.py b/src/scc_cli/application/launch/select_session.py new file mode 100644 index 0000000..4113317 --- /dev/null +++ b/src/scc_cli/application/launch/select_session.py @@ -0,0 +1,201 @@ +"""Session selection use case for launch flows.""" + +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum + +from scc_cli.application.interaction_requests import SelectOption, SelectRequest +from scc_cli.application.sessions import SessionService +from scc_cli.ports.session_models import SessionFilter, SessionSummary + + +class SessionSelectionMode(str, Enum): + """Selection modes for session retrieval.""" + + SELECT = "select" + RESUME = "resume" + + +@dataclass(frozen=True) +class SelectSessionDependencies: + """Dependencies for the SelectSession use case. + + Invariants: + - Session service must provide stable ordering of recent sessions. + + Args: + session_service: SessionService for loading session summaries. + """ + + session_service: SessionService + + +@dataclass(frozen=True) +class SelectSessionRequest: + """Inputs for selecting or resuming a session. + + Invariants: + - Selection uses the same filtering rules as existing CLI flows. + + Args: + mode: Selection mode (select vs resume). + team: Optional team filter. + include_all: Whether to include sessions from all teams. + limit: Max sessions to load. + selection: Selected item from a prior prompt. + """ + + mode: SessionSelectionMode + team: str | None + include_all: bool + limit: int + selection: SessionSelectionItem | None = None + + +@dataclass(frozen=True) +class SessionSelectionItem: + """Selectable session item for prompts. + + Invariants: + - item_id remains stable for UI adapters. + + Args: + item_id: Stable identifier for the item. + summary: Session summary payload. + """ + + item_id: str + summary: SessionSummary + + +@dataclass(frozen=True) +class SessionSelectionPrompt: + """Prompt metadata returned to UI layers. + + Invariants: + - Request metadata stays stable for UI adapters. + + Args: + request: Selection request describing session options. + """ + + request: SelectRequest[SessionSelectionItem] + + +@dataclass(frozen=True) +class SessionSelectionWarning: + """Warning details returned when selection is unavailable. + + Invariants: + - Titles and messages remain stable for characterization tests. + + Args: + title: Warning title for rendering. + message: Warning message. + suggestion: Optional suggestion for next steps. + """ + + title: str + message: str + suggestion: str | None = None + + +@dataclass(frozen=True) +class SessionSelectionWarningOutcome: + """Warning outcome returned to the command/UI edge. + + Args: + warning: Warning metadata to render. + """ + + warning: SessionSelectionWarning + + +@dataclass(frozen=True) +class SelectSessionResult: + """Selected session result for launch flows. + + Invariants: + - Selected session summary must match a stored session record. + + Args: + session: Selected session summary. + """ + + session: SessionSummary + + +SessionSelectionOutcome = ( + SessionSelectionPrompt | SessionSelectionWarningOutcome | SelectSessionResult +) + + +def select_session( + request: SelectSessionRequest, + *, + dependencies: SelectSessionDependencies, +) -> SessionSelectionOutcome: + """Select or resume a session without performing UI prompts. + + Invariants: + - Session ordering mirrors the persistence layer ordering. + - Empty session lists return warnings instead of raising. + + Args: + request: Selection inputs and optional resolved selection. + dependencies: Use case dependencies. + + Returns: + Session selection prompt, warning, or selected session result. + """ + if request.selection is not None: + return SelectSessionResult(session=request.selection.summary) + + summaries = _load_recent_sessions(request, dependencies.session_service) + if not summaries: + return SessionSelectionWarningOutcome( + SessionSelectionWarning( + title="No Recent Sessions", + message="No recent sessions found.", + ) + ) + + if request.mode is SessionSelectionMode.RESUME: + return SelectSessionResult(session=summaries[0]) + + return SessionSelectionPrompt(request=_build_select_request(summaries)) + + +def _load_recent_sessions( + request: SelectSessionRequest, + session_service: SessionService, +) -> list[SessionSummary]: + session_filter = SessionFilter( + limit=request.limit, + team=request.team, + include_all=request.include_all, + ) + result = session_service.list_recent(session_filter) + return result.sessions + + +def _build_select_request(summaries: list[SessionSummary]) -> SelectRequest[SessionSelectionItem]: + options = [] + for index, summary in enumerate(summaries, start=1): + item = SessionSelectionItem(item_id=f"session:{index}", summary=summary) + options.append( + SelectOption( + option_id=item.item_id, + label=summary.name, + description=summary.workspace, + value=item, + ) + ) + return SelectRequest( + request_id="select-session", + title="Select Session", + subtitle="Recent sessions", + options=options, + allow_back=False, + ) diff --git a/src/scc_cli/application/launch/start_wizard.py b/src/scc_cli/application/launch/start_wizard.py new file mode 100644 index 0000000..85a3af8 --- /dev/null +++ b/src/scc_cli/application/launch/start_wizard.py @@ -0,0 +1,914 @@ +"""State machine for the interactive start wizard.""" + +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from enum import Enum + +from scc_cli.application.interaction_requests import ( + ConfirmRequest, + InputRequest, + SelectOption, + SelectRequest, +) +from scc_cli.contexts import WorkContext + + +class StartWizardStep(str, Enum): + """Explicit steps for the start wizard state machine.""" + + QUICK_RESUME = "quick_resume" + TEAM_SELECTION = "team_selection" + WORKSPACE_SOURCE = "workspace_source" + WORKSPACE_PICKER = "workspace_picker" + WORKTREE_DECISION = "worktree_decision" + SESSION_NAME = "session_name" + COMPLETE = "complete" + CANCELLED = "cancelled" + BACK = "back" + + +class WorkspaceSource(str, Enum): + """Workspace source options for the start wizard.""" + + CURRENT_DIR = "current_dir" + RECENT = "recent" + TEAM_REPOS = "team_repos" + CUSTOM = "custom" + CLONE = "clone" + + +@dataclass(frozen=True) +class StartWizardConfig: + """Configuration flags for the start wizard state machine. + + Invariants: + - quick_resume_enabled determines whether the wizard starts in quick resume. + - team_selection_required determines whether a team step is required. + - allow_back controls whether BACK is a valid terminal state. + + Args: + quick_resume_enabled: Whether to start with quick resume. + team_selection_required: Whether a team selection step is required. + allow_back: Whether the UI can return a BACK outcome. + """ + + quick_resume_enabled: bool + team_selection_required: bool + allow_back: bool + + +@dataclass(frozen=True) +class StartWizardContext: + """Captured selections from the start wizard flow. + + Invariants: + - Values reflect the same selections used by the CLI flow. + + Args: + team: Selected team, if any. + workspace_source: Selected workspace source, if any. + workspace: Selected workspace path, if any. + worktree_name: Selected worktree name, if any. + session_name: Selected session name, if any. + """ + + team: str | None = None + workspace_source: WorkspaceSource | None = None + workspace: str | None = None + worktree_name: str | None = None + session_name: str | None = None + + +@dataclass(frozen=True) +class StartWizardState: + """Current state for the start wizard state machine. + + Invariants: + - step always matches an explicit StartWizardStep. + - config remains constant for the life of the wizard. + + Args: + step: Current wizard step. + context: Captured wizard selections. + config: Wizard configuration flags. + """ + + step: StartWizardStep + context: StartWizardContext + config: StartWizardConfig + + +@dataclass(frozen=True) +class QuickResumeSelected: + """Event for selecting a session via quick resume. + + Args: + workspace: Selected workspace path. + team: Associated team, if any. + session_name: Optional session name from the selection. + """ + + workspace: str + team: str | None + session_name: str | None + + +@dataclass(frozen=True) +class QuickResumeDismissed: + """Event for skipping quick resume to start a new session.""" + + +@dataclass(frozen=True) +class TeamSelected: + """Event for selecting a team.""" + + team: str | None + + +@dataclass(frozen=True) +class WorkspaceSourceChosen: + """Event for selecting a workspace source.""" + + source: WorkspaceSource + + +@dataclass(frozen=True) +class WorkspaceSelected: + """Event for selecting a workspace.""" + + workspace: str + + +@dataclass(frozen=True) +class WorktreeSelected: + """Event for selecting a worktree name.""" + + worktree_name: str | None + + +@dataclass(frozen=True) +class SessionNameEntered: + """Event for entering a session name.""" + + session_name: str | None + + +@dataclass(frozen=True) +class BackRequested: + """Event for requesting a BACK navigation action.""" + + +@dataclass(frozen=True) +class CancelRequested: + """Event for cancelling the wizard.""" + + +StartWizardEvent = ( + QuickResumeSelected + | QuickResumeDismissed + | TeamSelected + | WorkspaceSourceChosen + | WorkspaceSelected + | WorktreeSelected + | SessionNameEntered + | BackRequested + | CancelRequested +) + + +def initialize_start_wizard(config: StartWizardConfig) -> StartWizardState: + """Initialize the start wizard state. + + Invariants: + - Initial step honors quick resume and team selection requirements. + + Args: + config: Wizard configuration flags. + + Returns: + Initial StartWizardState. + """ + + if config.quick_resume_enabled: + step = StartWizardStep.QUICK_RESUME + elif config.team_selection_required: + step = StartWizardStep.TEAM_SELECTION + else: + step = StartWizardStep.WORKSPACE_SOURCE + return StartWizardState(step=step, context=StartWizardContext(), config=config) + + +def apply_start_wizard_event(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + """Apply an event to the start wizard state machine. + + Invariants: + - Terminal states remain stable once reached. + - Transitions are deterministic and side-effect free. + + Args: + state: Current wizard state. + event: Event emitted by the UI/command layer. + + Returns: + Updated StartWizardState after applying the event. + + Raises: + ValueError: When an event is invalid for the current state. + """ + + if state.step in { + StartWizardStep.COMPLETE, + StartWizardStep.CANCELLED, + StartWizardStep.BACK, + }: + return state + + if isinstance(event, CancelRequested): + return StartWizardState( + step=StartWizardStep.CANCELLED, + context=state.context, + config=state.config, + ) + + if isinstance(event, BackRequested): + return _handle_back_request(state) + + if state.step is StartWizardStep.QUICK_RESUME: + return _handle_quick_resume(state, event) + if state.step is StartWizardStep.TEAM_SELECTION: + return _handle_team_selection(state, event) + if state.step is StartWizardStep.WORKSPACE_SOURCE: + return _handle_workspace_source(state, event) + if state.step is StartWizardStep.WORKSPACE_PICKER: + return _handle_workspace_picker(state, event) + if state.step is StartWizardStep.WORKTREE_DECISION: + return _handle_worktree_decision(state, event) + if state.step is StartWizardStep.SESSION_NAME: + return _handle_session_name(state, event) + + msg = f"Unsupported state: {state.step}" + raise ValueError(msg) + + +def _handle_back_request(state: StartWizardState) -> StartWizardState: + if state.step is StartWizardStep.QUICK_RESUME: + return _terminal_back_or_cancel(state) + if state.step is StartWizardStep.TEAM_SELECTION: + return _terminal_back_or_cancel(state) + if state.step is StartWizardStep.WORKSPACE_SOURCE: + if state.config.team_selection_required: + return StartWizardState( + step=StartWizardStep.TEAM_SELECTION, + context=state.context, + config=state.config, + ) + return _terminal_back_or_cancel(state) + if state.step is StartWizardStep.WORKSPACE_PICKER: + return StartWizardState( + step=StartWizardStep.WORKSPACE_SOURCE, + context=state.context, + config=state.config, + ) + if state.step is StartWizardStep.WORKTREE_DECISION: + return StartWizardState( + step=StartWizardStep.WORKSPACE_PICKER, + context=state.context, + config=state.config, + ) + if state.step is StartWizardStep.SESSION_NAME: + return StartWizardState( + step=StartWizardStep.WORKTREE_DECISION, + context=state.context, + config=state.config, + ) + return state + + +def _handle_quick_resume(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, QuickResumeSelected): + context = StartWizardContext( + team=event.team, + workspace_source=None, + workspace=event.workspace, + session_name=event.session_name, + ) + return StartWizardState( + step=StartWizardStep.COMPLETE, + context=context, + config=state.config, + ) + if isinstance(event, QuickResumeDismissed): + next_step = ( + StartWizardStep.TEAM_SELECTION + if state.config.team_selection_required + else StartWizardStep.WORKSPACE_SOURCE + ) + return StartWizardState(step=next_step, context=state.context, config=state.config) + msg = f"Invalid event for quick resume: {event}" + raise ValueError(msg) + + +def _handle_team_selection(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, TeamSelected): + context = StartWizardContext( + team=event.team, + workspace_source=state.context.workspace_source, + workspace=state.context.workspace, + worktree_name=state.context.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState( + step=StartWizardStep.WORKSPACE_SOURCE, + context=context, + config=state.config, + ) + msg = f"Invalid event for team selection: {event}" + raise ValueError(msg) + + +def _handle_workspace_source(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, WorkspaceSourceChosen): + context = StartWizardContext( + team=state.context.team, + workspace_source=event.source, + workspace=state.context.workspace, + worktree_name=state.context.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState( + step=StartWizardStep.WORKSPACE_PICKER, + context=context, + config=state.config, + ) + msg = f"Invalid event for workspace source: {event}" + raise ValueError(msg) + + +def _handle_workspace_picker(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, WorkspaceSelected): + context = StartWizardContext( + team=state.context.team, + workspace_source=state.context.workspace_source, + workspace=event.workspace, + worktree_name=state.context.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState( + step=StartWizardStep.WORKTREE_DECISION, + context=context, + config=state.config, + ) + msg = f"Invalid event for workspace picker: {event}" + raise ValueError(msg) + + +def _handle_worktree_decision(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, WorktreeSelected): + context = StartWizardContext( + team=state.context.team, + workspace_source=state.context.workspace_source, + workspace=state.context.workspace, + worktree_name=event.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState( + step=StartWizardStep.SESSION_NAME, + context=context, + config=state.config, + ) + msg = f"Invalid event for worktree decision: {event}" + raise ValueError(msg) + + +def _handle_session_name(state: StartWizardState, event: StartWizardEvent) -> StartWizardState: + if isinstance(event, SessionNameEntered): + context = StartWizardContext( + team=state.context.team, + workspace_source=state.context.workspace_source, + workspace=state.context.workspace, + worktree_name=state.context.worktree_name, + session_name=event.session_name, + ) + return StartWizardState( + step=StartWizardStep.COMPLETE, + context=context, + config=state.config, + ) + msg = f"Invalid event for session name: {event}" + raise ValueError(msg) + + +def _terminal_back_or_cancel(state: StartWizardState) -> StartWizardState: + step = StartWizardStep.BACK if state.config.allow_back else StartWizardStep.CANCELLED + return StartWizardState(step=step, context=state.context, config=state.config) + + +@dataclass(frozen=True) +class TeamOption: + """Team option for selection prompts.""" + + name: str + description: str = "" + credential_status: str | None = None + + +@dataclass(frozen=True) +class TeamSelectionViewModel: + """View model for team selection prompts.""" + + title: str + subtitle: str | None + current_team: str | None + options: Sequence[TeamOption] + + +@dataclass(frozen=True) +class WorkspaceSourceOption: + """Workspace source option for selection prompts.""" + + source: WorkspaceSource + label: str + description: str + + +@dataclass(frozen=True) +class CwdContext: + """Current working directory context for workspace source selection. + + This dataclass captures the runtime state of the current working directory + so the UI layer can build appropriate presentation options. The command + layer gathers this data (via service functions), filters out suspicious + directories upstream, and the UI layer uses it to build picker options. + + Invariants: + - If cwd_context is None in a view model, cwd is suspicious or unavailable. + - If cwd_context is provided, the directory has passed suspicious checks. + - UI should show "Current directory" option iff cwd_context is not None. + + Args: + path: Absolute path to the current working directory. + name: Display name for the directory (typically the folder name). + is_git: Whether the directory is a git repository. + has_project_markers: Whether the directory has recognizable project markers. + """ + + path: str + name: str + is_git: bool + has_project_markers: bool + + +@dataclass(frozen=True) +class WorkspaceSummary: + """Workspace option summary for picker prompts.""" + + label: str + description: str + workspace: str + + +@dataclass(frozen=True) +class TeamRepoOption: + """Team repository option for selection prompts.""" + + name: str + description: str + url: str | None = None + local_path: str | None = None + + +@dataclass(frozen=True) +class QuickResumeOption: + """Quick resume option for selection prompts.""" + + option_id: str + label: str + description: str + is_new_session: bool = False + is_switch_team: bool = False + is_context: bool = False + context: WorkContext | None = None + + +@dataclass(frozen=True) +class QuickResumeViewModel: + """View model for quick resume selection prompts.""" + + title: str + subtitle: str | None + context_label: str | None + standalone: bool + effective_team: str | None + contexts: Sequence[WorkContext] + current_branch: str | None = None + + +@dataclass(frozen=True) +class WorkspaceSourceViewModel: + """View model for workspace source selection prompts. + + This view model carries data flags that the UI layer uses to build + presentation options. The application layer provides context about + the current directory and team repositories, but does not build + the actual picker options - that's the UI layer's responsibility. + + Invariants: + - cwd_context is None if the current directory is suspicious (UI should not show it). + - If options is empty, UI layer builds options from cwd_context/has_team_repos. + + Args: + title: Picker title text. + subtitle: Optional subtitle text. + context_label: Team context label (e.g., "Team: platform"). + standalone: Whether running in standalone mode (no org config). + allow_back: Whether back navigation is allowed. + has_team_repos: Whether team repositories are available. + cwd_context: Current directory context, or None if cwd is suspicious. + options: Prebuilt options (empty = UI builds from data flags). + """ + + title: str + subtitle: str | None + context_label: str | None + standalone: bool + allow_back: bool + has_team_repos: bool = False + cwd_context: CwdContext | None = None + options: Sequence[WorkspaceSourceOption] = () + + +@dataclass(frozen=True) +class WorkspacePickerViewModel: + """View model for workspace picker prompts.""" + + title: str + subtitle: str | None + context_label: str | None + standalone: bool + allow_back: bool + options: Sequence[WorkspaceSummary] + + +@dataclass(frozen=True) +class TeamRepoPickerViewModel: + """View model for team repository picker prompts.""" + + title: str + subtitle: str | None + context_label: str | None + standalone: bool + allow_back: bool + workspace_base: str + options: Sequence[TeamRepoOption] + + +StartWizardViewModel = ( + QuickResumeViewModel + | WorkspaceSourceViewModel + | WorkspacePickerViewModel + | TeamRepoPickerViewModel + | TeamSelectionViewModel + | None +) + + +@dataclass(frozen=True) +class StartWizardPrompt: + """Prompt returned for the start wizard UI layer. + + Invariants: + - Prompts are data-only and rendered at the UI edge. + """ + + step: StartWizardStep + request: ConfirmRequest | SelectRequest[object] | InputRequest + select_options: Sequence[SelectOption[object]] | None = None + view_model: StartWizardViewModel = None + allow_team_switch: bool = False + default_response: bool | None = None + + +@dataclass(frozen=True) +class StartWizardProgress: + """Non-terminal wizard state prompting user input.""" + + state: StartWizardState + prompt: StartWizardPrompt + + +StartWizardOutcome = StartWizardProgress | StartWizardState + + +WORKSPACE_SOURCE_REQUEST_ID = "start-workspace-source" +WORKSPACE_PICKER_REQUEST_ID = "start-workspace-picker" +TEAM_SELECTION_REQUEST_ID = "start-team-selection" +WORKTREE_CONFIRM_REQUEST_ID = "start-worktree-confirm" +WORKTREE_NAME_REQUEST_ID = "start-worktree-name" +SESSION_NAME_REQUEST_ID = "start-session-name" +QUICK_RESUME_REQUEST_ID = "start-quick-resume" +CROSS_TEAM_RESUME_REQUEST_ID = "start-cross-team-resume" +TEAM_REPO_REQUEST_ID = "start-team-repo" +CUSTOM_WORKSPACE_REQUEST_ID = "start-workspace-path" +CLONE_REPO_REQUEST_ID = "start-clone-repo" + + +def _build_quick_resume_options( + contexts: Sequence[WorkContext], + *, + include_switch_team: bool, + new_session_label: str, + new_session_description: str, + current_branch: str | None = None, +) -> list[QuickResumeOption]: + options: list[QuickResumeOption] = [ + QuickResumeOption( + option_id="quick-resume:new-session", + label=new_session_label, + description=new_session_description, + is_new_session=True, + ) + ] + if include_switch_team: + options.append( + QuickResumeOption( + option_id="quick-resume:switch-team", + label="Switch team", + description="Choose a different team", + is_switch_team=True, + ) + ) + for index, context in enumerate(contexts, start=1): + description_parts: list[str] = [] + if context.last_session_id: + description_parts.append(f"session: {context.last_session_id}") + if current_branch and context.worktree_name == current_branch: + description_parts.append("current branch") + options.append( + QuickResumeOption( + option_id=f"quick-resume:context:{index}", + label=context.display_label, + description=" ".join(description_parts), + is_context=True, + context=context, + ) + ) + return options + + +def build_team_selection_prompt(*, view_model: TeamSelectionViewModel) -> StartWizardPrompt: + options: list[SelectOption[TeamOption]] = [] + for team in view_model.options: + options.append( + SelectOption( + option_id=f"team:{team.name}", + label=team.name, + description=team.description, + value=team, + ) + ) + subtitle = view_model.subtitle + if subtitle is None: + subtitle = f"{len(options)} teams available" if options else None + request = SelectRequest( + request_id=TEAM_SELECTION_REQUEST_ID, + title=view_model.title, + subtitle=subtitle, + options=options, + allow_back=False, + ) + return StartWizardPrompt( + step=StartWizardStep.TEAM_SELECTION, + request=request, + view_model=view_model, + allow_team_switch=False, + select_options=request.options, + ) + + +def build_workspace_source_prompt(*, view_model: WorkspaceSourceViewModel) -> StartWizardPrompt: + """Build a workspace source selection prompt. + + This function passes through the view model data to the UI layer, which + is responsible for building picker options from the data flags + (cwd_context, has_team_repos) when options is empty. + + The application layer provides: + - cwd_context: Current directory data (None if suspicious/unavailable) + - has_team_repos: Whether team repositories are available + - options: Pre-built options (empty = UI builds from data flags) + + The UI layer: + - Builds picker options from data flags if options is empty + - Renders the picker with appropriate labels and descriptions + """ + options = list(view_model.options) + select_options: list[SelectOption[WorkspaceSourceOption]] = [] + for option in options: + select_options.append( + SelectOption( + option_id=f"workspace-source:{option.source.value}", + label=option.label, + description=option.description, + value=option, + ) + ) + request = SelectRequest( + request_id=WORKSPACE_SOURCE_REQUEST_ID, + title=view_model.title, + subtitle=view_model.subtitle, + options=select_options, + allow_back=view_model.allow_back, + ) + # Pass through the view model unchanged - UI layer will use cwd_context + # and has_team_repos to build options if options list is empty + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_SOURCE, + request=request, + view_model=view_model, + allow_team_switch=True, + select_options=request.options, + ) + + +def build_workspace_picker_prompt(*, view_model: WorkspacePickerViewModel) -> StartWizardPrompt: + options: list[SelectOption[WorkspaceSummary]] = [] + for option in view_model.options: + options.append( + SelectOption( + option_id=f"workspace:{option.workspace}", + label=option.label, + description=option.description, + value=option, + ) + ) + request = SelectRequest( + request_id=WORKSPACE_PICKER_REQUEST_ID, + title=view_model.title, + subtitle=view_model.subtitle, + options=options, + allow_back=view_model.allow_back, + ) + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_PICKER, + request=request, + view_model=view_model, + allow_team_switch=True, + select_options=request.options, + ) + + +def build_team_repo_prompt(*, view_model: TeamRepoPickerViewModel) -> StartWizardPrompt: + options: list[SelectOption[TeamRepoOption]] = [] + for option in view_model.options: + options.append( + SelectOption( + option_id=f"team-repo:{option.name}", + label=option.name, + description=option.description, + value=option, + ) + ) + request = SelectRequest( + request_id=TEAM_REPO_REQUEST_ID, + title=view_model.title, + subtitle=view_model.subtitle, + options=options, + allow_back=view_model.allow_back, + ) + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_PICKER, + request=request, + view_model=view_model, + allow_team_switch=True, + select_options=request.options, + ) + + +def build_quick_resume_prompt(*, view_model: QuickResumeViewModel) -> StartWizardPrompt: + team_label = view_model.effective_team or "standalone" + if view_model.standalone: + team_label = "standalone" + new_session_label = f"+ New session ({team_label})" + new_session_description = "Start fresh" + if not view_model.contexts: + new_session_description = "No sessions yet — press Enter to start" + + options = _build_quick_resume_options( + view_model.contexts, + include_switch_team=not view_model.standalone, + new_session_label=new_session_label, + new_session_description=new_session_description, + current_branch=view_model.current_branch, + ) + select_options: list[SelectOption[QuickResumeOption]] = [] + for option in options: + select_options.append( + SelectOption( + option_id=option.option_id, + label=option.label, + description=option.description, + value=option, + ) + ) + request = SelectRequest( + request_id=QUICK_RESUME_REQUEST_ID, + title=view_model.title, + subtitle=view_model.subtitle, + options=select_options, + allow_back=True, + ) + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_PICKER, + request=request, + view_model=view_model, + allow_team_switch=True, + select_options=request.options, + ) + + +def build_confirm_worktree_prompt() -> StartWizardPrompt: + request = ConfirmRequest( + request_id=WORKTREE_CONFIRM_REQUEST_ID, + prompt="Create a worktree for isolated feature development?", + ) + return StartWizardPrompt( + step=StartWizardStep.WORKTREE_DECISION, + request=request, + view_model=None, + allow_team_switch=False, + ) + + +def build_cross_team_resume_prompt(team: str) -> StartWizardPrompt: + request = ConfirmRequest( + request_id=CROSS_TEAM_RESUME_REQUEST_ID, + prompt=( + f"[yellow]Resume session from team '{team}'?[/yellow]\n" + f"[dim]This will use {team} plugins for this session.[/dim]" + ), + ) + return StartWizardPrompt( + step=StartWizardStep.QUICK_RESUME, + request=request, + view_model=None, + allow_team_switch=False, + default_response=False, + ) + + +def build_worktree_name_prompt() -> StartWizardPrompt: + request = InputRequest( + request_id=WORKTREE_NAME_REQUEST_ID, + prompt="Feature/worktree name", + default="", + ) + return StartWizardPrompt( + step=StartWizardStep.WORKTREE_DECISION, + request=request, + view_model=None, + allow_team_switch=False, + ) + + +def build_session_name_prompt() -> StartWizardPrompt: + request = InputRequest( + request_id=SESSION_NAME_REQUEST_ID, + prompt="Session name (optional, for easy resume)", + default="", + ) + return StartWizardPrompt( + step=StartWizardStep.SESSION_NAME, + request=request, + view_model=None, + allow_team_switch=False, + ) + + +def build_custom_workspace_prompt() -> StartWizardPrompt: + request = InputRequest( + request_id=CUSTOM_WORKSPACE_REQUEST_ID, + prompt="Enter workspace path", + default="", + ) + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_PICKER, + request=request, + view_model=None, + allow_team_switch=True, + ) + + +def build_clone_repo_prompt() -> StartWizardPrompt: + request = InputRequest( + request_id=CLONE_REPO_REQUEST_ID, + prompt="Repository URL (HTTPS or SSH)", + default="", + ) + return StartWizardPrompt( + step=StartWizardStep.WORKSPACE_PICKER, + request=request, + view_model=None, + allow_team_switch=True, + ) diff --git a/src/scc_cli/application/sessions/__init__.py b/src/scc_cli/application/sessions/__init__.py new file mode 100644 index 0000000..25b79cd --- /dev/null +++ b/src/scc_cli/application/sessions/__init__.py @@ -0,0 +1,5 @@ +"""Session use cases and helpers.""" + +from .use_cases import SessionService + +__all__ = ["SessionService"] diff --git a/src/scc_cli/application/sessions/use_cases.py b/src/scc_cli/application/sessions/use_cases.py new file mode 100644 index 0000000..464e094 --- /dev/null +++ b/src/scc_cli/application/sessions/use_cases.py @@ -0,0 +1,176 @@ +"""Session persistence use cases.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path + +from scc_cli.ports.session_models import ( + SessionFilter, + SessionListResult, + SessionRecord, + SessionSummary, +) +from scc_cli.ports.session_store import SessionStore + + +@dataclass(frozen=True) +class SessionService: + """Coordinate session persistence and retrieval. + + Invariants: + - Session records are sorted by last_used descending when listed. + - Workspace+branch pairs uniquely identify a session entry. + + Args: + store: SessionStore implementation for persistence. + """ + + store: SessionStore + + def list_recent(self, session_filter: SessionFilter) -> SessionListResult: + """Return recent sessions filtered by team and limit. + + Args: + session_filter: Filtering options for the session list. + + Returns: + SessionListResult with summaries and count. + """ + sessions = self.store.load_sessions() + sessions = _filter_sessions(sessions, session_filter) + sessions.sort(key=lambda record: record.last_used or "", reverse=True) + sessions = sessions[: session_filter.limit] + summaries = [ + SessionSummary( + name=record.name or _generate_session_name(record), + workspace=record.workspace, + team=record.team, + last_used=record.last_used, + container_name=record.container_name, + branch=record.branch, + ) + for record in sessions + ] + team_label = None if session_filter.include_all else session_filter.team + return SessionListResult.from_sessions(summaries, team=team_label) + + def record_session( + self, + *, + workspace: str, + team: str | None = None, + session_name: str | None = None, + container_name: str | None = None, + branch: str | None = None, + ) -> SessionRecord: + """Record a session creation or update. + + Args: + workspace: Workspace path as a string. + team: Team identifier or None for standalone sessions. + session_name: Optional session display name. + container_name: Optional container name. + branch: Optional git branch name. + + Returns: + SessionRecord that was written. + """ + now = datetime.now().isoformat() + with self.store.lock(): + sessions = self.store.load_sessions() + existing_index = _find_session_index(sessions, workspace, branch) + created_at = sessions[existing_index].created_at if existing_index is not None else now + record = SessionRecord( + workspace=workspace, + team=team, + name=session_name, + container_name=container_name, + branch=branch, + last_used=now, + created_at=created_at, + ) + if existing_index is not None: + sessions[existing_index] = record + else: + sessions.insert(0, record) + self.store.save_sessions(sessions) + return record + + def update_session_container( + self, + *, + workspace: str, + container_name: str, + branch: str | None = None, + ) -> None: + """Update the container name for an existing session. + + Args: + workspace: Workspace path string. + container_name: Container name to set. + branch: Optional branch to match when updating. + """ + now = datetime.now().isoformat() + with self.store.lock(): + sessions = self.store.load_sessions() + for record in sessions: + if record.workspace == workspace and (branch is None or record.branch == branch): + updated = SessionRecord( + workspace=record.workspace, + team=record.team, + name=record.name, + container_name=container_name, + branch=record.branch, + last_used=now, + created_at=record.created_at, + schema_version=record.schema_version, + ) + sessions[sessions.index(record)] = updated + break + self.store.save_sessions(sessions) + + def prune_orphaned_sessions(self) -> int: + """Remove sessions whose workspace paths no longer exist. + + Returns: + Number of sessions removed. + """ + with self.store.lock(): + sessions = self.store.load_sessions() + remaining = [ + record for record in sessions if Path(record.workspace).expanduser().exists() + ] + removed = len(sessions) - len(remaining) + self.store.save_sessions(remaining) + return removed + + +def _filter_sessions( + sessions: list[SessionRecord], + session_filter: SessionFilter, +) -> list[SessionRecord]: + if session_filter.include_all: + return sessions + if session_filter.team is None: + return [record for record in sessions if record.team is None] + return [record for record in sessions if record.team == session_filter.team] + + +def _generate_session_name(record: SessionRecord) -> str: + """Generate a display name for sessions without explicit names.""" + if record.workspace: + return Path(record.workspace).name + return "Unnamed" + + +def _find_session_index( + sessions: list[SessionRecord], + workspace: str, + branch: str | None, +) -> int | None: + for index, record in enumerate(sessions): + if record.workspace == workspace and record.branch == branch: + return index + return None diff --git a/src/scc_cli/application/start_session.py b/src/scc_cli/application/start_session.py index aa15da8..72d9573 100644 --- a/src/scc_cli/application/start_session.py +++ b/src/scc_cli/application/start_session.py @@ -15,6 +15,7 @@ SyncResult, sync_marketplace_settings, ) +from scc_cli.application.workspace import ResolveWorkspaceRequest, resolve_workspace from scc_cli.core.constants import AGENT_CONFIG_DIR, SANDBOX_IMAGE from scc_cli.core.errors import WorkspaceNotFoundError from scc_cli.core.workspace import ResolverResult @@ -25,7 +26,6 @@ from scc_cli.ports.models import AgentSettings, MountSpec, SandboxHandle, SandboxSpec from scc_cli.ports.remote_fetcher import RemoteFetcher from scc_cli.ports.sandbox_runtime import SandboxRuntime -from scc_cli.services.workspace import resolve_launch_context @dataclass(frozen=True) @@ -91,7 +91,7 @@ def prepare_start_session( """ resolver_result = _resolve_workspace_context(request) effective_config = _compute_effective_config(request) - sync_result, sync_error_message = _sync_marketplace_settings(request, dependencies) + sync_result, sync_error_message = sync_marketplace_settings_for_start(request, dependencies) agent_settings = _build_agent_settings(sync_result, dependencies.agent_runner) current_branch = _resolve_current_branch(request.workspace_path, dependencies.git_client) sandbox_spec = _build_sandbox_spec( @@ -128,14 +128,16 @@ def start_session( def _resolve_workspace_context(request: StartSessionRequest) -> ResolverResult: - result = resolve_launch_context( - request.entry_dir, - request.workspace_arg, - allow_suspicious=request.allow_suspicious, + context = resolve_workspace( + ResolveWorkspaceRequest( + cwd=request.entry_dir, + workspace_arg=request.workspace_arg, + allow_suspicious=request.allow_suspicious, + ) ) - if result is None: + if context is None: raise WorkspaceNotFoundError(path=str(request.workspace_path)) - return result + return context.resolver_result def _compute_effective_config(request: StartSessionRequest) -> EffectiveConfig | None: @@ -148,10 +150,23 @@ def _compute_effective_config(request: StartSessionRequest) -> EffectiveConfig | ) -def _sync_marketplace_settings( +def sync_marketplace_settings_for_start( request: StartSessionRequest, dependencies: StartSessionDependencies, ) -> tuple[SyncResult | None, str | None]: + """Sync marketplace settings for a start session. + + Invariants: + - Skips syncing in dry-run, offline, or standalone modes. + - Uses the same sync path as start session preparation. + + Args: + request: Start session request data. + dependencies: Dependencies used to perform the sync. + + Returns: + Tuple of sync result and optional error message. + """ if request.dry_run or request.offline or request.standalone: return None, None if request.org_config is None or request.team is None: diff --git a/src/scc_cli/application/support_bundle.py b/src/scc_cli/application/support_bundle.py new file mode 100644 index 0000000..92ad37d --- /dev/null +++ b/src/scc_cli/application/support_bundle.py @@ -0,0 +1,222 @@ +"""Support bundle use case for diagnostics output.""" + +from __future__ import annotations + +import json +import re +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +from scc_cli import __version__ +from scc_cli.core.errors import SCCError +from scc_cli.doctor.serialization import build_doctor_json_data +from scc_cli.ports.archive_writer import ArchiveWriter +from scc_cli.ports.clock import Clock +from scc_cli.ports.doctor_runner import DoctorRunner +from scc_cli.ports.filesystem import Filesystem + +# ───────────────────────────────────────────────────────────────────────────── +# Redaction Patterns and Helpers +# ───────────────────────────────────────────────────────────────────────────── + +SECRET_KEY_PATTERNS = [ + r"^auth$", + r".*token.*", + r".*api[_-]?key.*", + r".*apikey.*", + r".*password.*", + r".*secret.*", + r"^authorization$", + r".*credential.*", +] + +_SECRET_PATTERNS = [re.compile(pattern, re.IGNORECASE) for pattern in SECRET_KEY_PATTERNS] + + +def _is_secret_key(key: str) -> bool: + """Check if a key matches secret patterns.""" + return any(pattern.match(key) for pattern in _SECRET_PATTERNS) + + +def redact_secrets(data: dict[str, Any]) -> dict[str, Any]: + """Redact secret values from a dictionary. + + Recursively traverses the dictionary and replaces values for keys + matching secret patterns (auth, token, api_key, password, etc.) + with '[REDACTED]'. + + Args: + data: Dictionary to redact secrets from. + + Returns: + New dictionary with secret values redacted. + """ + result: dict[str, Any] = {} + + for key, value in data.items(): + if _is_secret_key(key) and isinstance(value, str): + result[key] = "[REDACTED]" + elif isinstance(value, dict): + result[key] = redact_secrets(value) + elif isinstance(value, list): + result[key] = [ + redact_secrets(item) if isinstance(item, dict) else item for item in value + ] + else: + result[key] = value + + return result + + +def redact_paths(data: dict[str, Any], *, redact: bool = True) -> dict[str, Any]: + """Redact home directory paths from a dictionary. + + Recursively traverses the dictionary and replaces home directory paths + with '~' for privacy. + + Args: + data: Dictionary to redact paths from. + redact: If False, returns data unchanged. + + Returns: + New dictionary with home paths redacted. + """ + if not redact: + return data + + home = str(Path.home()) + result: dict[str, Any] = {} + + for key, value in data.items(): + if isinstance(value, str) and home in value: + result[key] = value.replace(home, "~") + elif isinstance(value, dict): + result[key] = redact_paths(value, redact=redact) + elif isinstance(value, list): + result[key] = [ + redact_paths(item, redact=redact) + if isinstance(item, dict) + else (item.replace(home, "~") if isinstance(item, str) and home in item else item) + for item in value + ] + else: + result[key] = value + + return result + + +# ───────────────────────────────────────────────────────────────────────────── +# Use Case Types +# ───────────────────────────────────────────────────────────────────────────── + + +@dataclass(frozen=True) +class SupportBundleDependencies: + """Dependencies for the support bundle use case.""" + + filesystem: Filesystem + clock: Clock + doctor_runner: DoctorRunner + archive_writer: ArchiveWriter + + +@dataclass(frozen=True) +class SupportBundleRequest: + """Inputs for generating a support bundle.""" + + output_path: Path + redact_paths: bool + workspace_path: Path | None = None + + +@dataclass(frozen=True) +class SupportBundleResult: + """Result of support bundle generation.""" + + manifest: dict[str, Any] + + +def _load_user_config(filesystem: Filesystem, path: Path) -> dict[str, Any]: + try: + if not filesystem.exists(path): + return {} + content = filesystem.read_text(path) + result = json.loads(content) + if isinstance(result, dict): + return result + return {"error": "Config is not a dictionary"} + except (OSError, json.JSONDecodeError): + return {"error": "Failed to load config"} + + +def build_support_bundle_manifest( + request: SupportBundleRequest, + *, + dependencies: SupportBundleDependencies, +) -> dict[str, Any]: + """Assemble the support bundle manifest without writing files.""" + system_info = { + "platform": __import__("platform").system(), + "platform_version": __import__("platform").version(), + "platform_release": __import__("platform").release(), + "machine": __import__("platform").machine(), + "python_version": __import__("sys").version, + "python_implementation": __import__("platform").python_implementation(), + } + + generated_at = dependencies.clock.now().isoformat() + + user_config_path = Path.home() / ".scc" / "config.json" + user_config = _load_user_config(dependencies.filesystem, user_config_path) + user_config = redact_secrets(user_config) if isinstance(user_config, dict) else user_config + + org_config_path = Path.home() / ".scc" / "org.json" + org_config = _load_user_config(dependencies.filesystem, org_config_path) + org_config = redact_secrets(org_config) if isinstance(org_config, dict) else org_config + + try: + doctor_result = dependencies.doctor_runner.run( + str(request.workspace_path) if request.workspace_path else None + ) + doctor_data = build_doctor_json_data(doctor_result) + except Exception as exc: + doctor_data = {"error": f"Failed to run doctor: {exc}"} + + bundle_data: dict[str, Any] = { + "generated_at": generated_at, + "cli_version": __version__, + "system": system_info, + "config": user_config, + "org_config": org_config, + "doctor": doctor_data, + } + + if request.workspace_path: + bundle_data["workspace"] = str(request.workspace_path) + + if request.redact_paths: + bundle_data = redact_paths(bundle_data) + + return bundle_data + + +def create_support_bundle( + request: SupportBundleRequest, + *, + dependencies: SupportBundleDependencies, +) -> SupportBundleResult: + """Generate a support bundle and write the archive manifest.""" + manifest = build_support_bundle_manifest(request, dependencies=dependencies) + manifest_json = json.dumps(manifest, indent=2) + + try: + dependencies.archive_writer.write_manifest(str(request.output_path), manifest_json) + except Exception as exc: + raise SCCError( + user_message="Failed to write support bundle", + suggested_action="Check the output path and try again", + debug_context=str(exc), + ) from exc + + return SupportBundleResult(manifest=manifest) diff --git a/src/scc_cli/application/workspace/__init__.py b/src/scc_cli/application/workspace/__init__.py new file mode 100644 index 0000000..1945aa3 --- /dev/null +++ b/src/scc_cli/application/workspace/__init__.py @@ -0,0 +1,19 @@ +from scc_cli.application.workspace.use_cases import ( + ResolveWorkspaceRequest, + WorkspaceContext, + WorkspaceValidationResult, + WorkspaceValidationStep, + WorkspaceWarning, + resolve_workspace, + validate_workspace, +) + +__all__ = [ + "ResolveWorkspaceRequest", + "WorkspaceContext", + "WorkspaceValidationResult", + "WorkspaceValidationStep", + "WorkspaceWarning", + "resolve_workspace", + "validate_workspace", +] diff --git a/src/scc_cli/application/workspace/use_cases.py b/src/scc_cli/application/workspace/use_cases.py new file mode 100644 index 0000000..4220f27 --- /dev/null +++ b/src/scc_cli/application/workspace/use_cases.py @@ -0,0 +1,288 @@ +from __future__ import annotations + +from collections.abc import Iterable +from dataclasses import dataclass +from pathlib import Path + +from scc_cli.application.interaction_requests import ConfirmRequest +from scc_cli.core.errors import UsageError, WorkspaceNotFoundError +from scc_cli.core.workspace import ResolverResult +from scc_cli.ports.platform_probe import PlatformProbe +from scc_cli.services.workspace import ( + get_suspicious_reason, + is_suspicious_directory, + resolve_launch_context, +) + +SUSPICIOUS_WARNING_ID = "workspace-suspicious" +WSL_WARNING_ID = "workspace-wsl-performance" + + +@dataclass(frozen=True) +class WorkspaceContext: + """Workspace context resolved for launch or worktree flows. + + Invariants: + - Resolution follows the same precedence rules as the CLI. + - Paths are resolved and stable for session identity. + + Args: + resolver_result: Raw resolver output with path and mount details. + """ + + resolver_result: ResolverResult + + @property + def workspace_root(self) -> Path: + """Workspace root (WR).""" + return self.resolver_result.workspace_root + + @property + def entry_dir(self) -> Path: + """Entry directory (ED).""" + return self.resolver_result.entry_dir + + @property + def mount_root(self) -> Path: + """Mount root (MR).""" + return self.resolver_result.mount_root + + @property + def container_workdir(self) -> str: + """Container working directory (CW).""" + return self.resolver_result.container_workdir + + @property + def is_auto_detected(self) -> bool: + """Whether the workspace was auto-detected.""" + return self.resolver_result.is_auto_detected + + @property + def is_suspicious(self) -> bool: + """Whether the workspace is considered suspicious.""" + return self.resolver_result.is_suspicious + + @property + def is_mount_expanded(self) -> bool: + """Whether the mount root was expanded for worktrees.""" + return self.resolver_result.is_mount_expanded + + @property + def reason(self) -> str: + """Debug explanation of the resolution path.""" + return self.resolver_result.reason + + @property + def is_auto_eligible(self) -> bool: + """Whether the workspace can be auto-launched without prompts.""" + return self.resolver_result.is_auto_eligible() + + +@dataclass(frozen=True) +class ResolveWorkspaceRequest: + """Inputs for resolving workspace context. + + Invariants: + - Resolution order is stable for git and .scc.yaml detection. + - Paths are interpreted relative to the provided cwd. + + Args: + cwd: Directory where the user invoked the command. + workspace_arg: Explicit workspace argument, if provided. + allow_suspicious: Whether explicit suspicious paths are allowed. + include_git_dir_fallback: Whether to check for .git markers when git is unavailable. + """ + + cwd: Path + workspace_arg: str | None + allow_suspicious: bool = False + include_git_dir_fallback: bool = False + + +@dataclass(frozen=True) +class WorkspaceWarning: + """Warning details surfaced during workspace validation. + + Invariants: + - Warning identifiers remain stable for UI adapters. + - Messages mirror existing CLI prompts. + + Args: + warning_id: Stable identifier for the warning. + title: Short title used in warning panels. + message: Main warning message text. + suggestion: Optional follow-up hint for users. + console_message: Text emitted to stderr when applicable. + emit_stderr: Whether the warning should be written to stderr. + """ + + warning_id: str + title: str + message: str + suggestion: str | None + console_message: str + emit_stderr: bool + + +@dataclass(frozen=True) +class WorkspaceValidationStep: + """Validation step with optional confirmation request. + + Invariants: + - ConfirmRequest prompts stay aligned with CLI confirmations. + + Args: + warning: Warning metadata describing the issue. + confirm_request: Optional request for user confirmation. + """ + + warning: WorkspaceWarning + confirm_request: ConfirmRequest | None = None + + +@dataclass(frozen=True) +class WorkspaceValidationResult: + """Validated workspace path plus warnings to surface at the edge. + + Invariants: + - Steps are ordered in the sequence they should be displayed. + + Args: + workspace_path: Resolved workspace path that passed validation. + steps: Warning steps to render at the CLI/UI edge. + """ + + workspace_path: Path + steps: tuple[WorkspaceValidationStep, ...] + + +def resolve_workspace(request: ResolveWorkspaceRequest) -> WorkspaceContext | None: + """Resolve workspace context with unified precedence rules. + + Invariants: + - Preserves existing resolution order and path canonicalization. + - Does not emit UI output; callers render warnings separately. + + Args: + request: Resolution inputs from CLI or UI flows. + + Returns: + WorkspaceContext with resolved paths, or None if no workspace could be resolved. + """ + result = resolve_launch_context( + request.cwd, + request.workspace_arg, + allow_suspicious=request.allow_suspicious, + include_git_dir_fallback=request.include_git_dir_fallback, + ) + if result is None: + return None + return WorkspaceContext(result) + + +def validate_workspace( + workspace: str | None, + *, + allow_suspicious: bool, + interactive_allowed: bool, + platform_probe: PlatformProbe, +) -> WorkspaceValidationResult | None: + """Validate a workspace path and emit warning metadata. + + Invariants: + - Suspicious workspace messaging matches CLI prompts. + - WSL performance warnings are reported without UI side effects. + + Args: + workspace: Workspace path string, or None when unset. + allow_suspicious: Whether to allow suspicious paths without confirmation. + interactive_allowed: Whether the UI may prompt for confirmation. + platform_probe: Platform probe dependency for WSL checks. + + Returns: + WorkspaceValidationResult or None when no workspace path is provided. + + Raises: + WorkspaceNotFoundError: If the workspace path does not exist. + UsageError: If a suspicious path is blocked in non-interactive mode. + """ + if workspace is None: + return None + + workspace_path = Path(workspace).expanduser().resolve() + if not workspace_path.exists(): + raise WorkspaceNotFoundError(path=str(workspace_path)) + + steps: list[WorkspaceValidationStep] = [] + + if is_suspicious_directory(workspace_path): + reason = get_suspicious_reason(workspace_path) or "Suspicious directory" + warning = WorkspaceWarning( + warning_id=SUSPICIOUS_WARNING_ID, + title="Suspicious Workspace", + message=reason, + suggestion="Consider using a project-specific directory instead.", + console_message=reason, + emit_stderr=allow_suspicious, + ) + if allow_suspicious: + steps.append(WorkspaceValidationStep(warning=warning)) + elif interactive_allowed: + steps.append( + WorkspaceValidationStep( + warning=warning, + confirm_request=ConfirmRequest( + request_id="confirm-suspicious-workspace", + prompt="Continue anyway?", + ), + ) + ) + else: + raise UsageError( + user_message=( + f"Refusing to start in suspicious directory: {workspace_path}\n → {reason}" + ), + suggested_action=( + "Either:\n" + f" • Run: scc start --allow-suspicious-workspace {workspace_path}\n" + " • Run: scc start --interactive (to choose a different workspace)\n" + " • Run from a project directory inside a git repository" + ), + ) + + if platform_probe.is_wsl2(): + is_optimal, _warning = platform_probe.check_path_performance(workspace_path) + if not is_optimal: + warning = WorkspaceWarning( + warning_id=WSL_WARNING_ID, + title="Performance Warning", + message="Your workspace is on the Windows filesystem.", + suggestion="For better performance, move to ~/projects inside WSL.", + console_message=( + "Workspace is on the Windows filesystem. Performance may be slow." + ), + emit_stderr=True, + ) + confirm_request = ( + ConfirmRequest( + request_id="confirm-wsl-performance", + prompt="Continue anyway?", + ) + if interactive_allowed + else None + ) + steps.append( + WorkspaceValidationStep( + warning=warning, + confirm_request=confirm_request, + ) + ) + + return WorkspaceValidationResult( + workspace_path=workspace_path, + steps=_freeze_steps(steps), + ) + + +def _freeze_steps(steps: Iterable[WorkspaceValidationStep]) -> tuple[WorkspaceValidationStep, ...]: + return tuple(steps) diff --git a/src/scc_cli/application/worktree/__init__.py b/src/scc_cli/application/worktree/__init__.py new file mode 100644 index 0000000..e6a062a --- /dev/null +++ b/src/scc_cli/application/worktree/__init__.py @@ -0,0 +1,49 @@ +"""Worktree application use cases.""" + +from scc_cli.application.worktree.use_cases import ( + ShellCommand, + WorktreeConfirmation, + WorktreeCreateRequest, + WorktreeCreateResult, + WorktreeDependencies, + WorktreeEnterRequest, + WorktreeListRequest, + WorktreeListResult, + WorktreeResolution, + WorktreeSelectionItem, + WorktreeSelectionPrompt, + WorktreeSelectRequest, + WorktreeShellResult, + WorktreeSummary, + WorktreeSwitchRequest, + WorktreeWarningOutcome, + create_worktree, + enter_worktree_shell, + list_worktrees, + select_worktree, + switch_worktree, +) + +__all__ = [ + "ShellCommand", + "WorktreeConfirmation", + "WorktreeCreateRequest", + "WorktreeCreateResult", + "WorktreeDependencies", + "WorktreeEnterRequest", + "WorktreeListRequest", + "WorktreeListResult", + "WorktreeResolution", + "WorktreeSelectRequest", + "WorktreeSelectionItem", + "WorktreeSelectionPrompt", + "WorktreeShellResult", + "WorktreeSummary", + "WorktreeSwitchRequest", + "WorktreeWarningOutcome", + "create_worktree", + "enter_worktree_shell", + "list_worktrees", + "select_worktree", + "switch_worktree", +] diff --git a/src/scc_cli/application/worktree/use_cases.py b/src/scc_cli/application/worktree/use_cases.py new file mode 100644 index 0000000..94377ea --- /dev/null +++ b/src/scc_cli/application/worktree/use_cases.py @@ -0,0 +1,1044 @@ +"""Worktree use cases and domain models.""" + +from __future__ import annotations + +import os +from collections.abc import Iterable, Sequence +from dataclasses import dataclass +from enum import Enum +from pathlib import Path +from typing import TypeAlias + +from scc_cli.application.interaction_requests import ConfirmRequest, SelectOption, SelectRequest +from scc_cli.core.constants import WORKTREE_BRANCH_PREFIX +from scc_cli.core.errors import NotAGitRepoError, WorkspaceNotFoundError, WorktreeCreationError +from scc_cli.core.exit_codes import EXIT_CANCELLED +from scc_cli.ports.dependency_installer import DependencyInstaller +from scc_cli.ports.git_client import GitClient +from scc_cli.services.git.branch import get_display_branch, sanitize_branch_name +from scc_cli.services.git.worktree import WorktreeInfo +from scc_cli.utils.locks import file_lock, lock_path + + +@dataclass(frozen=True) +class WorktreeSummary: + """Summary of a git worktree for selection and listing. + + Invariants: + - Paths are absolute and refer to host filesystem locations. + - Counts are zero when status data is unavailable. + + Args: + path: Filesystem path to the worktree. + branch: Branch name (may be empty for detached/bare worktrees). + status: Raw status string from git worktree list. + is_current: Whether this worktree matches the current working directory. + has_changes: Whether the worktree has staged/modified/untracked files. + staged_count: Number of staged files. + modified_count: Number of modified files. + untracked_count: Number of untracked files. + status_timed_out: Whether status collection timed out. + """ + + path: Path + branch: str + status: str + is_current: bool + has_changes: bool + staged_count: int + modified_count: int + untracked_count: int + status_timed_out: bool + + @classmethod + def from_info( + cls, + info: WorktreeInfo, + *, + path: Path, + is_current: bool, + staged_count: int, + modified_count: int, + untracked_count: int, + status_timed_out: bool, + has_changes: bool, + ) -> WorktreeSummary: + """Build a WorktreeSummary from a WorktreeInfo record.""" + return cls( + path=path, + branch=info.branch, + status=info.status, + is_current=is_current, + has_changes=has_changes, + staged_count=staged_count, + modified_count=modified_count, + untracked_count=untracked_count, + status_timed_out=status_timed_out, + ) + + +@dataclass(frozen=True) +class WorktreeListRequest: + """Inputs for listing worktrees. + + Invariants: + - Current directory is provided for stable current-worktree detection. + + Args: + workspace_path: Repository root path. + verbose: Whether to include git status counts. + current_dir: Current working directory for current-worktree detection. + """ + + workspace_path: Path + verbose: bool + current_dir: Path + + +@dataclass(frozen=True) +class WorktreeListResult: + """Worktree list output for rendering at the edge. + + Invariants: + - Worktrees preserve the ordering returned by git. + + Args: + workspace_path: Repository root path. + worktrees: Tuple of worktree summaries. + """ + + workspace_path: Path + worktrees: tuple[WorktreeSummary, ...] + + +@dataclass(frozen=True) +class WorktreeSelectionItem: + """Selectable worktree or branch entry. + + Invariants: + - Branch-only entries have no worktree path. + + Args: + item_id: Stable identifier for selection tracking. + branch: Branch name associated with the item. + worktree: Worktree summary if this item represents a worktree. + is_branch_only: True when this item represents a branch without worktree. + """ + + item_id: str + branch: str + worktree: WorktreeSummary | None + is_branch_only: bool + + @property + def path(self) -> Path | None: + """Return the worktree path if present.""" + if not self.worktree: + return None + return self.worktree.path + + +@dataclass(frozen=True) +class WorktreeSelectionPrompt: + """Selection prompt metadata for interactive worktree choices. + + Invariants: + - Selection options must map to WorktreeSelectionItem values. + + Args: + request: SelectRequest describing the options. + initial_filter: Optional query used to seed interactive filters. + """ + + request: SelectRequest[WorktreeSelectionItem] + initial_filter: str = "" + + +@dataclass(frozen=True) +class WorktreeWarning: + """User-facing warning metadata. + + Invariants: + - Titles and messages remain stable for characterization tests. + + Args: + title: Warning title for panel rendering. + message: Warning body text. + suggestion: Optional follow-up guidance. + """ + + title: str + message: str + suggestion: str | None = None + + +@dataclass(frozen=True) +class WorktreeWarningOutcome: + """Warning outcome with an exit code hint. + + Args: + warning: Warning metadata to render. + exit_code: Suggested exit code for the command. + """ + + warning: WorktreeWarning + exit_code: int = 1 + + +class WorktreeConfirmAction(str, Enum): + """Confirm action identifiers for worktree flows.""" + + CREATE_WORKTREE = "create-worktree" + + +@dataclass(frozen=True) +class WorktreeConfirmation: + """Confirmation request for follow-up actions. + + Invariants: + - Prompts mirror existing CLI confirmations. + + Args: + action: Action that requires confirmation. + request: ConfirmRequest describing the prompt. + default_response: Default response value for UI adapters. + branch_name: Optional branch name for creation actions. + """ + + action: WorktreeConfirmAction + request: ConfirmRequest + default_response: bool + branch_name: str | None = None + + +@dataclass(frozen=True) +class WorktreeResolution: + """Resolved worktree path for shell integration. + + Args: + worktree_path: Resolved worktree path to output. + worktree_name: Optional worktree name for environment configuration. + """ + + worktree_path: Path + worktree_name: str | None = None + + +@dataclass(frozen=True) +class WorktreeCreateRequest: + """Inputs for creating a new worktree. + + Invariants: + - Name is sanitized for branch creation. + - Base branch defaults follow git default branch logic. + + Args: + workspace_path: Repository root path. + name: Worktree name (feature name). + base_branch: Optional base branch override. + install_dependencies: Whether to install dependencies after creation. + """ + + workspace_path: Path + name: str + base_branch: str | None + install_dependencies: bool = True + + +@dataclass(frozen=True) +class WorktreeCreateResult: + """Result of creating a new worktree. + + Args: + worktree_path: Filesystem path to the created worktree. + worktree_name: Sanitized worktree name. + branch_name: Full branch name created for the worktree. + base_branch: Base branch used for the worktree. + dependencies_installed: Whether dependency installation succeeded. + """ + + worktree_path: Path + worktree_name: str + branch_name: str + base_branch: str + dependencies_installed: bool | None + + +@dataclass(frozen=True) +class ShellCommand: + """Shell command specification for entering a worktree.""" + + argv: list[str] + workdir: Path + env: dict[str, str] + + +@dataclass(frozen=True) +class WorktreeShellResult: + """Shell entry details for a worktree.""" + + shell_command: ShellCommand + worktree_path: Path + worktree_name: str + + +WorktreeSelectOutcome: TypeAlias = ( + WorktreeResolution + | WorktreeSelectionPrompt + | WorktreeWarningOutcome + | WorktreeConfirmation + | WorktreeCreateResult +) +WorktreeSwitchOutcome: TypeAlias = WorktreeSelectOutcome +WorktreeEnterOutcome: TypeAlias = ( + WorktreeShellResult | WorktreeSelectionPrompt | WorktreeWarningOutcome +) + + +@dataclass(frozen=True) +class WorktreeDependencies: + """Dependencies for worktree use cases.""" + + git_client: GitClient + dependency_installer: DependencyInstaller + + +@dataclass(frozen=True) +class WorktreeSelectRequest: + """Inputs for selecting a worktree or branch. + + Args: + workspace_path: Repository root path. + include_branches: Whether to include branches without worktrees. + current_dir: Current working directory for current-worktree detection. + selection: Selected item from a prior prompt (if any). + confirm_create: Confirmation response for branch creation. + """ + + workspace_path: Path + include_branches: bool + current_dir: Path + selection: WorktreeSelectionItem | None = None + confirm_create: bool | None = None + + +@dataclass(frozen=True) +class WorktreeSwitchRequest: + """Inputs for switching to a worktree. + + Args: + workspace_path: Repository root path. + target: Target name or shortcut. + oldpwd: Shell OLDPWD value for '-' shortcut. + interactive_allowed: Whether prompts may be shown. + current_dir: Current working directory for current-worktree detection. + selection: Selected item from a prior prompt (if any). + confirm_create: Confirmation response for branch creation. + """ + + workspace_path: Path + target: str | None + oldpwd: str | None + interactive_allowed: bool + current_dir: Path + selection: WorktreeSelectionItem | None = None + confirm_create: bool | None = None + + +@dataclass(frozen=True) +class WorktreeEnterRequest: + """Inputs for entering a worktree in a subshell. + + Args: + workspace_path: Repository root path. + target: Target name or shortcut. + oldpwd: Shell OLDPWD value for '-' shortcut. + interactive_allowed: Whether prompts may be shown. + current_dir: Current working directory for current-worktree detection. + env: Environment mapping for shell resolution. + platform_system: Platform system name (e.g., "Windows", "Linux"). + selection: Selected item from a prior prompt (if any). + """ + + workspace_path: Path + target: str | None + oldpwd: str | None + interactive_allowed: bool + current_dir: Path + env: dict[str, str] + platform_system: str + selection: WorktreeSelectionItem | None = None + + +def list_worktrees( + request: WorktreeListRequest, + *, + git_client: GitClient, +) -> WorktreeListResult: + """List worktrees for a repository. + + Invariants: + - Mirrors git worktree ordering and status calculations. + - Does not emit UI output. + """ + worktrees = git_client.list_worktrees(request.workspace_path) + current_real = os.path.realpath(request.current_dir) + summaries: list[WorktreeSummary] = [] + + for worktree in worktrees: + path = Path(worktree.path) + is_current = os.path.realpath(worktree.path) == current_real + staged = modified = untracked = 0 + status_timed_out = False + has_changes = worktree.has_changes + + if request.verbose: + staged, modified, untracked, status_timed_out = git_client.get_worktree_status(path) + has_changes = (staged + modified + untracked) > 0 + + summaries.append( + WorktreeSummary.from_info( + worktree, + path=path, + is_current=is_current, + staged_count=staged, + modified_count=modified, + untracked_count=untracked, + status_timed_out=status_timed_out, + has_changes=has_changes, + ) + ) + + return WorktreeListResult(workspace_path=request.workspace_path, worktrees=tuple(summaries)) + + +def select_worktree( + request: WorktreeSelectRequest, + *, + dependencies: WorktreeDependencies, +) -> WorktreeSelectOutcome: + """Select a worktree or branch without performing UI prompts. + + Invariants: + - Confirmation prompts mirror existing CLI copy. + - Branch selections trigger worktree creation only after confirmation. + + Raises: + WorkspaceNotFoundError: If the workspace path does not exist. + NotAGitRepoError: If the workspace is not a git repository. + WorktreeCreationError: If creation fails after confirmation. + """ + _require_workspace(request.workspace_path) + if not dependencies.git_client.is_git_repo(request.workspace_path): + raise NotAGitRepoError(path=str(request.workspace_path)) + + if request.selection is not None: + return _resolve_selection(request, dependencies) + + worktrees = list_worktrees( + WorktreeListRequest( + workspace_path=request.workspace_path, + verbose=False, + current_dir=request.current_dir, + ), + git_client=dependencies.git_client, + ).worktrees + branch_items: list[str] = [] + if request.include_branches: + branch_items = dependencies.git_client.list_branches_without_worktrees( + request.workspace_path + ) + + items = _build_selection_items(worktrees, branch_items) + if not items: + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Worktrees or Branches", + message="No worktrees found and no remote branches available.", + suggestion="Create a worktree with: scc worktree create ", + ) + ) + + subtitle = f"{len(worktrees)} worktrees" + if branch_items: + subtitle += f", {len(branch_items)} branches" + return WorktreeSelectionPrompt( + request=_build_select_request( + request_id="worktree-select", + title="Select Worktree", + subtitle=subtitle, + items=items, + ), + ) + + +def switch_worktree( + request: WorktreeSwitchRequest, + *, + dependencies: WorktreeDependencies, +) -> WorktreeSwitchOutcome: + """Resolve a worktree switch target. + + Invariants: + - Shortcut semantics for '-' and '^' remain stable. + - Matching behavior mirrors git worktree fuzzy matching rules. + + Raises: + WorkspaceNotFoundError: If the workspace path does not exist. + NotAGitRepoError: If the workspace is not a git repository. + WorktreeCreationError: If creation fails after confirmation. + """ + _require_workspace(request.workspace_path) + if not dependencies.git_client.is_git_repo(request.workspace_path): + raise NotAGitRepoError(path=str(request.workspace_path)) + + if request.selection is not None: + return _resolve_selection( + WorktreeSelectRequest( + workspace_path=request.workspace_path, + include_branches=False, + current_dir=request.current_dir, + selection=request.selection, + ), + dependencies, + ) + + if request.target is None: + worktrees = list_worktrees( + WorktreeListRequest( + workspace_path=request.workspace_path, + verbose=False, + current_dir=request.current_dir, + ), + git_client=dependencies.git_client, + ).worktrees + if not worktrees: + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Worktrees", + message="No worktrees found for this repository.", + suggestion="Create one with: scc worktree create ", + ) + ) + return WorktreeSelectionPrompt( + request=_build_select_request( + request_id="worktree-switch", + title="Select Worktree", + subtitle=f"{len(worktrees)} worktrees", + items=_build_selection_items(worktrees, []), + ) + ) + + if request.target == "-": + if not request.oldpwd: + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Previous Directory", + message="Shell $OLDPWD is not set.", + suggestion="This typically means you haven't changed directories yet.", + ) + ) + return WorktreeResolution(worktree_path=Path(request.oldpwd)) + + if request.target == "^": + main_worktree = dependencies.git_client.find_main_worktree(request.workspace_path) + if not main_worktree: + default_branch = dependencies.git_client.get_default_branch(request.workspace_path) + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Main Worktree", + message=f"No worktree found for default branch '{default_branch}'.", + suggestion="The main branch may not have a separate worktree.", + ) + ) + return WorktreeResolution(worktree_path=Path(main_worktree.path)) + + exact_match, matches = dependencies.git_client.find_worktree_by_query( + request.workspace_path, request.target + ) + if exact_match: + return WorktreeResolution(worktree_path=Path(exact_match.path)) + + if not matches: + if request.target not in ("^", "-", "@") and not request.target.startswith("@{"): + branches = dependencies.git_client.list_branches_without_worktrees( + request.workspace_path + ) + if request.target in branches: + if request.confirm_create is False: + return WorktreeWarningOutcome( + WorktreeWarning( + title="Cancelled", + message="Cancelled.", + suggestion=None, + ), + exit_code=EXIT_CANCELLED, + ) + if request.confirm_create is True: + return create_worktree( + WorktreeCreateRequest( + workspace_path=request.workspace_path, + name=request.target, + base_branch=request.target, + install_dependencies=True, + ), + dependencies=dependencies, + ) + if not request.interactive_allowed: + return WorktreeWarningOutcome( + WorktreeWarning( + title="Branch Exists, No Worktree", + message=f"Branch '{request.target}' exists but has no worktree.", + suggestion=( + "Use: scc worktree create " + f"{request.target} --base {request.target}" + ), + ) + ) + return WorktreeConfirmation( + action=WorktreeConfirmAction.CREATE_WORKTREE, + request=ConfirmRequest( + request_id="worktree-create-branch", + prompt=f"No worktree for '{request.target}'. Create one?", + ), + default_response=False, + branch_name=request.target, + ) + + return WorktreeWarningOutcome( + WorktreeWarning( + title="Worktree Not Found", + message=f"No worktree matches '{request.target}'.", + suggestion="Tip: Use 'scc worktree select --branches' to pick from remote branches.", + ) + ) + + if request.interactive_allowed: + return WorktreeSelectionPrompt( + request=_build_select_request( + request_id="worktree-switch", + title="Multiple Matches", + subtitle=f"'{request.target}' matches {len(matches)} worktrees", + items=_build_selection_items(_summaries_from_matches(matches), []), + ), + initial_filter=request.target, + ) + + match_lines = [] + for i, match in enumerate(matches): + display_branch = get_display_branch(match.branch) + dir_name = Path(match.path).name + if i == 0: + match_lines.append( + f" 1. [bold]{display_branch}[/] -> {dir_name} [dim]<- best match[/]" + ) + else: + match_lines.append(f" {i + 1}. {display_branch} -> {dir_name}") + top_match_dir = Path(matches[0].path).name + + return WorktreeWarningOutcome( + WorktreeWarning( + title="Ambiguous Match", + message=f"'{request.target}' matches {len(matches)} worktrees (ranked by relevance):", + suggestion=( + "\n".join(match_lines) + + f"\n\n[dim]Use explicit directory name: scc worktree switch {top_match_dir}[/]" + ), + ) + ) + + +def enter_worktree_shell( + request: WorktreeEnterRequest, + *, + dependencies: WorktreeDependencies, +) -> WorktreeEnterOutcome: + """Resolve a worktree target into a shell command. + + Invariants: + - Shell resolution mirrors platform defaults. + - Worktree existence is verified before returning a command. + + Raises: + WorkspaceNotFoundError: If the workspace path does not exist. + NotAGitRepoError: If the workspace is not a git repository. + """ + _require_workspace(request.workspace_path) + if not dependencies.git_client.is_git_repo(request.workspace_path): + raise NotAGitRepoError(path=str(request.workspace_path)) + + if request.selection is not None: + return _build_shell_result(request, request.selection) + + if request.target is None: + worktrees = list_worktrees( + WorktreeListRequest( + workspace_path=request.workspace_path, + verbose=False, + current_dir=request.current_dir, + ), + git_client=dependencies.git_client, + ).worktrees + if not worktrees: + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Worktrees", + message="No worktrees found for this repository.", + suggestion="Create one with: scc worktree create ", + ) + ) + return WorktreeSelectionPrompt( + request=_build_select_request( + request_id="worktree-enter", + title="Enter Worktree", + subtitle="Select a worktree to enter", + items=_build_selection_items(worktrees, []), + ) + ) + + if request.target == "-": + if not request.oldpwd: + return WorktreeWarningOutcome( + WorktreeWarning( + title="No Previous Directory", + message="Shell $OLDPWD is not set.", + suggestion="This typically means you haven't changed directories yet.", + ) + ) + selection = WorktreeSelectionItem( + item_id="oldpwd", + branch=Path(request.oldpwd).name, + worktree=WorktreeSummary( + path=Path(request.oldpwd), + branch=Path(request.oldpwd).name, + status="", + is_current=False, + has_changes=False, + staged_count=0, + modified_count=0, + untracked_count=0, + status_timed_out=False, + ), + is_branch_only=False, + ) + return _build_shell_result(request, selection) + + if request.target == "^": + default_branch = dependencies.git_client.get_default_branch(request.workspace_path) + worktrees = list_worktrees( + WorktreeListRequest( + workspace_path=request.workspace_path, + verbose=False, + current_dir=request.current_dir, + ), + git_client=dependencies.git_client, + ).worktrees + selected = None + for worktree in worktrees: + if worktree.branch == default_branch or worktree.branch in {"main", "master"}: + selected = worktree + break + if not selected: + return WorktreeWarningOutcome( + WorktreeWarning( + title="Main Branch Not Found", + message=f"No worktree found for main branch ({default_branch}).", + suggestion="The main worktree may be in a different location.", + ) + ) + selection = WorktreeSelectionItem( + item_id=f"worktree:{selected.path}", + branch=selected.branch or selected.path.name, + worktree=selected, + is_branch_only=False, + ) + return _build_shell_result(request, selection) + + matched, _matches = dependencies.git_client.find_worktree_by_query( + request.workspace_path, request.target + ) + if not matched: + return WorktreeWarningOutcome( + WorktreeWarning( + title="Worktree Not Found", + message=f"No worktree matching '{request.target}'.", + suggestion="Run 'scc worktree list' to see available worktrees.", + ) + ) + selection = WorktreeSelectionItem( + item_id=f"worktree:{matched.path}", + branch=matched.branch or Path(matched.path).name, + worktree=WorktreeSummary( + path=Path(matched.path), + branch=matched.branch, + status=matched.status, + is_current=False, + has_changes=matched.has_changes, + staged_count=matched.staged_count, + modified_count=matched.modified_count, + untracked_count=matched.untracked_count, + status_timed_out=matched.status_timed_out, + ), + is_branch_only=False, + ) + return _build_shell_result(request, selection) + + +def create_worktree( + request: WorktreeCreateRequest, + *, + dependencies: WorktreeDependencies, +) -> WorktreeCreateResult: + """Create a worktree using git and dependency installer ports. + + Invariants: + - Uses the same branch naming and lock behavior as the CLI. + - Cleans up partially created worktrees on failure. + + Raises: + NotAGitRepoError: If the workspace is not a git repository. + WorktreeCreationError: If worktree creation fails. + """ + if not dependencies.git_client.is_git_repo(request.workspace_path): + raise NotAGitRepoError(path=str(request.workspace_path)) + + safe_name = sanitize_branch_name(request.name) + if not safe_name: + raise ValueError(f"Invalid worktree name: {request.name!r}") + + branch_name = f"{WORKTREE_BRANCH_PREFIX}{safe_name}" + worktree_base = request.workspace_path.parent / f"{request.workspace_path.name}-worktrees" + worktree_path = worktree_base / safe_name + + lock_file = lock_path("worktree", request.workspace_path) + with file_lock(lock_file): + if worktree_path.exists(): + raise WorktreeCreationError( + name=safe_name, + user_message=f"Worktree already exists: {worktree_path}", + suggested_action="Use existing worktree, remove it first, or choose a different name", + ) + + base_branch = request.base_branch or dependencies.git_client.get_default_branch( + request.workspace_path + ) + + if dependencies.git_client.has_remote(request.workspace_path): + dependencies.git_client.fetch_branch(request.workspace_path, base_branch) + + worktree_created = False + try: + dependencies.git_client.add_worktree( + request.workspace_path, + worktree_path, + branch_name, + base_branch, + ) + worktree_created = True + + dependencies_installed = None + if request.install_dependencies: + install_result = dependencies.dependency_installer.install(worktree_path) + if install_result.attempted and not install_result.success: + raise WorktreeCreationError( + name=safe_name, + user_message="Dependency install failed for the new worktree", + suggested_action="Install dependencies manually and retry if needed", + ) + if install_result.attempted: + dependencies_installed = install_result.success + + return WorktreeCreateResult( + worktree_path=worktree_path, + worktree_name=safe_name, + branch_name=branch_name, + base_branch=base_branch, + dependencies_installed=dependencies_installed, + ) + except KeyboardInterrupt: + if worktree_created or worktree_path.exists(): + _cleanup_partial_worktree( + request.workspace_path, worktree_path, dependencies.git_client + ) + raise + except WorktreeCreationError: + if worktree_created or worktree_path.exists(): + _cleanup_partial_worktree( + request.workspace_path, worktree_path, dependencies.git_client + ) + raise + except Exception as exc: + if worktree_created or worktree_path.exists(): + _cleanup_partial_worktree( + request.workspace_path, worktree_path, dependencies.git_client + ) + raise WorktreeCreationError( + name=safe_name, + user_message=f"Failed to create worktree: {safe_name}", + suggested_action="Check if the branch already exists or if there are uncommitted changes", + command=str(getattr(exc, "cmd", "")) or None, + ) from exc + + +def _cleanup_partial_worktree(repo_path: Path, worktree_path: Path, git_client: GitClient) -> None: + try: + git_client.remove_worktree(repo_path, worktree_path, force=True) + except Exception: + pass + try: + git_client.prune_worktrees(repo_path) + except Exception: + pass + + +def _require_workspace(workspace_path: Path) -> None: + if not workspace_path.exists(): + raise WorkspaceNotFoundError(path=str(workspace_path)) + + +def _build_selection_items( + worktrees: Iterable[WorktreeSummary], + branches: Sequence[str], +) -> list[WorktreeSelectionItem]: + items: list[WorktreeSelectionItem] = [] + for worktree in worktrees: + items.append( + WorktreeSelectionItem( + item_id=f"worktree:{worktree.path}", + branch=worktree.branch, + worktree=worktree, + is_branch_only=False, + ) + ) + for branch in branches: + items.append( + WorktreeSelectionItem( + item_id=f"branch:{branch}", + branch=branch, + worktree=None, + is_branch_only=True, + ) + ) + return items + + +def _build_select_request( + *, + request_id: str, + title: str, + subtitle: str | None, + items: Sequence[WorktreeSelectionItem], +) -> SelectRequest[WorktreeSelectionItem]: + options = [ + SelectOption( + option_id=item.item_id, + label=item.branch or (item.path.name if item.path else item.item_id), + description=None, + value=item, + ) + for item in items + ] + return SelectRequest( + request_id=request_id, + title=title, + subtitle=subtitle, + options=options, + allow_back=False, + ) + + +def _resolve_selection( + request: WorktreeSelectRequest, + dependencies: WorktreeDependencies, +) -> WorktreeSelectOutcome: + selection = request.selection + if selection is None: + raise ValueError("Selection must be provided to resolve a worktree selection") + + if not selection.is_branch_only: + if not selection.path: + raise ValueError("Selection missing worktree path") + worktree_name = selection.branch or selection.path.name + return WorktreeResolution(worktree_path=selection.path, worktree_name=worktree_name) + + if request.confirm_create is None: + return WorktreeConfirmation( + action=WorktreeConfirmAction.CREATE_WORKTREE, + request=ConfirmRequest( + request_id="worktree-create-branch", + prompt=f"Create worktree for branch '{selection.branch}'?", + ), + default_response=True, + branch_name=selection.branch, + ) + + if not request.confirm_create: + return WorktreeWarningOutcome( + WorktreeWarning( + title="Cancelled", + message="Cancelled.", + suggestion=None, + ), + exit_code=EXIT_CANCELLED, + ) + + return create_worktree( + WorktreeCreateRequest( + workspace_path=request.workspace_path, + name=selection.branch, + base_branch=selection.branch, + install_dependencies=True, + ), + dependencies=dependencies, + ) + + +def _summaries_from_matches(matches: Sequence[WorktreeInfo]) -> list[WorktreeSummary]: + summaries = [] + for match in matches: + summaries.append( + WorktreeSummary( + path=Path(match.path), + branch=match.branch, + status=match.status, + is_current=match.is_current, + has_changes=match.has_changes, + staged_count=match.staged_count, + modified_count=match.modified_count, + untracked_count=match.untracked_count, + status_timed_out=match.status_timed_out, + ) + ) + return summaries + + +def _build_shell_result( + request: WorktreeEnterRequest, + selection: WorktreeSelectionItem, +) -> WorktreeShellResult | WorktreeWarningOutcome: + if not selection.path: + raise ValueError("Selection must include a worktree path") + + if not selection.path.exists(): + return WorktreeWarningOutcome( + WorktreeWarning( + title="Worktree Missing", + message=f"Worktree path does not exist: {selection.path}", + suggestion="The worktree may have been removed. Run 'scc worktree prune'.", + ) + ) + + env = dict(request.env) + worktree_name = selection.branch or selection.path.name + env["SCC_WORKTREE"] = worktree_name + + if request.platform_system == "Windows": + shell = env.get("COMSPEC", "cmd.exe") + else: + shell = env.get("SHELL", "/bin/bash") + + return WorktreeShellResult( + shell_command=ShellCommand(argv=[shell], workdir=selection.path, env=env), + worktree_path=selection.path, + worktree_name=worktree_name, + ) diff --git a/src/scc_cli/bootstrap.py b/src/scc_cli/bootstrap.py index 2aace5a..25e41d2 100644 --- a/src/scc_cli/bootstrap.py +++ b/src/scc_cli/bootstrap.py @@ -7,16 +7,28 @@ from scc_cli.adapters.claude_agent_runner import ClaudeAgentRunner from scc_cli.adapters.docker_sandbox_runtime import DockerSandboxRuntime +from scc_cli.adapters.local_config_store import LocalConfigStore +from scc_cli.adapters.local_dependency_installer import LocalDependencyInstaller +from scc_cli.adapters.local_doctor_runner import LocalDoctorRunner from scc_cli.adapters.local_filesystem import LocalFilesystem from scc_cli.adapters.local_git_client import LocalGitClient +from scc_cli.adapters.personal_profile_service_local import LocalPersonalProfileService from scc_cli.adapters.requests_fetcher import RequestsFetcher +from scc_cli.adapters.session_store_json import JsonSessionStore from scc_cli.adapters.system_clock import SystemClock +from scc_cli.adapters.zip_archive_writer import ZipArchiveWriter from scc_cli.ports.agent_runner import AgentRunner +from scc_cli.ports.archive_writer import ArchiveWriter from scc_cli.ports.clock import Clock +from scc_cli.ports.config_store import ConfigStore +from scc_cli.ports.dependency_installer import DependencyInstaller +from scc_cli.ports.doctor_runner import DoctorRunner from scc_cli.ports.filesystem import Filesystem from scc_cli.ports.git_client import GitClient +from scc_cli.ports.personal_profile_service import PersonalProfileService from scc_cli.ports.remote_fetcher import RemoteFetcher from scc_cli.ports.sandbox_runtime import SandboxRuntime +from scc_cli.ports.session_store import SessionStore @dataclass(frozen=True) @@ -25,10 +37,15 @@ class DefaultAdapters: filesystem: Filesystem git_client: GitClient + dependency_installer: DependencyInstaller remote_fetcher: RemoteFetcher clock: Clock agent_runner: AgentRunner sandbox_runtime: SandboxRuntime + personal_profile_service: PersonalProfileService + doctor_runner: DoctorRunner + archive_writer: ArchiveWriter + config_store: ConfigStore @lru_cache(maxsize=1) @@ -38,8 +55,28 @@ def get_default_adapters() -> DefaultAdapters: return DefaultAdapters( filesystem=LocalFilesystem(), git_client=LocalGitClient(), + dependency_installer=LocalDependencyInstaller(), remote_fetcher=RequestsFetcher(), clock=SystemClock(), agent_runner=ClaudeAgentRunner(), sandbox_runtime=DockerSandboxRuntime(), + personal_profile_service=LocalPersonalProfileService(), + doctor_runner=LocalDoctorRunner(), + archive_writer=ZipArchiveWriter(), + config_store=LocalConfigStore(), ) + + +def build_session_store(filesystem: Filesystem | None = None) -> SessionStore: + """Build the default session store adapter. + + Args: + filesystem: Optional filesystem adapter override. + + Returns: + SessionStore implementation backed by JSON storage. + """ + from scc_cli import config + + fs = filesystem or get_default_adapters().filesystem + return JsonSessionStore(filesystem=fs, sessions_file=config.SESSIONS_FILE) diff --git a/src/scc_cli/cli.py b/src/scc_cli/cli.py index 07dc45a..7f01736 100644 --- a/src/scc_cli/cli.py +++ b/src/scc_cli/cli.py @@ -113,39 +113,26 @@ def main_callback( if ctx.invoked_subcommand is None: from pathlib import Path - from rich.prompt import Prompt - from . import config as scc_config from . import setup as scc_setup - from .services.workspace import resolve_launch_context + from .application.workspace import ResolveWorkspaceRequest, resolve_workspace from .ui.gate import is_interactive_allowed # Use strong-signal resolver (git or .scc.yaml) for parity with 'scc start' # Weak markers (package.json, etc.) are NOT used for auto-launch cwd = Path.cwd() - result = resolve_launch_context(cwd, workspace_arg=None) - workspace_detected = result is not None and result.is_auto_eligible() + context = resolve_workspace(ResolveWorkspaceRequest(cwd=cwd, workspace_arg=None)) + workspace_detected = context is not None and context.is_auto_eligible if is_interactive_allowed(): - # If no org is configured and standalone isn't explicit, offer setup + # If no org is configured and standalone isn't explicit, run setup wizard user_cfg = scc_config.load_user_config() org_source = user_cfg.get("organization_source") or {} has_org = bool(org_source.get("url")) if not has_org and not user_cfg.get("standalone"): - choice = Prompt.ask( - "[yellow]No organization configured.[/yellow] Choose setup mode", - choices=["setup", "standalone", "quit"], - default="setup", - ) - if choice == "setup": - if not scc_setup.run_setup_wizard(console): - raise typer.Exit(0) - elif choice == "standalone": - user_cfg["standalone"] = True - scc_config.save_user_config(user_cfg) - else: + # Run the comprehensive setup wizard directly + if not scc_setup.run_setup_wizard(console): raise typer.Exit(0) - # Setup complete - return to prompt return diff --git a/src/scc_cli/cli_common.py b/src/scc_cli/cli_common.py index 7081fda..80a7ad5 100644 --- a/src/scc_cli/cli_common.py +++ b/src/scc_cli/cli_common.py @@ -68,6 +68,10 @@ def handle_errors(func: F) -> F: JSON Mode: This is the SINGLE LOCATION for JSON error envelope output. All errors in JSON mode are handled here to ensure consistency. + Error strategy: + - Use cases raise typed SCCError instances. + - CLI edges map errors via core.error_mapping + json_output helpers. + Args: func: The CLI command function to wrap. diff --git a/src/scc_cli/commands/launch/__init__.py b/src/scc_cli/commands/launch/__init__.py index 34dfbac..4013ce9 100644 --- a/src/scc_cli/commands/launch/__init__.py +++ b/src/scc_cli/commands/launch/__init__.py @@ -10,12 +10,7 @@ """ from .app import launch_app, start -from .flow import ( - _configure_team_settings, - _sync_marketplace_settings, - interactive_start, - run_start_wizard_flow, -) +from .flow import interactive_start, run_start_wizard_flow from .render import ( build_dry_run_data, show_dry_run_panel, @@ -23,6 +18,7 @@ warn_if_non_worktree, ) from .sandbox import extract_container_name, launch_sandbox +from .team_settings import _configure_team_settings from .workspace import ( prepare_workspace, resolve_mount_and_branch, @@ -47,7 +43,6 @@ "run_start_wizard_flow", # Private helpers (exposed for orchestrator) "_configure_team_settings", - "_sync_marketplace_settings", # Sandbox functions "launch_sandbox", "extract_container_name", diff --git a/src/scc_cli/commands/launch/flow.py b/src/scc_cli/commands/launch/flow.py index a67efa2..89a6918 100644 --- a/src/scc_cli/commands/launch/flow.py +++ b/src/scc_cli/commands/launch/flow.py @@ -16,61 +16,94 @@ from rich.status import Status from ... import config, git, sessions, setup, teams -from ...application.start_session import ( - StartSessionDependencies, - StartSessionRequest, - prepare_start_session, - start_session, +from ...application.launch import ( + ApplyPersonalProfileConfirmation, + ApplyPersonalProfileDependencies, + ApplyPersonalProfileRequest, + ApplyPersonalProfileResult, + BackRequested, + CwdContext, + QuickResumeDismissed, + QuickResumeViewModel, + SelectSessionDependencies, + SelectSessionRequest, + SelectSessionResult, + SessionNameEntered, + SessionSelectionItem, + SessionSelectionMode, + SessionSelectionPrompt, + SessionSelectionWarningOutcome, + StartWizardConfig, + StartWizardContext, + StartWizardState, + StartWizardStep, + TeamOption, + TeamRepoPickerViewModel, + TeamSelected, + TeamSelectionViewModel, + WorkspacePickerViewModel, + WorkspaceSource, + WorkspaceSourceChosen, + WorkspaceSourceViewModel, + WorkspaceSummary, + WorktreeSelected, + apply_personal_profile, + apply_start_wizard_event, + build_clone_repo_prompt, + build_confirm_worktree_prompt, + build_cross_team_resume_prompt, + build_custom_workspace_prompt, + build_quick_resume_prompt, + build_session_name_prompt, + build_team_repo_prompt, + build_team_selection_prompt, + build_workspace_picker_prompt, + build_workspace_source_prompt, + build_worktree_name_prompt, + finalize_launch, + initialize_start_wizard, + prepare_launch_plan, + select_session, ) +from ...application.sessions import SessionService +from ...application.start_session import StartSessionDependencies, StartSessionRequest from ...bootstrap import get_default_adapters from ...cli_common import console, err_console from ...contexts import WorkContext, load_recent_contexts, normalize_path, record_context -from ...core import personal_profiles from ...core.errors import WorkspaceNotFoundError from ...core.exit_codes import EXIT_CANCELLED, EXIT_CONFIG, EXIT_ERROR, EXIT_USAGE -from ...json_output import build_envelope -from ...kinds import Kind from ...marketplace.materialize import materialize_marketplace from ...marketplace.resolve import resolve_effective_config -from ...marketplace.sync import SyncError, SyncResult, sync_marketplace_settings from ...output_mode import json_output_mode, print_human, print_json, set_pretty_mode from ...panels import create_info_panel, create_warning_panel -from ...theme import Colors, Indicators, Spinners, get_brand_header +from ...ports.git_client import GitClient +from ...ports.personal_profile_service import PersonalProfileService +from ...presentation.json.launch_json import build_start_dry_run_envelope +from ...presentation.launch_presenter import build_sync_output_view_model, render_launch_output +from ...services.workspace import has_project_markers, is_suspicious_directory +from ...theme import Colors, Spinners, get_brand_header from ...ui.chrome import print_with_layout, render_with_layout from ...ui.gate import is_interactive_allowed -from ...ui.git_interactive import clone_repo from ...ui.keys import _BackSentinel -from ...ui.picker import ( - QuickResumeResult, - TeamSwitchRequested, - pick_context_quick_resume, - pick_team, -) -from ...ui.prompts import ( - confirm_with_layout, - prompt_custom_workspace, - prompt_repo_url, - prompt_with_layout, - select_session, -) +from ...ui.picker import pick_session +from ...ui.prompts import confirm_with_layout from ...ui.wizard import ( BACK, - WorkspaceSource, - pick_recent_workspace, - pick_team_repo, - pick_workspace_source, -) -from .render import ( - build_dry_run_data, - show_dry_run_panel, - show_launch_panel, - warn_if_non_worktree, + StartWizardAction, + StartWizardAnswer, + StartWizardAnswerKind, + _normalize_path, + render_start_wizard_prompt, ) -from .workspace import ( - prepare_workspace, - resolve_workspace_team, - validate_and_resolve_workspace, +from .flow_types import ( + UserConfig, + reset_for_team_switch, + set_team_context, + set_workspace, ) +from .render import build_dry_run_data, show_dry_run_panel, show_launch_panel, warn_if_non_worktree +from .team_settings import _configure_team_settings +from .workspace import prepare_workspace, resolve_workspace_team, validate_and_resolve_workspace # ───────────────────────────────────────────────────────────────────────────── # Helper Functions (extracted for maintainability) @@ -82,12 +115,13 @@ def _resolve_session_selection( team: str | None, resume: bool, select: bool, - cfg: dict[str, Any], + cfg: UserConfig, *, json_mode: bool = False, standalone_override: bool = False, no_interactive: bool = False, dry_run: bool = False, + session_service: SessionService, ) -> tuple[str | None, str | None, str | None, str | None, bool, bool]: """ Handle session selection logic for --select, --resume, and interactive modes. @@ -114,17 +148,19 @@ def _resolve_session_selection( worktree_name = None cancelled = False + select_dependencies = SelectSessionDependencies(session_service=session_service) + # Interactive mode if no workspace provided and no session flags if workspace is None and not resume and not select: # For --dry-run without workspace, use resolver to auto-detect (skip interactive) if dry_run: from pathlib import Path - from ...services.workspace import resolve_launch_context + from ...application.workspace import ResolveWorkspaceRequest, resolve_workspace - result = resolve_launch_context(Path.cwd(), workspace_arg=None) - if result is not None: - return str(result.workspace_root), team, None, None, False, True # auto-detected + context = resolve_workspace(ResolveWorkspaceRequest(cwd=Path.cwd(), workspace_arg=None)) + if context is not None: + return str(context.workspace_root), team, None, None, False, True # auto-detected # No auto-detect possible, fall through to error err_console.print( "[red]Error:[/red] No workspace could be auto-detected.\n" @@ -141,11 +177,11 @@ def _resolve_session_selection( # Try auto-detect before failing from pathlib import Path - from ...services.workspace import resolve_launch_context + from ...application.workspace import ResolveWorkspaceRequest, resolve_workspace - result = resolve_launch_context(Path.cwd(), workspace_arg=None) - if result is not None: - return str(result.workspace_root), team, None, None, False, True # auto-detected + context = resolve_workspace(ResolveWorkspaceRequest(cwd=Path.cwd(), workspace_arg=None)) + if context is not None: + return str(context.workspace_root), team, None, None, False, True # auto-detected err_console.print( "[red]Error:[/red] Interactive mode requires a terminal (TTY).\n" @@ -153,9 +189,15 @@ def _resolve_session_selection( highlight=False, ) raise typer.Exit(EXIT_USAGE) + adapters = get_default_adapters() workspace_result, team, session_name, worktree_name = cast( tuple[str | None, str | None, str | None, str | None], - interactive_start(cfg, standalone_override=standalone_override, team_override=team), + interactive_start( + cfg, + standalone_override=standalone_override, + team_override=team, + git_client=adapters.git_client, + ), ) if workspace_result is None: return None, team, None, None, True, False @@ -196,28 +238,46 @@ def _resolve_session_selection( ) return None, team, None, None, False, False - recent_sessions = sessions.list_recent(limit=10) - if effective_team is None: - filtered_sessions = [s for s in recent_sessions if s.get("team") is None] - else: - filtered_sessions = [s for s in recent_sessions if s.get("team") == effective_team] + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team=effective_team, + include_all=False, + limit=10, + ), + dependencies=select_dependencies, + ) - if not filtered_sessions: + if isinstance(outcome, SessionSelectionWarningOutcome): if not json_mode: console.print("[yellow]No recent sessions found.[/yellow]") return None, team, None, None, False, False - selected = select_session(console, filtered_sessions) - if selected is None: - return None, team, None, None, True, False - workspace = selected.get("workspace") - if not team: - team = selected.get("team") - # --standalone overrides any team from session (standalone means no team) - if standalone_override: - team = None - if not json_mode: - print_with_layout(console, f"[dim]Selected: {workspace}[/dim]") + if isinstance(outcome, SessionSelectionPrompt): + selected_item = _prompt_for_session_selection(outcome) + if selected_item is None: + return None, team, None, None, True, False + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team=effective_team, + include_all=False, + limit=10, + selection=selected_item, + ), + dependencies=select_dependencies, + ) + + if isinstance(outcome, SelectSessionResult): + selected = outcome.session + workspace = selected.workspace + if not team: + team = selected.team + # --standalone overrides any team from session (standalone means no team) + if standalone_override: + team = None + if not json_mode: + print_with_layout(console, f"[dim]Selected: {workspace}[/dim]") # Handle --resume: auto-select most recent session elif resume and workspace is None: @@ -235,234 +295,130 @@ def _resolve_session_selection( ) return None, team, None, None, False, False - recent_sessions = sessions.list_recent(limit=50) - if effective_team is None: - filtered_sessions = [s for s in recent_sessions if s.get("team") is None] - else: - filtered_sessions = [s for s in recent_sessions if s.get("team") == effective_team] + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.RESUME, + team=effective_team, + include_all=False, + limit=50, + ), + dependencies=select_dependencies, + ) + + if isinstance(outcome, SessionSelectionWarningOutcome): + if not json_mode: + console.print("[yellow]No recent sessions found.[/yellow]") + return None, team, None, None, False, False - if filtered_sessions: - recent_session = filtered_sessions[0] - workspace = recent_session.get("workspace") + if isinstance(outcome, SelectSessionResult): + recent_session = outcome.session + workspace = recent_session.workspace if not team: - team = recent_session.get("team") + team = recent_session.team # --standalone overrides any team from session (standalone means no team) if standalone_override: team = None if not json_mode: print_with_layout(console, f"[dim]Resuming: {workspace}[/dim]") - else: - if not json_mode: - console.print("[yellow]No recent sessions found.[/yellow]") - return None, team, None, None, False, False return workspace, team, session_name, worktree_name, cancelled, False # explicit workspace -def _configure_team_settings(team: str | None, cfg: dict[str, Any]) -> None: - """ - Validate team profile exists. - - NOTE: Plugin settings are now sourced ONLY from workspace settings.local.json - (via _sync_marketplace_settings). Docker volume injection has been removed - to prevent plugin mixing across teams. - - IMPORTANT: This function must remain cache-only (no network calls). - It's called in offline mode where only cached org config is available. - If you need to add network operations, gate them with an offline check - or move them to _sync_marketplace_settings() which is already offline-aware. +def _apply_personal_profile( + workspace_path: Path, + *, + json_mode: bool, + non_interactive: bool, + profile_service: PersonalProfileService, +) -> tuple[str | None, bool]: + """Apply personal profile if available. - Raises: - typer.Exit: If team profile is not found. + Returns (profile_id, applied). """ - if not team: - return - - with Status( - f"[cyan]Validating {team} profile...[/cyan]", console=console, spinner=Spinners.SETUP - ): - # load_cached_org_config() reads from local cache only - safe for offline mode - org_config = config.load_cached_org_config() + request = _build_personal_profile_request( + workspace_path, + json_mode=json_mode, + non_interactive=non_interactive, + confirm_apply=None, + ) + dependencies = ApplyPersonalProfileDependencies(profile_service=profile_service) - validation = teams.validate_team_profile(team, org_config) - if not validation["valid"]: - print_with_layout( + while True: + outcome = apply_personal_profile(request, dependencies=dependencies) + if isinstance(outcome, ApplyPersonalProfileConfirmation): + _render_personal_profile_confirmation(outcome, json_mode=json_mode) + confirm = confirm_with_layout( console, - create_warning_panel( - "Team Not Found", - f"No team profile named '{team}'.", - "Run 'scc team list' to see available profiles", - ), - constrain=True, + outcome.request.prompt, + default=outcome.default_response, ) - raise typer.Exit(1) - - # NOTE: docker.inject_team_settings() removed - workspace settings.local.json - # is now the single source of truth for plugins (prevents cross-team mixing) - - -def _sync_marketplace_settings( - workspace_path: Path | None, - team: str | None, - org_config_url: str | None = None, -) -> SyncResult | None: - """ - Sync marketplace settings for the workspace. - - Orchestrates the full marketplace pipeline: - 1. Compute effective plugins for team - 2. Materialize required marketplaces - 3. Render settings (NOT written to workspace to prevent host leakage) - 4. Return rendered_settings for container injection - - IMPORTANT: This uses container-only mode to prevent host Claude from seeing - SCC-managed plugins. Marketplaces are still materialized to workspace (for - container access via bind-mount), but settings.local.json is NOT written. - Instead, rendered_settings is returned for injection into container HOME. - - Args: - workspace_path: Path to the workspace directory. - team: Selected team profile name. - org_config_url: URL of the org config (for tracking). - - Returns: - SyncResult with details (including rendered_settings for container injection), - or None if no sync needed. - - Raises: - typer.Exit: If marketplace sync fails critically. - """ - if workspace_path is None or team is None: - return None - - org_config = config.load_cached_org_config() - if org_config is None: - return None - - with Status( - "[cyan]Syncing marketplace settings...[/cyan]", console=console, spinner=Spinners.NETWORK - ): - try: - # Use container-only mode: - # - write_to_workspace=False: Don't write settings.local.json (prevents host leakage) - # - container_path_prefix: Workspace path for absolute paths in container - # - # Docker sandbox mounts workspace at the same absolute path, so paths like - # "/Users/foo/project/.claude/.scc-marketplaces/..." will resolve correctly - # when settings are in container HOME (/home/agent/.claude/settings.json) - result = sync_marketplace_settings( - project_dir=workspace_path, - org_config_data=org_config, - team_id=team, - org_config_url=org_config_url, - write_to_workspace=False, # Container-only mode - container_path_prefix=str(workspace_path), # Absolute paths for container + request = _build_personal_profile_request( + workspace_path, + json_mode=json_mode, + non_interactive=non_interactive, + confirm_apply=confirm, ) + continue - # Display any warnings - if result.warnings: - console.print() - for warning in result.warnings: - print_with_layout(console, f"[yellow]{warning}[/yellow]") - console.print() - - # Log success - if result.plugins_enabled: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Enabled {len(result.plugins_enabled)} team plugin(s)[/green]", - ) - if result.marketplaces_materialized: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Materialized {len(result.marketplaces_materialized)} marketplace(s)[/green]", - ) - - # rendered_settings will be passed to launch_sandbox for container injection - return result + if isinstance(outcome, ApplyPersonalProfileResult): + _render_personal_profile_result(outcome, json_mode=json_mode) + return outcome.profile_id, outcome.applied - except SyncError as e: - panel = create_warning_panel( - "Marketplace Sync Failed", - str(e), - "Team plugins may not be available. Use --dry-run to diagnose.", - ) - print_with_layout(console, panel, constrain=True) - # Non-fatal: continue without marketplace sync - return None + return None, False -def _apply_personal_profile( +def _build_personal_profile_request( workspace_path: Path, *, json_mode: bool, non_interactive: bool, -) -> tuple[str | None, bool]: - """Apply personal profile if available. - - Returns (profile_id, applied). - """ - profile, corrupt = personal_profiles.load_personal_profile_with_status(workspace_path) - if corrupt: - if not json_mode: - console.print("[yellow]Personal profile is invalid JSON. Skipping.[/yellow]") - return None, False - if profile is None: - return None, False + confirm_apply: bool | None, +) -> ApplyPersonalProfileRequest: + return ApplyPersonalProfileRequest( + workspace_path=workspace_path, + interactive_allowed=is_interactive_allowed( + json_mode=json_mode, + no_interactive_flag=non_interactive, + ), + confirm_apply=confirm_apply, + ) - drift = personal_profiles.detect_drift(workspace_path) - if drift and not personal_profiles.workspace_has_overrides(workspace_path): - drift = False - if drift and not is_interactive_allowed( - json_mode=json_mode, no_interactive_flag=non_interactive - ): - if not json_mode: - console.print( - "[yellow]Workspace overrides detected; personal profile not applied.[/yellow]" - ) - return profile.profile_id, False +def _render_personal_profile_confirmation( + outcome: ApplyPersonalProfileConfirmation, *, json_mode: bool +) -> None: + if json_mode: + return + if outcome.message: + console.print(outcome.message) - if drift and not json_mode: - console.print("[yellow]Workspace overrides detected.[/yellow]") - if not confirm_with_layout(console, "Apply personal profile anyway?", default=False): - return profile.profile_id, False - existing_settings, settings_invalid = personal_profiles.load_workspace_settings_with_status( - workspace_path - ) - existing_mcp, mcp_invalid = personal_profiles.load_workspace_mcp_with_status(workspace_path) - if settings_invalid: - if not json_mode: - console.print("[yellow]Invalid JSON in .claude/settings.local.json[/yellow]") - return profile.profile_id, False - if mcp_invalid: - if not json_mode: - console.print("[yellow]Invalid JSON in .mcp.json[/yellow]") - return profile.profile_id, False - - existing_settings = existing_settings or {} - existing_mcp = existing_mcp or {} - - merged_settings = personal_profiles.merge_personal_settings( - workspace_path, existing_settings, profile.settings or {} - ) - merged_mcp = personal_profiles.merge_personal_mcp(existing_mcp, profile.mcp or {}) +def _render_personal_profile_result( + outcome: ApplyPersonalProfileResult, *, json_mode: bool +) -> None: + if json_mode: + return + if outcome.message: + console.print(outcome.message) - personal_profiles.write_workspace_settings(workspace_path, merged_settings) - if profile.mcp: - personal_profiles.write_workspace_mcp(workspace_path, merged_mcp) - personal_profiles.save_applied_state( - workspace_path, - profile.profile_id, - personal_profiles.compute_fingerprints(workspace_path), +def _prompt_for_session_selection(prompt: SessionSelectionPrompt) -> SessionSelectionItem | None: + items = [option.value for option in prompt.request.options if option.value is not None] + if not items: + return None + summaries = [item.summary for item in items] + selected = pick_session( + summaries, + title=prompt.request.title, + subtitle=prompt.request.subtitle, ) - - if not json_mode: - console.print("[green]Applied personal profile.[/green]") - - return profile.profile_id, True + if selected is None: + return None + try: + index = summaries.index(selected) + except ValueError: + return None + return items[index] def _record_session_and_context( @@ -611,6 +567,7 @@ def start( cfg = config.load_user_config() adapters = get_default_adapters() + session_service = sessions.get_session_service(adapters.filesystem) # ── Step 2: Session selection (interactive, --select, --resume) ────────── workspace, team, session_name, worktree_name, cancelled, was_auto_detected = ( @@ -624,6 +581,7 @@ def start( standalone_override=standalone, no_interactive=non_interactive, dry_run=dry_run, + session_service=session_service, ) ) if workspace is None: @@ -721,35 +679,12 @@ def start( console=console, spinner=Spinners.NETWORK, ): - start_plan = prepare_start_session(start_request, dependencies=start_dependencies) + start_plan = prepare_launch_plan(start_request, dependencies=start_dependencies) else: - start_plan = prepare_start_session(start_request, dependencies=start_dependencies) + start_plan = prepare_launch_plan(start_request, dependencies=start_dependencies) - if start_plan.sync_error_message: - panel = create_warning_panel( - "Marketplace Sync Failed", - start_plan.sync_error_message, - "Team plugins may not be available. Use --dry-run to diagnose.", - ) - print_with_layout(console, panel, constrain=True) - elif start_plan.sync_result: - if start_plan.sync_result.warnings: - console.print() - for warning in start_plan.sync_result.warnings: - print_with_layout(console, f"[yellow]{warning}[/yellow]") - console.print() - if start_plan.sync_result.plugins_enabled: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Enabled " - f"{len(start_plan.sync_result.plugins_enabled)} team plugin(s)[/green]", - ) - if start_plan.sync_result.marketplaces_materialized: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Materialized " - f"{len(start_plan.sync_result.marketplaces_materialized)} marketplace(s)[/green]", - ) + output_view_model = build_sync_output_view_model(start_plan) + render_launch_output(output_view_model, console=console, json_mode=(json_output or pretty)) # ── Step 6.55: Apply personal profile (local overlay) ───────────────────── personal_profile_id = None @@ -759,6 +694,7 @@ def start( workspace_path, json_mode=(json_output or pretty), non_interactive=non_interactive, + profile_service=adapters.personal_profile_service, ) # ── Step 6.6: Active stack summary ─────────────────────────────────────── @@ -767,7 +703,9 @@ def start( if personal_profile_id and not personal_applied: personal_label = "skipped" workspace_label = ( - "overrides" if personal_profiles.workspace_has_overrides(workspace_path) else "none" + "overrides" + if adapters.personal_profile_service.workspace_has_overrides(workspace_path) + else "none" ) print_with_layout( console, @@ -819,7 +757,7 @@ def start( if pretty: set_pretty_mode(True) try: - envelope = build_envelope(Kind.START_DRY_RUN, data=dry_run_data) + envelope = build_start_dry_run_envelope(dry_run_data) print_json(envelope) finally: if pretty: @@ -845,7 +783,7 @@ def start( branch=current_branch, is_resume=False, ) - start_session(start_plan, dependencies=start_dependencies) + finalize_launch(start_plan, dependencies=start_dependencies) # ───────────────────────────────────────────────────────────────────────────── @@ -854,12 +792,13 @@ def start( def interactive_start( - cfg: dict[str, Any], + cfg: UserConfig, *, skip_quick_resume: bool = False, allow_back: bool = False, standalone_override: bool = False, team_override: str | None = None, + git_client: GitClient | None = None, ) -> tuple[str | _BackSentinel | None, str | None, str | None, str | None]: """Guide user through interactive session setup. @@ -897,6 +836,7 @@ def interactive_start( config. Used when --standalone CLI flag is passed. team_override: If provided, use this team for filtering instead of selected_profile. Set by --team CLI flag. + git_client: Optional git client for branch detection in Quick Resume. Returns: Tuple of (workspace, team, session_name, worktree_name). @@ -933,35 +873,207 @@ def interactive_start( org_config = config.load_cached_org_config() available_teams = teams.list_teams(org_config) - # Track if user dismissed global Quick Resume (to skip workspace-scoped QR) + if git_client is None: + adapters = get_default_adapters() + git_client = adapters.git_client + + try: + current_branch = git_client.get_current_branch(Path.cwd()) + except Exception: + current_branch = None + + has_active_team = team_override is not None or selected_profile is not None + wizard_config = StartWizardConfig( + quick_resume_enabled=not skip_quick_resume, + team_selection_required=not standalone_mode and not has_active_team, + allow_back=allow_back, + ) + state = initialize_start_wizard(wizard_config) + if team_override: + state = StartWizardState( + step=state.step, + context=StartWizardContext(team=team_override), + config=state.config, + ) + user_dismissed_quick_resume = False + show_all_teams = False + workspace_base = cfg.get("workspace_base", "~/projects") - # Step 0: Global Quick Resume - # Skip when: - # - entering from dashboard empty state (skip_quick_resume=True) - # - org mode with no active team (force team selection first) - # User can press 't' to switch teams (raises TeamSwitchRequested → skip to Step 1) - # - # In org mode without an effective team, skip Quick Resume entirely. - # This prevents showing cross-team sessions and forces user to pick a team first. - should_skip_quick_resume = skip_quick_resume - if not standalone_mode and not effective_team and available_teams: - # Org mode with no active team - skip to team picker - should_skip_quick_resume = True - console.print("[dim]Tip: Select a team first to see team-specific sessions[/dim]") - console.print() + def _prompt_workspace_quick_resume( + workspace: str, *, team: str | None + ) -> StartWizardAnswer | None: + if user_dismissed_quick_resume: + return None - if not should_skip_quick_resume: - # Track whether showing all teams (toggled by 'a' key) - show_all_teams = False + normalized_workspace = normalize_path(workspace) + workspace_contexts: list[WorkContext] = [] + team_filter = None if standalone_mode else team if team else "all" + for ctx in load_recent_contexts(limit=30, team_filter=team_filter): + if standalone_mode and ctx.team is not None: + continue + if ctx.worktree_path == normalized_workspace: + workspace_contexts.append(ctx) + continue + if ctx.repo_root == normalized_workspace: + workspace_contexts.append(ctx) + continue + try: + if normalized_workspace.is_relative_to(ctx.worktree_path): + workspace_contexts.append(ctx) + continue + if normalized_workspace.is_relative_to(ctx.repo_root): + workspace_contexts.append(ctx) + except ValueError: + pass + + if not workspace_contexts: + return None - # Quick Resume loop: allows toggling between filtered and all-teams view + console.print() + workspace_show_all_teams = False while True: - # Filter by effective_team unless user toggled to show all + displayed_contexts = workspace_contexts + if workspace_show_all_teams: + displayed_contexts = [] + for ctx in load_recent_contexts(limit=30, team_filter="all"): + if ctx.worktree_path == normalized_workspace: + displayed_contexts.append(ctx) + continue + if ctx.repo_root == normalized_workspace: + displayed_contexts.append(ctx) + continue + try: + if normalized_workspace.is_relative_to(ctx.worktree_path): + displayed_contexts.append(ctx) + continue + if normalized_workspace.is_relative_to(ctx.repo_root): + displayed_contexts.append(ctx) + except ValueError: + pass + + qr_subtitle = "Existing sessions found for this workspace" + if workspace_show_all_teams: + qr_subtitle = "All teams for this workspace — resuming uses that team's plugins" + + quick_resume_view = QuickResumeViewModel( + title=f"Resume session in {Path(workspace).name}?", + subtitle=qr_subtitle, + context_label="All teams" + if workspace_show_all_teams + else f"Team: {team or active_team_label}", + standalone=standalone_mode, + effective_team=team or effective_team, + contexts=displayed_contexts, + current_branch=current_branch, + ) + prompt = build_quick_resume_prompt(view_model=quick_resume_view) + answer = render_start_wizard_prompt( + prompt, + console=console, + allow_back=True, + standalone=standalone_mode, + context_label=quick_resume_view.context_label, + current_branch=current_branch, + effective_team=team or effective_team, + ) + + if answer.kind is StartWizardAnswerKind.CANCELLED: + return answer + if answer.kind is StartWizardAnswerKind.BACK: + return answer + if answer.value is StartWizardAction.SWITCH_TEAM: + # Signal to caller that team switch was requested + return answer + + if answer.value is StartWizardAction.NEW_SESSION: + console.print() + return answer + + if answer.value is StartWizardAction.TOGGLE_ALL_TEAMS: + if standalone_mode: + console.print("[dim]All teams view is unavailable in standalone mode[/dim]") + console.print() + continue + workspace_show_all_teams = not workspace_show_all_teams + continue + + selected_context = cast(WorkContext, answer.value) + current_team = team or effective_team + if current_team and selected_context.team and selected_context.team != current_team: + console.print() + prompt = build_cross_team_resume_prompt(selected_context.team) + confirm_answer = render_start_wizard_prompt(prompt, console=console) + if not bool(confirm_answer.value): + continue + return answer + + def _resolve_workspace_resume( + state: StartWizardState, + workspace: str, + *, + workspace_source: WorkspaceSource, + ) -> ( + StartWizardState + | tuple[str | _BackSentinel | None, str | None, str | None, str | None] + | None + ): + nonlocal show_all_teams + + resume_answer = _prompt_workspace_quick_resume(workspace, team=state.context.team) + + if resume_answer is None: + return set_workspace( + state, + workspace, + workspace_source, + standalone_mode=standalone_mode, + team_override=team_override, + effective_team=effective_team, + ) + + if resume_answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if resume_answer.kind is StartWizardAnswerKind.BACK: + return None + + if resume_answer.value is StartWizardAction.SWITCH_TEAM: + show_all_teams = False + reset_state = reset_for_team_switch(state) + return set_team_context(reset_state, team_override) + + if resume_answer.value is StartWizardAction.NEW_SESSION: + return set_workspace( + state, + workspace, + workspace_source, + standalone_mode=standalone_mode, + team_override=team_override, + effective_team=effective_team, + ) + + selected_context = cast(WorkContext, resume_answer.value) + return ( + str(selected_context.worktree_path), + selected_context.team, + selected_context.last_session_id, + None, + ) + + while state.step not in { + StartWizardStep.COMPLETE, + StartWizardStep.CANCELLED, + StartWizardStep.BACK, + }: + if state.step is StartWizardStep.QUICK_RESUME: + if not standalone_mode and not effective_team and available_teams: + console.print("[dim]Tip: Select a team first to see team-specific sessions[/dim]") + console.print() + state = apply_start_wizard_event(state, QuickResumeDismissed()) + continue + team_filter = "all" if show_all_teams else effective_team recent_contexts = load_recent_contexts(limit=10, team_filter=team_filter) - - # Update header based on view mode and build helpful subtitle qr_subtitle: str | None = None if show_all_teams: qr_context_label = "All teams" @@ -986,461 +1098,413 @@ def interactive_start( else: qr_subtitle = "No sessions yet — start fresh" - try: - result, selected_context = pick_context_quick_resume( - recent_contexts, - title=qr_title, - subtitle=qr_subtitle, - standalone=standalone_mode, - context_label=qr_context_label, - effective_team=effective_team, + quick_resume_view = QuickResumeViewModel( + title=qr_title, + subtitle=qr_subtitle, + context_label=qr_context_label, + standalone=standalone_mode, + effective_team=effective_team, + contexts=recent_contexts, + current_branch=current_branch, + ) + prompt = build_quick_resume_prompt(view_model=quick_resume_view) + answer = render_start_wizard_prompt( + prompt, + console=console, + allow_back=allow_back, + standalone=standalone_mode, + context_label=qr_context_label, + current_branch=current_branch, + effective_team=effective_team, + ) + + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.kind is StartWizardAnswerKind.BACK: + if allow_back: + return (BACK, None, None, None) + return (None, None, None, None) + + if answer.value is StartWizardAction.SWITCH_TEAM: + show_all_teams = False + state = apply_start_wizard_event(state, QuickResumeDismissed()) + # User explicitly requested team switch - go to TEAM_SELECTION + # regardless of team_selection_required config + state = StartWizardState( + step=StartWizardStep.TEAM_SELECTION, + context=StartWizardContext(team=None), # Clear for fresh selection + config=state.config, ) + continue - match result: - case QuickResumeResult.SELECTED: - # User pressed Enter on a context - resume it - if selected_context is not None: - # Cross-team resume requires confirmation - if ( - effective_team - and selected_context.team - and selected_context.team != effective_team - ): - console.print() - if not confirm_with_layout( - console, - f"[yellow]Resume session from team '{selected_context.team}'?[/yellow]\n" - f"[dim]This will use {selected_context.team} plugins for this session.[/dim]", - default=False, - ): - continue # Back to QR picker loop - return ( - str(selected_context.worktree_path), - selected_context.team, - selected_context.last_session_id, - None, # worktree_name - not creating new worktree - ) - - case QuickResumeResult.BACK: - # User pressed Esc - go back if we can (Dashboard context) - if allow_back: - return (BACK, None, None, None) - # CLI context: no previous screen, treat as cancel - return (None, None, None, None) - - case QuickResumeResult.NEW_SESSION: - # User pressed 'n' or selected "New Session" entry - user_dismissed_quick_resume = True - console.print() - break # Exit QR loop, continue to wizard - - case QuickResumeResult.TOGGLE_ALL_TEAMS: - # User pressed 'a' - toggle all-teams view - if standalone_mode: - console.print( - "[dim]All teams view is unavailable in standalone mode[/dim]" - ) - console.print() - continue - show_all_teams = not show_all_teams - continue # Re-render with new filter + if answer.value is StartWizardAction.NEW_SESSION: + console.print() + state = apply_start_wizard_event(state, QuickResumeDismissed()) + continue - case QuickResumeResult.CANCELLED: - # User pressed q - cancel entire wizard - return (None, None, None, None) + if answer.value is StartWizardAction.TOGGLE_ALL_TEAMS: + if standalone_mode: + console.print("[dim]All teams view is unavailable in standalone mode[/dim]") + console.print() + continue + show_all_teams = not show_all_teams + continue - except TeamSwitchRequested: - # User pressed 't' - skip to team selection (Step 1) - # Reset Quick Resume dismissal so new team's contexts are shown - user_dismissed_quick_resume = False - show_all_teams = False + selected_context = cast(WorkContext, answer.value) + if effective_team and selected_context.team and selected_context.team != effective_team: console.print() - break # Exit QR loop, continue to team selection + prompt = build_cross_team_resume_prompt(selected_context.team) + confirm_answer = render_start_wizard_prompt(prompt, console=console) + if not bool(confirm_answer.value): + continue + return ( + str(selected_context.worktree_path), + selected_context.team, + selected_context.last_session_id, + None, + ) - # ───────────────────────────────────────────────────────────────────────── - # MEGA-LOOP: Wraps Steps 1-2.5 to handle 't' key (TeamSwitchRequested) - # When user presses 't' anywhere, we restart from Step 1 (team selection) - # ───────────────────────────────────────────────────────────────────────── - while True: - # Step 1: Select team (mode-aware handling) - team: str | None = None - - if standalone_mode: - # P0.1: Standalone mode - skip team picker entirely - # Solo devs don't need team selection friction - # Only print banner if detected from config (CLI --standalone already printed in start()) - if not standalone_override: - console.print("[dim]Running in standalone mode (no organization config)[/dim]") - console.print() - elif not available_teams: - # P0.2: Org mode with no teams configured - exit with clear error - # Get org URL for context in error message - user_cfg = config.load_user_config() - org_source = user_cfg.get("organization_source", {}) - org_url = org_source.get("url", "unknown") + if state.step is StartWizardStep.TEAM_SELECTION: + if standalone_mode: + if not standalone_override: + console.print("[dim]Running in standalone mode (no organization config)[/dim]") + console.print() + state = apply_start_wizard_event(state, TeamSelected(team=None)) + continue - console.print() - console.print( - create_warning_panel( - "No Teams Configured", - f"Organization config from: {org_url}\n" - "No team profiles are defined in this organization.", - "Contact your admin to add profiles, or use: scc start --standalone", + if not available_teams: + user_cfg = config.load_user_config() + org_source = user_cfg.get("organization_source", {}) + org_url = org_source.get("url", "unknown") + console.print() + console.print( + create_warning_panel( + "No Teams Configured", + f"Organization config from: {org_url}\n" + "No team profiles are defined in this organization.", + "Contact your admin to add profiles, or use: scc start --standalone", + ) ) - ) - console.print() - raise typer.Exit(EXIT_CONFIG) - elif team_override: - # --team flag provided - use it directly, skip team picker - team = team_override - console.print(f"[dim]Using team from --team flag: {team}[/dim]") - console.print() - else: - # Normal flow: org mode with teams available - selected = pick_team( - available_teams, - current_team=str(selected_profile) if selected_profile else None, + console.print() + raise typer.Exit(EXIT_CONFIG) + + team_options = [ + TeamOption( + name=option.get("name", ""), + description=option.get("description", ""), + credential_status=option.get("credential_status"), + ) + for option in available_teams + ] + team_view = TeamSelectionViewModel( title="Select Team", + subtitle=None, + current_team=str(selected_profile) if selected_profile else None, + options=team_options, ) - if selected is None: + prompt = build_team_selection_prompt(view_model=team_view) + answer = render_start_wizard_prompt( + prompt, + console=console, + available_teams=available_teams, + ) + if answer.kind is StartWizardAnswerKind.CANCELLED: return (None, None, None, None) + if answer.value is StartWizardAction.SWITCH_TEAM: + state = apply_start_wizard_event(state, BackRequested()) + continue + + selected = cast(dict[str, Any], answer.value) team = selected.get("name") if team and team != selected_profile: config.set_selected_profile(team) selected_profile = team effective_team = team + state = apply_start_wizard_event(state, TeamSelected(team=team)) + continue - # Step 2: Select workspace source (with back navigation support) - workspace: str | None = None - team_context_label = active_team_context - if team: - team_context_label = f"Team: {team}" - - # Check if team has repositories configured (must be inside mega-loop since team can change) - team_config = cfg.get("profiles", {}).get(team, {}) if team else {} - team_repos: list[dict[str, Any]] = team_config.get("repositories", []) - has_team_repos = bool(team_repos) - - try: - # Outer loop: allows Step 2.5 to go BACK to Step 2 (workspace picker) - while True: - # Step 2: Workspace selection loop - while workspace is None: - # Top-level picker: supports three-state contract - source = pick_workspace_source( - has_team_repos=has_team_repos, - team=team, - standalone=standalone_mode, - allow_back=allow_back or (team is not None), - context_label=team_context_label, - ) - - # Handle three-state return contract - if source is BACK: - if team is not None: - # Esc in org mode: go back to Step 1 (team selection) - raise TeamSwitchRequested() # Will be caught by mega-loop - elif allow_back: - # Esc in standalone mode with allow_back: return to dashboard - return (BACK, None, None, None) - else: - # Esc in standalone CLI mode: cancel wizard - return (None, None, None, None) - - if source is None: - # q pressed: quit entirely - return (None, None, None, None) - - if source == WorkspaceSource.CURRENT_DIR: - from ...services.workspace import resolve_launch_context - - # Detect workspace root from CWD (handles subdirs + worktrees) - resolver_result = resolve_launch_context(Path.cwd(), workspace_arg=None) - if resolver_result is not None: - workspace = str(resolver_result.workspace_root) - else: - # Fall back to CWD if no workspace root detected - workspace = str(Path.cwd()) - - elif source == WorkspaceSource.RECENT: - recent = sessions.list_recent(10) - picker_result = pick_recent_workspace( - recent, - standalone=standalone_mode, - context_label=team_context_label, - ) - if picker_result is None: - return (None, None, None, None) # User pressed q - quit wizard - if picker_result is BACK: - continue # User pressed Esc - go back to source picker - workspace = cast(str, picker_result) - - elif source == WorkspaceSource.TEAM_REPOS: - workspace_base = cfg.get("workspace_base", "~/projects") - picker_result = pick_team_repo( - team_repos, - workspace_base, - standalone=standalone_mode, - context_label=team_context_label, - ) - if picker_result is None: - return (None, None, None, None) # User pressed q - quit wizard - if picker_result is BACK: - continue # User pressed Esc - go back to source picker - workspace = cast(str, picker_result) - - elif source == WorkspaceSource.CUSTOM: - workspace = prompt_custom_workspace(console) - # Empty input means go back - if workspace is None: - continue - - elif source == WorkspaceSource.CLONE: - repo_url = prompt_repo_url(console) - if repo_url: - workspace = clone_repo( - repo_url, - workspace_base, - ) - - # Empty URL means go back - if workspace is None: - continue - - # ───────────────────────────────────────────────────────────────── - # Step 2.5: Workspace-scoped Quick Resume - # After selecting a workspace, check if existing contexts exist - # and offer to resume one instead of starting fresh - # ───────────────────────────────────────────────────────────────── - normalized_workspace = normalize_path(workspace) - - # Smart filter: Match contexts related to this workspace AND team - workspace_contexts = [] - for ctx in load_recent_contexts(limit=30): - # Standalone: only show standalone contexts - if standalone_mode and ctx.team is not None: - continue - # Org mode: filter by team (prevents cross-team resume confusion) - if team is not None and ctx.team != team: - continue - - # Case 1: Exact worktree match (fastest check) - if ctx.worktree_path == normalized_workspace: - workspace_contexts.append(ctx) - continue - - # Case 2: User picked repo root - show all worktree contexts for this repo - if ctx.repo_root == normalized_workspace: - workspace_contexts.append(ctx) - continue - - # Case 3: User picked a subdir - match if inside a known worktree/repo - try: - if normalized_workspace.is_relative_to(ctx.worktree_path): - workspace_contexts.append(ctx) - continue - if normalized_workspace.is_relative_to(ctx.repo_root): - workspace_contexts.append(ctx) - except ValueError: - # is_relative_to raises ValueError if paths are on different drives - pass + if state.step is StartWizardStep.WORKSPACE_SOURCE: + team_context_label = active_team_context + if state.context.team: + team_context_label = f"Team: {state.context.team}" - # Skip workspace-scoped Quick Resume if user already dismissed global Quick Resume - if workspace_contexts and not user_dismissed_quick_resume: - console.print() + team_config = ( + cfg.get("profiles", {}).get(state.context.team, {}) if state.context.team else {} + ) + team_repos = team_config.get("repositories", []) + + # Gather current directory context for UI to build options + # Command layer does I/O via service functions; application layer + # receives data flags; UI layer builds presentation options + cwd = Path.cwd() + cwd_context: CwdContext | None = None + if not is_suspicious_directory(cwd): + cwd_context = CwdContext( + path=str(cwd), + name=cwd.name or str(cwd), + is_git=git.is_git_repo(cwd), + has_project_markers=has_project_markers(cwd), + ) - show_all_teams = False - while True: - # Filter contexts based on toggle state - displayed_contexts = workspace_contexts - if show_all_teams: - # Show all contexts for this workspace (ignore team filter) - # Use same 3-case matching logic as above - displayed_contexts = [] - for ctx in load_recent_contexts(limit=30): - # Case 1: Exact worktree match - if ctx.worktree_path == normalized_workspace: - displayed_contexts.append(ctx) - continue - # Case 2: User picked repo root - if ctx.repo_root == normalized_workspace: - displayed_contexts.append(ctx) - continue - # Case 3: User picked a subdir - try: - if normalized_workspace.is_relative_to(ctx.worktree_path): - displayed_contexts.append(ctx) - continue - if normalized_workspace.is_relative_to(ctx.repo_root): - displayed_contexts.append(ctx) - except ValueError: - pass - - qr_subtitle = "Existing sessions found for this workspace" - if show_all_teams: - qr_subtitle = ( - "All teams for this workspace — resuming uses that team's plugins" - ) - - result, selected_context = pick_context_quick_resume( - displayed_contexts, - title=f"Resume session in {Path(workspace).name}?", - subtitle=qr_subtitle, - standalone=standalone_mode, - context_label="All teams" - if show_all_teams - else f"Team: {team or active_team_label}", - effective_team=team or effective_team, - ) - # Note: TeamSwitchRequested bubbles up to mega-loop handler - - match result: - case QuickResumeResult.SELECTED: - # User pressed Enter on a context - resume it - if selected_context is not None: - # Cross-team resume requires confirmation - current_team = team or effective_team - if ( - current_team - and selected_context.team - and selected_context.team != current_team - ): - console.print() - if not confirm_with_layout( - console, - f"[yellow]Resume session from team '{selected_context.team}'?[/yellow]\n" - f"[dim]This will use {selected_context.team} plugins for this session.[/dim]", - default=False, - ): - continue # Back to QR picker loop - return ( - str(selected_context.worktree_path), - selected_context.team, - selected_context.last_session_id, - None, - ) - - case QuickResumeResult.BACK: - # User pressed Esc - go back to workspace picker - workspace = None - break - - case QuickResumeResult.NEW_SESSION: - # User pressed 'n' or selected "New Session" entry - console.print() - break # Exit workspace QR, continue to wizard - - case QuickResumeResult.TOGGLE_ALL_TEAMS: - # User pressed 'a' - toggle all-teams view - if standalone_mode: - console.print( - "[dim]All teams view is unavailable in standalone mode[/dim]" - ) - console.print() - continue - show_all_teams = not show_all_teams - continue # Re-render with new filter - - case QuickResumeResult.CANCELLED: - # User pressed q - cancel entire wizard - return (None, None, None, None) - - # Check if we need to go back to workspace picker - if workspace is None: - continue # Continue outer loop to re-enter Step 2 - - # No contexts or user dismissed global Quick Resume - proceed to Step 3 - break # Exit outer loop (Step 2 + 2.5) - - except TeamSwitchRequested: - # User pressed 't' somewhere - restart at Step 1 (team selection) - # Reset Quick Resume dismissal so new team's contexts are shown - user_dismissed_quick_resume = False - console.print() - continue # Continue mega-loop + source_view = WorkspaceSourceViewModel( + title="Where is your project?", + subtitle="Pick a project source (press 't' to switch team)", + context_label=team_context_label, + standalone=standalone_mode, + allow_back=allow_back or (state.context.team is not None), + has_team_repos=bool(team_repos), + cwd_context=cwd_context, + options=[], + ) + prompt = build_workspace_source_prompt(view_model=source_view) + answer = render_start_wizard_prompt( + prompt, + console=console, + team_repos=team_repos, + allow_back=allow_back or (state.context.team is not None), + standalone=standalone_mode, + context_label=team_context_label, + effective_team=state.context.team or effective_team, + ) - # Successfully got a workspace - exit mega-loop - break + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.value is StartWizardAction.SWITCH_TEAM: + state = reset_for_team_switch(state) + state = set_team_context(state, team_override) + continue + + if answer.kind is StartWizardAnswerKind.BACK: + if state.context.team is not None: + state = apply_start_wizard_event(state, BackRequested()) + elif allow_back: + return (BACK, None, None, None) + else: + return (None, None, None, None) + continue - # Step 3: Worktree option - worktree_name = None - console.print() - if confirm_with_layout( - console, - "[cyan]Create a worktree for isolated feature development?[/cyan]", - default=False, - ): - workspace_path = Path(workspace) - can_create_worktree = True + source = cast(WorkspaceSource, answer.value) + if source is WorkspaceSource.CURRENT_DIR: + from ...application.workspace import ResolveWorkspaceRequest, resolve_workspace - # Check if directory is a git repository - if not git.is_git_repo(workspace_path): - console.print() - if confirm_with_layout( - console, - "[yellow]⚠️ Not a git repository. Initialize git?[/yellow]", - default=False, - ): - if git.init_repo(workspace_path): - console.print( - f" [green]{Indicators.get('PASS')}[/green] Initialized git repository" - ) - else: - err_console.print( - f" [red]{Indicators.get('FAIL')}[/red] Failed to initialize git" - ) - can_create_worktree = False - else: - # User declined git init - can't create worktree - console.print( - f" [dim]{Indicators.get('INFO')}[/dim] " - "Skipping worktree (requires git repository)" + context = resolve_workspace( + ResolveWorkspaceRequest(cwd=Path.cwd(), workspace_arg=None) ) - can_create_worktree = False - - # Check if repository has commits (worktree requires at least one) - if can_create_worktree and git.is_git_repo(workspace_path): - if not git.has_commits(workspace_path): - console.print() - if confirm_with_layout( - console, - "[yellow]⚠️ Worktree requires initial commit. " - "Create empty initial commit?[/yellow]", - default=True, - ): - success, error_msg = git.create_empty_initial_commit(workspace_path) - if success: - console.print( - f" [green]{Indicators.get('PASS')}[/green] Created initial commit" - ) - else: - err_console.print(f" [red]{Indicators.get('FAIL')}[/red] {error_msg}") - can_create_worktree = False + if context is not None: + workspace = str(context.workspace_root) else: - # User declined empty commit - can't create worktree - console.print( - f" [dim]{Indicators.get('INFO')}[/dim] " - "Skipping worktree (requires initial commit)" + workspace = str(Path.cwd()) + resume_state = _resolve_workspace_resume( + state, + workspace, + workspace_source=WorkspaceSource.CURRENT_DIR, + ) + if resume_state is None: + continue + if isinstance(resume_state, tuple): + return resume_state + state = resume_state + continue + + state = apply_start_wizard_event(state, WorkspaceSourceChosen(source=source)) + continue + + if state.step is StartWizardStep.WORKSPACE_PICKER: + team_context_label = active_team_context + if state.context.team: + team_context_label = f"Team: {state.context.team}" + + team_config = ( + cfg.get("profiles", {}).get(state.context.team, {}) if state.context.team else {} + ) + team_repos = team_config.get("repositories", []) + workspace_source = state.context.workspace_source + + if workspace_source is WorkspaceSource.RECENT: + recent = sessions.list_recent(limit=10, include_all=True) + summaries = [ + WorkspaceSummary( + label=_normalize_path(session.workspace), + description=session.last_used or "", + workspace=session.workspace, ) - can_create_worktree = False - - # Only ask for worktree name if we have a valid git repo with commits - if can_create_worktree: - worktree_name = prompt_with_layout(console, "[cyan]Feature/worktree name[/cyan]") - - # Step 4: Session name - console.print() - session_name = ( - prompt_with_layout( - console, - "[cyan]Session name[/cyan] [dim](optional, for easy resume)[/dim]", - default="", - ) - or None + for session in recent + ] + recent_view_model = WorkspacePickerViewModel( + title="Recent Workspaces", + subtitle=None, + context_label=team_context_label, + standalone=standalone_mode, + allow_back=True, + options=summaries, + ) + prompt = build_workspace_picker_prompt(view_model=recent_view_model) + answer = render_start_wizard_prompt( + prompt, + console=console, + recent_sessions=recent, + allow_back=True, + standalone=standalone_mode, + context_label=team_context_label, + ) + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.value is StartWizardAction.SWITCH_TEAM: + state = reset_for_team_switch(state) + continue + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + workspace = cast(str, answer.value) + resume_state = _resolve_workspace_resume( + state, + workspace, + workspace_source=WorkspaceSource.RECENT, + ) + if resume_state is None: + continue + if isinstance(resume_state, tuple): + return resume_state + state = resume_state + continue + + if workspace_source is WorkspaceSource.TEAM_REPOS: + repo_view_model = TeamRepoPickerViewModel( + title="Team Repositories", + subtitle=None, + context_label=team_context_label, + standalone=standalone_mode, + allow_back=True, + workspace_base=workspace_base, + options=[], + ) + prompt = build_team_repo_prompt(view_model=repo_view_model) + answer = render_start_wizard_prompt( + prompt, + console=console, + team_repos=team_repos, + workspace_base=workspace_base, + allow_back=True, + standalone=standalone_mode, + context_label=team_context_label, + ) + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.value is StartWizardAction.SWITCH_TEAM: + state = reset_for_team_switch(state) + continue + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + workspace = cast(str, answer.value) + resume_state = _resolve_workspace_resume( + state, + workspace, + workspace_source=WorkspaceSource.TEAM_REPOS, + ) + if resume_state is None: + continue + if isinstance(resume_state, tuple): + return resume_state + state = resume_state + continue + + if workspace_source is WorkspaceSource.CUSTOM: + prompt = build_custom_workspace_prompt() + answer = render_start_wizard_prompt(prompt, console=console) + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + workspace = cast(str, answer.value) + resume_state = _resolve_workspace_resume( + state, + workspace, + workspace_source=WorkspaceSource.CUSTOM, + ) + if resume_state is None: + continue + if isinstance(resume_state, tuple): + return resume_state + state = resume_state + continue + + if workspace_source is WorkspaceSource.CLONE: + prompt = build_clone_repo_prompt() + answer = render_start_wizard_prompt( + prompt, + console=console, + workspace_base=workspace_base, + ) + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + workspace = cast(str, answer.value) + resume_state = _resolve_workspace_resume( + state, + workspace, + workspace_source=WorkspaceSource.CLONE, + ) + if resume_state is None: + continue + if isinstance(resume_state, tuple): + return resume_state + state = resume_state + continue + + if state.step is StartWizardStep.WORKTREE_DECISION: + prompt = build_confirm_worktree_prompt() + answer = render_start_wizard_prompt( + prompt, + console=console, + allow_back=True, + ) + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + + wants_worktree = cast(bool, answer.value) + worktree_name: str | None = None + if wants_worktree: + prompt = build_worktree_name_prompt() + answer = render_start_wizard_prompt(prompt, console=console) + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + worktree_name = cast(str, answer.value) + state = apply_start_wizard_event(state, WorktreeSelected(worktree_name=worktree_name)) + continue + + if state.step is StartWizardStep.SESSION_NAME: + prompt = build_session_name_prompt() + answer = render_start_wizard_prompt(prompt, console=console) + if answer.kind is StartWizardAnswerKind.CANCELLED: + return (None, None, None, None) + if answer.kind is StartWizardAnswerKind.BACK: + state = apply_start_wizard_event(state, BackRequested()) + continue + session_name_value = cast(str | None, answer.value) + state = apply_start_wizard_event( + state, + SessionNameEntered(session_name=session_name_value), + ) + continue + + if state.step is StartWizardStep.BACK: + return (BACK, None, None, None) + if state.step is StartWizardStep.CANCELLED: + return (None, None, None, None) + + if state.context.workspace is None: + return (None, state.context.team, state.context.session_name, state.context.worktree_name) + return ( + cast(str, state.context.workspace), + state.context.team, + state.context.session_name, + state.context.worktree_name, ) - return workspace, team, session_name, worktree_name - # ───────────────────────────────────────────────────────────────────────────── # Wizard entrypoint (dashboard + CLI) @@ -1479,13 +1543,17 @@ def run_start_wizard_flow( return None # Error during setup cfg = config.load_user_config() + adapters = get_default_adapters() # Step 2: Run interactive wizard # Note: standalone_override=False (default) is correct here - dashboard path # doesn't have CLI flags, so we rely on config.is_standalone_mode() inside # interactive_start() to detect standalone mode from user's config file. workspace, team, session_name, worktree_name = interactive_start( - cfg, skip_quick_resume=skip_quick_resume, allow_back=allow_back + cfg, + skip_quick_resume=skip_quick_resume, + allow_back=allow_back, + git_client=adapters.git_client, ) # Three-state return handling: @@ -1499,7 +1567,6 @@ def run_start_wizard_flow( workspace_value = cast(str, workspace) try: - adapters = get_default_adapters() with Status("[cyan]Checking Docker...[/cyan]", console=console, spinner=Spinners.DOCKER): adapters.sandbox_runtime.ensure_available() workspace_path = validate_and_resolve_workspace(workspace_value) @@ -1543,35 +1610,12 @@ def run_start_wizard_flow( console=console, spinner=Spinners.NETWORK, ): - start_plan = prepare_start_session(start_request, dependencies=start_dependencies) + start_plan = prepare_launch_plan(start_request, dependencies=start_dependencies) else: - start_plan = prepare_start_session(start_request, dependencies=start_dependencies) + start_plan = prepare_launch_plan(start_request, dependencies=start_dependencies) - if start_plan.sync_error_message: - panel = create_warning_panel( - "Marketplace Sync Failed", - start_plan.sync_error_message, - "Team plugins may not be available. Use --dry-run to diagnose.", - ) - print_with_layout(console, panel, constrain=True) - elif start_plan.sync_result: - if start_plan.sync_result.warnings: - console.print() - for warning in start_plan.sync_result.warnings: - print_with_layout(console, f"[yellow]{warning}[/yellow]") - console.print() - if start_plan.sync_result.plugins_enabled: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Enabled " - f"{len(start_plan.sync_result.plugins_enabled)} team plugin(s)[/green]", - ) - if start_plan.sync_result.marketplaces_materialized: - print_with_layout( - console, - f"[green]{Indicators.get('PASS')} Materialized " - f"{len(start_plan.sync_result.marketplaces_materialized)} marketplace(s)[/green]", - ) + output_view_model = build_sync_output_view_model(start_plan) + render_launch_output(output_view_model, console=console, json_mode=False) resolver_result = start_plan.resolver_result if resolver_result.is_mount_expanded: @@ -1598,7 +1642,7 @@ def run_start_wizard_flow( branch=current_branch, is_resume=False, ) - start_session(start_plan, dependencies=start_dependencies) + finalize_launch(start_plan, dependencies=start_dependencies) return True except Exception as e: err_console.print(f"[red]Error launching sandbox: {e}[/red]") diff --git a/src/scc_cli/commands/launch/flow_types.py b/src/scc_cli/commands/launch/flow_types.py new file mode 100644 index 0000000..da29ad9 --- /dev/null +++ b/src/scc_cli/commands/launch/flow_types.py @@ -0,0 +1,114 @@ +"""Shared typing helpers for launch flow modules.""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import Any, TypeAlias + +from ...application.launch import ( + StartWizardContext, + StartWizardState, + StartWizardStep, + WorkspaceSource, +) +from ...contexts import WorkContext + +UserConfig: TypeAlias = dict[str, Any] + + +@dataclass +class WizardRenderContext: + """Shared context for wizard rendering helpers. + + Holds configuration that would otherwise be captured as closure variables + in nested functions. Passing this explicitly makes the helpers testable + and allows them to live at module level. + """ + + standalone_mode: bool + effective_team: str | None + team_override: str | None + active_team_label: str + active_team_context: str + current_branch: str | None + workspace_base: str + allow_back: bool + available_teams: list[dict[str, Any]] + selected_profile: str | None + + +def set_team_context(state: StartWizardState, team: str | None) -> StartWizardState: + """Return new wizard state with updated team, preserving other context fields.""" + context = StartWizardContext( + team=team, + workspace_source=state.context.workspace_source, + workspace=state.context.workspace, + worktree_name=state.context.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState(step=state.step, context=context, config=state.config) + + +def set_workspace( + state: StartWizardState, + workspace: str, + workspace_source: WorkspaceSource | None = None, + *, + standalone_mode: bool, + team_override: str | None, + effective_team: str | None, +) -> StartWizardState: + """Return new wizard state with workspace set and step advanced to WORKTREE_DECISION.""" + resolved_team = state.context.team + if resolved_team is None and not standalone_mode: + resolved_team = team_override or effective_team + context = StartWizardContext( + team=resolved_team, + workspace_source=workspace_source or state.context.workspace_source, + workspace=workspace, + worktree_name=state.context.worktree_name, + session_name=state.context.session_name, + ) + return StartWizardState( + step=StartWizardStep.WORKTREE_DECISION, context=context, config=state.config + ) + + +def reset_for_team_switch(state: StartWizardState) -> StartWizardState: + """Reset wizard state for team switch, clearing workspace selections.""" + next_step = ( + StartWizardStep.TEAM_SELECTION + if state.config.team_selection_required + else StartWizardStep.WORKSPACE_SOURCE + ) + reset_team = None if state.config.team_selection_required else state.context.team + return StartWizardState( + step=next_step, + context=StartWizardContext(team=reset_team), + config=state.config, + ) + + +def filter_contexts_for_workspace( + workspace: Path, + contexts: list[WorkContext], +) -> list[WorkContext]: + """Filter work contexts that match the given workspace path.""" + result: list[WorkContext] = [] + for ctx in contexts: + if ctx.worktree_path == workspace: + result.append(ctx) + continue + if ctx.repo_root == workspace: + result.append(ctx) + continue + try: + if workspace.is_relative_to(ctx.worktree_path): + result.append(ctx) + continue + if workspace.is_relative_to(ctx.repo_root): + result.append(ctx) + except ValueError: + pass + return result diff --git a/src/scc_cli/commands/launch/team_settings.py b/src/scc_cli/commands/launch/team_settings.py new file mode 100644 index 0000000..7f89bb0 --- /dev/null +++ b/src/scc_cli/commands/launch/team_settings.py @@ -0,0 +1,55 @@ +"""Team configuration checks for launch flows.""" + +from __future__ import annotations + +import typer +from rich.status import Status + +from ... import config, teams +from ...cli_common import console +from ...core.exit_codes import EXIT_CONFIG +from ...panels import create_warning_panel +from ...theme import Spinners +from ...ui.chrome import print_with_layout +from .flow_types import UserConfig + + +def _configure_team_settings(team: str | None, cfg: UserConfig) -> None: + """Validate team profile exists. + + NOTE: Plugin settings are now sourced ONLY from workspace settings.local.json + (via start session preparation). Docker volume injection has been removed + to prevent plugin mixing across teams. + + IMPORTANT: This function must remain cache-only (no network calls). + It's called in offline mode where only cached org config is available. + If you need to add network operations, gate them with an offline check + or move them to start session preparation which is already offline-aware. + + Raises: + typer.Exit: If team profile is not found. + """ + if not team: + return + + with Status( + f"[cyan]Validating {team} profile...[/cyan]", console=console, spinner=Spinners.SETUP + ): + # load_cached_org_config() reads from local cache only - safe for offline mode + org_config = config.load_cached_org_config() + + validation = teams.validate_team_profile(team, org_config) + if not validation["valid"]: + print_with_layout( + console, + create_warning_panel( + "Team Not Found", + f"No team profile named '{team}'.", + "Run 'scc team list' to see available profiles", + ), + constrain=True, + ) + raise typer.Exit(EXIT_CONFIG) + + # NOTE: docker.inject_team_settings() removed - workspace settings.local.json + # is now the single source of truth for plugins (prevents cross-team mixing) diff --git a/src/scc_cli/commands/launch/workspace.py b/src/scc_cli/commands/launch/workspace.py index 8d53e04..2bd4a9a 100644 --- a/src/scc_cli/commands/launch/workspace.py +++ b/src/scc_cli/commands/launch/workspace.py @@ -18,12 +18,14 @@ import typer from rich.status import Status -from ... import config, deps, git -from ... import platform as platform_module +from ... import config, git +from ...adapters.local_platform_probe import LocalPlatformProbe +from ...application.workspace import WorkspaceValidationResult, validate_workspace +from ...bootstrap import get_default_adapters from ...cli_common import console from ...confirm import Confirm from ...core.constants import WORKTREE_BRANCH_PREFIX -from ...core.errors import NotAGitRepoError, WorkspaceNotFoundError +from ...core.errors import NotAGitRepoError from ...core.exit_codes import EXIT_CANCELLED from ...core.workspace import ResolverResult from ...output_mode import print_human @@ -121,79 +123,44 @@ def validate_and_resolve_workspace( UsageError: If workspace is suspicious in non-interactive mode without --allow-suspicious-workspace. typer.Exit: If user declines to continue after warnings. """ - from ...core.errors import UsageError - from ...services.workspace.suspicious import get_suspicious_reason, is_suspicious_directory - - if workspace is None: + validation = validate_workspace( + workspace, + allow_suspicious=allow_suspicious, + interactive_allowed=is_interactive_allowed( + json_mode=json_mode, + no_interactive_flag=no_interactive, + ), + platform_probe=LocalPlatformProbe(), + ) + if validation is None: return None - workspace_path = Path(workspace).expanduser().resolve() - - if not workspace_path.exists(): - raise WorkspaceNotFoundError(path=str(workspace_path)) + _render_workspace_validation(validation) + return validation.workspace_path - # Check for suspicious workspace (home, /tmp, system directories) - if is_suspicious_directory(workspace_path): - reason = get_suspicious_reason(workspace_path) or "Suspicious directory" - # If --allow-suspicious-workspace is set, skip confirmation entirely - if allow_suspicious: +def _render_workspace_validation(result: WorkspaceValidationResult) -> None: + for step in result.steps: + if step.warning.emit_stderr: print_human( - f"[yellow]Warning:[/yellow] {reason}", + f"[yellow]Warning:[/yellow] {step.warning.console_message}", file=sys.stderr, highlight=False, ) - elif is_interactive_allowed(json_mode=json_mode, no_interactive_flag=no_interactive): - # Interactive mode: warn but allow user to continue + if step.confirm_request: console.print() console.print( create_warning_panel( - "Suspicious Workspace", - reason, - "Consider using a project-specific directory instead.", + step.warning.title, + step.warning.message, + step.warning.suggestion or "", ) ) console.print() - if not Confirm.ask("[cyan]Continue anyway?[/cyan]", default=True): + prompt = step.confirm_request.prompt + if not Confirm.ask(f"[cyan]{prompt}[/cyan]", default=True): console.print("[dim]Cancelled.[/dim]") raise typer.Exit(EXIT_CANCELLED) - else: - # Non-interactive mode without flag: block - raise UsageError( - user_message=f"Refusing to start in suspicious directory: {workspace_path}\n → {reason}", - suggested_action=( - "Either:\n" - f" • Run: scc start --allow-suspicious-workspace {workspace_path}\n" - " • Run: scc start --interactive (to choose a different workspace)\n" - " • Run from a project directory inside a git repository" - ), - ) - - # WSL2 performance warning - if platform_module.is_wsl2(): - is_optimal, warning = platform_module.check_path_performance(workspace_path) - if not is_optimal and warning: - print_human( - "[yellow]Warning:[/yellow] Workspace is on the Windows filesystem." - " Performance may be slow.", - file=sys.stderr, - highlight=False, - ) - if is_interactive_allowed(no_interactive_flag=no_interactive): - console.print() - console.print( - create_warning_panel( - "Performance Warning", - "Your workspace is on the Windows filesystem.", - "For better performance, move to ~/projects inside WSL.", - ) - ) - console.print() - if not Confirm.ask("[cyan]Continue anyway?[/cyan]", default=True): - console.print("[dim]Cancelled.[/dim]") - raise typer.Exit(EXIT_CANCELLED) - - return workspace_path def prepare_workspace( @@ -225,11 +192,13 @@ def prepare_workspace( # Install dependencies if requested if install_deps: + adapters = get_default_adapters() + installer = adapters.dependency_installer with Status( "[cyan]Installing dependencies...[/cyan]", console=console, spinner=Spinners.SETUP ): - success = deps.auto_install_dependencies(workspace_path) - if success: + install_result = installer.install(workspace_path) + if install_result.success: console.print(f"[green]{Indicators.get('PASS')} Dependencies installed[/green]") else: console.print("[yellow]⚠ Could not detect package manager or install failed[/yellow]") diff --git a/src/scc_cli/commands/reset.py b/src/scc_cli/commands/reset.py index 373bf88..819172b 100644 --- a/src/scc_cli/commands/reset.py +++ b/src/scc_cli/commands/reset.py @@ -22,6 +22,7 @@ from rich.console import Console from rich.prompt import Confirm, Prompt +from ..application.workspace import ResolveWorkspaceRequest, resolve_workspace from ..cli_common import handle_errors from ..core.exit_codes import ( EXIT_CANCELLED, @@ -42,7 +43,6 @@ preview_operation, run_task, ) -from ..services.git import detect_workspace_root console = Console() @@ -86,8 +86,14 @@ def _normalize_exception_scope(scope: str | None) -> ExceptionScope: def _resolve_repo_root() -> Path: """Resolve repo root for repo-scoped exception tasks.""" - root, _ = detect_workspace_root(Path.cwd()) - return root or Path.cwd() + context = resolve_workspace( + ResolveWorkspaceRequest( + cwd=Path.cwd(), + workspace_arg=None, + include_git_dir_fallback=True, + ) + ) + return context.workspace_root if context else Path.cwd() def _build_context( diff --git a/src/scc_cli/commands/support.py b/src/scc_cli/commands/support.py index d18759e..43be07f 100644 --- a/src/scc_cli/commands/support.py +++ b/src/scc_cli/commands/support.py @@ -5,19 +5,21 @@ and path redaction for safe sharing. """ +from datetime import datetime from pathlib import Path import typer +from ..application.support_bundle import ( + SupportBundleDependencies, + SupportBundleRequest, + build_support_bundle_manifest, + create_support_bundle, +) +from ..bootstrap import get_default_adapters from ..cli_common import console, handle_errors -from ..json_output import build_envelope -from ..kinds import Kind from ..output_mode import json_output_mode, print_json, set_pretty_mode -from ..support_bundle import ( - build_bundle_data, - create_bundle, - get_default_bundle_path, -) # noqa: F401 +from ..presentation.json.support_json import build_support_bundle_envelope # ───────────────────────────────────────────────────────────────────────────── # Support App @@ -31,6 +33,12 @@ ) +def _get_default_bundle_path() -> Path: + """Get default path for support bundle.""" + timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") + return Path.cwd() / f"scc-support-bundle-{timestamp}.zip" + + # ───────────────────────────────────────────────────────────────────────────── # Support Bundle Command # ───────────────────────────────────────────────────────────────────────────── @@ -77,22 +85,33 @@ def support_bundle_cmd( set_pretty_mode(True) redact_paths_flag = not no_redact_paths + output_path = Path(output) if output else _get_default_bundle_path() + + # Build dependencies from adapters + adapters = get_default_adapters() + dependencies = SupportBundleDependencies( + filesystem=adapters.filesystem, + clock=adapters.clock, + doctor_runner=adapters.doctor_runner, + archive_writer=adapters.archive_writer, + ) + + request = SupportBundleRequest( + output_path=output_path, + redact_paths=redact_paths_flag, + workspace_path=None, + ) if json_output: with json_output_mode(): - bundle_data = build_bundle_data(redact_paths_flag=redact_paths_flag) - envelope = build_envelope(Kind.SUPPORT_BUNDLE, data=bundle_data) + manifest = build_support_bundle_manifest(request, dependencies=dependencies) + envelope = build_support_bundle_envelope(manifest) print_json(envelope) raise typer.Exit(0) # Create the bundle zip file - output_path = Path(output) if output else get_default_bundle_path() - console.print("[cyan]Generating support bundle...[/cyan]") - create_bundle( - output_path=output_path, - redact_paths_flag=redact_paths_flag, - ) + create_support_bundle(request, dependencies=dependencies) console.print() console.print(f"[green]Support bundle created:[/green] {output_path}") diff --git a/src/scc_cli/commands/worktree/_helpers.py b/src/scc_cli/commands/worktree/_helpers.py index ff91f16..79f0f65 100644 --- a/src/scc_cli/commands/worktree/_helpers.py +++ b/src/scc_cli/commands/worktree/_helpers.py @@ -5,33 +5,15 @@ mocks and serve as building blocks for the higher-level command logic. Functions: - build_worktree_list_data: Build worktree list data for JSON output. + build_worktree_list_data: JSON mapping helper for worktree list output. is_container_stopped: Check if a Docker container status indicates stopped. """ from __future__ import annotations -from typing import Any +from ...presentation.json.worktree_json import build_worktree_list_data - -def build_worktree_list_data( - worktrees: list[dict[str, Any]], - workspace: str, -) -> dict[str, Any]: - """Build worktree list data for JSON output. - - Args: - worktrees: List of worktree dictionaries from ui.list_worktrees() - workspace: Path to the workspace - - Returns: - Dictionary with worktrees, count, and workspace - """ - return { - "worktrees": worktrees, - "count": len(worktrees), - "workspace": workspace, - } +__all__ = ["build_worktree_list_data", "is_container_stopped"] def is_container_stopped(status: str) -> bool: diff --git a/src/scc_cli/commands/worktree/app.py b/src/scc_cli/commands/worktree/app.py index 4ab38ec..105cc02 100644 --- a/src/scc_cli/commands/worktree/app.py +++ b/src/scc_cli/commands/worktree/app.py @@ -116,13 +116,22 @@ def session_callback( select: bool = typer.Option( False, "--select", "-s", help="Interactive picker to select a session" ), + json_output: bool = typer.Option(False, "--json", help="Output as JSON"), + pretty: bool = typer.Option(False, "--pretty", help="Pretty-print JSON (implies --json)"), ) -> None: """List recent sessions (default). This makes `scc session` behave like `scc session list` for convenience. """ if ctx.invoked_subcommand is None: - session_list_cmd(limit=limit, team=team, all_teams=all_teams, select=select) + session_list_cmd( + limit=limit, + team=team, + all_teams=all_teams, + select=select, + json_output=json_output, + pretty=pretty, + ) # ───────────────────────────────────────────────────────────────────────────── diff --git a/src/scc_cli/commands/worktree/session_commands.py b/src/scc_cli/commands/worktree/session_commands.py index 0c5abe0..c67ae6a 100644 --- a/src/scc_cli/commands/worktree/session_commands.py +++ b/src/scc_cli/commands/worktree/session_commands.py @@ -3,7 +3,8 @@ from __future__ import annotations import re -from typing import Annotated +from datetime import datetime +from typing import Annotated, Any import typer from rich.prompt import Confirm @@ -11,11 +12,26 @@ from ... import config, sessions from ...cli_common import console, handle_errors, render_responsive_table from ...core.exit_codes import EXIT_CANCELLED +from ...json_command import json_command +from ...kinds import Kind from ...maintenance import prune_sessions as maintenance_prune_sessions +from ...output_mode import is_json_mode from ...panels import create_warning_panel +from ...presentation.json.sessions_json import build_session_list_data from ...ui.picker import TeamSwitchRequested, pick_session +def _format_last_used(last_used: str | None) -> str: + if not last_used: + return "-" + try: + dt = datetime.fromisoformat(last_used) + except ValueError: + return last_used + return sessions.format_relative_time(dt) + + +@json_command(Kind.SESSION_LIST) @handle_errors def sessions_cmd( limit: int = typer.Option(10, "-n", "--limit", help="Number of sessions to show"), @@ -33,7 +49,9 @@ def sessions_cmd( select: bool = typer.Option( False, "--select", "-s", help="Interactive picker to select a session" ), -) -> None: + json_output: bool = typer.Option(False, "--json", help="Output as JSON"), + pretty: bool = typer.Option(False, "--pretty", help="Pretty-print JSON (implies --json)"), +) -> dict[str, Any]: """List recent Claude Code sessions.""" cfg = config.load_user_config() active_team = cfg.get("selected_profile") @@ -51,14 +69,34 @@ def sessions_cmd( filter_team = active_team else: filter_team = "__all__" - console.print( - "[dim]No active team selected — showing all sessions. " - "Use 'scc team switch' or --team to filter.[/dim]" - ) + if not is_json_mode(): + console.print( + "[dim]No active team selected — showing all sessions. " + "Use 'scc team switch' or --team to filter.[/dim]" + ) - recent = sessions.list_recent(limit) - if filter_team != "__all__": - recent = [s for s in recent if s.get("team") == filter_team] + include_all = filter_team == "__all__" + recent = sessions.list_recent( + limit=limit, + team=None if include_all else filter_team, + include_all=include_all, + ) + + session_dicts = [ + { + "name": session.name, + "workspace": session.workspace, + "team": session.team, + "last_used": session.last_used, + "container_name": session.container_name, + "branch": session.branch, + } + for session in recent + ] + data = build_session_list_data(session_dicts, team=None if include_all else filter_team) + + if is_json_mode(): + return data # Interactive picker mode if select and recent: @@ -69,11 +107,11 @@ def sessions_cmd( subtitle=f"{len(recent)} recent sessions", ) if selected: - console.print(f"[green]Selected session:[/green] {selected.get('name', '-')}") - console.print(f"[dim]Workspace: {selected.get('workspace', '-')}[/dim]") + console.print(f"[green]Selected session:[/green] {selected.name}") + console.print(f"[dim]Workspace: {selected.workspace}[/dim]") except TeamSwitchRequested: console.print("[dim]Use 'scc team switch' to change teams[/dim]") - return + return data if not recent: hint = "Start a session with: scc start " @@ -86,16 +124,23 @@ def sessions_cmd( hint, ) ) - return + return data # Build rows for responsive table rows = [] - for s in recent: + for session in recent: # Shorten workspace path if needed - ws = s.get("workspace", "-") + ws = session.workspace or "-" if len(ws) > 40: ws = "..." + ws[-37:] - rows.append([s.get("name", "-"), ws, s.get("last_used", "-"), s.get("team", "-")]) + rows.append( + [ + session.name, + ws, + _format_last_used(session.last_used), + session.team or "-", + ] + ) title = "Recent Sessions" if filter_team not in ("__all__", None): @@ -116,6 +161,8 @@ def sessions_cmd( ], ) + return data + @handle_errors def session_list_cmd( @@ -129,6 +176,8 @@ def session_list_cmd( select: bool = typer.Option( False, "--select", "-s", help="Interactive picker to select a session" ), + json_output: bool = typer.Option(False, "--json", help="Output as JSON"), + pretty: bool = typer.Option(False, "--pretty", help="Pretty-print JSON (implies --json)"), ) -> None: """List recent Claude Code sessions. @@ -140,7 +189,14 @@ def session_list_cmd( scc session list --select """ # Delegate to sessions_cmd to avoid duplication - sessions_cmd(limit=limit, team=team, all_teams=all_teams, select=select) + sessions_cmd( + limit=limit, + team=team, + all_teams=all_teams, + select=select, + json_output=json_output, + pretty=pretty, + ) def _parse_duration(duration: str) -> int: diff --git a/src/scc_cli/commands/worktree/worktree_commands.py b/src/scc_cli/commands/worktree/worktree_commands.py index 038283d..5333d49 100644 --- a/src/scc_cli/commands/worktree/worktree_commands.py +++ b/src/scc_cli/commands/worktree/worktree_commands.py @@ -2,25 +2,35 @@ from __future__ import annotations -from dataclasses import asdict from pathlib import Path from typing import TYPE_CHECKING, Any import typer from rich.status import Status -from ... import config, deps, docker, git +from ... import config +from ...application import worktree as worktree_use_cases +from ...application.start_session import ( + StartSessionDependencies, + StartSessionRequest, + prepare_start_session, + start_session, +) +from ...bootstrap import get_default_adapters from ...cli_common import console, err_console, handle_errors from ...confirm import Confirm from ...core.constants import WORKTREE_BRANCH_PREFIX from ...core.errors import NotAGitRepoError, WorkspaceNotFoundError from ...core.exit_codes import EXIT_CANCELLED +from ...git import WorktreeInfo from ...json_command import json_command from ...kinds import Kind +from ...marketplace.materialize import materialize_marketplace +from ...marketplace.resolve import resolve_effective_config from ...output_mode import is_json_mode from ...panels import create_success_panel, create_warning_panel from ...theme import Indicators, Spinners -from ...ui import cleanup_worktree, create_worktree, list_worktrees, render_worktrees +from ...ui import cleanup_worktree, render_worktrees from ...ui.gate import InteractivityContext from ...ui.picker import TeamSwitchRequested, pick_worktree from ._helpers import build_worktree_list_data @@ -29,6 +39,96 @@ pass +def _build_worktree_dependencies() -> tuple[worktree_use_cases.WorktreeDependencies, Any]: + adapters = get_default_adapters() + dependencies = worktree_use_cases.WorktreeDependencies( + git_client=adapters.git_client, + dependency_installer=adapters.dependency_installer, + ) + return dependencies, adapters + + +def _to_worktree_info(summary: worktree_use_cases.WorktreeSummary) -> WorktreeInfo: + return WorktreeInfo( + path=str(summary.path), + branch=summary.branch, + status=summary.status, + is_current=summary.is_current, + has_changes=summary.has_changes, + staged_count=summary.staged_count, + modified_count=summary.modified_count, + untracked_count=summary.untracked_count, + status_timed_out=summary.status_timed_out, + ) + + +def _serialize_worktree_summary(summary: worktree_use_cases.WorktreeSummary) -> dict[str, Any]: + return { + "path": str(summary.path), + "branch": summary.branch, + "status": summary.status, + "is_current": summary.is_current, + "has_changes": summary.has_changes, + "staged_count": summary.staged_count, + "modified_count": summary.modified_count, + "untracked_count": summary.untracked_count, + "status_timed_out": summary.status_timed_out, + } + + +def _prompt_for_worktree( + prompt: worktree_use_cases.WorktreeSelectionPrompt, +) -> worktree_use_cases.WorktreeSelectionItem | None: + items = [option.value for option in prompt.request.options if option.value is not None] + worktree_infos: list[WorktreeInfo] = [] + for item in items: + if item.worktree is None: + worktree_infos.append(WorktreeInfo(path="", branch=item.branch, status="branch")) + continue + worktree_infos.append(_to_worktree_info(item.worktree)) + + selected = pick_worktree( + worktree_infos, + title=prompt.request.title, + subtitle=prompt.request.subtitle, + initial_filter=prompt.initial_filter, + ) + if selected is None: + return None + + try: + index = worktree_infos.index(selected) + except ValueError: + return None + if index >= len(items): + return None + return items[index] + + +def _render_worktree_ready(result: worktree_use_cases.WorktreeCreateResult) -> None: + console.print() + console.print( + create_success_panel( + "Worktree Ready", + { + "Path": str(result.worktree_path), + "Branch": result.branch_name, + "Base": result.base_branch, + "Next": f"cd {result.worktree_path}", + }, + ) + ) + + +def _raise_worktree_warning(outcome: worktree_use_cases.WorktreeWarningOutcome) -> None: + if outcome.warning.title == "Cancelled": + err_console.print("[dim]Cancelled.[/dim]") + raise typer.Exit(outcome.exit_code) + hint = outcome.warning.suggestion or "" + err_console.print(create_warning_panel(outcome.warning.title, outcome.warning.message, hint)) + raise typer.Exit(outcome.exit_code) + + @handle_errors def worktree_create_cmd( workspace: str = typer.Argument(..., help="Path to the main repository"), @@ -51,12 +151,15 @@ def worktree_create_cmd( if not workspace_path.exists(): raise WorkspaceNotFoundError(path=str(workspace_path)) + dependencies, adapters = _build_worktree_dependencies() + git_client = dependencies.git_client + # Handle non-git repo: offer to initialize in interactive mode - if not git.is_git_repo(workspace_path): + if not git_client.is_git_repo(workspace_path): if is_interactive(): err_console.print(f"[yellow]'{workspace_path}' is not a git repository.[/yellow]") if Confirm.ask("[cyan]Initialize git repository here?[/cyan]", default=True): - if git.init_repo(workspace_path): + if git_client.init_repo(workspace_path): err_console.print("[green]+ Git repository initialized[/green]") else: err_console.print("[red]Failed to initialize git repository[/red]") @@ -68,13 +171,13 @@ def worktree_create_cmd( raise NotAGitRepoError(path=str(workspace_path)) # Handle repo with no commits: offer to create initial commit - if not git.has_commits(workspace_path): + if not git_client.has_commits(workspace_path): if is_interactive(): err_console.print( "[yellow]Repository has no commits. Worktrees require at least one commit.[/yellow]" ) if Confirm.ask("[cyan]Create an empty initial commit?[/cyan]", default=True): - success, error_msg = git.create_empty_initial_commit(workspace_path) + success, error_msg = git_client.create_empty_initial_commit(workspace_path) if success: err_console.print("[green]+ Initial commit created[/green]") else: @@ -99,13 +202,23 @@ def worktree_create_cmd( ) raise typer.Exit(1) - worktree_path = create_worktree(workspace_path, name, base_branch) + result = worktree_use_cases.create_worktree( + worktree_use_cases.WorktreeCreateRequest( + workspace_path=workspace_path, + name=name, + base_branch=base_branch, + install_dependencies=True, + ), + dependencies=dependencies, + ) + + _render_worktree_ready(result) console.print( create_success_panel( "Worktree Created", { - "Path": str(worktree_path), + "Path": str(result.worktree_path), "Branch": f"{WORKTREE_BRANCH_PREFIX}{name}", "Base": base_branch or "current branch", }, @@ -117,8 +230,8 @@ def worktree_create_cmd( with Status( "[cyan]Installing dependencies...[/cyan]", console=console, spinner=Spinners.SETUP ): - success = deps.auto_install_dependencies(worktree_path) - if success: + install_result = dependencies.dependency_installer.install(result.worktree_path) + if install_result.success: console.print(f"[green]{Indicators.get('PASS')} Dependencies installed[/green]") else: console.print("[yellow]! Could not detect package manager or install failed[/yellow]") @@ -126,18 +239,33 @@ def worktree_create_cmd( if start_claude: console.print() if Confirm.ask("[cyan]Start Claude Code in this worktree?[/cyan]", default=True): - docker.check_docker_available() - # For worktrees, mount the common parent (contains .git/worktrees/) - # but set CWD to the worktree path - mount_path, _ = git.get_workspace_mount_path(worktree_path) - docker_cmd, _ = docker.get_or_create_container( - workspace=mount_path, - branch=f"{WORKTREE_BRANCH_PREFIX}{name}", + adapters.sandbox_runtime.ensure_available() + start_dependencies = StartSessionDependencies( + filesystem=adapters.filesystem, + remote_fetcher=adapters.remote_fetcher, + clock=adapters.clock, + git_client=adapters.git_client, + agent_runner=adapters.agent_runner, + sandbox_runtime=adapters.sandbox_runtime, + resolve_effective_config=resolve_effective_config, + materialize_marketplace=materialize_marketplace, ) - # Load org config for safety-net policy injection - org_config = config.load_cached_org_config() - # Pass container_workdir explicitly for correct CWD in worktree - docker.run(docker_cmd, org_config=org_config, container_workdir=worktree_path) + start_request = StartSessionRequest( + workspace_path=result.worktree_path, + workspace_arg=str(result.worktree_path), + entry_dir=result.worktree_path, + team=None, + session_name=None, + resume=False, + fresh=False, + offline=False, + standalone=config.is_standalone_mode(), + dry_run=False, + allow_suspicious=False, + org_config=config.load_cached_org_config(), + ) + start_plan = prepare_start_session(start_request, dependencies=start_dependencies) + start_session(start_plan, dependencies=start_dependencies) @json_command(Kind.WORKTREE_LIST) @@ -166,16 +294,23 @@ def worktree_list_cmd( if not workspace_path.exists(): raise WorkspaceNotFoundError(path=str(workspace_path)) - worktree_list = list_worktrees(workspace_path, verbose=verbose) + dependencies, _ = _build_worktree_dependencies() + result = worktree_use_cases.list_worktrees( + worktree_use_cases.WorktreeListRequest( + workspace_path=workspace_path, + verbose=verbose, + current_dir=Path.cwd(), + ), + git_client=dependencies.git_client, + ) - # Convert WorktreeInfo dataclasses to dicts for JSON serialization - worktree_dicts = [asdict(wt) for wt in worktree_list] + worktree_dicts = [_serialize_worktree_summary(summary) for summary in result.worktrees] data = build_worktree_list_data(worktree_dicts, str(workspace_path)) if is_json_mode(): return data - if not worktree_list: + if not result.worktrees: console.print( create_warning_panel( "No Worktrees", @@ -185,13 +320,15 @@ def worktree_list_cmd( ) return None + worktree_infos = [_to_worktree_info(summary) for summary in result.worktrees] + # Interactive mode: use worktree picker if interactive: try: selected = pick_worktree( - worktree_list, + worktree_infos, title="Select Worktree", - subtitle=f"{len(worktree_list)} worktrees in {workspace_path.name}", + subtitle=f"{len(worktree_infos)} worktrees in {workspace_path.name}", ) if selected: # Print just the path for scripting: cd $(scc worktree list -i) @@ -201,14 +338,14 @@ def worktree_list_cmd( return None # Use the worktree rendering from the UI layer - render_worktrees(worktree_list, console) + render_worktrees(worktree_infos, console) return data @handle_errors def worktree_switch_cmd( - target: str = typer.Argument( + target: str | None = typer.Argument( None, help="Target: worktree name, '-' (previous via $OLDPWD), '^' (main branch)", ), @@ -237,162 +374,66 @@ def worktree_switch_cmd( if not workspace_path.exists(): raise WorkspaceNotFoundError(path=str(workspace_path)) - if not git.is_git_repo(workspace_path): - raise NotAGitRepoError(path=str(workspace_path)) + dependencies, _ = _build_worktree_dependencies() + ctx = InteractivityContext.create() + oldpwd = os.environ.get("OLDPWD") + selection: worktree_use_cases.WorktreeSelectionItem | None = None + + request = worktree_use_cases.WorktreeSwitchRequest( + workspace_path=workspace_path, + target=target, + oldpwd=oldpwd, + interactive_allowed=ctx.allows_prompt(), + current_dir=Path.cwd(), + ) - # No target: interactive picker - if target is None: - worktree_list = list_worktrees(workspace_path) - if not worktree_list: - err_console.print( - create_warning_panel( - "No Worktrees", - "No worktrees found for this repository.", - "Create one with: scc worktree create ", - ), - ) - raise typer.Exit(1) + while True: + outcome = worktree_use_cases.switch_worktree(request, dependencies=dependencies) - try: - selected = pick_worktree( - worktree_list, - title="Select Worktree", - subtitle=f"{len(worktree_list)} worktrees", - ) - if selected: - print(selected.path) # noqa: T201 - else: + if isinstance(outcome, worktree_use_cases.WorktreeSelectionPrompt): + try: + selection = _prompt_for_worktree(outcome) + except TeamSwitchRequested: + err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") raise typer.Exit(EXIT_CANCELLED) - except TeamSwitchRequested: - err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") - raise typer.Exit(1) - return + if selection is None: + raise typer.Exit(EXIT_CANCELLED) + request = worktree_use_cases.WorktreeSwitchRequest( + workspace_path=workspace_path, + target=target, + oldpwd=oldpwd, + interactive_allowed=ctx.allows_prompt(), + current_dir=Path.cwd(), + selection=selection, + ) + continue - # Handle special shortcuts - if target == "-": - # Previous directory via shell's OLDPWD - oldpwd = os.environ.get("OLDPWD") - if not oldpwd: - err_console.print( - create_warning_panel( - "No Previous Directory", - "Shell $OLDPWD is not set.", - "This typically means you haven't changed directories yet.", - ), + if isinstance(outcome, worktree_use_cases.WorktreeConfirmation): + confirmed = Confirm.ask( + f"[cyan]{outcome.request.prompt}[/cyan]", + default=outcome.default_response, ) - raise typer.Exit(1) - print(oldpwd) # noqa: T201 - return - - if target == "^": - # Main/default branch worktree - main_wt = git.find_main_worktree(workspace_path) - if not main_wt: - default_branch = git.get_default_branch(workspace_path) - err_console.print( - create_warning_panel( - "No Main Worktree", - f"No worktree found for default branch '{default_branch}'.", - "The main branch may not have a separate worktree.", - ), + request = worktree_use_cases.WorktreeSwitchRequest( + workspace_path=workspace_path, + target=target, + oldpwd=oldpwd, + interactive_allowed=ctx.allows_prompt(), + current_dir=Path.cwd(), + confirm_create=confirmed, ) - raise typer.Exit(1) - print(main_wt.path) # noqa: T201 - return - - # Fuzzy match worktree - exact_match, matches = git.find_worktree_by_query(workspace_path, target) - - if exact_match: - print(exact_match.path) # noqa: T201 - return - - if not matches: - # Skip branch check for special targets (handled earlier: -, ^, @) - if target not in ("^", "-", "@") and not target.startswith("@{"): - # Check if EXACT branch exists without worktree - branches = git.list_branches_without_worktrees(workspace_path) - if target in branches: # Exact match only - no substring matching - ctx = InteractivityContext.create() - if ctx.allows_prompt(): - if Confirm.ask( - f"[cyan]No worktree for '{target}'. Create one?[/cyan]", - default=False, # Explicit > implicit - ): - worktree_path = create_worktree( - workspace_path, - name=target, - base_branch=target, - ) - print(worktree_path) # noqa: T201 - return - else: - # User declined - use EXIT_CANCELLED so shell wrappers don't cd - err_console.print("[dim]Cancelled.[/dim]") - raise typer.Exit(EXIT_CANCELLED) - else: - # Non-interactive: hint at explicit command - err_console.print( - create_warning_panel( - "Branch Exists, No Worktree", - f"Branch '{target}' exists but has no worktree.", - f"Use: scc worktree create {target} --base {target}", - ), - ) - raise typer.Exit(1) - - # Original "not found" error with select --branches hint - err_console.print( - create_warning_panel( - "Worktree Not Found", - f"No worktree matches '{target}'.", - "Tip: Use 'scc worktree select --branches' to pick from remote branches.", - ), - ) - raise typer.Exit(1) + continue - # Multiple matches: show picker or list - ctx = InteractivityContext.create() - if ctx.allows_prompt(): - try: - selected = pick_worktree( - matches, - title="Multiple Matches", - subtitle=f"'{target}' matches {len(matches)} worktrees", - initial_filter=target, - ) - if selected: - print(selected.path) # noqa: T201 - else: - raise typer.Exit(EXIT_CANCELLED) - except TeamSwitchRequested: - raise typer.Exit(EXIT_CANCELLED) - else: - # Non-interactive: print ranked matches with explicit selection commands - match_lines = [] - for i, wt in enumerate(matches): - display_branch = git.get_display_branch(wt.branch) - dir_name = Path(wt.path).name - if i == 0: - # Highlight top match (would be auto-selected interactively) - match_lines.append( - f" 1. [bold]{display_branch}[/] -> {dir_name} [dim]<- best match[/]" - ) - else: - match_lines.append(f" {i + 1}. {display_branch} -> {dir_name}") + if isinstance(outcome, worktree_use_cases.WorktreeCreateResult): + _render_worktree_ready(outcome) + print(outcome.worktree_path) # noqa: T201 + return - # Get the top match for the suggested command - top_match_dir = Path(matches[0].path).name + if isinstance(outcome, worktree_use_cases.WorktreeResolution): + print(outcome.worktree_path) # noqa: T201 + return - err_console.print( - create_warning_panel( - "Ambiguous Match", - f"'{target}' matches {len(matches)} worktrees (ranked by relevance):", - "\n".join(match_lines) - + f"\n\n[dim]Use explicit directory name: scc worktree switch {top_match_dir}[/]", - ), - ) - raise typer.Exit(1) + if isinstance(outcome, worktree_use_cases.WorktreeWarningOutcome): + _raise_worktree_warning(outcome) @handle_errors @@ -422,89 +463,74 @@ def worktree_select_cmd( if not workspace_path.exists(): raise WorkspaceNotFoundError(path=str(workspace_path)) - if not git.is_git_repo(workspace_path): - raise NotAGitRepoError(path=str(workspace_path)) + dependencies, _ = _build_worktree_dependencies() + selection: worktree_use_cases.WorktreeSelectionItem | None = None - worktree_list = list_worktrees(workspace_path) - - # Build combined list if including branches - from ...git import WorktreeInfo + request = worktree_use_cases.WorktreeSelectRequest( + workspace_path=workspace_path, + include_branches=branches, + current_dir=Path.cwd(), + ) - items: list[WorktreeInfo] = list(worktree_list) - branch_items: list[str] = [] + while True: + outcome = worktree_use_cases.select_worktree(request, dependencies=dependencies) - if branches: - branch_items = git.list_branches_without_worktrees(workspace_path) - # Create placeholder WorktreeInfo for branches (with empty path) - for branch in branch_items: - items.append( - WorktreeInfo( - path="", # Empty path indicates this is a branch, not worktree - branch=branch, - status="branch", # Mark as branch-only - ) + if isinstance(outcome, worktree_use_cases.WorktreeSelectionPrompt): + try: + selection = _prompt_for_worktree(outcome) + except TeamSwitchRequested: + err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") + raise typer.Exit(EXIT_CANCELLED) + if selection is None: + raise typer.Exit(EXIT_CANCELLED) + request = worktree_use_cases.WorktreeSelectRequest( + workspace_path=workspace_path, + include_branches=branches, + current_dir=Path.cwd(), + selection=selection, ) - - if not items: - err_console.print( - create_warning_panel( - "No Worktrees or Branches", - "No worktrees found and no remote branches available.", - "Create a worktree with: scc worktree create ", - ), - ) - raise typer.Exit(1) - - try: - selected = pick_worktree( - items, - title="Select Worktree", - subtitle=f"{len(worktree_list)} worktrees" - + (f", {len(branch_items)} branches" if branch_items else ""), - ) - - if not selected: - raise typer.Exit(EXIT_CANCELLED) - - # If selected item is a worktree (has path), print it - if selected.path: - print(selected.path) # noqa: T201 - return - - # Selected a branch without worktree - offer to create - if Confirm.ask( - f"[cyan]Create worktree for branch '{selected.branch}'?[/cyan]", - default=True, - console=console, - ): - with Status( - "[cyan]Creating worktree...[/cyan]", + continue + + if isinstance(outcome, worktree_use_cases.WorktreeConfirmation): + if selection is None: + raise ValueError("Selection required to confirm worktree creation") + confirmed = Confirm.ask( + f"[cyan]{outcome.request.prompt}[/cyan]", + default=outcome.default_response, console=console, - spinner=Spinners.SETUP, - ): - worktree_path = create_worktree( - workspace_path, - selected.branch, - base_branch=selected.branch, - ) + ) + request = worktree_use_cases.WorktreeSelectRequest( + workspace_path=workspace_path, + include_branches=branches, + current_dir=Path.cwd(), + selection=selection, + confirm_create=confirmed, + ) + continue + + if isinstance(outcome, worktree_use_cases.WorktreeCreateResult): + _render_worktree_ready(outcome) + branch_label = selection.branch if selection is not None else outcome.worktree_name err_console.print( create_success_panel( "Worktree Created", - {"Branch": selected.branch, "Path": str(worktree_path)}, + {"Branch": branch_label, "Path": str(outcome.worktree_path)}, ) ) - print(worktree_path) # noqa: T201 - else: - raise typer.Exit(EXIT_CANCELLED) + print(outcome.worktree_path) # noqa: T201 + return - except TeamSwitchRequested: - err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") - raise typer.Exit(EXIT_CANCELLED) + if isinstance(outcome, worktree_use_cases.WorktreeResolution): + print(outcome.worktree_path) # noqa: T201 + return + + if isinstance(outcome, worktree_use_cases.WorktreeWarningOutcome): + _raise_worktree_warning(outcome) @handle_errors def worktree_enter_cmd( - target: str = typer.Argument( + target: str | None = typer.Argument( None, help="Target: worktree name, '-' (previous), '^' (main branch)", ), @@ -528,6 +554,7 @@ def worktree_enter_cmd( scc worktree enter ^ # Enter main branch worktree """ import os + import platform import subprocess workspace_path = Path(workspace).expanduser().resolve() @@ -535,126 +562,66 @@ def worktree_enter_cmd( if not workspace_path.exists(): raise WorkspaceNotFoundError(path=str(workspace_path)) - if not git.is_git_repo(workspace_path): - raise NotAGitRepoError(path=str(workspace_path)) + dependencies, _ = _build_worktree_dependencies() + ctx = InteractivityContext.create() + selection: worktree_use_cases.WorktreeSelectionItem | None = None + + request = worktree_use_cases.WorktreeEnterRequest( + workspace_path=workspace_path, + target=target, + oldpwd=os.environ.get("OLDPWD"), + interactive_allowed=ctx.allows_prompt(), + current_dir=Path.cwd(), + env=dict(os.environ), + platform_system=platform.system(), + ) - # Resolve target to worktree path - worktree_path: Path | None = None - worktree_name: str = "" + while True: + outcome = worktree_use_cases.enter_worktree_shell(request, dependencies=dependencies) - if target is None: - # No target: interactive picker - worktree_list = list_worktrees(workspace_path) - if not worktree_list: - err_console.print( - create_warning_panel( - "No Worktrees", - "No worktrees found for this repository.", - "Create one with: scc worktree create ", - ), - ) - raise typer.Exit(1) - - try: - selected = pick_worktree( - worktree_list, - title="Enter Worktree", - subtitle="Select a worktree to enter", - ) - if selected: - worktree_path = Path(selected.path) - worktree_name = selected.branch or Path(selected.path).name - else: + if isinstance(outcome, worktree_use_cases.WorktreeSelectionPrompt): + try: + selection = _prompt_for_worktree(outcome) + except TeamSwitchRequested: + err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") + raise typer.Exit(1) + if selection is None: raise typer.Exit(EXIT_CANCELLED) - except TeamSwitchRequested: - err_console.print("[dim]Use 'scc team switch' to change teams[/dim]") - raise typer.Exit(1) - elif target == "-": - # Previous directory - oldpwd = os.environ.get("OLDPWD") - if not oldpwd: - err_console.print( - create_warning_panel( - "No Previous Directory", - "Shell $OLDPWD is not set.", - "This typically means you haven't changed directories yet.", - ), + request = worktree_use_cases.WorktreeEnterRequest( + workspace_path=workspace_path, + target=target, + oldpwd=request.oldpwd, + interactive_allowed=ctx.allows_prompt(), + current_dir=Path.cwd(), + env=request.env, + platform_system=request.platform_system, + selection=selection, ) - raise typer.Exit(1) - worktree_path = Path(oldpwd) - worktree_name = worktree_path.name - elif target == "^": - # Main branch worktree - main_branch = git.get_default_branch(workspace_path) - worktree_list = list_worktrees(workspace_path) - for wt in worktree_list: - if wt.branch == main_branch or wt.branch in {"main", "master"}: - worktree_path = Path(wt.path) - worktree_name = wt.branch or worktree_path.name - break - if not worktree_path: - err_console.print( - create_warning_panel( - "Main Branch Not Found", - f"No worktree found for main branch ({main_branch}).", - "The main worktree may be in a different location.", - ), - ) - raise typer.Exit(1) - else: - # Fuzzy match target - matched, _matches = git.find_worktree_by_query(workspace_path, target) - if matched: - worktree_path = Path(matched.path) - worktree_name = matched.branch or Path(matched.path).name - else: - err_console.print( - create_warning_panel( - "Worktree Not Found", - f"No worktree matching '{target}'.", - "Run 'scc worktree list' to see available worktrees.", - ), - ) - raise typer.Exit(1) - - # Verify worktree path exists - if not worktree_path or not worktree_path.exists(): - err_console.print( - create_warning_panel( - "Worktree Missing", - f"Worktree path does not exist: {worktree_path}", - "The worktree may have been removed. Run 'scc worktree prune'.", - ), - ) - raise typer.Exit(1) - - # Print entry message to stderr (stdout stays clean) - err_console.print(f"[cyan]Entering worktree:[/cyan] {worktree_path}") - err_console.print("[dim]Type 'exit' to return.[/dim]") - err_console.print() - - # Set up environment with SCC_WORKTREE variable - env = os.environ.copy() - env["SCC_WORKTREE"] = worktree_name - - # Get user's shell (default to /bin/bash on Unix, cmd.exe on Windows) - import platform - - if platform.system() == "Windows": - shell = os.environ.get("COMSPEC", "cmd.exe") - else: - shell = os.environ.get("SHELL", "/bin/bash") - - # Run subshell in worktree directory - try: - subprocess.run([shell], cwd=str(worktree_path), env=env) - except FileNotFoundError: - err_console.print(f"[red]Shell not found: {shell}[/red]") - raise typer.Exit(1) + continue + + if isinstance(outcome, worktree_use_cases.WorktreeWarningOutcome): + _raise_worktree_warning(outcome) + + if isinstance(outcome, worktree_use_cases.WorktreeShellResult): + worktree_path = outcome.worktree_path + err_console.print(f"[cyan]Entering worktree:[/cyan] {worktree_path}") + err_console.print("[dim]Type 'exit' to return.[/dim]") + err_console.print() + + try: + subprocess.run( + outcome.shell_command.argv, + cwd=str(outcome.shell_command.workdir), + env=outcome.shell_command.env, + ) + except FileNotFoundError: + shell = outcome.shell_command.argv[0] + err_console.print(f"[red]Shell not found: {shell}[/red]") + raise typer.Exit(1) - # After subshell exits, print a message - err_console.print() - err_console.print("[dim]Exited worktree subshell[/dim]") + err_console.print() + err_console.print("[dim]Exited worktree subshell[/dim]") + return @handle_errors @@ -701,7 +668,8 @@ def worktree_prune_cmd( """ workspace_path = Path(workspace).expanduser().resolve() - if not git.is_git_repo(workspace_path): + dependencies, _ = _build_worktree_dependencies() + if not dependencies.git_client.is_git_repo(workspace_path): raise NotAGitRepoError(path=str(workspace_path)) cmd = ["git", "-C", str(workspace_path), "worktree", "prune"] diff --git a/src/scc_cli/core/enums.py b/src/scc_cli/core/enums.py new file mode 100644 index 0000000..4dcab8a --- /dev/null +++ b/src/scc_cli/core/enums.py @@ -0,0 +1,107 @@ +"""Domain enums for SCC CLI. + +Centralized location for string-based enums that replace magic strings +throughout the codebase. All enums inherit from str to maintain +JSON serialization compatibility. +""" + +from __future__ import annotations + +from enum import Enum + + +class SeverityLevel(str, Enum): + """Severity levels for doctor checks, validation, and warnings.""" + + ERROR = "error" + WARNING = "warning" + INFO = "info" + + +class MCPServerType(str, Enum): + """MCP server transport types.""" + + SSE = "sse" + STDIO = "stdio" + HTTP = "http" + + +class TargetType(str, Enum): + """Target types for blocked/denied items.""" + + PLUGIN = "plugin" + MCP_SERVER = "mcp_server" + + +class RequestSource(str, Enum): + """Source of config additions (team or project level).""" + + TEAM = "team" + PROJECT = "project" + + +class CredentialStatus(str, Enum): + """Status of team credentials.""" + + VALID = "valid" + EXPIRED = "expired" + EXPIRING = "expiring" + + +class GovernanceStatus(str, Enum): + """Governance status for teams.""" + + BLOCKED = "blocked" + WARNING = "warning" + + +class OrgConfigUpdateStatus(str, Enum): + """Status of organization config update checks.""" + + UPDATED = "updated" + UNCHANGED = "unchanged" + OFFLINE = "offline" + AUTH_FAILED = "auth_failed" + NO_CACHE = "no_cache" + STANDALONE = "standalone" + THROTTLED = "throttled" + + +class DiffItemStatus(str, Enum): + """Status of items in a diff.""" + + ADDED = "added" + REMOVED = "removed" + MODIFIED = "modified" + + +class DiffItemSection(str, Enum): + """Sections for diff items.""" + + PLUGINS = "plugins" + MCP_SERVERS = "mcp_servers" + MARKETPLACES = "marketplaces" + + +class MarketplaceSourceType(str, Enum): + """Marketplace source types.""" + + GITHUB = "github" + GIT = "git" + URL = "url" + + +class NetworkPolicy(str, Enum): + """Network policy options.""" + + CORP_PROXY_ONLY = "corp-proxy-only" + UNRESTRICTED = "unrestricted" + ISOLATED = "isolated" + + +class DecisionResult(str, Enum): + """Evaluation decision results.""" + + ALLOWED = "allowed" + BLOCKED = "blocked" + DENIED = "denied" diff --git a/src/scc_cli/core/personal_profiles.py b/src/scc_cli/core/personal_profiles.py index 5a70234..7ed65d6 100644 --- a/src/scc_cli/core/personal_profiles.py +++ b/src/scc_cli/core/personal_profiles.py @@ -15,6 +15,7 @@ from urllib.parse import urlparse from scc_cli import config as config_module +from scc_cli.core.enums import DiffItemSection, DiffItemStatus from scc_cli.marketplace.managed import load_managed_state from scc_cli.subprocess_utils import run_command @@ -677,8 +678,8 @@ class DiffItem: """A single diff item for the TUI overlay.""" name: str - status: str # "added" (+), "removed" (-), "modified" (~) - section: str # "plugins", "mcp_servers", "marketplaces" + status: DiffItemStatus # ADDED (+), REMOVED (-), MODIFIED (~) + section: DiffItemSection # PLUGINS, MCP_SERVERS, MARKETPLACES @dataclass @@ -724,12 +725,18 @@ def compute_structured_diff( # Plugins in profile but not workspace (would be added on apply) for plugin in sorted(prof_plugins.keys()): if plugin not in ws_plugins: - items.append(DiffItem(name=plugin, status="added", section="plugins")) + items.append( + DiffItem(name=plugin, status=DiffItemStatus.ADDED, section=DiffItemSection.PLUGINS) + ) # Plugins in workspace but not profile (would be removed on apply) for plugin in sorted(ws_plugins.keys()): if plugin not in prof_plugins: - items.append(DiffItem(name=plugin, status="removed", section="plugins")) + items.append( + DiffItem( + name=plugin, status=DiffItemStatus.REMOVED, section=DiffItemSection.PLUGINS + ) + ) # Compare marketplaces ws_markets = _normalize_marketplaces(workspace_settings.get("extraKnownMarketplaces")) @@ -737,13 +744,25 @@ def compute_structured_diff( for name in sorted(prof_markets.keys()): if name not in ws_markets: - items.append(DiffItem(name=name, status="added", section="marketplaces")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.ADDED, section=DiffItemSection.MARKETPLACES + ) + ) elif prof_markets[name] != ws_markets[name]: - items.append(DiffItem(name=name, status="modified", section="marketplaces")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.MODIFIED, section=DiffItemSection.MARKETPLACES + ) + ) for name in sorted(ws_markets.keys()): if name not in prof_markets: - items.append(DiffItem(name=name, status="removed", section="marketplaces")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.REMOVED, section=DiffItemSection.MARKETPLACES + ) + ) # Compare MCP servers ws_servers = workspace_mcp.get("mcpServers", {}) @@ -751,13 +770,25 @@ def compute_structured_diff( for name in sorted(prof_servers.keys()): if name not in ws_servers: - items.append(DiffItem(name=name, status="added", section="mcp_servers")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.ADDED, section=DiffItemSection.MCP_SERVERS + ) + ) elif prof_servers[name] != ws_servers[name]: - items.append(DiffItem(name=name, status="modified", section="mcp_servers")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.MODIFIED, section=DiffItemSection.MCP_SERVERS + ) + ) for name in sorted(ws_servers.keys()): if name not in prof_servers: - items.append(DiffItem(name=name, status="removed", section="mcp_servers")) + items.append( + DiffItem( + name=name, status=DiffItemStatus.REMOVED, section=DiffItemSection.MCP_SERVERS + ) + ) return StructuredDiff(items=items, total_count=len(items)) diff --git a/src/scc_cli/docker/core.py b/src/scc_cli/docker/core.py index f310aad..a254051 100644 --- a/src/scc_cli/docker/core.py +++ b/src/scc_cli/docker/core.py @@ -246,7 +246,7 @@ def build_command( - Agent `claude` is ALWAYS included, even in detached mode - Session flags passed via docker exec in detached mode (see run_sandbox) """ - from ..core.constants import SAFETY_NET_POLICY_FILENAME, SANDBOX_DATA_MOUNT + from ..core.constants import SANDBOX_DATA_MOUNT cmd = ["docker", "sandbox", "run"] @@ -258,17 +258,20 @@ def build_command( # Add read-only bind mount for safety net policy (kernel-enforced security) # This MUST be added before the agent name in the command # - # Design note: We mount the FILE directly (not a directory) because: - # - Containers are ephemeral (recreated each `scc start`) - # - Policy is written before container creation, so new containers get current policy - # - If we ever support container reuse or hot-reload, switch to directory mount - # (file mounts pin to inode; atomic rename would be invisible to running container) + # Design note: We mount the DIRECTORY (not the file) because: + # - Docker Desktop's VirtioFS can have delays before newly created files are visible + # - Directory mounts are more reliable as the directory already exists + # - The file can appear "later" as VirtioFS propagation catches up + # - Avoids inode pinning issues with atomic file replacement if policy_host_path is not None: - container_policy_path = f"{SANDBOX_DATA_MOUNT}/{SAFETY_NET_POLICY_FILENAME}" - # -v host_path:container_path:ro ← Kernel-enforced read-only + # Mount the parent directory containing the policy file + policy_dir = policy_host_path.parent + policy_filename = policy_host_path.name + container_policy_dir = f"{SANDBOX_DATA_MOUNT}/policy" + container_policy_path = f"{container_policy_dir}/{policy_filename}" + # -v host_dir:container_dir:ro ← Kernel-enforced read-only # Even sudo inside container cannot bypass `:ro` - requires CAP_SYS_ADMIN - # Use os.fspath() to reliably convert Path to string - cmd.extend(["-v", f"{os.fspath(policy_host_path)}:{container_policy_path}:ro"]) + cmd.extend(["-v", f"{os.fspath(policy_dir)}:{container_policy_dir}:ro"]) # Set SCC_POLICY_PATH env var so plugin knows where to read policy cmd.extend(["-e", f"SCC_POLICY_PATH={container_policy_path}"]) diff --git a/src/scc_cli/docker/launch.py b/src/scc_cli/docker/launch.py index af337a7..a028022 100644 --- a/src/scc_cli/docker/launch.py +++ b/src/scc_cli/docker/launch.py @@ -9,6 +9,7 @@ import os import subprocess import tempfile +import time from pathlib import Path from typing import Any, cast @@ -221,6 +222,25 @@ def _cleanup_fallback_policy_files() -> None: pass # Silently ignore - this is optional hygiene +def _is_mount_race_error(stderr: str) -> bool: + """Check if Docker error is a mount race condition (retryable). + + Docker Desktop's VirtioFS can have delays before newly created files + are visible. This function detects these specific errors. + + Args: + stderr: The stderr output from the Docker command. + + Returns: + True if the error indicates a mount race condition. + """ + error_lower = stderr.lower() + return ( + "bind source path does not exist" in error_lower + or "no such file or directory" in error_lower + ) + + def run( cmd: list[str], ensure_credentials: bool = True, @@ -350,26 +370,52 @@ def run_sandbox( _preinit_credential_volume() # STEP 3: Start container in DETACHED mode (no Claude running yet) + # Use retry-with-backoff for Docker Desktop VirtioFS race conditions + # (newly created files may not be immediately visible to Docker) detached_cmd = build_command( workspace=workspace, detached=True, policy_host_path=policy_host_path, ) - result = subprocess.run( - detached_cmd, - capture_output=True, - text=True, - timeout=60, - ) - if result.returncode != 0: + max_retries = 5 + base_delay = 0.5 # Start with 500ms, exponential backoff + last_result: subprocess.CompletedProcess[str] | None = None + + for attempt in range(max_retries): + result = subprocess.run( + detached_cmd, + capture_output=True, + text=True, + timeout=60, + ) + last_result = result + + if result.returncode == 0: + break # Success! + + # Check if this is a retryable mount race error + if _is_mount_race_error(result.stderr) and attempt < max_retries - 1: + delay = base_delay * (2**attempt) # 0.5s, 1s, 2s, 4s + err_line( + f"Docker mount race detected, retrying in {delay:.1f}s " + f"({attempt + 1}/{max_retries})..." + ) + time.sleep(delay) + else: + # Non-retryable error or last attempt failed + break + + # After retry loop, check final result + if last_result is None or last_result.returncode != 0: + stderr = last_result.stderr if last_result else "" raise SandboxLaunchError( user_message="Failed to create Docker sandbox", command=" ".join(detached_cmd), - stderr=result.stderr, + stderr=stderr, ) - container_id = result.stdout.strip() + container_id = last_result.stdout.strip() if not container_id: raise SandboxLaunchError( user_message="Docker sandbox returned empty container ID", @@ -567,21 +613,76 @@ def inject_settings(settings: dict[str, Any]) -> bool: ) +def reset_plugin_caches() -> bool: + """ + Reset Claude Code's plugin caches in Docker sandbox volume. + + Claude Code maintains its own plugin caches (known_marketplaces.json, + installed_plugins.json) that can contain stale paths from previous sessions. + When switching between workspaces (e.g., main repo vs worktrees), these + stale paths cause "Plugin not found in marketplace" errors. + + This function clears those caches to ensure Claude uses fresh paths from + the injected settings.json. + + Returns: + True if reset successful, False otherwise + """ + try: + # Clear and recreate plugin caches in a single atomic operation + # This matches the manual fix that was verified to work + result = subprocess.run( + [ + "docker", + "run", + "--rm", + "-v", + f"{SANDBOX_DATA_VOLUME}:/data", + "alpine", + "sh", + "-c", + ( + "rm -rf /data/plugins && " + "mkdir -p /data/plugins && " + "echo '{}' > /data/plugins/known_marketplaces.json && " + 'echo \'{"version":2,"plugins":{}}\' > /data/plugins/installed_plugins.json' + ), + ], + capture_output=True, + text=True, + timeout=30, + ) + return result.returncode == 0 + except (subprocess.TimeoutExpired, FileNotFoundError, OSError): + return False + + def reset_global_settings() -> bool: """ - Reset global settings in Docker sandbox volume to empty state. + Reset global settings and plugin caches in Docker sandbox volume. - This prevents plugin mixing across teams by ensuring the volume doesn't - retain old plugin configurations. Workspace settings.local.json is the - single source of truth for plugins. + This prevents plugin mixing across teams/workspaces by ensuring the volume + doesn't retain stale configurations. Clears: + - settings.json (main settings) + - plugins/known_marketplaces.json (cached marketplace paths) + - plugins/installed_plugins.json (cached plugin install info) Called once per `scc start` flow, before container exec. Returns: - True if reset successful, False otherwise + True if all resets successful, False otherwise """ - # Write empty settings to the volume (overwrites any existing) - return inject_file_to_sandbox_volume("settings.json", "{}") + success = True + + # Clear main settings + if not inject_file_to_sandbox_volume("settings.json", "{}"): + success = False + + # Clear plugin caches to prevent stale paths across workspaces + if not reset_plugin_caches(): + success = False + + return success def inject_plugin_settings_to_container( diff --git a/src/scc_cli/doctor/checks/cache.py b/src/scc_cli/doctor/checks/cache.py index 75648d2..b9454a8 100644 --- a/src/scc_cli/doctor/checks/cache.py +++ b/src/scc_cli/doctor/checks/cache.py @@ -11,6 +11,8 @@ from datetime import datetime, timezone from pathlib import Path +from scc_cli.core.enums import SeverityLevel + from ..types import CheckResult from .json_helpers import get_json_error_hints, validate_json_file @@ -32,7 +34,7 @@ def check_cache_readable() -> CheckResult: name="Org Cache", passed=True, message="No cache file (will fetch on first use)", - severity="info", + severity=SeverityLevel.INFO, ) # Use the new validation helper for enhanced error display @@ -58,7 +60,7 @@ def check_cache_readable() -> CheckResult: passed=False, message=f"Cannot read cache file: {e}", fix_hint="Run 'scc setup' to refresh organization config", - severity="error", + severity=SeverityLevel.ERROR, ) # Invalid JSON - build detailed error message @@ -81,7 +83,7 @@ def check_cache_readable() -> CheckResult: passed=False, message=error_msg, fix_hint=fix_hint, - severity="error", + severity=SeverityLevel.ERROR, code_frame=result.code_frame, ) @@ -108,7 +110,7 @@ def check_cache_ttl_status() -> CheckResult | None: passed=False, message="Cache metadata is corrupted", fix_hint="Run 'scc setup' to refresh organization config", - severity="warning", + severity=SeverityLevel.WARNING, ) org_meta = meta.get("org_config", {}) @@ -119,7 +121,7 @@ def check_cache_ttl_status() -> CheckResult | None: name="Cache TTL", passed=True, message="No expiration set in cache", - severity="info", + severity=SeverityLevel.INFO, ) try: @@ -143,7 +145,7 @@ def check_cache_ttl_status() -> CheckResult | None: passed=False, message=f"Cache expired {hours:.1f} hours ago", fix_hint="Run 'scc setup' to refresh organization config", - severity="warning", + severity=SeverityLevel.WARNING, ) except (ValueError, TypeError): return CheckResult( @@ -151,7 +153,7 @@ def check_cache_ttl_status() -> CheckResult | None: passed=False, message="Invalid expiration date in cache metadata", fix_hint="Run 'scc setup' to refresh organization config", - severity="warning", + severity=SeverityLevel.WARNING, ) @@ -220,7 +222,7 @@ def check_exception_stores() -> CheckResult: passed=False, message="; ".join(issues), fix_hint="Run 'scc exceptions reset --user --yes' to reset corrupt stores", - severity="error", + severity=SeverityLevel.ERROR, ) if warnings: @@ -229,7 +231,7 @@ def check_exception_stores() -> CheckResult: passed=True, message="; ".join(warnings), fix_hint="Consider upgrading SCC or running 'scc exceptions cleanup'", - severity="warning", + severity=SeverityLevel.WARNING, ) return CheckResult( @@ -271,5 +273,5 @@ def check_proxy_environment() -> CheckResult: name="Proxy Environment", passed=True, message=message, - severity="info", + severity=SeverityLevel.INFO, ) diff --git a/src/scc_cli/doctor/checks/config.py b/src/scc_cli/doctor/checks/config.py index 15b4a4f..5588afb 100644 --- a/src/scc_cli/doctor/checks/config.py +++ b/src/scc_cli/doctor/checks/config.py @@ -5,6 +5,8 @@ from __future__ import annotations +from scc_cli.core.enums import SeverityLevel + from ..types import CheckResult from .json_helpers import get_json_error_hints, validate_json_file @@ -27,7 +29,7 @@ def check_user_config_valid() -> CheckResult: name="User Config", passed=True, message="No user config file (using defaults)", - severity="info", + severity=SeverityLevel.INFO, ) result = validate_json_file(config_file) @@ -84,7 +86,7 @@ def check_config_directory() -> CheckResult: passed=False, message=f"Cannot create config directory: {config_dir}", fix_hint="Check permissions on parent directory", - severity="error", + severity=SeverityLevel.ERROR, ) # Check if writable @@ -103,5 +105,5 @@ def check_config_directory() -> CheckResult: passed=False, message=f"Config directory is not writable: {config_dir}", fix_hint=f"Check permissions: chmod 755 {config_dir}", - severity="error", + severity=SeverityLevel.ERROR, ) diff --git a/src/scc_cli/doctor/checks/environment.py b/src/scc_cli/doctor/checks/environment.py index 3734754..a039a87 100644 --- a/src/scc_cli/doctor/checks/environment.py +++ b/src/scc_cli/doctor/checks/environment.py @@ -8,6 +8,8 @@ import subprocess from pathlib import Path +from scc_cli.core.enums import SeverityLevel + from ..types import CheckResult @@ -22,7 +24,7 @@ def check_git() -> CheckResult: message="Git is not installed or not in PATH", fix_hint="Install Git from https://git-scm.com/downloads", fix_url="https://git-scm.com/downloads", - severity="error", + severity=SeverityLevel.ERROR, ) version = git_module.get_git_version() @@ -47,7 +49,7 @@ def check_docker() -> CheckResult: message="Docker is not installed or not running", fix_hint="Install Docker Desktop from https://docker.com/products/docker-desktop", fix_url="https://docker.com/products/docker-desktop", - severity="error", + severity=SeverityLevel.ERROR, ) return CheckResult( @@ -70,7 +72,7 @@ def check_docker_desktop() -> CheckResult: message="Docker Desktop CLI not detected", fix_hint=("Install or update Docker Desktop 4.50+ and ensure its CLI is first in PATH"), fix_url="https://docker.com/products/docker-desktop", - severity="warning", + severity=SeverityLevel.WARNING, ) current = docker_module._parse_version(desktop_version) @@ -87,7 +89,7 @@ def check_docker_desktop() -> CheckResult: version=desktop_version, fix_hint="Update Docker Desktop to 4.50+", fix_url="https://docker.com/products/docker-desktop", - severity="error", + severity=SeverityLevel.ERROR, ) return CheckResult( @@ -112,7 +114,7 @@ def check_docker_sandbox() -> CheckResult: "Run 'docker sandbox --help' and verify Docker Desktop is first in PATH" ), fix_url="https://docs.docker.com/desktop/features/sandbox/", - severity="error", + severity=SeverityLevel.ERROR, ) return CheckResult( @@ -142,7 +144,7 @@ def check_docker_running() -> CheckResult: passed=False, message="Docker daemon is not running", fix_hint="Start Docker Desktop or run 'sudo systemctl start docker'", - severity="error", + severity=SeverityLevel.ERROR, ) except (subprocess.TimeoutExpired, FileNotFoundError): return CheckResult( @@ -150,7 +152,7 @@ def check_docker_running() -> CheckResult: passed=False, message="Could not connect to Docker daemon", fix_hint="Ensure Docker Desktop is running", - severity="error", + severity=SeverityLevel.ERROR, ) @@ -166,7 +168,7 @@ def check_wsl2() -> tuple[CheckResult, bool]: name="WSL2 Environment", passed=True, message="Running in WSL2 (recommended for Windows)", - severity="info", + severity=SeverityLevel.INFO, ), True, ) @@ -200,7 +202,7 @@ def check_workspace_path(workspace: Path | None = None) -> CheckResult: passed=False, message=f"Workspace is on Windows filesystem: {workspace}", fix_hint="Move project to ~/projects inside WSL for better performance", - severity="warning", + severity=SeverityLevel.WARNING, ) return CheckResult( diff --git a/src/scc_cli/doctor/checks/organization.py b/src/scc_cli/doctor/checks/organization.py index ae80617..e49bd4c 100644 --- a/src/scc_cli/doctor/checks/organization.py +++ b/src/scc_cli/doctor/checks/organization.py @@ -8,6 +8,8 @@ import json from typing import Any, cast +from scc_cli.core.enums import SeverityLevel + from ..types import CheckResult @@ -75,7 +77,7 @@ def check_org_config_reachable() -> CheckResult | None: passed=False, message=f"Failed to fetch org config: {e}", fix_hint="Check network connection and URL", - severity="error", + severity=SeverityLevel.ERROR, ) if status == 401: @@ -84,7 +86,7 @@ def check_org_config_reachable() -> CheckResult | None: passed=False, message=f"Authentication required (401) for {url}", fix_hint="Configure auth with: scc setup", - severity="error", + severity=SeverityLevel.ERROR, ) if status == 403: @@ -93,7 +95,7 @@ def check_org_config_reachable() -> CheckResult | None: passed=False, message=f"Access denied (403) for {url}", fix_hint="Check your access permissions", - severity="error", + severity=SeverityLevel.ERROR, ) if status != 200 or org_config is None: @@ -102,7 +104,7 @@ def check_org_config_reachable() -> CheckResult | None: passed=False, message=f"Failed to fetch org config (status: {status})", fix_hint="Check URL and network connection", - severity="error", + severity=SeverityLevel.ERROR, ) org_name = org_config.get("organization", {}).get("name", "Unknown") @@ -150,7 +152,7 @@ def check_marketplace_auth_available() -> CheckResult | None: name="Marketplace Auth", passed=False, message=f"Marketplace '{marketplace_name}' not found in org config", - severity="error", + severity=SeverityLevel.ERROR, ) # Check auth requirement @@ -186,14 +188,14 @@ def check_marketplace_auth_available() -> CheckResult | None: passed=False, message=f"{auth_spec} not set or invalid", fix_hint=hint, - severity="error", + severity=SeverityLevel.ERROR, ) except Exception as e: return CheckResult( name="Marketplace Auth", passed=False, message=f"Auth resolution failed: {e}", - severity="error", + severity=SeverityLevel.ERROR, ) diff --git a/src/scc_cli/doctor/checks/worktree.py b/src/scc_cli/doctor/checks/worktree.py index 4fee80b..6c833c7 100644 --- a/src/scc_cli/doctor/checks/worktree.py +++ b/src/scc_cli/doctor/checks/worktree.py @@ -9,6 +9,8 @@ from pathlib import Path from typing import Any +from scc_cli.core.enums import SeverityLevel + from ..types import CheckResult @@ -97,7 +99,7 @@ def check_worktree_health(cwd: Path | None = None) -> CheckResult | None: message=f"{total} worktree{'s' if total != 1 else ''}: {', '.join(issues)}", fix_hint="; ".join(fix_hints) if fix_hints else None, fix_commands=fix_commands if fix_commands else None, - severity="warning" if prunable_count > 0 else "info", + severity=SeverityLevel.WARNING if prunable_count > 0 else SeverityLevel.INFO, ) @@ -215,7 +217,7 @@ def check_git_version_for_worktrees() -> CheckResult | None: name="Git Version (Worktrees)", passed=True, # Still pass, just warn message=f"Git {version_str} works, but 2.20+ recommended for worktrees", - severity="info", + severity=SeverityLevel.INFO, ) return CheckResult( @@ -274,5 +276,5 @@ def check_worktree_branch_conflicts(cwd: Path | None = None) -> CheckResult | No passed=False, message=f"Branch checked out in multiple worktrees: {'; '.join(conflict_msgs)}", fix_hint="Each branch can only be checked out in one worktree at a time", - severity="error", + severity=SeverityLevel.ERROR, ) diff --git a/src/scc_cli/doctor/core.py b/src/scc_cli/doctor/core.py index 1defec7..afd58c7 100644 --- a/src/scc_cli/doctor/core.py +++ b/src/scc_cli/doctor/core.py @@ -2,6 +2,8 @@ from pathlib import Path +from scc_cli.core.enums import SeverityLevel + from .checks import ( check_config_directory, check_docker, @@ -50,7 +52,9 @@ def run_doctor(workspace: Path | None = None) -> DoctorResult: if workspace: path_check = check_workspace_path(workspace) result.checks.append(path_check) - result.windows_path_warning = not path_check.passed and path_check.severity == "warning" + result.windows_path_warning = ( + not path_check.passed and path_check.severity == SeverityLevel.WARNING + ) config_check = check_config_directory() result.checks.append(config_check) diff --git a/src/scc_cli/doctor/render.py b/src/scc_cli/doctor/render.py index 27614a8..e3df04b 100644 --- a/src/scc_cli/doctor/render.py +++ b/src/scc_cli/doctor/render.py @@ -18,6 +18,7 @@ from rich.text import Text from scc_cli import __version__ +from scc_cli.core.enums import SeverityLevel from .core import run_doctor from .types import DoctorResult @@ -56,7 +57,7 @@ def render_doctor_results(console: Console, result: DoctorResult) -> None: # Status icon with color if check.passed: status = Text(" ", style="bold green") - elif check.severity == "warning": + elif check.severity == SeverityLevel.WARNING: status = Text(" ", style="bold yellow") else: status = Text(" ", style="bold red") @@ -180,7 +181,9 @@ def render_quick_status(console: Console, result: DoctorResult) -> None: if result.all_ok: console.print("[green] All systems operational[/green]") else: - failed = [c.name for c in result.checks if not c.passed and c.severity == "error"] + failed = [ + c.name for c in result.checks if not c.passed and c.severity == SeverityLevel.ERROR + ] console.print(f"[red] Issues detected:[/red] {', '.join(failed)}") diff --git a/src/scc_cli/doctor/serialization.py b/src/scc_cli/doctor/serialization.py index d2cff49..2496a55 100644 --- a/src/scc_cli/doctor/serialization.py +++ b/src/scc_cli/doctor/serialization.py @@ -2,6 +2,8 @@ from typing import Any +from scc_cli.core.enums import SeverityLevel + from .types import DoctorResult @@ -30,8 +32,8 @@ def build_doctor_json_data(result: DoctorResult) -> dict[str, Any]: total = len(result.checks) passed = sum(1 for c in result.checks if c.passed) - errors = sum(1 for c in result.checks if not c.passed and c.severity == "error") - warnings = sum(1 for c in result.checks if not c.passed and c.severity == "warning") + errors = sum(1 for c in result.checks if not c.passed and c.severity == SeverityLevel.ERROR) + warnings = sum(1 for c in result.checks if not c.passed and c.severity == SeverityLevel.WARNING) return { "checks": checks_data, diff --git a/src/scc_cli/doctor/types.py b/src/scc_cli/doctor/types.py index 534048e..e51afde 100644 --- a/src/scc_cli/doctor/types.py +++ b/src/scc_cli/doctor/types.py @@ -9,6 +9,8 @@ from dataclasses import dataclass, field from pathlib import Path +from scc_cli.core.enums import SeverityLevel + @dataclass class CheckResult: @@ -20,7 +22,7 @@ class CheckResult: version: str | None = None fix_hint: str | None = None fix_url: str | None = None - severity: str = "error" # "error", "warning", "info" + severity: str = SeverityLevel.ERROR code_frame: str | None = None # Optional code frame for syntax errors fix_commands: list[str] | None = None # Copy-pasteable fix commands @@ -58,9 +60,9 @@ def all_ok(self) -> bool: @property def error_count(self) -> int: """Return the count of failed critical checks.""" - return sum(1 for c in self.checks if not c.passed and c.severity == "error") + return sum(1 for c in self.checks if not c.passed and c.severity == SeverityLevel.ERROR) @property def warning_count(self) -> int: """Return the count of warnings.""" - return sum(1 for c in self.checks if not c.passed and c.severity == "warning") + return sum(1 for c in self.checks if not c.passed and c.severity == SeverityLevel.WARNING) diff --git a/src/scc_cli/maintenance/health_checks.py b/src/scc_cli/maintenance/health_checks.py index 3c01dec..924cb6a 100644 --- a/src/scc_cli/maintenance/health_checks.py +++ b/src/scc_cli/maintenance/health_checks.py @@ -143,7 +143,7 @@ def preview_operation(action_id: str, **kwargs: Any) -> MaintenancePreview: elif action_id in ("prune_sessions", "delete_all_sessions"): paths = [config.SESSIONS_FILE] try: - item_count = len(sessions._load_sessions()) + item_count = len(sessions.get_session_store().load_sessions()) except Exception: item_count = 0 bytes_estimate = _get_size(config.SESSIONS_FILE) diff --git a/src/scc_cli/maintenance/repair_sessions.py b/src/scc_cli/maintenance/repair_sessions.py index 619ece2..7cda547 100644 --- a/src/scc_cli/maintenance/repair_sessions.py +++ b/src/scc_cli/maintenance/repair_sessions.py @@ -1,10 +1,9 @@ from __future__ import annotations from datetime import datetime, timedelta, timezone -from typing import Any from scc_cli import config, sessions -from scc_cli.utils.locks import file_lock, lock_path +from scc_cli.ports.session_models import SessionRecord from .backups import _create_backup from .health_checks import _get_size @@ -36,9 +35,9 @@ def prune_sessions( ) try: - lock_file = lock_path("sessions") - with file_lock(lock_file): - all_sessions = sessions._load_sessions() + store = sessions.get_session_store() + with store.lock(): + all_sessions = store.load_sessions() original_count = len(all_sessions) if original_count == 0: @@ -47,23 +46,19 @@ def prune_sessions( cutoff = datetime.now(timezone.utc) - timedelta(days=older_than_days) - by_team: dict[str | None, list[dict[str, Any]]] = {} + by_team: dict[str | None, list[SessionRecord]] = {} for session in all_sessions: - session_team = session.get("team") - if team is not None and session_team != team: - by_team.setdefault(session_team, []).append(session) - else: - by_team.setdefault(session_team, []).append(session) + by_team.setdefault(session.team, []).append(session) - kept_sessions: list[dict[str, Any]] = [] + kept_sessions: list[SessionRecord] = [] for _team, team_sessions in by_team.items(): - team_sessions.sort(key=lambda s: s.get("last_used", ""), reverse=True) + team_sessions.sort(key=lambda s: s.last_used or "", reverse=True) kept = team_sessions[:keep_n] remaining = team_sessions[keep_n:] for session in remaining: - last_used = session.get("last_used", "") + last_used = session.last_used or "" if last_used: try: dt = datetime.fromisoformat(last_used.replace("Z", "+00:00")) @@ -89,7 +84,7 @@ def prune_sessions( result.bytes_freed = _get_size(config.SESSIONS_FILE) - sessions._save_sessions(kept_sessions) + store.save_sessions(kept_sessions) new_size = _get_size(config.SESSIONS_FILE) result.bytes_freed = result.bytes_freed - new_size @@ -129,7 +124,8 @@ def delete_all_sessions( return result try: - all_sessions = sessions._load_sessions() + store = sessions.get_session_store() + all_sessions = store.load_sessions() result.removed_count = len(all_sessions) except Exception: result.removed_count = 0 diff --git a/src/scc_cli/ports/archive_writer.py b/src/scc_cli/ports/archive_writer.py new file mode 100644 index 0000000..b7fc7e9 --- /dev/null +++ b/src/scc_cli/ports/archive_writer.py @@ -0,0 +1,12 @@ +"""Archive writer port definition.""" + +from __future__ import annotations + +from typing import Protocol + + +class ArchiveWriter(Protocol): + """Write support bundles to an archive destination.""" + + def write_manifest(self, output_path: str, manifest_json: str) -> None: + """Write a manifest JSON file into the archive.""" diff --git a/src/scc_cli/ports/config_models.py b/src/scc_cli/ports/config_models.py new file mode 100644 index 0000000..7cab6cb --- /dev/null +++ b/src/scc_cli/ports/config_models.py @@ -0,0 +1,181 @@ +"""Normalized typed config models for application layer. + +These models provide type-safe access to configuration data. They are: +- Parsed/validated at config load edges (adapters/services) +- Passed inward to application code +- Used instead of raw dict[str, Any] access + +The models are minimal - only fields that use cases need are included. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(frozen=True) +class OrganizationSource: + """Source configuration for fetching org config.""" + + url: str + auth: str | None = None + auth_header: str | None = None + + +@dataclass(frozen=True) +class NormalizedUserConfig: + """Normalized user configuration. + + Represents the local user's SCC configuration. + """ + + selected_profile: str | None = None + standalone: bool = False + organization_source: OrganizationSource | None = None + workspace_team_map: dict[str, str] = field(default_factory=dict) + onboarding_seen: bool = False + + +@dataclass(frozen=True) +class SessionSettings: + """Session configuration settings.""" + + timeout_hours: int | None = None + auto_resume: bool = False + + +@dataclass(frozen=True) +class MCPServerConfig: + """MCP server configuration.""" + + name: str + type: str = "sse" + url: str | None = None + command: str | None = None + args: list[str] = field(default_factory=list) + env: dict[str, str] = field(default_factory=dict) + headers: dict[str, str] = field(default_factory=dict) + + +@dataclass(frozen=True) +class TeamDelegation: + """Team-level delegation settings.""" + + allow_project_overrides: bool = False + + +@dataclass(frozen=True) +class NormalizedTeamConfig: + """Normalized team/profile configuration. + + Represents a single team profile from the org config. + """ + + name: str + description: str = "" + plugin: str | None = None + marketplace: str | None = None + additional_plugins: tuple[str, ...] = () + additional_mcp_servers: tuple[MCPServerConfig, ...] = () + session: SessionSettings = field(default_factory=SessionSettings) + delegation: TeamDelegation = field(default_factory=TeamDelegation) + + +@dataclass(frozen=True) +class SecurityConfig: + """Organization security configuration.""" + + blocked_plugins: tuple[str, ...] = () + blocked_mcp_servers: tuple[str, ...] = () + allow_stdio_mcp: bool = False + allowed_stdio_prefixes: tuple[str, ...] = () + + +@dataclass(frozen=True) +class DefaultsConfig: + """Organization default configuration.""" + + enabled_plugins: tuple[str, ...] = () + disabled_plugins: tuple[str, ...] = () + allowed_plugins: tuple[str, ...] | None = None + allowed_mcp_servers: tuple[str, ...] | None = None + network_policy: str | None = None + session: SessionSettings = field(default_factory=SessionSettings) + + +@dataclass(frozen=True) +class TeamsDelegation: + """Delegation rules for teams.""" + + allow_additional_plugins: tuple[str, ...] = () + allow_additional_mcp_servers: tuple[str, ...] = () + + +@dataclass(frozen=True) +class ProjectsDelegation: + """Delegation rules for projects.""" + + inherit_team_delegation: bool = False + + +@dataclass(frozen=True) +class DelegationConfig: + """Organization delegation configuration.""" + + teams: TeamsDelegation = field(default_factory=TeamsDelegation) + projects: ProjectsDelegation = field(default_factory=ProjectsDelegation) + + +@dataclass(frozen=True) +class MarketplaceConfig: + """Marketplace source configuration.""" + + name: str + source: str + owner: str | None = None + repo: str | None = None + branch: str | None = None + url: str | None = None + host: str | None = None + path: str | None = None + headers: dict[str, str] = field(default_factory=dict) + + +@dataclass(frozen=True) +class OrganizationInfo: + """Basic organization information.""" + + name: str + + +@dataclass(frozen=True) +class NormalizedOrgConfig: + """Normalized organization configuration. + + Represents the full organization config with all sections normalized. + This is the primary config type used by application-layer use cases. + """ + + organization: OrganizationInfo + security: SecurityConfig = field(default_factory=SecurityConfig) + defaults: DefaultsConfig = field(default_factory=DefaultsConfig) + delegation: DelegationConfig = field(default_factory=DelegationConfig) + profiles: dict[str, NormalizedTeamConfig] = field(default_factory=dict) + marketplaces: dict[str, MarketplaceConfig] = field(default_factory=dict) + + def get_profile(self, name: str) -> NormalizedTeamConfig | None: + """Get a team profile by name.""" + return self.profiles.get(name) + + def list_profile_names(self) -> list[str]: + """List all available profile names.""" + return list(self.profiles.keys()) + + +@dataclass(frozen=True) +class NormalizedProjectConfig: + """Normalized project configuration from .scc.yaml.""" + + additional_plugins: tuple[str, ...] = () + additional_mcp_servers: tuple[MCPServerConfig, ...] = () + session: SessionSettings = field(default_factory=SessionSettings) diff --git a/src/scc_cli/ports/config_store.py b/src/scc_cli/ports/config_store.py new file mode 100644 index 0000000..9654086 --- /dev/null +++ b/src/scc_cli/ports/config_store.py @@ -0,0 +1,77 @@ +"""Port for accessing configuration data. + +Provides typed access to user, organization, and project configuration. +All config is normalized at load time to provide type-safe access. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Protocol + +from scc_cli.ports.config_models import ( + NormalizedOrgConfig, + NormalizedProjectConfig, + NormalizedUserConfig, +) + + +class ConfigStore(Protocol): + """Protocol for configuration storage and retrieval. + + Implementations should: + - Load and normalize config at retrieval time + - Return typed models instead of raw dicts + - Handle missing/invalid config gracefully + """ + + def load_user_config(self) -> NormalizedUserConfig: + """Load and normalize user configuration. + + Returns: + NormalizedUserConfig with typed fields. + """ + ... + + def load_org_config(self) -> NormalizedOrgConfig | None: + """Load and normalize cached organization configuration. + + Returns: + NormalizedOrgConfig if available, None otherwise. + """ + ... + + def load_project_config(self, workspace_path: Path) -> NormalizedProjectConfig | None: + """Load and normalize project configuration from workspace. + + Args: + workspace_path: Path to the workspace containing .scc.yaml. + + Returns: + NormalizedProjectConfig if available, None otherwise. + """ + ... + + def get_selected_profile(self) -> str | None: + """Get the currently selected profile/team name. + + Returns: + Profile name if selected, None otherwise. + """ + ... + + def is_standalone_mode(self) -> bool: + """Check if running in standalone (solo) mode. + + Returns: + True if standalone mode is enabled. + """ + ... + + def is_organization_configured(self) -> bool: + """Check if organization source is configured. + + Returns: + True if organization source URL is set. + """ + ... diff --git a/src/scc_cli/ports/dependency_installer.py b/src/scc_cli/ports/dependency_installer.py new file mode 100644 index 0000000..8b79285 --- /dev/null +++ b/src/scc_cli/ports/dependency_installer.py @@ -0,0 +1,49 @@ +"""Dependency installer port definition.""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import Protocol + + +@dataclass(frozen=True) +class DependencyInstallResult: + """Outcome of a dependency installation attempt. + + Invariants: + - `attempted` is False when no package manager was detected. + - `success` is meaningful only when `attempted` is True. + + Args: + attempted: Whether an installation was attempted. + success: Whether the installation succeeded. + package_manager: Detected package manager name when available. + """ + + attempted: bool + success: bool + package_manager: str | None = None + + +class DependencyInstaller(Protocol): + """Abstract dependency installation operations. + + Invariants: + - Installation attempts are best-effort and never prompt for input. + - Results are returned to the caller for rendering decisions. + + Args: + workspace: Path to the workspace where dependencies should be installed. + """ + + def install(self, workspace: Path) -> DependencyInstallResult: + """Install dependencies for a workspace. + + Args: + workspace: Workspace directory to inspect and install dependencies. + + Returns: + Result describing whether installation was attempted and succeeded. + """ + ... diff --git a/src/scc_cli/ports/doctor_runner.py b/src/scc_cli/ports/doctor_runner.py new file mode 100644 index 0000000..8e5a08e --- /dev/null +++ b/src/scc_cli/ports/doctor_runner.py @@ -0,0 +1,14 @@ +"""Doctor runner port definition.""" + +from __future__ import annotations + +from typing import Protocol + +from scc_cli.doctor.types import DoctorResult + + +class DoctorRunner(Protocol): + """Run doctor checks via injected adapter.""" + + def run(self, workspace: str | None = None) -> DoctorResult: + """Return doctor results for an optional workspace.""" diff --git a/src/scc_cli/ports/git_client.py b/src/scc_cli/ports/git_client.py index e82906d..9a8527f 100644 --- a/src/scc_cli/ports/git_client.py +++ b/src/scc_cli/ports/git_client.py @@ -5,30 +5,98 @@ from pathlib import Path from typing import Protocol +from scc_cli.services.git.worktree import WorktreeInfo + class GitClient(Protocol): """Abstract git operations used by application logic.""" def check_available(self) -> None: """Ensure git is installed and available.""" + ... def check_installed(self) -> bool: """Return True if git is available.""" + ... def get_version(self) -> str | None: """Return the git version string.""" + ... def is_git_repo(self, path: Path) -> bool: """Return True if the path is within a git repository.""" + ... def init_repo(self, path: Path) -> bool: """Initialize a git repository.""" + ... def create_empty_initial_commit(self, path: Path) -> tuple[bool, str | None]: """Create an empty initial commit if needed.""" + ... def detect_workspace_root(self, start_dir: Path) -> tuple[Path | None, Path]: """Detect the git workspace root from a starting directory.""" + ... def get_current_branch(self, path: Path) -> str | None: """Return the current branch name.""" + ... + + def has_commits(self, path: Path) -> bool: + """Return True if the repository has at least one commit.""" + ... + + def has_remote(self, path: Path) -> bool: + """Return True if the repository has a remote origin.""" + ... + + def get_default_branch(self, path: Path) -> str: + """Return the default branch name for a repository.""" + ... + + def list_worktrees(self, path: Path) -> list[WorktreeInfo]: + """Return the worktrees configured for the repository.""" + ... + + def get_worktree_status(self, path: Path) -> tuple[int, int, int, bool]: + """Return (staged, modified, untracked, timed_out) for a worktree.""" + ... + + def find_worktree_by_query( + self, + path: Path, + query: str, + ) -> tuple[WorktreeInfo | None, list[WorktreeInfo]]: + """Find a worktree by name, branch, or path using fuzzy matching.""" + ... + + def find_main_worktree(self, path: Path) -> WorktreeInfo | None: + """Return the worktree for the default/main branch if present.""" + ... + + def list_branches_without_worktrees(self, path: Path) -> list[str]: + """Return remote branches that do not have worktrees.""" + ... + + def fetch_branch(self, path: Path, branch: str) -> None: + """Fetch a branch from the remote origin if available.""" + ... + + def add_worktree( + self, + repo_path: Path, + worktree_path: Path, + branch_name: str, + base_branch: str, + ) -> None: + """Create a worktree directory for the given branch.""" + ... + + def remove_worktree(self, repo_path: Path, worktree_path: Path, *, force: bool) -> None: + """Remove a worktree from the repository.""" + ... + + def prune_worktrees(self, repo_path: Path) -> None: + """Prune stale worktree metadata from the repository.""" + ... diff --git a/src/scc_cli/ports/personal_profile_service.py b/src/scc_cli/ports/personal_profile_service.py new file mode 100644 index 0000000..10f7352 --- /dev/null +++ b/src/scc_cli/ports/personal_profile_service.py @@ -0,0 +1,63 @@ +"""Personal profile port for application use cases.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any, Protocol + +from scc_cli.core.personal_profiles import PersonalProfile + + +class PersonalProfileService(Protocol): + """Operations for reading and applying personal profiles. + + Invariants: + - Returned profile data mirrors persisted profile content. + - Drift detection and merge behavior stay consistent with existing CLI logic. + """ + + def load_personal_profile_with_status( + self, workspace: Path + ) -> tuple[PersonalProfile | None, bool]: + """Load the profile for a workspace, returning (profile, invalid).""" + + def detect_drift(self, workspace: Path) -> bool: + """Return True when workspace overlays differ from last applied state.""" + + def workspace_has_overrides(self, workspace: Path) -> bool: + """Return True when workspace has local overrides.""" + + def load_workspace_settings_with_status( + self, workspace: Path + ) -> tuple[dict[str, Any] | None, bool]: + """Load workspace settings, returning (data, invalid).""" + + def load_workspace_mcp_with_status(self, workspace: Path) -> tuple[dict[str, Any] | None, bool]: + """Load workspace MCP config, returning (data, invalid).""" + + def merge_personal_settings( + self, + workspace: Path, + existing: dict[str, Any], + personal: dict[str, Any], + ) -> dict[str, Any]: + """Merge personal settings into existing settings.""" + + def merge_personal_mcp( + self, existing: dict[str, Any], personal: dict[str, Any] + ) -> dict[str, Any]: + """Merge personal MCP data into existing data.""" + + def write_workspace_settings(self, workspace: Path, data: dict[str, Any]) -> None: + """Persist workspace settings.""" + + def write_workspace_mcp(self, workspace: Path, data: dict[str, Any]) -> None: + """Persist workspace MCP config.""" + + def save_applied_state( + self, workspace: Path, profile_id: str, fingerprints: dict[str, str] + ) -> None: + """Persist applied profile state.""" + + def compute_fingerprints(self, workspace: Path) -> dict[str, str]: + """Compute fingerprints for workspace profile files.""" diff --git a/src/scc_cli/ports/platform_probe.py b/src/scc_cli/ports/platform_probe.py new file mode 100644 index 0000000..ef28aff --- /dev/null +++ b/src/scc_cli/ports/platform_probe.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Protocol + + +class PlatformProbe(Protocol): + """Probe platform-specific behavior for workspace validation. + + Invariants: + - Results must reflect the local runtime environment. + - WSL2 detection remains consistent with CLI warnings. + """ + + def is_wsl2(self) -> bool: + """Return True when running inside WSL2. + + Returns: + True when the current runtime is WSL2, otherwise False. + """ + ... + + def check_path_performance(self, path: Path) -> tuple[bool, str | None]: + """Return whether a path is optimal and an optional warning message. + + Args: + path: Workspace path to evaluate. + + Returns: + Tuple of (is_optimal, warning_message). When is_optimal is False, + warning_message should describe the performance concern. + """ + ... diff --git a/src/scc_cli/ports/session_models.py b/src/scc_cli/ports/session_models.py new file mode 100644 index 0000000..807ec55 --- /dev/null +++ b/src/scc_cli/ports/session_models.py @@ -0,0 +1,142 @@ +"""Session models used by session ports and services.""" + +from __future__ import annotations + +from dataclasses import asdict, dataclass +from typing import Any + + +@dataclass(frozen=True) +class SessionRecord: + """Persisted session record stored on disk. + + Invariants: + - Serialized field names must remain stable for the sessions JSON schema. + - Optional fields are omitted when serialized. + + Args: + workspace: Workspace path as a string. + team: Team identifier or None for standalone sessions. + name: Optional friendly session name. + container_name: Container name linked to the session. + branch: Git branch name for the session. + last_used: ISO 8601 timestamp string of last use. + created_at: ISO 8601 timestamp string of creation time. + schema_version: Schema version for migration support. + """ + + workspace: str + team: str | None = None + name: str | None = None + container_name: str | None = None + branch: str | None = None + last_used: str | None = None + created_at: str | None = None + schema_version: int = 1 + + def to_dict(self) -> dict[str, Any]: + """Serialize the record for JSON storage. + + Returns: + Dictionary suitable for JSON serialization. + """ + return {key: value for key, value in asdict(self).items() if value is not None} + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> SessionRecord: + """Hydrate a record from stored JSON. + + Args: + data: Raw session dictionary from the sessions file. + + Returns: + Parsed SessionRecord instance. + """ + return cls( + workspace=data.get("workspace", ""), + team=data.get("team"), + name=data.get("name"), + container_name=data.get("container_name"), + branch=data.get("branch"), + last_used=data.get("last_used"), + created_at=data.get("created_at"), + schema_version=data.get("schema_version", 1), + ) + + +@dataclass(frozen=True) +class SessionSummary: + """Summary view of a session for list output. + + Invariants: + - Fields must align with existing CLI session list keys. + + Args: + name: Display name for the session. + workspace: Workspace path string. + team: Team identifier or None. + last_used: ISO 8601 timestamp string (format at edges). + container_name: Linked container name. + branch: Git branch name for the session. + """ + + name: str + workspace: str + team: str | None + last_used: str | None + container_name: str | None + branch: str | None + + +@dataclass(frozen=True) +class SessionFilter: + """Filter options for listing sessions. + + Invariants: + - Limit values must remain non-negative. + + Args: + limit: Maximum number of sessions to return. + team: Optional team filter. + include_all: Whether to ignore team filtering. + """ + + limit: int = 10 + team: str | None = None + include_all: bool = False + + +@dataclass(frozen=True) +class SessionListResult: + """Result payload for session list operations. + + Invariants: + - Count reflects the number of session summaries. + + Args: + sessions: Summaries returned by a list operation. + team: Team filter applied to the list. + count: Count of sessions returned. + """ + + sessions: list[SessionSummary] + team: str | None = None + count: int = 0 + + @classmethod + def from_sessions( + cls, + sessions: list[SessionSummary], + *, + team: str | None = None, + ) -> SessionListResult: + """Build a list result with count calculated. + + Args: + sessions: Session summaries returned by the list operation. + team: Team filter applied to the list. + + Returns: + SessionListResult populated with count. + """ + return cls(sessions=sessions, team=team, count=len(sessions)) diff --git a/src/scc_cli/ports/session_store.py b/src/scc_cli/ports/session_store.py new file mode 100644 index 0000000..4fddd02 --- /dev/null +++ b/src/scc_cli/ports/session_store.py @@ -0,0 +1,41 @@ +"""Session persistence port for application use cases.""" + +from __future__ import annotations + +from contextlib import AbstractContextManager +from typing import Protocol + +from .session_models import SessionRecord + + +class SessionStore(Protocol): + """Persist and retrieve session records for the application layer. + + Invariants: + - Persistence must preserve session JSON schema fields. + - Calls must be safe for concurrent CLI invocations. + """ + + def lock(self) -> AbstractContextManager[None]: + """Return a context manager for exclusive session store access. + + Returns: + Context manager enforcing exclusive access while reading/writing. + """ + ... + + def load_sessions(self) -> list[SessionRecord]: + """Load all session records. + + Returns: + List of stored session records. + """ + ... + + def save_sessions(self, sessions: list[SessionRecord]) -> None: + """Persist all session records. + + Args: + sessions: Session records to store. + """ + ... diff --git a/src/scc_cli/presentation/__init__.py b/src/scc_cli/presentation/__init__.py new file mode 100644 index 0000000..0dc9d77 --- /dev/null +++ b/src/scc_cli/presentation/__init__.py @@ -0,0 +1 @@ +"""Presentation-layer helpers for CLI/UI output.""" diff --git a/src/scc_cli/presentation/json/__init__.py b/src/scc_cli/presentation/json/__init__.py new file mode 100644 index 0000000..b3677c8 --- /dev/null +++ b/src/scc_cli/presentation/json/__init__.py @@ -0,0 +1,5 @@ +"""Edge-only JSON mapping helpers for CLI/UI boundaries. + +This package centralizes JSON envelope shaping for machine output. Application +use cases should return typed results and avoid importing this package. +""" diff --git a/src/scc_cli/presentation/json/launch_json.py b/src/scc_cli/presentation/json/launch_json.py new file mode 100644 index 0000000..8890223 --- /dev/null +++ b/src/scc_cli/presentation/json/launch_json.py @@ -0,0 +1,24 @@ +"""JSON mapping helpers for launch/start flows.""" + +from __future__ import annotations + +from typing import Any + +from ...json_output import build_envelope +from ...kinds import Kind + + +def build_start_dry_run_envelope(dry_run_data: dict[str, Any]) -> dict[str, Any]: + """Build the JSON envelope for `scc start --dry-run` output. + + Invariants: + - Keep `Kind.START_DRY_RUN` stable. + - Preserve dry-run data keys for downstream tooling. + + Args: + dry_run_data: Precomputed dry-run data payload. + + Returns: + JSON envelope for the dry-run preview. + """ + return build_envelope(Kind.START_DRY_RUN, data=dry_run_data) diff --git a/src/scc_cli/presentation/json/sessions_json.py b/src/scc_cli/presentation/json/sessions_json.py new file mode 100644 index 0000000..de97dc2 --- /dev/null +++ b/src/scc_cli/presentation/json/sessions_json.py @@ -0,0 +1,47 @@ +"""JSON mapping helpers for session flows.""" + +from __future__ import annotations + +from typing import Any + +from ...json_output import build_envelope +from ...kinds import Kind + + +def build_session_list_data( + sessions: list[dict[str, Any]], + *, + team: str | None = None, +) -> dict[str, Any]: + """Build JSON-ready session list data. + + Invariants: + - Preserve keys: `sessions`, `count`, and `team`. + + Args: + sessions: Serialized session dictionaries. + team: Optional team filter label. + + Returns: + Dictionary payload for session list output. + """ + return { + "sessions": sessions, + "count": len(sessions), + "team": team, + } + + +def build_session_list_envelope(data: dict[str, Any]) -> dict[str, Any]: + """Build the JSON envelope for session list output. + + Invariants: + - Keep `Kind.SESSION_LIST` stable. + + Args: + data: Session list data payload. + + Returns: + JSON envelope for the session list. + """ + return build_envelope(Kind.SESSION_LIST, data=data) diff --git a/src/scc_cli/presentation/json/support_json.py b/src/scc_cli/presentation/json/support_json.py new file mode 100644 index 0000000..9dcb339 --- /dev/null +++ b/src/scc_cli/presentation/json/support_json.py @@ -0,0 +1,24 @@ +"""JSON mapping helpers for support bundle output.""" + +from __future__ import annotations + +from typing import Any + +from ...json_output import build_envelope +from ...kinds import Kind + + +def build_support_bundle_envelope(bundle_data: dict[str, Any]) -> dict[str, Any]: + """Build the JSON envelope for support bundle output. + + Invariants: + - Keep `Kind.SUPPORT_BUNDLE` stable. + - Preserve bundle manifest keys. + + Args: + bundle_data: Support bundle manifest data. + + Returns: + JSON envelope for the support bundle manifest. + """ + return build_envelope(Kind.SUPPORT_BUNDLE, data=bundle_data) diff --git a/src/scc_cli/presentation/json/worktree_json.py b/src/scc_cli/presentation/json/worktree_json.py new file mode 100644 index 0000000..a39d20c --- /dev/null +++ b/src/scc_cli/presentation/json/worktree_json.py @@ -0,0 +1,43 @@ +"""JSON mapping helpers for worktree flows.""" + +from __future__ import annotations + +from typing import Any + +from ...json_output import build_envelope +from ...kinds import Kind + + +def build_worktree_list_data(worktrees: list[dict[str, Any]], workspace: str) -> dict[str, Any]: + """Build JSON-ready worktree list data. + + Invariants: + - Preserve keys: `worktrees`, `count`, and `workspace`. + + Args: + worktrees: Serialized worktree dictionaries. + workspace: Workspace path as a string. + + Returns: + Dictionary payload for the worktree list envelope. + """ + return { + "worktrees": worktrees, + "count": len(worktrees), + "workspace": workspace, + } + + +def build_worktree_list_envelope(data: dict[str, Any]) -> dict[str, Any]: + """Build the JSON envelope for worktree list output. + + Invariants: + - Keep `Kind.WORKTREE_LIST` stable. + + Args: + data: Worktree list data payload. + + Returns: + JSON envelope for the worktree list. + """ + return build_envelope(Kind.WORKTREE_LIST, data=data) diff --git a/src/scc_cli/presentation/launch_presenter.py b/src/scc_cli/presentation/launch_presenter.py new file mode 100644 index 0000000..2228fa5 --- /dev/null +++ b/src/scc_cli/presentation/launch_presenter.py @@ -0,0 +1,91 @@ +"""Presentation helpers for launch flow output.""" + +from __future__ import annotations + +from rich.console import Console + +from scc_cli.application.launch.output_models import ( + LaunchInfoEvent, + LaunchOutputViewModel, + LaunchSuccessEvent, + LaunchWarningEvent, +) +from scc_cli.application.start_session import StartSessionPlan +from scc_cli.panels import create_warning_panel +from scc_cli.theme import Indicators +from scc_cli.ui.chrome import print_with_layout + + +def build_sync_output_view_model(plan: StartSessionPlan) -> LaunchOutputViewModel: + """Build output view model for marketplace sync messages. + + Invariants: + - Messages mirror existing CLI text for sync warnings and counts. + + Args: + plan: Start session plan with sync result metadata. + + Returns: + LaunchOutputViewModel describing sync output to render. + """ + events: list[LaunchInfoEvent | LaunchWarningEvent | LaunchSuccessEvent] = [] + if plan.sync_result and plan.sync_result.warnings: + for warning in plan.sync_result.warnings: + events.append(LaunchWarningEvent(message=warning)) + if plan.sync_result and plan.sync_result.plugins_enabled: + events.append( + LaunchSuccessEvent( + message=( + f"{Indicators.get('PASS')} Enabled " + f"{len(plan.sync_result.plugins_enabled)} team plugin(s)" + ) + ) + ) + if plan.sync_result and plan.sync_result.marketplaces_materialized: + events.append( + LaunchSuccessEvent( + message=( + f"{Indicators.get('PASS')} Materialized " + f"{len(plan.sync_result.marketplaces_materialized)} marketplace(s)" + ) + ) + ) + return LaunchOutputViewModel( + events=events, + sync_result=plan.sync_result, + sync_error_message=plan.sync_error_message, + ) + + +def render_launch_output( + view_model: LaunchOutputViewModel, + *, + console: Console, + json_mode: bool, +) -> None: + """Render launch output events at the CLI edge. + + Args: + view_model: Launch output view model. + console: Console for rendering. + json_mode: Whether JSON output is enabled (suppresses human output). + """ + if json_mode: + return + if view_model.sync_error_message: + panel = create_warning_panel( + "Marketplace Sync Failed", + view_model.sync_error_message, + "Team plugins may not be available. Use --dry-run to diagnose.", + ) + print_with_layout(console, panel, constrain=True) + return + if not view_model.events: + return + console.print() + for event in view_model.events: + if isinstance(event, LaunchWarningEvent): + print_with_layout(console, f"[yellow]{event.message}[/yellow]") + else: + print_with_layout(console, f"[green]{event.message}[/green]") + console.print() diff --git a/src/scc_cli/remote.py b/src/scc_cli/remote.py index cc99d08..2001fb6 100644 --- a/src/scc_cli/remote.py +++ b/src/scc_cli/remote.py @@ -27,6 +27,7 @@ from scc_cli.auth import is_remote_command_allowed from scc_cli.auth import resolve_auth as _resolve_auth_impl from scc_cli.bootstrap import get_default_adapters +from scc_cli.core.enums import SeverityLevel from scc_cli.output_mode import print_human from scc_cli.ports.remote_fetcher import RemoteFetcher from scc_cli.utils.locks import file_lock, lock_path @@ -382,7 +383,7 @@ def _validate_org_config(config: dict[str, Any]) -> None: # Step 2: Semantic validation (governance invariants) violations: list[InvariantViolation] = validate_config_invariants(config) - errors = [v for v in violations if v.severity == "error"] + errors = [v for v in violations if v.severity == SeverityLevel.ERROR] if errors: # Format violations for user-friendly message error_messages = [v.message for v in errors[:3]] # Show first 3 diff --git a/src/scc_cli/services/git/core.py b/src/scc_cli/services/git/core.py index 12f3da9..77e07d9 100644 --- a/src/scc_cli/services/git/core.py +++ b/src/scc_cli/services/git/core.py @@ -137,7 +137,7 @@ def detect_workspace_root(start_dir: Path) -> tuple[Path | None, Path]: the user runs `scc start` from a subdirectory or git worktree. Resolution order: - 1) git rev-parse --show-toplevel (works for subdirs + worktrees) + 1) git rev-parse --show-toplevel (handles subdirs + worktrees) 2) Parent-walk for .scc.yaml (repo root config marker) 3) Parent-walk for .git (directory OR file - worktree-safe) 4) None (no workspace detected) @@ -150,35 +150,16 @@ def detect_workspace_root(start_dir: Path) -> tuple[Path | None, Path]: - root: The detected workspace root, or None if not found - start_cwd: The original start_dir (preserved for container cwd) """ - start_dir = start_dir.resolve() + from scc_cli.services.workspace import resolve_launch_context - # Priority 1: Use git rev-parse --show-toplevel (handles subdirs + worktrees) - if check_git_installed(): - toplevel = run_command( - ["git", "-C", str(start_dir), "rev-parse", "--show-toplevel"], - timeout=5, - ) - if toplevel: - return (Path(toplevel.strip()), start_dir) - - # Priority 2: Parent-walk for .scc.yaml (SCC project marker) - current = start_dir - while current != current.parent: - scc_config = current / ".scc.yaml" - if scc_config.is_file(): - return (current, start_dir) - current = current.parent - - # Priority 3: Parent-walk for .git (directory OR file - worktree-safe) - current = start_dir - while current != current.parent: - git_marker = current / ".git" - if git_marker.exists(): # Works for both directory and file - return (current, start_dir) - current = current.parent - - # No workspace detected - return (None, start_dir) + result = resolve_launch_context( + start_dir, + workspace_arg=None, + include_git_dir_fallback=True, + ) + if result is None: + return (None, start_dir.resolve()) + return (result.workspace_root, result.entry_dir) def is_file_ignored(file_path: str | Path, repo_root: Path | None = None) -> bool: diff --git a/src/scc_cli/services/git/worktree.py b/src/scc_cli/services/git/worktree.py index f95f18d..25964c5 100644 --- a/src/scc_cli/services/git/worktree.py +++ b/src/scc_cli/services/git/worktree.py @@ -3,11 +3,13 @@ Pure functions with no UI dependencies. """ +import shutil import subprocess from dataclasses import dataclass from pathlib import Path from ...core.constants import WORKTREE_BRANCH_PREFIX +from ...core.errors import WorktreeCreationError from .branch import get_default_branch, sanitize_branch_name @@ -442,3 +444,113 @@ def find_main_worktree(repo_path: Path) -> WorktreeInfo | None: return wt return None + + +def fetch_branch(repo_path: Path, branch: str) -> None: + """Fetch a branch from origin for worktree creation. + + Raises: + WorktreeCreationError: If the fetch fails. + """ + result = subprocess.run( + ["git", "-C", str(repo_path), "fetch", "origin", branch], + capture_output=True, + text=True, + timeout=30, + ) + if result.returncode == 0: + return + + error_msg = result.stderr.strip() if result.stderr else "Unknown fetch error" + lower = error_msg.lower() + user_message = f"Failed to fetch branch '{branch}'" + suggested_action = "Check the branch name and your network connection" + + if "couldn't find remote ref" in lower or "remote ref" in lower and "not found" in lower: + user_message = f"Branch '{branch}' not found on origin" + suggested_action = "Check the branch name or fetch remote branches" + elif "could not resolve host" in lower or "failed to connect" in lower: + user_message = "Network error while fetching from origin" + suggested_action = "Check your network or VPN connection" + elif "permission denied" in lower or "authentication" in lower: + user_message = "Authentication error while fetching from origin" + suggested_action = "Check your git credentials and remote access" + + raise WorktreeCreationError( + name=branch, + user_message=user_message, + suggested_action=suggested_action, + command=f"git -C {repo_path} fetch origin {branch}", + stderr=error_msg, + ) + + +def add_worktree( + repo_path: Path, + worktree_path: Path, + branch_name: str, + base_branch: str, +) -> None: + """Create the worktree directory using git worktree add.""" + worktree_path.parent.mkdir(parents=True, exist_ok=True) + + try: + subprocess.run( + [ + "git", + "-C", + str(repo_path), + "worktree", + "add", + "-b", + branch_name, + str(worktree_path), + f"origin/{base_branch}", + ], + check=True, + capture_output=True, + timeout=30, + ) + except subprocess.CalledProcessError: + subprocess.run( + [ + "git", + "-C", + str(repo_path), + "worktree", + "add", + "-b", + branch_name, + str(worktree_path), + base_branch, + ], + check=True, + capture_output=True, + timeout=30, + ) + + +def remove_worktree(repo_path: Path, worktree_path: Path, *, force: bool) -> None: + """Remove a worktree entry and directory.""" + force_flag = ["--force"] if force else [] + try: + subprocess.run( + ["git", "-C", str(repo_path), "worktree", "remove", str(worktree_path)] + force_flag, + check=True, + capture_output=True, + timeout=30, + ) + except subprocess.CalledProcessError: + shutil.rmtree(worktree_path, ignore_errors=True) + + +def prune_worktrees(repo_path: Path) -> None: + """Prune stale worktree metadata.""" + try: + subprocess.run( + ["git", "-C", str(repo_path), "worktree", "prune"], + capture_output=True, + timeout=10, + ) + except (subprocess.TimeoutExpired, FileNotFoundError): + return diff --git a/src/scc_cli/services/workspace/__init__.py b/src/scc_cli/services/workspace/__init__.py index e1530f1..5696086 100644 --- a/src/scc_cli/services/workspace/__init__.py +++ b/src/scc_cli/services/workspace/__init__.py @@ -6,6 +6,8 @@ resolve_launch_context: Main entry point for workspace resolution is_suspicious_directory: Check if a path is inappropriate as workspace get_suspicious_reason: Get human-readable reason for suspicious status + has_project_markers: Check if a directory has common project markers + is_valid_workspace: Check if a directory looks like a valid workspace ResolverResult: Complete workspace resolution result (from core) Example usage: @@ -25,12 +27,15 @@ from scc_cli.core.workspace import ResolverResult +from .markers import has_project_markers, is_valid_workspace from .resolver import resolve_launch_context from .suspicious import get_suspicious_reason, is_suspicious_directory __all__ = [ "ResolverResult", "get_suspicious_reason", + "has_project_markers", "is_suspicious_directory", + "is_valid_workspace", "resolve_launch_context", ] diff --git a/src/scc_cli/services/workspace/markers.py b/src/scc_cli/services/workspace/markers.py new file mode 100644 index 0000000..6e50d6a --- /dev/null +++ b/src/scc_cli/services/workspace/markers.py @@ -0,0 +1,108 @@ +"""Project marker detection for workspace resolution. + +This module provides functions to detect whether a directory contains +common project markers that indicate it's a valid workspace root. + +Project markers include: +- Version control: .git, .gitignore +- SCC config: .scc.yaml +- Package managers: package.json, pyproject.toml, Cargo.toml, go.mod, etc. +- Build systems: Makefile, CMakeLists.txt, build.gradle, etc. +- IDE/editor configs: .project (Eclipse), *.sln (Visual Studio) + +This logic is extracted from ui/wizard.py for reuse across the codebase +without UI dependencies, following the architecture principle that +filesystem logic belongs in services, not UI. +""" + +from __future__ import annotations + +from pathlib import Path + +# Common project markers across languages/frameworks +# Split into direct checks (fast) and glob patterns (slower, checked only if needed) +_PROJECT_MARKERS_DIRECT: tuple[str, ...] = ( + ".git", # Git repository (directory or file for worktrees) + ".scc.yaml", # SCC config + ".gitignore", # Often at project root + "package.json", # Node.js / JavaScript + "tsconfig.json", # TypeScript + "pyproject.toml", # Python (modern) + "setup.py", # Python (legacy) + "requirements.txt", # Python dependencies + "Pipfile", # Pipenv + "Cargo.toml", # Rust + "go.mod", # Go + "pom.xml", # Java Maven + "build.gradle", # Java/Kotlin Gradle + "gradlew", # Gradle wrapper (strong signal) + "Gemfile", # Ruby + "composer.json", # PHP + "mix.exs", # Elixir + "Makefile", # Make-based projects + "CMakeLists.txt", # CMake C/C++ + ".project", # Eclipse + "Dockerfile", # Docker projects + "docker-compose.yml", # Docker Compose + "compose.yaml", # Docker Compose (new name) +) + +# Glob patterns for project markers (checked only if direct checks fail) +_PROJECT_MARKERS_GLOB: tuple[str, ...] = ( + "*.sln", # .NET solution + "*.csproj", # .NET C# project +) + + +def has_project_markers(path: Path) -> bool: + """Check if a directory has common project markers. + + Uses a two-phase approach for performance: + 1. Fast direct existence checks for common markers + 2. Slower glob patterns only if direct checks fail + + This function is used to determine whether a directory is likely + a valid project root (as opposed to a random directory like $HOME). + + Args: + path: Directory to check. + + Returns: + True if directory has any recognizable project markers. + """ + if not path.is_dir(): + return False + + # Phase 1: Fast direct checks + for marker in _PROJECT_MARKERS_DIRECT: + if (path / marker).exists(): + return True + + # Phase 2: Slower glob checks (only if no direct markers found) + for pattern in _PROJECT_MARKERS_GLOB: + try: + if next(path.glob(pattern), None) is not None: + return True + except (OSError, StopIteration): + continue + + return False + + +def is_valid_workspace(path: Path) -> bool: + """Check if a directory looks like a valid workspace. + + A valid workspace must have at least one of: + - .git directory or file (for worktrees) + - .scc.yaml config file + - Common project markers (package.json, pyproject.toml, etc.) + + Random directories (like $HOME) are NOT valid workspaces. + + Args: + path: Directory to check. + + Returns: + True if directory exists and has workspace markers. + """ + return has_project_markers(path) diff --git a/src/scc_cli/services/workspace/resolver.py b/src/scc_cli/services/workspace/resolver.py index 7492c94..c227f38 100644 --- a/src/scc_cli/services/workspace/resolver.py +++ b/src/scc_cli/services/workspace/resolver.py @@ -96,6 +96,24 @@ def _detect_scc_config_root(cwd: Path) -> Path | None: return None +def _detect_git_marker_root(cwd: Path) -> Path | None: + """Find a .git marker by walking up from cwd. + + Args: + cwd: Current working directory. + + Returns: + Directory containing .git, or None if not found. + """ + current = cwd.resolve() + while current != current.parent: + git_marker = current / ".git" + if git_marker.exists(): + return current + current = current.parent + return None + + def _calculate_container_workdir( entry_dir: Path, mount_root: Path, @@ -129,6 +147,7 @@ def resolve_launch_context( workspace_arg: str | None, *, allow_suspicious: bool = False, + include_git_dir_fallback: bool = False, ) -> ResolverResult | None: """Resolve workspace with complete context for launch. @@ -139,7 +158,8 @@ def resolve_launch_context( Auto-detect policy (simple, explicit): 1. git rev-parse --show-toplevel -> use git root 2. .scc.yaml parent walk -> use config dir - 3. Anything else -> None (requires wizard or explicit path) + 3. Optional .git marker fallback (when enabled) + 4. Anything else -> None (requires wizard or explicit path) Suspicious handling: - Auto-detected + suspicious -> is_suspicious=True (blocks auto-launch) @@ -152,6 +172,7 @@ def resolve_launch_context( workspace_arg: Explicit workspace path from --workspace arg, or None. allow_suspicious: If True, allow explicit paths to suspicious locations. This is typically set via --force or after user confirmation. + include_git_dir_fallback: If True, use .git marker discovery when git is unavailable. Returns: ResolverResult with all paths canonicalized, or None if: @@ -187,6 +208,14 @@ def resolve_launch_context( if scc_config_root is not None: workspace_root = scc_config_root reason = f".scc.yaml found at: {scc_config_root}" + elif include_git_dir_fallback: + git_marker_root = _detect_git_marker_root(cwd) + if git_marker_root is not None: + workspace_root = git_marker_root + reason = f".git marker found at: {git_marker_root}" + else: + # No auto-detection possible + return None else: # No auto-detection possible return None diff --git a/src/scc_cli/sessions.py b/src/scc_cli/sessions.py index 2b9a935..cac741d 100644 --- a/src/scc_cli/sessions.py +++ b/src/scc_cli/sessions.py @@ -9,51 +9,35 @@ - This enables seamless resume of Claude Code conversations """ +from __future__ import annotations + import json -from dataclasses import asdict, dataclass from datetime import datetime from pathlib import Path from typing import Any, cast -from . import config +from scc_cli.application.sessions import SessionService +from scc_cli.bootstrap import build_session_store +from scc_cli.ports.filesystem import Filesystem +from scc_cli.ports.session_models import SessionFilter, SessionRecord, SessionSummary +from scc_cli.ports.session_store import SessionStore +from scc_cli.ui.time_format import format_relative_time_from_datetime + from .core.constants import AGENT_CONFIG_DIR -from .utils.locks import file_lock, lock_path # ═══════════════════════════════════════════════════════════════════════════════ -# Data Classes +# Store Wiring # ═══════════════════════════════════════════════════════════════════════════════ -@dataclass -class SessionRecord: - """A recorded Claude Code session with container linking.""" - - workspace: str - team: str | None = None - name: str | None = None - container_name: str | None = None - branch: str | None = None - last_used: str | None = None - created_at: str | None = None - schema_version: int = 1 # For future migration support - - def to_dict(self) -> dict[str, Any]: - """Convert the record to a dictionary for JSON serialization.""" - return {k: v for k, v in asdict(self).items() if v is not None} - - @classmethod - def from_dict(cls, data: dict[str, Any]) -> "SessionRecord": - """Create a SessionRecord from a dictionary.""" - return cls( - workspace=data.get("workspace", ""), - team=data.get("team"), - name=data.get("name"), - container_name=data.get("container_name"), - branch=data.get("branch"), - last_used=data.get("last_used"), - created_at=data.get("created_at"), - schema_version=data.get("schema_version", 1), - ) +def get_session_store(filesystem: Filesystem | None = None) -> SessionStore: + """Return the JSON session store adapter.""" + return build_session_store(filesystem) + + +def get_session_service(filesystem: Filesystem | None = None) -> SessionService: + """Return the session service wired to the JSON store.""" + return SessionService(store=get_session_store(filesystem)) # ═══════════════════════════════════════════════════════════════════════════════ @@ -61,68 +45,26 @@ def from_dict(cls, data: dict[str, Any]) -> "SessionRecord": # ═══════════════════════════════════════════════════════════════════════════════ -def get_most_recent() -> dict[str, Any] | None: - """ - Return the most recently used session. - - Returns: - Session dict with workspace, team, container_name, etc. or None if no sessions. - """ - sessions = _load_sessions() - - if not sessions: - return None - - # Sort by last_used descending and return first - sessions.sort(key=lambda s: s.get("last_used", ""), reverse=True) - return sessions[0] - - -def list_recent(limit: int = 10) -> list[dict[str, Any]]: - """ - Return recent sessions with container and relative time info. - - Returns list of dicts with: name, workspace, team, last_used, container_name, branch - """ - sessions = _load_sessions() - - # Sort by last_used descending - sessions.sort(key=lambda s: s.get("last_used", ""), reverse=True) - - # Limit results - sessions = sessions[:limit] - - # Format for display - result = [] - for s in sessions: - last_used = s.get("last_used", "") - if last_used: - try: - dt = datetime.fromisoformat(last_used) - last_used = format_relative_time(dt) - except ValueError: - pass - - result.append( - { - "name": s.get("name") or _generate_session_name(s), - "workspace": s.get("workspace", ""), - "team": s.get("team"), - "last_used": last_used, - "container_name": s.get("container_name"), - "branch": s.get("branch"), - } - ) - - return result +def get_most_recent(filesystem: Filesystem | None = None) -> SessionSummary | None: + """Return the most recently used session summary.""" + recent = list_recent(limit=1, include_all=True, filesystem=filesystem) + return recent[0] if recent else None -def _generate_session_name(session: dict[str, Any]) -> str: - """Generate a display name for a session without an explicit name.""" - workspace = session.get("workspace", "") - if workspace: - return Path(workspace).name - return "Unnamed" +def list_recent( + limit: int = 10, + team: str | None = None, + include_all: bool | None = None, + *, + filesystem: Filesystem | None = None, +) -> list[SessionSummary]: + """Return recent sessions from the store.""" + resolved_include_all = team is None if include_all is None else include_all + service = get_session_service(filesystem) + result = service.list_recent( + SessionFilter(limit=limit, team=team, include_all=resolved_include_all) + ) + return result.sessions def record_session( @@ -131,118 +73,73 @@ def record_session( session_name: str | None = None, container_name: str | None = None, branch: str | None = None, + *, + filesystem: Filesystem | None = None, ) -> SessionRecord: - """ - Record a new session or update an existing one. - - Key sessions by workspace + branch combination. - """ - lock_file = lock_path("sessions") - with file_lock(lock_file): - sessions = _load_sessions() - now = datetime.now().isoformat() - - # Find existing session for this workspace+branch - existing_idx = None - for idx, s in enumerate(sessions): - if s.get("workspace") == workspace and s.get("branch") == branch: - existing_idx = idx - break - - record = SessionRecord( - workspace=workspace, - team=team, - name=session_name, - container_name=container_name, - branch=branch, - last_used=now, - created_at=( - sessions[existing_idx].get("created_at", now) if existing_idx is not None else now - ), - ) - - if existing_idx is not None: - # Update existing - sessions[existing_idx] = record.to_dict() - else: - # Add new - sessions.insert(0, record.to_dict()) - - _save_sessions(sessions) - return record + """Record a new session or update an existing one.""" + service = get_session_service(filesystem) + return service.record_session( + workspace=workspace, + team=team, + session_name=session_name, + container_name=container_name, + branch=branch, + ) def update_session_container( workspace: str, container_name: str, branch: str | None = None, + *, + filesystem: Filesystem | None = None, ) -> None: - """ - Update the container name for an existing session. - - Call when a container is created for a session. - """ - lock_file = lock_path("sessions") - with file_lock(lock_file): - sessions = _load_sessions() - - for s in sessions: - if s.get("workspace") == workspace: - if branch is None or s.get("branch") == branch: - s["container_name"] = container_name - s["last_used"] = datetime.now().isoformat() - break - - _save_sessions(sessions) - - -def find_session_by_container(container_name: str) -> dict[str, Any] | None: - """ - Find a session by its container name. - - Use for resume operations. - """ - sessions = _load_sessions() - for s in sessions: - if s.get("container_name") == container_name: - return s + """Update the container name for an existing session.""" + service = get_session_service(filesystem) + service.update_session_container( + workspace=workspace, + container_name=container_name, + branch=branch, + ) + + +def find_session_by_container( + container_name: str, + *, + filesystem: Filesystem | None = None, +) -> SessionRecord | None: + """Find a session by its container name.""" + sessions_list = get_session_store(filesystem).load_sessions() + for record in sessions_list: + if record.container_name == container_name: + return record return None def find_session_by_workspace( workspace: str, branch: str | None = None, -) -> dict[str, Any] | None: - """ - Find a session by workspace and optionally branch. - - Return the most recent matching session. - """ - sessions = _load_sessions() - - # Sort by last_used descending - sessions.sort(key=lambda s: s.get("last_used", ""), reverse=True) - - for s in sessions: - if s.get("workspace") == workspace: - if branch is None or s.get("branch") == branch: - return s + *, + filesystem: Filesystem | None = None, +) -> SessionRecord | None: + """Find a session by workspace and optionally branch.""" + sessions_list = get_session_store(filesystem).load_sessions() + sessions_list.sort(key=lambda record: record.last_used or "", reverse=True) + for record in sessions_list: + if record.workspace == workspace and (branch is None or record.branch == branch): + return record return None def get_container_for_workspace( workspace: str, branch: str | None = None, + *, + filesystem: Filesystem | None = None, ) -> str | None: - """ - Return the container name for a workspace (and optionally branch). - - Return None if no container has been recorded. - """ - session = find_session_by_workspace(workspace, branch) - if session: - return session.get("container_name") - return None + """Return the container name for a workspace (and optionally branch).""" + session = find_session_by_workspace(workspace, branch, filesystem=filesystem) + return session.container_name if session else None # ═══════════════════════════════════════════════════════════════════════════════ @@ -250,64 +147,45 @@ def get_container_for_workspace( # ═══════════════════════════════════════════════════════════════════════════════ -def clear_history() -> int: - """ - Clear all session history. - - Return the number of sessions cleared. - """ - lock_file = lock_path("sessions") - with file_lock(lock_file): - sessions = _load_sessions() - count = len(sessions) - _save_sessions([]) +def clear_history(filesystem: Filesystem | None = None) -> int: + """Clear all session history and return count cleared.""" + store = get_session_store(filesystem) + with store.lock(): + sessions_list = store.load_sessions() + count = len(sessions_list) + store.save_sessions([]) return count -def remove_session(workspace: str, branch: str | None = None) -> bool: - """ - Remove a specific session from history. - - Args: - workspace: Workspace path to remove - branch: Optional branch (if None, removes all sessions for workspace) - - Returns: - True if session was found and removed - """ - lock_file = lock_path("sessions") - with file_lock(lock_file): - sessions = _load_sessions() - original_count = len(sessions) +def remove_session( + workspace: str, + branch: str | None = None, + *, + filesystem: Filesystem | None = None, +) -> bool: + """Remove a specific session from history.""" + store = get_session_store(filesystem) + with store.lock(): + sessions_list = store.load_sessions() + original_count = len(sessions_list) if branch: - sessions = [ - s - for s in sessions - if not (s.get("workspace") == workspace and s.get("branch") == branch) + sessions_list = [ + record + for record in sessions_list + if not (record.workspace == workspace and record.branch == branch) ] else: - sessions = [s for s in sessions if s.get("workspace") != workspace] - - _save_sessions(sessions) - return len(sessions) < original_count - - -def prune_orphaned_sessions() -> int: - """ - Remove sessions whose workspaces no longer exist. + sessions_list = [record for record in sessions_list if record.workspace != workspace] - Return the number of sessions pruned. - """ - lock_file = lock_path("sessions") - with file_lock(lock_file): - sessions = _load_sessions() - original_count = len(sessions) + store.save_sessions(sessions_list) + return len(sessions_list) < original_count - valid_sessions = [s for s in sessions if Path(s.get("workspace", "")).expanduser().exists()] - _save_sessions(valid_sessions) - return original_count - len(valid_sessions) +def prune_orphaned_sessions(filesystem: Filesystem | None = None) -> int: + """Remove sessions whose workspaces no longer exist.""" + service = get_session_service(filesystem) + return service.prune_orphaned_sessions() # ═══════════════════════════════════════════════════════════════════════════════ @@ -317,17 +195,11 @@ def prune_orphaned_sessions() -> int: def get_claude_sessions_dir() -> Path: """Return the Claude Code sessions directory.""" - # Claude Code stores sessions in its config directory return Path.home() / AGENT_CONFIG_DIR def get_claude_recent_sessions() -> list[dict[Any, Any]]: - """ - Return recent sessions from Claude Code's own storage. - - Read from ~/.claude/ if available. - Note: Claude Code's session format may change; this is best-effort. - """ + """Return recent sessions from Claude Code's own storage.""" claude_dir = get_claude_sessions_dir() sessions_file = claude_dir / "sessions.json" @@ -343,83 +215,10 @@ def get_claude_recent_sessions() -> list[dict[Any, Any]]: # ═══════════════════════════════════════════════════════════════════════════════ -# Internal Helpers +# Formatting Helpers # ═══════════════════════════════════════════════════════════════════════════════ -def _migrate_legacy_sessions(sessions: list[dict[Any, Any]]) -> list[dict[Any, Any]]: - """Migrate legacy session records to current format. - - Migrations performed: - - team == "base" → team = None (standalone mode) - - This allows sessions created with the old hardcoded "base" fallback - to be safely loaded without causing "Team Not Found" errors. - - Args: - sessions: List of raw session dicts from JSON. - - Returns: - Migrated session list (same list, mutated in place). - """ - for session in sessions: - # Migration: "base" was never a real team, treat as standalone - if session.get("team") == "base": - session["team"] = None - - return sessions - - -def _load_sessions() -> list[dict[Any, Any]]: - """Load and return sessions from the config file. - - Performs legacy migrations on load to handle sessions saved - with older schema versions. - """ - sessions_file = config.SESSIONS_FILE - - if sessions_file.exists(): - try: - with open(sessions_file) as f: - data = json.load(f) - sessions = cast(list[dict[Any, Any]], data.get("sessions", [])) - # Apply migrations for legacy sessions - return _migrate_legacy_sessions(sessions) - except (OSError, json.JSONDecodeError): - pass - - return [] - - -def _save_sessions(sessions: list[dict[str, Any]]) -> None: - """Save the sessions list to the config file.""" - sessions_file = config.SESSIONS_FILE - - # Ensure parent directory exists - sessions_file.parent.mkdir(parents=True, exist_ok=True) - - with open(sessions_file, "w") as f: - json.dump({"sessions": sessions}, f, indent=2) - - def format_relative_time(dt: datetime) -> str: """Format a datetime as a relative time string (e.g., '2h ago').""" - now = datetime.now() - diff = now - dt - - seconds = diff.total_seconds() - - if seconds < 60: - return "just now" - elif seconds < 3600: - minutes = int(seconds / 60) - return f"{minutes}m ago" - elif seconds < 86400: - hours = int(seconds / 3600) - return f"{hours}h ago" - elif seconds < 604800: - days = int(seconds / 86400) - return f"{days}d ago" - else: - weeks = int(seconds / 604800) - return f"{weeks}w ago" + return format_relative_time_from_datetime(dt) diff --git a/src/scc_cli/ui/dashboard/_dashboard.py b/src/scc_cli/ui/dashboard/_dashboard.py index afd6193..15c7dde 100644 --- a/src/scc_cli/ui/dashboard/_dashboard.py +++ b/src/scc_cli/ui/dashboard/_dashboard.py @@ -13,6 +13,7 @@ from __future__ import annotations +from datetime import datetime from typing import Any from rich import box @@ -64,6 +65,7 @@ WorktreeActionMenuRequested, ) from ..list_screen import ListItem +from ..time_format import format_relative_time_from_datetime from .models import DashboardState @@ -394,15 +396,10 @@ def _render_container_details(self, item: ListItem[Any]) -> RenderableType: return Group(header, table, commands) def _render_session_details(self, item: ListItem[Any]) -> RenderableType: - """Render details for a session item using structured key/value table. - - Uses the raw session dict stored in item.value for field access. - """ + """Render details for a session item using structured key/value table.""" session_source = item.value if isinstance(session_source, SessionItem): session = session_source.session - elif isinstance(session_source, dict): - session = session_source else: return Text("Session details unavailable", style="dim italic") @@ -415,38 +412,37 @@ def _render_session_details(self, item: ListItem[Any]) -> RenderableType: table.add_row("Name", Text(item.label, style="bold")) - # Read fields directly from session dict (with None protection) - if session.get("team"): - table.add_row("Team", str(session["team"])) - if session.get("branch"): - table.add_row("Branch", str(session["branch"])) - if session.get("workspace"): - table.add_row("Workspace", str(session["workspace"])) - if session.get("last_used"): - table.add_row("Last Used", str(session["last_used"])) - - # Commands section with None protection and helpful tips + if session.team: + table.add_row("Team", str(session.team)) + if session.branch: + table.add_row("Branch", str(session.branch)) + if session.workspace: + table.add_row("Workspace", str(session.workspace)) + if session.last_used: + table.add_row("Last Used", self._format_session_last_used(session.last_used)) + commands = Text() commands.append("\nCommands\n", style="dim") - container_name = session.get("container_name") - session_id = session.get("id") + container_name = session.container_name if container_name: - # Container is available - show resume command commands.append(f" scc resume {container_name}\n", style="cyan") - elif session_id: - # Session exists but container stopped - show restart tip + elif session.workspace: commands.append(" Container stopped. Start new session:\n", style="dim italic") - commands.append( - f" scc start --workspace {session.get('workspace', '.')}\n", style="cyan" - ) + commands.append(f" scc start --workspace {session.workspace}\n", style="cyan") else: - # Minimal session info - generic tip commands.append(" Start session: scc start\n", style="cyan dim") return Group(header, table, commands) + def _format_session_last_used(self, iso_timestamp: str) -> str: + try: + dt = datetime.fromisoformat(iso_timestamp) + except ValueError: + return iso_timestamp + return format_relative_time_from_datetime(dt) + def _render_worktree_details(self, item: ListItem[Any]) -> RenderableType: """Render details for a worktree item using structured key/value table.""" header = self._build_details_header("Worktree Details") diff --git a/src/scc_cli/ui/dashboard/loaders.py b/src/scc_cli/ui/dashboard/loaders.py index a54d1c3..90c9152 100644 --- a/src/scc_cli/ui/dashboard/loaders.py +++ b/src/scc_cli/ui/dashboard/loaders.py @@ -4,15 +4,30 @@ from datetime import datetime +from scc_cli import sessions from scc_cli.application import dashboard as app_dashboard from ..list_screen import ListItem +from ..time_format import format_relative_time_from_datetime from .models import DashboardTab, TabData +def _format_last_used(iso_timestamp: str) -> str: + try: + dt = datetime.fromisoformat(iso_timestamp) + except ValueError: + return iso_timestamp + return format_relative_time_from_datetime(dt) + + def _load_status_tab_data(refresh_at: datetime | None = None) -> TabData: """Load Status tab data showing quick actions and context.""" - tab_data = app_dashboard.load_status_tab_data(refresh_at=refresh_at) + session_service = sessions.get_session_service() + tab_data = app_dashboard.load_status_tab_data( + refresh_at=refresh_at, + session_service=session_service, + format_last_used=_format_last_used, + ) return _to_tab_data(tab_data) @@ -23,7 +38,12 @@ def _load_containers_tab_data() -> TabData: def _load_sessions_tab_data() -> TabData: """Load Sessions tab data showing recent Claude sessions.""" - return _to_tab_data(app_dashboard.load_sessions_tab_data()) + session_service = sessions.get_session_service() + tab_data = app_dashboard.load_sessions_tab_data( + session_service=session_service, + format_last_used=_format_last_used, + ) + return _to_tab_data(tab_data) def _load_worktrees_tab_data(verbose: bool = False) -> TabData: @@ -33,10 +53,13 @@ def _load_worktrees_tab_data(verbose: bool = False) -> TabData: def _load_all_tab_data(verbose_worktrees: bool = False) -> dict[DashboardTab, TabData]: """Load data for all dashboard tabs.""" - return { - tab: _to_tab_data(tab_data) - for tab, tab_data in app_dashboard.load_all_tab_data(verbose_worktrees).items() - } + session_service = sessions.get_session_service() + all_tab_data = app_dashboard.load_all_tab_data( + session_service=session_service, + format_last_used=_format_last_used, + verbose_worktrees=verbose_worktrees, + ) + return {tab: _to_tab_data(tab_data) for tab, tab_data in all_tab_data.items()} def _to_tab_data(tab_data: app_dashboard.DashboardTabData) -> TabData: diff --git a/src/scc_cli/ui/dashboard/orchestrator.py b/src/scc_cli/ui/dashboard/orchestrator.py index 4e7d625..67723e8 100644 --- a/src/scc_cli/ui/dashboard/orchestrator.py +++ b/src/scc_cli/ui/dashboard/orchestrator.py @@ -12,14 +12,18 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from collections.abc import Mapping +from datetime import datetime +from typing import TYPE_CHECKING +from ... import sessions from ...console import get_err_console if TYPE_CHECKING: from rich.console import Console from scc_cli.application import dashboard as app_dashboard +from scc_cli.ports.session_models import SessionSummary from ...confirm import Confirm from ..chrome import print_with_layout @@ -44,11 +48,20 @@ WorktreeActionMenuRequested, ) from ..list_screen import ListState +from ..time_format import format_relative_time_from_datetime from ._dashboard import Dashboard from .loaders import _to_tab_data from .models import DashboardState +def _format_last_used(iso_timestamp: str) -> str: + try: + dt = datetime.fromisoformat(iso_timestamp) + except ValueError: + return iso_timestamp + return format_relative_time_from_datetime(dt) + + def run_dashboard() -> None: """Run the main SCC dashboard. @@ -73,11 +86,24 @@ def run_dashboard() -> None: scc_config.mark_onboarding_seen() flow_state = app_dashboard.DashboardFlowState() + session_service = sessions.get_session_service() + + def _load_tabs( + verbose_worktrees: bool = False, + ) -> Mapping[ + app_dashboard.DashboardTab, + app_dashboard.DashboardTabData, + ]: + return app_dashboard.load_all_tab_data( + session_service=session_service, + format_last_used=_format_last_used, + verbose_worktrees=verbose_worktrees, + ) while True: view, flow_state = app_dashboard.build_dashboard_view( flow_state, - app_dashboard.load_all_tab_data, + _load_tabs, ) tabs = {tab: _to_tab_data(tab_data) for tab, tab_data in view.tabs.items()} state = DashboardState( @@ -453,13 +479,20 @@ def _handle_worktree_start(worktree_path: str) -> app_dashboard.StartFlowResult: from rich.status import Status from ... import config, docker + from ...application.start_session import ( + StartSessionDependencies, + StartSessionRequest, + sync_marketplace_settings_for_start, + ) + from ...bootstrap import get_default_adapters from ...commands.launch import ( - _configure_team_settings, _launch_sandbox, _resolve_mount_and_branch, - _sync_marketplace_settings, _validate_and_resolve_workspace, ) + from ...commands.launch.team_settings import _configure_team_settings + from ...marketplace.materialize import materialize_marketplace + from ...marketplace.resolve import resolve_effective_config from ...theme import Spinners console = get_err_console() @@ -493,7 +526,36 @@ def _handle_worktree_start(worktree_path: str) -> app_dashboard.StartFlowResult: _configure_team_settings(team, cfg) # Sync marketplace settings - sync_result = _sync_marketplace_settings(workspace_path, team) + adapters = get_default_adapters() + start_dependencies = StartSessionDependencies( + filesystem=adapters.filesystem, + remote_fetcher=adapters.remote_fetcher, + clock=adapters.clock, + git_client=adapters.git_client, + agent_runner=adapters.agent_runner, + sandbox_runtime=adapters.sandbox_runtime, + resolve_effective_config=resolve_effective_config, + materialize_marketplace=materialize_marketplace, + ) + start_request = StartSessionRequest( + workspace_path=workspace_path, + workspace_arg=str(workspace_path), + entry_dir=workspace_path, + team=team, + session_name=None, + resume=False, + fresh=False, + offline=False, + standalone=team is None, + dry_run=False, + allow_suspicious=False, + org_config=config.load_cached_org_config(), + org_config_url=None, + ) + sync_result, _sync_error = sync_marketplace_settings_for_start( + start_request, + start_dependencies, + ) plugin_settings = sync_result.rendered_settings if sync_result else None # Resolve mount path and branch @@ -527,44 +589,49 @@ def _handle_worktree_start(worktree_path: str) -> app_dashboard.StartFlowResult: return app_dashboard.StartFlowResult.from_legacy(False) -def _handle_session_resume(session: dict[str, Any]) -> bool: - """Handle session resume request from dashboard. - - Resumes an existing session by launching the Docker container with - the stored workspace, team, and branch configuration. +def _handle_session_resume(session: SessionSummary) -> bool: + """Resume a Claude Code session from the dashboard. This function executes OUTSIDE Rich Live context (the dashboard has already exited via the exception unwind before this is called). Args: - session: Session dict containing workspace, team, branch, container_name, etc. + session: Session summary containing workspace, team, branch, container_name, etc. Returns: True if session was resumed successfully, False if resume failed (e.g., workspace no longer exists). """ + from pathlib import Path from rich.status import Status from ... import config, docker + from ...application.start_session import ( + StartSessionDependencies, + StartSessionRequest, + sync_marketplace_settings_for_start, + ) + from ...bootstrap import get_default_adapters from ...commands.launch import ( - _configure_team_settings, _launch_sandbox, _resolve_mount_and_branch, - _sync_marketplace_settings, _validate_and_resolve_workspace, ) + from ...commands.launch.team_settings import _configure_team_settings + from ...marketplace.materialize import materialize_marketplace + from ...marketplace.resolve import resolve_effective_config from ...theme import Spinners console = get_err_console() _prepare_for_nested_ui(console) # Extract session info - workspace = session.get("workspace", "") - team = session.get("team") # May be None for standalone - session_name = session.get("name") - branch = session.get("branch") + workspace = session.workspace + team = session.team # May be None for standalone + session_name = session.name + branch = session.branch if not workspace: console.print("[red]Session has no workspace path[/red]") @@ -594,7 +661,36 @@ def _handle_session_resume(session: dict[str, Any]) -> bool: _configure_team_settings(team, cfg) # Sync marketplace settings - sync_result = _sync_marketplace_settings(workspace_path, team) + adapters = get_default_adapters() + start_dependencies = StartSessionDependencies( + filesystem=adapters.filesystem, + remote_fetcher=adapters.remote_fetcher, + clock=adapters.clock, + git_client=adapters.git_client, + agent_runner=adapters.agent_runner, + sandbox_runtime=adapters.sandbox_runtime, + resolve_effective_config=resolve_effective_config, + materialize_marketplace=materialize_marketplace, + ) + start_request = StartSessionRequest( + workspace_path=workspace_path, + workspace_arg=str(workspace_path), + entry_dir=workspace_path, + team=team, + session_name=session_name, + resume=True, + fresh=False, + offline=False, + standalone=team is None, + dry_run=False, + allow_suspicious=False, + org_config=config.load_cached_org_config(), + org_config_url=None, + ) + sync_result, _sync_error = sync_marketplace_settings_for_start( + start_request, + start_dependencies, + ) plugin_settings = sync_result.rendered_settings if sync_result else None # Resolve mount path and branch @@ -970,7 +1066,7 @@ def _handle_container_action_menu(container_id: str, container_name: str) -> str return None -def _handle_session_action_menu(session: dict[str, Any]) -> str | None: +def _handle_session_action_menu(session: SessionSummary) -> str | None: """Show a session actions menu and execute the selected action.""" from ... import sessions as session_store from ..list_screen import ListItem, ListScreen @@ -1003,8 +1099,8 @@ def _handle_session_action_menu(session: dict[str, Any]) -> str | None: return "Resume failed" if selected == "remove": - workspace = session.get("workspace") - branch = session.get("branch") + workspace = session.workspace + branch = session.branch if not workspace: return "Missing workspace" removed = session_store.remove_session(workspace, branch) diff --git a/src/scc_cli/ui/formatters.py b/src/scc_cli/ui/formatters.py index 6553289..7a46144 100644 --- a/src/scc_cli/ui/formatters.py +++ b/src/scc_cli/ui/formatters.py @@ -20,13 +20,14 @@ from __future__ import annotations -from datetime import datetime, timezone from typing import TYPE_CHECKING, Any, TypedDict from ..docker.core import ContainerInfo from ..git import WorktreeInfo, get_display_branch +from ..ports.session_models import SessionSummary from ..theme import Indicators from .list_screen import ListItem +from .time_format import format_relative_time_compact if TYPE_CHECKING: from ..contexts import WorkContext @@ -188,50 +189,34 @@ def format_container(container: ContainerInfo) -> ListItem[ContainerInfo]: ) -def format_session(session: dict[str, Any]) -> ListItem[dict[str, Any]]: - """Format a session dict for display in a picker. +def format_session(session: SessionSummary) -> ListItem[SessionSummary]: + """Format a session summary for display in a picker. Args: - session: Session dictionary with name, team, branch, etc. + session: Session summary with name, team, branch, and timestamps. Returns: ListItem suitable for ListScreen display. - - Example: - >>> session = { - ... "name": "project-feature", - ... "team": "platform", - ... "branch": "feature/auth", - ... "last_used": "2 hours ago", - ... } - >>> item = format_session(session) - >>> item.label - 'project-feature' """ - name = session.get("name", "Unnamed") + name = session.name or "Unnamed" - # Build description parts desc_parts: list[str] = [] - if session.get("team"): - desc_parts.append(str(session["team"])) + if session.team: + desc_parts.append(str(session.team)) - if session.get("branch"): - desc_parts.append(str(session["branch"])) + if session.branch: + desc_parts.append(str(session.branch)) - if session.get("last_used"): - desc_parts.append(str(session["last_used"])) - - # Check for governance warnings (e.g., expiring exceptions) - governance_status: str | None = None - if session.get("has_exception_warning"): - governance_status = "warning" + if session.last_used: + relative_time = format_relative_time_compact(session.last_used) + desc_parts.append(relative_time or session.last_used) return ListItem( value=session, label=name, description=" ".join(desc_parts), - governance_status=governance_status, + governance_status=None, ) @@ -404,30 +389,7 @@ def _format_relative_time(iso_timestamp: str) -> str: Returns: Human-readable relative time string, or empty if parsing fails. """ - try: - # Parse ISO format, handling Z suffix - timestamp = datetime.fromisoformat(iso_timestamp.replace("Z", "+00:00")) - now = datetime.now(timezone.utc) - delta = now - timestamp - - seconds = int(delta.total_seconds()) - if seconds < 0: - return "" - if seconds < 60: - return "just now" - if seconds < 3600: - minutes = seconds // 60 - return f"{minutes}m ago" - if seconds < 86400: - hours = seconds // 3600 - return f"{hours}h ago" - if seconds < 604800: - days = seconds // 86400 - return f"{days}d ago" - weeks = seconds // 604800 - return f"{weeks}w ago" - except (ValueError, TypeError): - return "" + return format_relative_time_compact(iso_timestamp) def _extract_container_time(status: str) -> str: diff --git a/src/scc_cli/ui/keys.py b/src/scc_cli/ui/keys.py index 3f87501..2615ca8 100644 --- a/src/scc_cli/ui/keys.py +++ b/src/scc_cli/ui/keys.py @@ -21,10 +21,12 @@ from dataclasses import dataclass from enum import Enum, auto -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from typing import TYPE_CHECKING, Generic, TypeVar import readchar +from scc_cli.ports.session_models import SessionSummary + if TYPE_CHECKING: pass @@ -108,11 +110,11 @@ class SessionResumeRequested(Exception): # noqa: N818 The orchestrator (run_dashboard) catches this and calls the resume flow. Attributes: - session: Session dict containing workspace, team, name, etc. + session: Session summary containing workspace, team, name, etc. return_to: Tab name to restore after flow (e.g., "SESSIONS"). """ - def __init__(self, session: dict[str, Any], return_to: str = "") -> None: + def __init__(self, session: SessionSummary, return_to: str = "") -> None: self.session = session self.return_to = return_to super().__init__() @@ -251,7 +253,7 @@ def __init__(self, container_id: str, container_name: str, return_to: str = "") class SessionActionMenuRequested(Exception): # noqa: N818 """Raised when user requests the session actions menu from the dashboard.""" - def __init__(self, session: dict[str, Any], return_to: str = "") -> None: + def __init__(self, session: SessionSummary, return_to: str = "") -> None: self.session = session self.return_to = return_to super().__init__() diff --git a/src/scc_cli/ui/picker.py b/src/scc_cli/ui/picker.py index e218454..7298551 100644 --- a/src/scc_cli/ui/picker.py +++ b/src/scc_cli/ui/picker.py @@ -43,6 +43,7 @@ from rich.text import Text from ..contexts import normalize_path +from ..ports.session_models import SessionSummary from ..theme import Indicators from .chrome import Chrome, ChromeConfig from .formatters import ( @@ -235,23 +236,23 @@ def pick_containers( def pick_session( - sessions: Sequence[dict[str, Any]], + sessions: Sequence[SessionSummary], *, title: str = "Select Session", subtitle: str | None = None, -) -> dict[str, Any] | None: +) -> SessionSummary | None: """Show interactive session picker. Display a list of sessions with team, branch, and last used info. User can navigate with arrow keys, filter by typing, and select with Enter. Args: - sessions: Sequence of session dicts. + sessions: Sequence of session summaries. title: Title shown in chrome header. subtitle: Optional subtitle for additional context. Returns: - Selected session dict, or None if cancelled. + Selected session summary, or None if cancelled. """ if not sessions: return None diff --git a/src/scc_cli/ui/prompts.py b/src/scc_cli/ui/prompts.py index c44907c..6fc8637 100644 --- a/src/scc_cli/ui/prompts.py +++ b/src/scc_cli/ui/prompts.py @@ -12,6 +12,7 @@ prompt_repo_url: Prompt for Git repository URL """ +from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, Any @@ -22,8 +23,10 @@ from rich.table import Table from scc_cli.confirm import Confirm +from scc_cli.ports.session_models import SessionSummary from scc_cli.theme import Borders, Colors from scc_cli.ui.chrome import get_layout_metrics, print_with_layout +from scc_cli.ui.time_format import format_relative_time_from_datetime if TYPE_CHECKING: from scc_cli.core.errors import SCCError @@ -85,15 +88,28 @@ def confirm_with_layout(console: Console, prompt: str, **kwargs: Any) -> bool: return Confirm.ask(f"{prefix}{prompt}", **kwargs) -def select_session(console: Console, sessions_list: list[dict[str, Any]]) -> dict[str, Any] | None: +def _format_last_used(last_used: str | None) -> str: + if not last_used: + return "-" + try: + dt = datetime.fromisoformat(last_used) + except ValueError: + return last_used + return format_relative_time_from_datetime(dt) + + +def select_session( + console: Console, + sessions_list: list[SessionSummary], +) -> SessionSummary | None: """Display an interactive session selection menu. Args: console: Rich console for output. - sessions_list: List of session dicts with 'name', 'workspace', 'last_used', etc. + sessions_list: List of session summaries with name, workspace, and timestamps. Returns: - Selected session dict or None if cancelled. + Selected session summary or None if cancelled. """ if not sessions_list: console.print(f"[{Colors.WARNING}]No sessions available.[/{Colors.WARNING}]") @@ -108,9 +124,9 @@ def select_session(console: Console, sessions_list: list[dict[str, Any]]) -> dic table.add_column("Last Used", style=Colors.SECONDARY) for i, session in enumerate(sessions_list, 1): - name = session.get("name", "-") - workspace = session.get("workspace", "-") - last_used = session.get("last_used", "-") + name = session.name or "-" + workspace = session.workspace or "-" + last_used = _format_last_used(session.last_used) table.add_row(f"[{i}]", name, workspace, last_used) table.add_row("[0]", "<- Cancel", "", "") @@ -172,17 +188,21 @@ def select_team(console: Console, cfg: dict[str, Any]) -> str | None: return selected -def prompt_custom_workspace(console: Console) -> str | None: +def prompt_custom_workspace(console: Console, prompt: str | None = None) -> str | None: """Prompt the user to enter a custom workspace path. Args: console: Rich console for output. + prompt: Optional prompt text override. Returns: Resolved absolute path string, or None if cancelled or path invalid. """ console.print() - path = prompt_with_layout(console, f"[{Colors.BRAND}]Enter workspace path[/{Colors.BRAND}]") + prompt_text = ( + prompt or f"[{Colors.BRAND}]Enter workspace path (tab to autocomplete)[/{Colors.BRAND}]" + ) + path = prompt_with_layout(console, prompt_text) if not path: return None @@ -203,17 +223,16 @@ def prompt_custom_workspace(console: Console) -> str | None: return str(expanded) -def prompt_repo_url(console: Console) -> str: +def prompt_repo_url(console: Console, prompt: str | None = None) -> str: """Prompt the user to enter a Git repository URL. Args: console: Rich console for output. + prompt: Optional prompt text override. Returns: The entered URL string (may be empty if user pressed Enter). """ console.print() - return prompt_with_layout( - console, - f"[{Colors.BRAND}]Repository URL (HTTPS or SSH)[/{Colors.BRAND}]", - ) + prompt_text = prompt or f"[{Colors.BRAND}]Repository URL (HTTPS or SSH)[/{Colors.BRAND}]" + return prompt_with_layout(console, prompt_text) diff --git a/src/scc_cli/ui/time_format.py b/src/scc_cli/ui/time_format.py new file mode 100644 index 0000000..72059a0 --- /dev/null +++ b/src/scc_cli/ui/time_format.py @@ -0,0 +1,112 @@ +"""Relative time formatting helpers for UI output.""" + +from __future__ import annotations + +from datetime import datetime, timezone + + +def format_relative_time_compact(iso_timestamp: str) -> str: + """Format an ISO timestamp as compact relative time. + + Args: + iso_timestamp: ISO 8601 timestamp string. + + Returns: + Relative time string (e.g., "2h ago"), or empty if parsing fails. + """ + try: + timestamp = datetime.fromisoformat(iso_timestamp.replace("Z", "+00:00")) + now = datetime.now(timezone.utc) + delta = now - timestamp + + seconds = int(delta.total_seconds()) + if seconds < 0: + return "" + if seconds < 60: + return "just now" + if seconds < 3600: + minutes = seconds // 60 + return f"{minutes}m ago" + if seconds < 86400: + hours = seconds // 3600 + return f"{hours}h ago" + if seconds < 604800: + days = seconds // 86400 + return f"{days}d ago" + weeks = seconds // 604800 + return f"{weeks}w ago" + except (ValueError, TypeError): + return "" + + +def format_relative_time_calendar(iso_timestamp: str) -> str: + """Format an ISO timestamp with calendar-style labels. + + Examples: + 2 minutes ago → "2m ago" + yesterday → "yesterday" + older → "Dec 20" + + Args: + iso_timestamp: ISO 8601 timestamp string. + + Returns: + Calendar-style relative time string, or empty if parsing fails. + """ + try: + if iso_timestamp.endswith("Z"): + iso_timestamp = iso_timestamp[:-1] + "+00:00" + + timestamp = datetime.fromisoformat(iso_timestamp) + now = datetime.now(timezone.utc) + if timestamp.tzinfo is None: + timestamp = timestamp.replace(tzinfo=timezone.utc) + + delta = now - timestamp + seconds = delta.total_seconds() + + if seconds < 60: + return "just now" + if seconds < 3600: + minutes = int(seconds / 60) + return f"{minutes}m ago" + if seconds < 86400: + hours = int(seconds / 3600) + return f"{hours}h ago" + if seconds < 172800: + return "yesterday" + if seconds < 604800: + days = int(seconds / 86400) + return f"{days}d ago" + return timestamp.strftime("%b %d") + except (ValueError, AttributeError): + return "" + + +def format_relative_time_from_datetime(dt: datetime) -> str: + """Format a datetime as a relative time string. + + Args: + dt: Datetime to format. + + Returns: + Relative time string (e.g., "2h ago"). + """ + now = datetime.now() + diff = now - dt + + seconds = diff.total_seconds() + + if seconds < 60: + return "just now" + if seconds < 3600: + minutes = int(seconds / 60) + return f"{minutes}m ago" + if seconds < 86400: + hours = int(seconds / 3600) + return f"{hours}h ago" + if seconds < 604800: + days = int(seconds / 86400) + return f"{days}d ago" + weeks = int(seconds / 604800) + return f"{weeks}w ago" diff --git a/src/scc_cli/ui/wizard.py b/src/scc_cli/ui/wizard.py index e8ce819..0afa12f 100644 --- a/src/scc_cli/ui/wizard.py +++ b/src/scc_cli/ui/wizard.py @@ -17,14 +17,16 @@ Example: >>> from scc_cli.ui.wizard import ( - ... BACK, WorkspaceSource, - ... pick_workspace_source, pick_recent_workspace + ... BACK, pick_workspace_source, pick_recent_workspace ... ) + >>> from scc_cli.application.launch.start_wizard import WorkspaceSource >>> >>> while True: ... source = pick_workspace_source(team="platform") ... if source is None: ... break # User pressed q or Esc at top level - quit + ... if source is BACK: + ... break ... ... if source == WorkspaceSource.RECENT: ... workspace = pick_recent_workspace(recent_sessions) @@ -37,36 +39,68 @@ from __future__ import annotations -from datetime import datetime, timezone +from dataclasses import dataclass from enum import Enum from pathlib import Path -from typing import TYPE_CHECKING, Any, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from rich.console import Console + +from scc_cli.application.interaction_requests import ConfirmRequest, InputRequest, SelectRequest +from scc_cli.application.launch.start_wizard import ( + CLONE_REPO_REQUEST_ID, + CROSS_TEAM_RESUME_REQUEST_ID, + CUSTOM_WORKSPACE_REQUEST_ID, + QUICK_RESUME_REQUEST_ID, + SESSION_NAME_REQUEST_ID, + TEAM_SELECTION_REQUEST_ID, + WORKSPACE_PICKER_REQUEST_ID, + WORKSPACE_SOURCE_REQUEST_ID, + WORKTREE_CONFIRM_REQUEST_ID, + WORKTREE_NAME_REQUEST_ID, + QuickResumeOption, + QuickResumeViewModel, + StartWizardPrompt, + TeamOption, + TeamRepoOption, + TeamRepoPickerViewModel, + TeamSelectionViewModel, + WorkspacePickerViewModel, + WorkspaceSource, + WorkspaceSourceOption, + WorkspaceSourceViewModel, + WorkspaceSummary, +) -from ..services.workspace import is_suspicious_directory +from ..ports.session_models import SessionSummary +from ..services.workspace import has_project_markers, is_suspicious_directory from .keys import BACK, _BackSentinel from .list_screen import ListItem -from .picker import _run_single_select_picker +from .picker import ( + QuickResumeResult, + TeamSwitchRequested, + _run_single_select_picker, + pick_context_quick_resume, + pick_team, +) +from .prompts import ( + confirm_with_layout, + prompt_custom_workspace, + prompt_repo_url, + prompt_with_layout, +) +from .time_format import format_relative_time_calendar if TYPE_CHECKING: pass -# Type variable for generic picker return types -T = TypeVar("T") - - -# ═══════════════════════════════════════════════════════════════════════════════ -# Workspace Source Enum -# ═══════════════════════════════════════════════════════════════════════════════ +class StartWizardRendererError(RuntimeError): + """Error raised for unexpected prompt types in the start wizard renderer.""" -class WorkspaceSource(Enum): - """Options for where to get the workspace from.""" - CURRENT_DIR = "current_dir" # Use current working directory - RECENT = "recent" - TEAM_REPOS = "team_repos" - CUSTOM = "custom" - CLONE = "clone" +# Type variable for generic picker return types +T = TypeVar("T") # ═══════════════════════════════════════════════════════════════════════════════ @@ -122,40 +156,400 @@ def _format_relative_time(iso_timestamp: str) -> str: 5 days ago → "5d ago" older → "Dec 20" (month day format) """ - try: - # Handle Z suffix for UTC - if iso_timestamp.endswith("Z"): - iso_timestamp = iso_timestamp[:-1] + "+00:00" - - timestamp = datetime.fromisoformat(iso_timestamp) - - # Ensure timezone-aware comparison - now = datetime.now(timezone.utc) - if timestamp.tzinfo is None: - timestamp = timestamp.replace(tzinfo=timezone.utc) - - delta = now - timestamp - seconds = delta.total_seconds() - - if seconds < 60: - return "just now" - elif seconds < 3600: - minutes = int(seconds / 60) - return f"{minutes}m ago" - elif seconds < 86400: - hours = int(seconds / 3600) - return f"{hours}h ago" - elif seconds < 172800: # 2 days - return "yesterday" - elif seconds < 604800: # 7 days - days = int(seconds / 86400) - return f"{days}d ago" + return format_relative_time_calendar(iso_timestamp) + + +@dataclass(frozen=True) +class StartWizardAnswer: + """Result of rendering a start wizard prompt.""" + + kind: StartWizardAnswerKind + value: object | None = None + + +class StartWizardAnswerKind(Enum): + """Response outcomes for the start wizard prompt renderer.""" + + SELECTED = "selected" + BACK = "back" + CANCELLED = "cancelled" + + +class StartWizardAction(Enum): + """Synthetic wizard actions emitted by the prompt renderer.""" + + NEW_SESSION = "new_session" + TOGGLE_ALL_TEAMS = "toggle_all_teams" + SWITCH_TEAM = "switch_team" + + +def _answer_cancelled() -> StartWizardAnswer: + return StartWizardAnswer(kind=StartWizardAnswerKind.CANCELLED) + + +def _answer_back() -> StartWizardAnswer: + return StartWizardAnswer(kind=StartWizardAnswerKind.BACK) + + +def _answer_selected(value: object) -> StartWizardAnswer: + return StartWizardAnswer(kind=StartWizardAnswerKind.SELECTED, value=value) + + +def render_start_wizard_prompt( + prompt: StartWizardPrompt, + *, + console: Console, + recent_sessions: list[SessionSummary] | None = None, + available_teams: list[dict[str, Any]] | None = None, + team_repos: list[dict[str, Any]] | None = None, + workspace_base: str | None = None, + allow_back: bool = False, + standalone: bool = False, + context_label: str | None = None, + current_branch: str | None = None, + effective_team: str | None = None, +) -> StartWizardAnswer: + """Render a start wizard prompt using existing UI pickers/prompts.""" + request_id = prompt.request.request_id + + if request_id == QUICK_RESUME_REQUEST_ID: + quick_resume_view = cast(QuickResumeViewModel, prompt.view_model) + quick_resume_request = cast(SelectRequest[QuickResumeOption], prompt.request) + contexts = quick_resume_view.contexts + try: + result, selected_context = pick_context_quick_resume( + contexts, + title=quick_resume_request.title, + subtitle=quick_resume_request.subtitle, + standalone=standalone, + context_label=quick_resume_view.context_label, + effective_team=effective_team, + current_branch=current_branch, + ) + except TeamSwitchRequested: + return _answer_selected(StartWizardAction.SWITCH_TEAM) + if result is QuickResumeResult.SELECTED: + if selected_context is None: + return _answer_cancelled() + return _answer_selected(selected_context) + if result is QuickResumeResult.NEW_SESSION: + return _answer_selected(StartWizardAction.NEW_SESSION) + if result is QuickResumeResult.TOGGLE_ALL_TEAMS: + return _answer_selected(StartWizardAction.TOGGLE_ALL_TEAMS) + if result is QuickResumeResult.BACK: + return _answer_back() + return _answer_cancelled() + + if request_id == TEAM_SELECTION_REQUEST_ID: + if available_teams is None: + raise StartWizardRendererError("available_teams required for team selection") + team_view = cast(TeamSelectionViewModel, prompt.view_model) + team_request = cast(SelectRequest[TeamOption], prompt.request) + try: + selected = pick_team( + available_teams, + current_team=team_view.current_team, + title=team_request.title, + subtitle=team_request.subtitle, + ) + except TeamSwitchRequested: + return _answer_selected(StartWizardAction.SWITCH_TEAM) + if selected is None: + return _answer_cancelled() + return _answer_selected(selected) + + if request_id == WORKSPACE_SOURCE_REQUEST_ID: + source_view = cast(WorkspaceSourceViewModel, prompt.view_model) + source_request = cast(SelectRequest[WorkspaceSourceOption], prompt.request) + try: + source = pick_workspace_source( + has_team_repos=any(team_repos or []), + team=effective_team, + standalone=standalone, + allow_back=allow_back, + context_label=context_label or source_view.context_label, + subtitle=source_request.subtitle, + options=list(source_view.options), + view_model=source_view, + ) + except TeamSwitchRequested: + return _answer_selected(StartWizardAction.SWITCH_TEAM) + if source is BACK: + return _answer_back() + if source is None: + return _answer_cancelled() + return _answer_selected(source) + + if request_id == WORKSPACE_PICKER_REQUEST_ID: + if prompt.view_model is None: + raise StartWizardRendererError("workspace picker view model required") + + if isinstance(prompt.view_model, WorkspacePickerViewModel): + picker_view = prompt.view_model + try: + picker_result = pick_recent_workspace( + recent_sessions or [], + standalone=standalone, + context_label=context_label or picker_view.context_label, + options=list(picker_view.options), + ) + except TeamSwitchRequested: + return _answer_selected(StartWizardAction.SWITCH_TEAM) + if picker_result is BACK: + return _answer_back() + if picker_result is None: + return _answer_cancelled() + return _answer_selected(picker_result) + + if isinstance(prompt.view_model, TeamRepoPickerViewModel): + repo_view = prompt.view_model + if team_repos is None: + raise StartWizardRendererError("team_repos required for team repo selection") + resolved_workspace_base = workspace_base or repo_view.workspace_base + try: + picker_result = pick_team_repo( + team_repos, + resolved_workspace_base, + standalone=standalone, + context_label=context_label or repo_view.context_label, + options=list(repo_view.options), + ) + except TeamSwitchRequested: + return _answer_selected(StartWizardAction.SWITCH_TEAM) + if picker_result is BACK: + return _answer_back() + if picker_result is None: + return _answer_cancelled() + return _answer_selected(picker_result) + + msg = f"Unsupported workspace picker view model: {type(prompt.view_model)}" + raise StartWizardRendererError(msg) + + if request_id == CUSTOM_WORKSPACE_REQUEST_ID: + custom_request = cast(InputRequest, prompt.request) + prompt_text = f"[cyan]{custom_request.prompt}[/cyan]" + workspace_path = prompt_custom_workspace(console, prompt=prompt_text) + if workspace_path is None: + return _answer_back() + return _answer_selected(workspace_path) + + if request_id == CLONE_REPO_REQUEST_ID: + clone_request = cast(InputRequest, prompt.request) + prompt_text = f"[cyan]{clone_request.prompt}[/cyan]" + repo_url = prompt_repo_url(console, prompt=prompt_text) + if not repo_url: + return _answer_back() + from .git_interactive import clone_repo + + resolved_base = workspace_base or "~/projects" + workspace = clone_repo(repo_url, resolved_base) + if workspace is None: + return _answer_back() + return _answer_selected(workspace) + + if request_id == CROSS_TEAM_RESUME_REQUEST_ID: + confirm_request = cast(ConfirmRequest, prompt.request) + prompt_text = confirm_request.prompt + confirm = confirm_with_layout( + console, + prompt_text, + default=prompt.default_response or False, + ) + return _answer_selected(confirm) + + if request_id == WORKTREE_CONFIRM_REQUEST_ID: + confirm_request = cast(ConfirmRequest, prompt.request) + prompt_text = f"[cyan]{confirm_request.prompt}[/cyan]" + confirm = confirm_with_layout( + console, + prompt_text, + default=prompt.default_response or False, + ) + return _answer_selected(confirm) + + if request_id == WORKTREE_NAME_REQUEST_ID: + worktree_request = cast(InputRequest, prompt.request) + prompt_text = f"[cyan]{worktree_request.prompt}[/cyan]" + worktree_name = prompt_with_layout(console, prompt_text) + if worktree_name is None: + return _answer_back() + return _answer_selected(worktree_name) + + if request_id == SESSION_NAME_REQUEST_ID: + session_request = cast(InputRequest, prompt.request) + prompt_text = "[cyan]Session name[/cyan] [dim](optional, for easy resume)[/dim]" + session_name_value = prompt_with_layout( + console, + prompt_text, + default=session_request.default or "", + ) + return _answer_selected(session_name_value or None) + + msg = f"Unsupported start wizard prompt: {prompt.request.request_id}" + raise StartWizardRendererError(msg) + + +# ═══════════════════════════════════════════════════════════════════════════════ +# Workspace Source Option Builder +# ═══════════════════════════════════════════════════════════════════════════════ + + +def build_workspace_source_options( + *, + has_team_repos: bool, + include_current_dir: bool = True, +) -> list[WorkspaceSourceOption]: + options: list[WorkspaceSourceOption] = [] + + if include_current_dir: + # Check current directory for project markers and git status + # Import here to avoid circular dependencies + from scc_cli.services import git as git_service + + cwd = Path.cwd() + cwd_name = cwd.name or str(cwd) + is_git = git_service.is_git_repo(cwd) + + # Three-tier logic with git awareness: + # 1. Suspicious directory (home, /, tmp) -> don't show + # 2. Has project markers + git -> show folder name (confident) + # 3. Has project markers, no git -> show "folder (no git)" + # 4. No markers, not suspicious -> show "folder (no git)" + if not is_suspicious_directory(cwd): + if _has_project_markers(cwd): + if is_git: + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.CURRENT_DIR, + label="• Current directory", + description=cwd_name, + ) + ) + else: + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.CURRENT_DIR, + label="• Current directory", + description=f"{cwd_name} (no git)", + ) + ) + else: + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.CURRENT_DIR, + label="• Current directory", + description=f"{cwd_name} (no git)", + ) + ) + + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.RECENT, + label="• Recent workspaces", + description="Continue working on previous project", + ) + ) + + if has_team_repos: + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.TEAM_REPOS, + label="• Team repositories", + description="Choose from team's common repos", + ) + ) + + options.extend( + [ + WorkspaceSourceOption( + source=WorkspaceSource.CUSTOM, + label="• Enter path", + description="Specify a local directory path", + ), + WorkspaceSourceOption( + source=WorkspaceSource.CLONE, + label="• Clone repository", + description="Clone a Git repository", + ), + ] + ) + + return options + + +def build_workspace_source_options_from_view_model( + view_model: WorkspaceSourceViewModel, +) -> list[WorkspaceSourceOption]: + """Build workspace source options from view model data flags. + + This function is called by the UI layer when the view model has empty + options. It builds presentation options based on the data flags + provided by the application layer (cwd_context, has_team_repos). + + The design follows clean architecture: + - Application layer provides data (cwd_context, has_team_repos) + - UI layer decides how to present that data (this function) + + Args: + view_model: WorkspaceSourceViewModel with data flags populated. + + Returns: + List of WorkspaceSourceOption for the picker. + """ + options: list[WorkspaceSourceOption] = [] + + # Current directory - only if cwd_context is provided (means it's not suspicious) + if view_model.cwd_context is not None: + ctx = view_model.cwd_context + # Format description based on git status + if ctx.is_git: + description = ctx.name else: - # Older than a week - show month day - return timestamp.strftime("%b %d") + description = f"{ctx.name} (no git)" + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.CURRENT_DIR, + label="• Current directory", + description=description, + ) + ) + + # Recent workspaces - always available + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.RECENT, + label="• Recent workspaces", + description="Continue working on previous project", + ) + ) + + # Team repositories - only if available + if view_model.has_team_repos: + options.append( + WorkspaceSourceOption( + source=WorkspaceSource.TEAM_REPOS, + label="• Team repositories", + description="Choose from team's common repos", + ) + ) + + # Enter path and Clone - always available + options.extend( + [ + WorkspaceSourceOption( + source=WorkspaceSource.CUSTOM, + label="• Enter path", + description="Specify a local directory path", + ), + WorkspaceSourceOption( + source=WorkspaceSource.CLONE, + label="• Clone repository", + description="Clone a Git repository", + ), + ] + ) - except (ValueError, AttributeError): - return "" + return options # ═══════════════════════════════════════════════════════════════════════════════ @@ -207,47 +601,16 @@ def _run_subscreen_picker( # ═══════════════════════════════════════════════════════════════════════════════ -# Common project markers across languages/frameworks -# Split into direct checks (fast) and glob patterns (slower, checked only if needed) -_PROJECT_MARKERS_DIRECT = ( - ".git", # Git repository (directory or file for worktrees) - ".scc.yaml", # SCC config - ".gitignore", # Often at project root - "package.json", # Node.js / JavaScript - "tsconfig.json", # TypeScript - "pyproject.toml", # Python (modern) - "setup.py", # Python (legacy) - "requirements.txt", # Python dependencies - "Pipfile", # Pipenv - "Cargo.toml", # Rust - "go.mod", # Go - "pom.xml", # Java Maven - "build.gradle", # Java/Kotlin Gradle - "gradlew", # Gradle wrapper (strong signal) - "Gemfile", # Ruby - "composer.json", # PHP - "mix.exs", # Elixir - "Makefile", # Make-based projects - "CMakeLists.txt", # CMake C/C++ - ".project", # Eclipse - "Dockerfile", # Docker projects - "docker-compose.yml", # Docker Compose - "compose.yaml", # Docker Compose (new name) -) - -# Glob patterns for project markers (checked only if direct checks fail) -_PROJECT_MARKERS_GLOB = ( - "*.sln", # .NET solution - "*.csproj", # .NET C# project -) +# ───────────────────────────────────────────────────────────────────────────── +# Project Marker Detection (delegates to services layer) +# ───────────────────────────────────────────────────────────────────────────── def _has_project_markers(path: Path) -> bool: """Check if a directory has common project markers. - Uses a two-phase approach for performance: - 1. Fast direct existence checks for common markers - 2. Slower glob patterns only if direct checks fail + Delegates to the service layer for the actual check. + This wrapper is kept for backwards compatibility with existing callers. Args: path: Directory to check. @@ -255,23 +618,7 @@ def _has_project_markers(path: Path) -> bool: Returns: True if directory has any recognizable project markers. """ - if not path.is_dir(): - return False - - # Phase 1: Fast direct checks - for marker in _PROJECT_MARKERS_DIRECT: - if (path / marker).exists(): - return True - - # Phase 2: Slower glob checks (only if no direct markers found) - for pattern in _PROJECT_MARKERS_GLOB: - try: - if next(path.glob(pattern), None) is not None: - return True - except (OSError, StopIteration): - continue - - return False + return has_project_markers(path) def _is_valid_workspace(path: Path) -> bool: @@ -284,13 +631,15 @@ def _is_valid_workspace(path: Path) -> bool: Random directories (like $HOME) are NOT valid workspaces. + Delegates to the service layer for the actual check. + Args: path: Directory to check. Returns: True if directory exists and has workspace markers. """ - return _has_project_markers(path) + return has_project_markers(path) def pick_workspace_source( @@ -300,6 +649,10 @@ def pick_workspace_source( standalone: bool = False, allow_back: bool = False, context_label: str | None = None, + include_current_dir: bool = True, + subtitle: str | None = None, + options: list[WorkspaceSourceOption] | None = None, + view_model: WorkspaceSourceViewModel | None = None, ) -> WorkspaceSource | _BackSentinel | None: """Show picker for workspace source selection. @@ -315,12 +668,23 @@ def pick_workspace_source( allow_back: If True, Esc returns BACK (for sub-screen context like Dashboard). If False, Esc returns None (for top-level CLI context). context_label: Optional context label (e.g., "Team: platform") shown in header. + include_current_dir: Whether to include current directory as an option. + subtitle: Optional subtitle override. + options: Optional prebuilt workspace source options to render. + view_model: Optional view model with data flags (cwd_context, has_team_repos). + When provided with empty options, uses these flags to build options. Returns: Selected WorkspaceSource, BACK if allow_back and Esc pressed, or None if quit. """ # Build subtitle based on context - subtitle = "Pick a project source (press 't' to switch team)" + resolved_subtitle = subtitle + if resolved_subtitle is None: + resolved_subtitle = "Pick a project source (press 't' to switch team)" + if options is not None: + resolved_subtitle = None + elif standalone: + resolved_subtitle = "Pick a project source" resolved_context_label = context_label if resolved_context_label is None and team: resolved_context_label = f"Team: {team}" @@ -328,99 +692,56 @@ def pick_workspace_source( # Build items list - start with CWD option if appropriate items: list[ListItem[WorkspaceSource]] = [] - # Check current directory for project markers and git status - # Import here to avoid circular dependencies - from scc_cli.services import git as git_service - - cwd = Path.cwd() - cwd_name = cwd.name or str(cwd) - is_git = git_service.is_git_repo(cwd) - - # Three-tier logic with git awareness: - # 1. Suspicious directory (home, /, tmp) → don't show - # 2. Has project markers + git → show folder name (confident) - # 3. Has project markers, no git → show "folder (no git)" - # 4. No markers, not suspicious → show "folder (no git)" - if not is_suspicious_directory(cwd): - if _has_project_markers(cwd): - if is_git: - # Valid project with git - show with confidence - items.append( - ListItem( - label="• Current directory", - description=cwd_name, - value=WorkspaceSource.CURRENT_DIR, - ) - ) - else: - # Has project markers but no git - items.append( - ListItem( - label="• Current directory", - description=f"{cwd_name} (no git)", - value=WorkspaceSource.CURRENT_DIR, - ) - ) + source_options = options + if not source_options: + # If view model is provided, build options from it + # This is the clean architecture approach: application provides data, + # UI layer builds presentation options + if view_model is not None: + source_options = build_workspace_source_options_from_view_model(view_model) else: - # Not a project but still allow - show with hint about git - items.append( - ListItem( - label="• Current directory", - description=f"{cwd_name} (no git)", - value=WorkspaceSource.CURRENT_DIR, - ) + # Fallback to original logic for backwards compatibility + # (when called without view_model from legacy code paths) + source_options = build_workspace_source_options( + has_team_repos=has_team_repos, + include_current_dir=include_current_dir, ) - # Add standard options - items.append( - ListItem( - label="• Recent workspaces", - description="Continue working on previous project", - value=WorkspaceSource.RECENT, - ) - ) - - if has_team_repos: + for option in source_options: items.append( ListItem( - label="• Team repositories", - description="Choose from team's common repos", - value=WorkspaceSource.TEAM_REPOS, + label=option.label, + description=option.description, + value=option.source, ) ) - items.extend( - [ - ListItem( - label="• Enter path", - description="Specify a local directory path", - value=WorkspaceSource.CUSTOM, - ), - ListItem( - label="• Clone repository", - description="Clone a Git repository", - value=WorkspaceSource.CLONE, - ), - ] - ) - if allow_back: - return _run_single_select_picker( + result = _run_single_select_picker( items=items, title="Where is your project?", - subtitle=subtitle, + subtitle=resolved_subtitle, standalone=standalone, allow_back=True, context_label=resolved_context_label, ) - return _run_single_select_picker( - items=items, - title="Where is your project?", - subtitle=subtitle, - standalone=standalone, - allow_back=False, - context_label=resolved_context_label, - ) + else: + result = _run_single_select_picker( + items=items, + title="Where is your project?", + subtitle=resolved_subtitle, + standalone=standalone, + allow_back=False, + context_label=resolved_context_label, + ) + + if result is BACK: + return BACK + if result is None: + return None + if isinstance(result, WorkspaceSource): + return result + return None # ═══════════════════════════════════════════════════════════════════════════════ @@ -429,10 +750,11 @@ def pick_workspace_source( def pick_recent_workspace( - recent: list[dict[str, Any]], + recent: list[SessionSummary], *, standalone: bool = False, context_label: str | None = None, + options: list[WorkspaceSummary] | None = None, ) -> str | _BackSentinel | None: """Show picker for recent workspace selection. @@ -442,9 +764,10 @@ def pick_recent_workspace( - None: User pressed q (quit app entirely) Args: - recent: List of recent session dicts with 'workspace' and 'last_used' keys. + recent: List of recent session summaries with workspace and last_used fields. standalone: If True, dim the "t teams" hint (not available without org). context_label: Optional context label (e.g., "Team: platform") shown in header. + options: Optional prebuilt workspace summaries to render. Returns: Selected workspace path, BACK if Esc pressed, or None if q pressed (quit). @@ -458,16 +781,26 @@ def pick_recent_workspace( ), ] - # Add recent workspaces - for session in recent: - workspace = session.get("workspace", "") - last_used = session.get("last_used", "") + summaries = options or [] + if not summaries: + for session in recent: + workspace = session.workspace + last_used = session.last_used or "" + summaries.append( + WorkspaceSummary( + label=_normalize_path(workspace), + description=_format_relative_time(last_used), + workspace=workspace, + ) + ) + # Add recent workspaces + for summary in summaries: items.append( ListItem( - label=_normalize_path(workspace), - description=_format_relative_time(last_used), - value=workspace, # Full path as value + label=summary.label, + description=summary.description, + value=summary.workspace, ) ) @@ -497,6 +830,7 @@ def pick_team_repo( *, standalone: bool = False, context_label: str | None = None, + options: list[TeamRepoOption] | None = None, ) -> str | _BackSentinel | None: """Show picker for team repository selection. @@ -513,12 +847,13 @@ def pick_team_repo( workspace_base: Base directory for cloning new repos. standalone: If True, dim the "t teams" hint (not available without org). context_label: Optional context label (e.g., "Team: platform") shown in header. + options: Optional prebuilt repo options to render. Returns: Workspace path (existing or newly cloned), BACK if Esc pressed, or None if q pressed. """ # Build items with "← Back" first - items: list[ListItem[dict[str, Any] | _BackSentinel]] = [ + items: list[ListItem[TeamRepoOption | _BackSentinel]] = [ ListItem( label="← Back", description="", @@ -526,16 +861,25 @@ def pick_team_repo( ), ] - # Add team repos - for repo in repos: - name = repo.get("name", repo.get("url", "Unknown")) - description = repo.get("description", "") + resolved_options: list[TeamRepoOption] = list(options) if options is not None else [] + if not resolved_options: + for repo in repos: + resolved_options.append( + TeamRepoOption( + name=repo.get("name", repo.get("url", "Unknown")), + description=repo.get("description", ""), + url=repo.get("url"), + local_path=repo.get("local_path"), + ) + ) + # Add team repos + for repo_option in resolved_options: items.append( ListItem( - label=name, - description=description, - value=repo, # Full repo dict as value + label=repo_option.name, + description=repo_option.description, + value=repo_option, ) ) @@ -561,20 +905,22 @@ def pick_team_repo( if result is BACK: return BACK + # Need to clone - import here to avoid circular imports + from .git_interactive import clone_repo + + clone_handler = clone_repo + # Handle repo selection - check for existing local path or clone - if isinstance(result, dict): - local_path = result.get("local_path") + if isinstance(result, TeamRepoOption): + local_path = result.local_path if local_path: expanded = Path(local_path).expanduser() if expanded.exists(): return str(expanded) - # Need to clone - import here to avoid circular imports - from .git_interactive import clone_repo - - repo_url = result.get("url", "") + repo_url = result.url or "" if repo_url: - cloned_path = clone_repo(repo_url, workspace_base) + cloned_path = clone_handler(repo_url, workspace_base) if cloned_path: return cloned_path diff --git a/src/scc_cli/update.py b/src/scc_cli/update.py index f9567cb..329fa42 100644 --- a/src/scc_cli/update.py +++ b/src/scc_cli/update.py @@ -29,6 +29,8 @@ from rich.console import Console from rich.panel import Panel +from scc_cli.core.enums import OrgConfigUpdateStatus + if TYPE_CHECKING: pass @@ -71,7 +73,7 @@ class UpdateInfo: class OrgConfigUpdateResult: """Result of org config update check.""" - status: str # 'updated', 'unchanged', 'offline', 'auth_failed', 'no_cache', 'standalone' + status: OrgConfigUpdateStatus message: str | None = None cached_age_hours: float | None = None @@ -410,16 +412,16 @@ def check_org_config_update( # Standalone mode - no org config to update if user_config.get("standalone"): - return OrgConfigUpdateResult(status="standalone") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.STANDALONE) # No organization source configured org_source = user_config.get("organization_source") if not org_source: - return OrgConfigUpdateResult(status="standalone") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.STANDALONE) url = org_source.get("url") if not url: - return OrgConfigUpdateResult(status="standalone") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.STANDALONE) auth_spec = org_source.get("auth") auth_header = org_source.get("auth_header") @@ -427,7 +429,7 @@ def check_org_config_update( # Check throttle (unless forced) if not force and not _should_check_org_config(): # Return early - too soon to check - return OrgConfigUpdateResult(status="throttled") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.THROTTLED) # Try to load existing cache cached_config, meta = remote.load_from_cache() @@ -461,10 +463,10 @@ def check_org_config_update( _mark_org_config_check_done() if cached_config: return OrgConfigUpdateResult( - status="offline", + status=OrgConfigUpdateStatus.OFFLINE, cached_age_hours=cached_age_hours, ) - return OrgConfigUpdateResult(status="no_cache") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.NO_CACHE) # Mark check as done _mark_org_config_check_done() @@ -472,7 +474,7 @@ def check_org_config_update( # 304 Not Modified - cache is current if status == 304: return OrgConfigUpdateResult( - status="unchanged", + status=OrgConfigUpdateStatus.UNCHANGED, cached_age_hours=cached_age_hours, ) @@ -482,7 +484,7 @@ def check_org_config_update( ttl_hours = config.get("defaults", {}).get("cache_ttl_hours", 24) remote.save_to_cache(config, url, new_etag, ttl_hours) return OrgConfigUpdateResult( - status="updated", + status=OrgConfigUpdateStatus.UPDATED, message="Organization config updated from remote", ) @@ -490,23 +492,23 @@ def check_org_config_update( if status in (401, 403): if cached_config: return OrgConfigUpdateResult( - status="auth_failed", + status=OrgConfigUpdateStatus.AUTH_FAILED, message="Auth failed for org config, using cached version", cached_age_hours=cached_age_hours, ) return OrgConfigUpdateResult( - status="auth_failed", + status=OrgConfigUpdateStatus.AUTH_FAILED, message="Auth failed and no cached config available", ) # Other errors - use cache if available if cached_config: return OrgConfigUpdateResult( - status="offline", + status=OrgConfigUpdateStatus.OFFLINE, cached_age_hours=cached_age_hours, ) - return OrgConfigUpdateResult(status="no_cache") + return OrgConfigUpdateResult(status=OrgConfigUpdateStatus.NO_CACHE) # ═══════════════════════════════════════════════════════════════════════════════ diff --git a/src/scc_cli/validate.py b/src/scc_cli/validate.py index d8d74ff..c0fdd29 100644 --- a/src/scc_cli/validate.py +++ b/src/scc_cli/validate.py @@ -17,11 +17,12 @@ import json from dataclasses import dataclass, field from importlib.resources import files -from typing import TYPE_CHECKING, Any, Literal, cast +from typing import TYPE_CHECKING, Any, cast from jsonschema import Draft7Validator from .core.constants import CLI_VERSION, CURRENT_SCHEMA_VERSION +from .core.enums import SeverityLevel if TYPE_CHECKING: pass @@ -39,12 +40,12 @@ class InvariantViolation: Attributes: rule: The invariant rule that was violated (e.g., "enabled_must_be_allowed"). message: Human-readable description of the violation. - severity: "error" for hard failures, "warning" for advisory. + severity: SeverityLevel.ERROR for hard failures, SeverityLevel.WARNING for advisory. """ rule: str message: str - severity: Literal["error", "warning"] + severity: SeverityLevel # ═══════════════════════════════════════════════════════════════════════════════ @@ -324,7 +325,7 @@ def normalize_plugin_safe(ref: str, context: str) -> str | None: InvariantViolation( rule="invalid_plugin_reference", message=f"{context} plugin '{ref}' is invalid: {exc}", - severity="error", + severity=SeverityLevel.ERROR, ) ) return None @@ -377,7 +378,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: message=( f"Team '{team_name}' plugin '{normalized}' is not allowed by defaults.allowed_plugins" ), - severity="error", + severity=SeverityLevel.ERROR, ) ) @@ -389,7 +390,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: message=( f"Team '{team_name}' plugin '{normalized}' matches blocked pattern '{pattern}'" ), - severity="error", + severity=SeverityLevel.ERROR, ) ) break @@ -407,7 +408,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: message=( f"Default plugin '{normalized}' matches blocked pattern '{pattern}'" ), - severity="error", + severity=SeverityLevel.ERROR, ) ) break @@ -423,7 +424,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: InvariantViolation( rule="mcp_missing_identifier", message=f"Team '{team_name}' MCP server entry is missing identifiers", - severity="error", + severity=SeverityLevel.ERROR, ) ) continue @@ -437,7 +438,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: message=( f"Team '{team_name}' MCP server '{candidates[0]}' is not allowed by {allowed_desc}" ), - severity="error", + severity=SeverityLevel.ERROR, ) ) @@ -449,7 +450,7 @@ def mcp_candidates(server: dict[str, Any]) -> list[str]: message=( f"Team '{team_name}' MCP server '{candidates[0]}' matches blocked pattern '{pattern}'" ), - severity="error", + severity=SeverityLevel.ERROR, ) ) break diff --git a/tasks.yaml b/tasks.yaml new file mode 100644 index 0000000..d0a5683 --- /dev/null +++ b/tasks.yaml @@ -0,0 +1,216 @@ +tasks: + # ============================================================================ + # Phase 3 Cleanup — MUST FIX FIRST (Single consolidated task) + # These fixes are interdependent - do ALL before running validation. + # ============================================================================ + + - title: "Phase 3 Cleanup: Fix ALL lint/test issues - (1) Fix ruff import sorting in src/scc_cli/application/worktree/__init__.py and tests/test_worktree_use_cases.py, (2) Replace typing.Iterable/Sequence with collections.abc in src/scc_cli/application/worktree/use_cases.py, (3) Remove unused 'dependencies, _ = worktree_command_dependencies' in tests/test_worktree_cli.py, (4) Fix test_start_with_install_deps in tests/test_cli.py and tests/test_integration.py by patching _build_worktree_dependencies correctly. Run full validation (uv run ruff format && uv run ruff check && uv run mypy src/scc_cli && uv run pytest) and fix any failures until ALL pass." + completed: true + + # ============================================================================ + # Phase 4 — Launch Flow Split + # Outcome: launch flow is not a monolith; wizard is a state machine; output is model-driven; JSON mapping is centralized at edges. + # ============================================================================ + + - title: "Phase 4: Create application/launch/ package with SelectSession use case" + completed: true + + - title: "Phase 4: Create application/launch/ package with StartSessionWizard state machine (explicit states)" + completed: true + + - title: "Phase 4: Create application/launch/ package with PrepareLaunchPlan use case" + completed: true + + - title: "Phase 4: Create application/launch/ package with ApplyPersonalProfile use case" + completed: true + + - title: "Phase 4: Create application/launch/ package with FinalizeLaunch use case" + completed: true + + - title: "Phase 4: Introduce LaunchOutput view model (structured info/warn/success events)" + completed: true + + - title: "Phase 4: Convert wizard prompts to InteractionRequests; UI renders them" + completed: false + + - title: "Phase 4: Keep JSON mapping in presentation/json/launch_json.py (edge-only)" + completed: false + + - title: "Phase 4: Reduce commands/launch/flow.py to orchestration glue only - parse args" + completed: false + + - title: "Phase 4: Reduce commands/launch/flow.py to orchestration glue only - call use cases" + completed: false + + - title: "Phase 4: Reduce commands/launch/flow.py to orchestration glue only - map output (human/JSON)" + completed: false + + - title: "Phase 4: Reduce commands/launch/flow.py to orchestration glue only - execute side effects through ports" + completed: false + + - title: "Phase 4: Unit tests for wizard state transitions without Rich UI" + completed: false + + - title: "Phase 4: Unit tests for session selection/resume logic" + completed: false + + - title: "Phase 4: Unit tests for personal profile overlay decisions" + completed: false + + - title: "Phase 4: Characterization tests for quick resume flows" + completed: false + + - title: "Phase 4: Characterization tests for cross-team resume confirmation labels/hotkeys" + completed: false + + - title: "Phase 4: Characterization tests for start --dry-run unchanged output" + completed: false + + - title: "Phase 4: Characterization tests for JSON envelope unchanged" + completed: false + + # ============================================================================ + # Phase 5 — Support Bundle Use Case + # Outcome: support bundle generation is deterministic and testable; doctor is injected; archive writing is an adapter; commands only handle flags/output. + # ============================================================================ + + - title: "Phase 5: Create application/support_bundle.py use case" + completed: false + + - title: "Phase 5: Introduce DoctorRunner port" + completed: false + + - title: "Phase 5: Introduce ArchiveWriter port (zip writer adapter)" + completed: false + + - title: "Phase 5: Reuse Filesystem and Clock ports in support bundle use case" + completed: false + + - title: "Phase 5: Extract redaction logic into pure helpers - redact secrets" + completed: false + + - title: "Phase 5: Extract redaction logic into pure helpers - redact paths" + completed: false + + - title: "Phase 5: Update support command - call use case" + completed: false + + - title: "Phase 5: Update support command - map JSON via presentation/json/support_json.py" + completed: false + + - title: "Phase 5: Update support command - print human output only at edge" + completed: false + + - title: "Phase 5: Unit tests for redaction rules" + completed: false + + - title: "Phase 5: Unit tests for doctor failure handling" + completed: false + + - title: "Phase 5: ZIP assertions via in-memory archive writer or temp dir + zipfile inspection" + completed: false + + # ============================================================================ + # Phase 6 — Typed Config Models + # Outcome: application layer no longer consumes raw dict[str, Any] config; schema drift risk decreases; merge/policy logic is safer and clearer. + # ============================================================================ + + - title: "Phase 6: Define minimal normalized typed config model - NormalizedOrgConfig" + completed: false + + - title: "Phase 6: Define minimal normalized typed config model - NormalizedTeamConfig" + completed: false + + - title: "Phase 6: Define minimal normalized typed config model - NormalizedProjectConfig (if needed)" + completed: false + + - title: "Phase 6: Parse/validate once at config load edges (adapters/services), then pass normalized models inward" + completed: false + + - title: "Phase 6: Replace dict access in profiles/effective config computation with typed models" + completed: false + + - title: "Phase 6: Replace dict access in dashboard loading paths (if applicable) with typed models" + completed: false + + - title: "Phase 6: Replace dict access in launch/worktree flows where config is consumed with typed models" + completed: false + + - title: "Phase 6: Add tests for normalization/mapping + invariant preservation" + completed: false + + # ============================================================================ + # Phase 7 — Cleanup and Polish + # Outcome: consistency improvements, removal of remaining duplication, and long-term cleanliness guardrails. + # ============================================================================ + + - title: "Phase 7: Consolidate time formatting in ui/time_format.py and reuse it everywhere" + completed: false + + - title: "Phase 7: Replace magic strings (__all__, etc.) with enums/constants" + completed: false + + - title: "Phase 7: Add ConfigStore port for user/org config access (reduce direct config reads in commands)" + completed: false + + - title: "Phase 7: Tighten optional dependency import discipline - external libs import only in adapters" + completed: false + + - title: "Phase 7: Tighten optional dependency import discipline - no side effects at import time" + completed: false + + - title: "Phase 7: Expand boundary/invariant tests if new layers are introduced" + completed: false + + - title: "Phase 7: Add minimal internal docs updates - module boundaries" + completed: false + + - title: "Phase 7: Add minimal internal docs updates - how to add a new agent backend" + completed: false + + - title: "Phase 7: Add minimal internal docs updates - how to add a new runtime backend" + completed: false + + - title: "Phase 7: Optional - add complexity budget guardrails (manual checklist or automated) to prevent reintroducing god files" + completed: false + + # ============================================================================ + # Final Verification — Definition of 10/10 Maintainability + # Mark these as you achieve them to confirm 10/10 maintainability. + # ============================================================================ + + - title: "Final: Verify commands are thin wrappers (no business logic)" + completed: false + + - title: "Final: Verify application use cases are testable with fakes (no docker/git/network)" + completed: false + + - title: "Final: Verify IO only in adapters or CLI/UI edges" + completed: false + + - title: "Final: Verify workspace resolution is unified (one authoritative entry point)" + completed: false + + - title: "Final: Verify sessions persistence is typed and port-based" + completed: false + + - title: "Final: Verify worktree flows are use-case driven and deterministic" + completed: false + + - title: "Final: Verify launch wizard is a state machine with explicit transitions" + completed: false + + - title: "Final: Verify InteractionRequest schema is stable and tested for labels/hotkeys/BACK" + completed: false + + - title: "Final: Verify JSON mapping is centralized in presentation/json and stays at edges" + completed: false + + - title: "Final: Verify error strategy is consistent - use cases raise SCCError, edges map" + completed: false + + - title: "Final: Verify architectural invariants + filesystem contracts are enforced by tests" + completed: false + + - title: "Final: Optional - verify complexity budget prevents reintroducing mega-files" + completed: false diff --git a/tests/conftest.py b/tests/conftest.py index 1d6f58c..076ef09 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,10 +3,14 @@ import os import tempfile from pathlib import Path +from types import SimpleNamespace from unittest.mock import MagicMock, patch import pytest +from scc_cli.application.worktree import WorktreeDependencies +from scc_cli.ports.dependency_installer import DependencyInstallResult + # ═══════════════════════════════════════════════════════════════════════════════ # Path Fixtures # ═══════════════════════════════════════════════════════════════════════════════ @@ -192,3 +196,28 @@ def app(): from scc_cli.cli import app return app + + +@pytest.fixture +def worktree_dependencies(): + """Build mock worktree dependencies for CLI tests.""" + git_client = MagicMock() + dependency_installer = MagicMock() + dependency_installer.install.return_value = DependencyInstallResult( + attempted=False, + success=False, + ) + dependencies = WorktreeDependencies( + git_client=git_client, + dependency_installer=dependency_installer, + ) + adapters = SimpleNamespace( + filesystem=MagicMock(), + remote_fetcher=MagicMock(), + clock=MagicMock(), + git_client=git_client, + agent_runner=MagicMock(), + sandbox_runtime=MagicMock(), + dependency_installer=dependency_installer, + ) + return dependencies, adapters diff --git a/tests/fakes/__init__.py b/tests/fakes/__init__.py index 656d947..9dd8e79 100644 --- a/tests/fakes/__init__.py +++ b/tests/fakes/__init__.py @@ -2,10 +2,15 @@ from __future__ import annotations +from scc_cli.adapters.local_config_store import LocalConfigStore +from scc_cli.adapters.local_dependency_installer import LocalDependencyInstaller +from scc_cli.adapters.local_doctor_runner import LocalDoctorRunner from scc_cli.adapters.local_filesystem import LocalFilesystem from scc_cli.adapters.local_git_client import LocalGitClient +from scc_cli.adapters.personal_profile_service_local import LocalPersonalProfileService from scc_cli.adapters.requests_fetcher import RequestsFetcher from scc_cli.adapters.system_clock import SystemClock +from scc_cli.adapters.zip_archive_writer import ZipArchiveWriter from scc_cli.bootstrap import DefaultAdapters from tests.fakes.fake_agent_runner import FakeAgentRunner from tests.fakes.fake_sandbox_runtime import FakeSandboxRuntime @@ -16,8 +21,13 @@ def build_fake_adapters() -> DefaultAdapters: return DefaultAdapters( filesystem=LocalFilesystem(), git_client=LocalGitClient(), + dependency_installer=LocalDependencyInstaller(), remote_fetcher=RequestsFetcher(), clock=SystemClock(), agent_runner=FakeAgentRunner(), sandbox_runtime=FakeSandboxRuntime(), + personal_profile_service=LocalPersonalProfileService(), + doctor_runner=LocalDoctorRunner(), + archive_writer=ZipArchiveWriter(), + config_store=LocalConfigStore(), ) diff --git a/tests/test_application_start_session.py b/tests/test_application_start_session.py index 3d64430..8e899db 100644 --- a/tests/test_application_start_session.py +++ b/tests/test_application_start_session.py @@ -11,6 +11,7 @@ start_session, ) from scc_cli.application.sync_marketplace import SyncError, SyncResult +from scc_cli.application.workspace import WorkspaceContext from scc_cli.core.constants import AGENT_CONFIG_DIR, SANDBOX_IMAGE from scc_cli.core.workspace import ResolverResult from scc_cli.ports.models import MountSpec, SandboxSpec @@ -100,8 +101,8 @@ def test_prepare_start_session_builds_plan_with_sync_result(tmp_path: Path) -> N with ( patch( - "scc_cli.application.start_session.resolve_launch_context", - return_value=resolver_result, + "scc_cli.application.start_session.resolve_workspace", + return_value=WorkspaceContext(resolver_result), ), patch( "scc_cli.application.start_session.sync_marketplace_settings", @@ -149,8 +150,8 @@ def test_prepare_start_session_captures_sync_error(tmp_path: Path) -> None: with ( patch( - "scc_cli.application.start_session.resolve_launch_context", - return_value=resolver_result, + "scc_cli.application.start_session.resolve_workspace", + return_value=WorkspaceContext(resolver_result), ), patch( "scc_cli.application.start_session.sync_marketplace_settings", diff --git a/tests/test_architecture_invariants.py b/tests/test_architecture_invariants.py new file mode 100644 index 0000000..d976c7a --- /dev/null +++ b/tests/test_architecture_invariants.py @@ -0,0 +1,91 @@ +"""Architecture guardrail tests for application boundaries.""" + +from __future__ import annotations + +import ast +from pathlib import Path + +APPLICATION_ROOT = Path(__file__).resolve().parents[1] / "src" / "scc_cli" / "application" + +FORBIDDEN_EXTERNAL_MODULES = { + "rich", + "typer", + "subprocess", + "zipfile", + "requests", + "httpx", +} + +FORBIDDEN_INTERNAL_PREFIXES = ( + "scc_cli.ui", + "scc_cli.commands", +) + +DIRECT_IO_METHODS = { + "read_text", + "write_text", +} + + +def _iter_application_files() -> list[Path]: + return sorted(APPLICATION_ROOT.rglob("*.py")) + + +def _is_forbidden_module(module: str) -> bool: + base = module.split(".")[0] + if base in FORBIDDEN_EXTERNAL_MODULES: + return True + return module.startswith(FORBIDDEN_INTERNAL_PREFIXES) + + +def _attribute_chain(node: ast.Attribute) -> list[str]: + parts: list[str] = [] + current: ast.AST = node + while isinstance(current, ast.Attribute): + parts.append(current.attr) + current = current.value + if isinstance(current, ast.Name): + parts.append(current.id) + return list(reversed(parts)) + + +def test_application_forbidden_imports() -> None: + """Application modules do not import forbidden modules.""" + violations: list[str] = [] + + for path in _iter_application_files(): + tree = ast.parse(path.read_text(encoding="utf-8"), filename=str(path)) + for node in ast.walk(tree): + if isinstance(node, ast.Import): + for alias in node.names: + if _is_forbidden_module(alias.name): + violations.append(f"{path}: import {alias.name}") + elif isinstance(node, ast.ImportFrom): + module = node.module or "" + if node.level > 0 and module.startswith(("ui", "commands")): + violations.append(f"{path}: from {'.' * node.level}{module} import ...") + continue + if module and _is_forbidden_module(module): + violations.append(f"{path}: from {module} import ...") + + assert not violations, "\n".join(violations) + + +def test_application_no_direct_io_calls() -> None: + """Application modules avoid direct filesystem IO and prints.""" + violations: list[str] = [] + + for path in _iter_application_files(): + tree = ast.parse(path.read_text(encoding="utf-8"), filename=str(path)) + for node in ast.walk(tree): + if isinstance(node, ast.Call) and isinstance(node.func, ast.Name): + if node.func.id == "print": + violations.append(f"{path}: print call") + if isinstance(node, ast.Call) and isinstance(node.func, ast.Attribute): + chain = _attribute_chain(node.func) + if node.func.attr in DIRECT_IO_METHODS and "filesystem" not in chain: + violations.append(f"{path}: {'.'.join(chain)}") + if node.func.attr == "print" and {"console", "err_console"} & set(chain): + violations.append(f"{path}: {'.'.join(chain)}") + + assert not violations, "\n".join(violations) diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 4486613..ce32cc9 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -4,8 +4,10 @@ from scc_cli.adapters.claude_agent_runner import ClaudeAgentRunner from scc_cli.adapters.docker_sandbox_runtime import DockerSandboxRuntime +from scc_cli.adapters.local_dependency_installer import LocalDependencyInstaller from scc_cli.adapters.local_filesystem import LocalFilesystem from scc_cli.adapters.local_git_client import LocalGitClient +from scc_cli.adapters.personal_profile_service_local import LocalPersonalProfileService from scc_cli.adapters.requests_fetcher import RequestsFetcher from scc_cli.adapters.system_clock import SystemClock from scc_cli.bootstrap import DefaultAdapters, get_default_adapters @@ -17,7 +19,9 @@ def test_get_default_adapters_returns_expected_types() -> None: assert isinstance(adapters, DefaultAdapters) assert isinstance(adapters.filesystem, LocalFilesystem) assert isinstance(adapters.git_client, LocalGitClient) + assert isinstance(adapters.dependency_installer, LocalDependencyInstaller) assert isinstance(adapters.remote_fetcher, RequestsFetcher) assert isinstance(adapters.clock, SystemClock) assert isinstance(adapters.agent_runner, ClaudeAgentRunner) assert isinstance(adapters.sandbox_runtime, DockerSandboxRuntime) + assert isinstance(adapters.personal_profile_service, LocalPersonalProfileService) diff --git a/tests/test_cli.py b/tests/test_cli.py index c205102..97e3990 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -24,6 +24,8 @@ SandboxNotAvailableError, ) from scc_cli.core.exit_codes import EXIT_USAGE +from scc_cli.ports.dependency_installer import DependencyInstallResult +from scc_cli.ports.session_models import SessionSummary from tests.fakes import build_fake_adapters runner = CliRunner() @@ -127,23 +129,48 @@ class TestStartCommand: def test_start_with_install_deps_runs_dependency_install(self, tmp_path): """Should install dependencies when --install-deps flag set.""" + from scc_cli.bootstrap import DefaultAdapters + # Create a workspace with package.json (tmp_path / "package.json").write_text("{}") + dependency_installer = MagicMock() + dependency_installer.install.return_value = DependencyInstallResult( + attempted=True, + success=True, + package_manager="npm", + ) + base_adapters = build_fake_adapters() + adapters = DefaultAdapters( + filesystem=base_adapters.filesystem, + git_client=base_adapters.git_client, + dependency_installer=dependency_installer, + remote_fetcher=base_adapters.remote_fetcher, + clock=base_adapters.clock, + agent_runner=base_adapters.agent_runner, + sandbox_runtime=base_adapters.sandbox_runtime, + personal_profile_service=base_adapters.personal_profile_service, + doctor_runner=base_adapters.doctor_runner, + archive_writer=base_adapters.archive_writer, + config_store=base_adapters.config_store, + ) + with ( patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( "scc_cli.commands.launch.flow.get_default_adapters", - return_value=build_fake_adapters(), + return_value=adapters, + ), + patch( + "scc_cli.commands.launch.workspace.get_default_adapters", + return_value=adapters, ), patch("scc_cli.commands.launch.workspace.check_branch_safety"), - patch("scc_cli.commands.launch.workspace.deps.auto_install_dependencies") as mock_deps, ): - mock_deps.return_value = True runner.invoke(app, ["start", str(tmp_path), "--install-deps"]) - # Should have called auto_install_dependencies - mock_deps.assert_called_once() + # Should have called dependency installer + dependency_installer.install.assert_called_once() def test_start_with_offline_uses_cache_only(self, tmp_path): """Should use cached config only when --offline flag set.""" @@ -192,24 +219,40 @@ def test_start_with_standalone_skips_org_config(self, tmp_path): class TestWorktreeCommand: """Tests for worktree command with new options.""" - def test_worktree_with_install_deps_installs_after_create(self, tmp_path): + def test_worktree_with_install_deps_installs_after_create( + self, tmp_path, worktree_dependencies + ): """Should install dependencies after worktree creation.""" + from scc_cli.application.worktree import WorktreeCreateResult + worktree_path = tmp_path / "worktree" worktree_path.mkdir() + dependencies, adapters = worktree_dependencies + dependencies.git_client.is_git_repo.return_value = True + dependencies.git_client.has_commits.return_value = True + dependencies.dependency_installer.install.return_value = DependencyInstallResult( + attempted=True, + success=True, + package_manager="uv", + ) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=True), patch( - "scc_cli.commands.worktree.worktree_commands.create_worktree", - return_value=worktree_path, + "scc_cli.commands.worktree.worktree_commands._build_worktree_dependencies", + return_value=(dependencies, adapters), ), patch( - "scc_cli.commands.worktree.worktree_commands.deps.auto_install_dependencies" - ) as mock_deps, + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree", + return_value=WorktreeCreateResult( + worktree_path=worktree_path, + worktree_name="feature-x", + branch_name="scc/feature-x", + base_branch="main", + dependencies_installed=True, + ), + ), patch("rich.prompt.Confirm.ask", return_value=False), # Don't start claude ): - mock_deps.return_value = True # CLI structure: scc worktree [group-workspace] create # The "." is needed as explicit group workspace so Typer knows "create" is the subcommand runner.invoke( @@ -224,7 +267,8 @@ def test_worktree_with_install_deps_installs_after_create(self, tmp_path): "--no-start", ], ) - mock_deps.assert_called_once_with(worktree_path) + + dependencies.dependency_installer.install.assert_called_once_with(worktree_path) # ═══════════════════════════════════════════════════════════════════════════════ @@ -238,12 +282,14 @@ class TestSessionsCommand: def test_sessions_shows_recent_sessions(self): """Should list recent sessions.""" mock_sessions = [ - { - "name": "session1", - "workspace": "/tmp/proj1", - "last_used": "2025-01-01", - "team": "dev", - }, + SessionSummary( + name="session1", + workspace="/tmp/proj1", + team="dev", + last_used="2025-01-01", + container_name=None, + branch=None, + ), ] with patch( "scc_cli.commands.worktree.session_commands.sessions.list_recent", @@ -257,8 +303,22 @@ def test_sessions_shows_recent_sessions(self): def test_sessions_interactive_picker_when_select_flag(self): """Should show interactive picker with --select flag.""" mock_sessions = [ - {"name": "session1", "workspace": "/tmp/proj1"}, - {"name": "session2", "workspace": "/tmp/proj2"}, + SessionSummary( + name="session1", + workspace="/tmp/proj1", + team=None, + last_used=None, + container_name=None, + branch=None, + ), + SessionSummary( + name="session2", + workspace="/tmp/proj2", + team=None, + last_used=None, + container_name=None, + branch=None, + ), ] with ( patch( @@ -555,10 +615,13 @@ def test_stop_all_when_no_containers(self): class TestWorktreeCommandErrors: """Tests for worktree command error handling.""" - def test_worktree_not_git_repo_shows_error(self, tmp_path): + def test_worktree_not_git_repo_shows_error(self, tmp_path, worktree_dependencies): """Should show error when not in a git repo.""" + dependencies, adapters = worktree_dependencies + dependencies.git_client.is_git_repo.return_value = False with patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=False + "scc_cli.commands.worktree.worktree_commands._build_worktree_dependencies", + return_value=(dependencies, adapters), ): result = runner.invoke(app, ["worktree", "create", str(tmp_path), "feature-x"]) @@ -657,7 +720,14 @@ def test_sessions_empty_list(self): def test_sessions_with_limit(self): """Should respect limit option.""" mock_sessions = [ - {"name": f"session-{i}", "workspace": f"/path/{i}", "timestamp": "2024-01-01"} + SessionSummary( + name=f"session-{i}", + workspace=f"/path/{i}", + team=None, + last_used="2024-01-01", + container_name=None, + branch=None, + ) for i in range(5) ] with patch( @@ -667,4 +737,5 @@ def test_sessions_with_limit(self): result = runner.invoke(app, ["sessions", "-n", "5"]) assert result.exit_code == 0 - mock_list.assert_called_once_with(5) + mock_list.assert_called_once() + assert mock_list.call_args.kwargs["limit"] == 5 diff --git a/tests/test_config_normalization.py b/tests/test_config_normalization.py new file mode 100644 index 0000000..ffc042a --- /dev/null +++ b/tests/test_config_normalization.py @@ -0,0 +1,315 @@ +"""Tests for config normalization. + +Verify that raw dict configs are correctly normalized to typed models. +""" + +from __future__ import annotations + +import pytest + +from scc_cli.adapters.config_normalizer import ( + normalize_org_config, + normalize_project_config, + normalize_user_config, +) + + +class TestNormalizeUserConfig: + """Test user config normalization.""" + + def test_empty_config_returns_defaults(self) -> None: + """Empty dict should return default values.""" + result = normalize_user_config({}) + + assert result.selected_profile is None + assert result.standalone is False + assert result.organization_source is None + assert result.workspace_team_map == {} + assert result.onboarding_seen is False + + def test_selected_profile_preserved(self) -> None: + """Selected profile should be preserved.""" + result = normalize_user_config({"selected_profile": "platform"}) + + assert result.selected_profile == "platform" + + def test_standalone_mode_normalized(self) -> None: + """Standalone flag should be normalized to bool.""" + result = normalize_user_config({"standalone": True}) + + assert result.standalone is True + + def test_organization_source_normalized(self) -> None: + """Organization source should be normalized.""" + raw = { + "organization_source": { + "url": "https://example.com/org.json", + "auth": "token123", + "auth_header": "X-Custom-Auth", + } + } + result = normalize_user_config(raw) + + assert result.organization_source is not None + assert result.organization_source.url == "https://example.com/org.json" + assert result.organization_source.auth == "token123" + assert result.organization_source.auth_header == "X-Custom-Auth" + + def test_workspace_team_map_preserved(self) -> None: + """Workspace team map should be preserved.""" + raw = {"workspace_team_map": {"/path/to/project": "backend"}} + result = normalize_user_config(raw) + + assert result.workspace_team_map == {"/path/to/project": "backend"} + + def test_invalid_workspace_map_becomes_empty(self) -> None: + """Invalid workspace map type should become empty dict.""" + result = normalize_user_config({"workspace_team_map": "invalid"}) + + assert result.workspace_team_map == {} + + +class TestNormalizeOrgConfig: + """Test organization config normalization.""" + + def test_minimal_config_returns_defaults(self) -> None: + """Minimal config should return default values for optional fields.""" + result = normalize_org_config({"organization": {"name": "TestOrg"}}) + + assert result.organization.name == "TestOrg" + assert result.security.blocked_plugins == () + assert result.defaults.enabled_plugins == () + assert result.profiles == {} + assert result.marketplaces == {} + + def test_security_config_normalized(self) -> None: + """Security config should be normalized.""" + raw = { + "organization": {"name": "TestOrg"}, + "security": { + "blocked_plugins": ["malicious-*", "bad-plugin"], + "blocked_mcp_servers": ["evil.com"], + "allow_stdio_mcp": True, + "allowed_stdio_prefixes": ["/usr/bin", "/opt"], + }, + } + result = normalize_org_config(raw) + + assert result.security.blocked_plugins == ("malicious-*", "bad-plugin") + assert result.security.blocked_mcp_servers == ("evil.com",) + assert result.security.allow_stdio_mcp is True + assert result.security.allowed_stdio_prefixes == ("/usr/bin", "/opt") + + def test_defaults_config_normalized(self) -> None: + """Defaults config should be normalized.""" + raw = { + "organization": {"name": "TestOrg"}, + "defaults": { + "enabled_plugins": ["plugin-a", "plugin-b"], + "disabled_plugins": ["deprecated-plugin"], + "allowed_plugins": ["plugin-*"], + "network_policy": "restrictive", + "session": {"timeout_hours": 8, "auto_resume": True}, + }, + } + result = normalize_org_config(raw) + + assert result.defaults.enabled_plugins == ("plugin-a", "plugin-b") + assert result.defaults.disabled_plugins == ("deprecated-plugin",) + assert result.defaults.allowed_plugins == ("plugin-*",) + assert result.defaults.network_policy == "restrictive" + assert result.defaults.session.timeout_hours == 8 + assert result.defaults.session.auto_resume is True + + def test_allowed_plugins_none_vs_empty(self) -> None: + """None allowed_plugins (no allowlist) differs from empty (deny all).""" + no_allowlist = normalize_org_config({"organization": {"name": "Test"}}) + empty_allowlist = normalize_org_config( + { + "organization": {"name": "Test"}, + "defaults": {"allowed_plugins": []}, + } + ) + + assert no_allowlist.defaults.allowed_plugins is None + assert empty_allowlist.defaults.allowed_plugins == () + + def test_delegation_config_normalized(self) -> None: + """Delegation config should be normalized.""" + raw = { + "organization": {"name": "TestOrg"}, + "delegation": { + "teams": { + "allow_additional_plugins": ["platform-*", "backend-*"], + "allow_additional_mcp_servers": ["platform-*"], + }, + "projects": {"inherit_team_delegation": True}, + }, + } + result = normalize_org_config(raw) + + assert result.delegation.teams.allow_additional_plugins == ("platform-*", "backend-*") + assert result.delegation.teams.allow_additional_mcp_servers == ("platform-*",) + assert result.delegation.projects.inherit_team_delegation is True + + def test_profiles_normalized(self) -> None: + """Team profiles should be normalized.""" + raw = { + "organization": {"name": "TestOrg"}, + "profiles": { + "platform": { + "description": "Platform team", + "plugin": "platform-plugin", + "marketplace": "internal", + "additional_plugins": ["extra-plugin"], + "additional_mcp_servers": [ + {"name": "server1", "type": "sse", "url": "https://example.com"} + ], + "session": {"timeout_hours": 12}, + "delegation": {"allow_project_overrides": True}, + } + }, + } + result = normalize_org_config(raw) + + assert "platform" in result.profiles + profile = result.profiles["platform"] + assert profile.name == "platform" + assert profile.description == "Platform team" + assert profile.plugin == "platform-plugin" + assert profile.marketplace == "internal" + assert profile.additional_plugins == ("extra-plugin",) + assert len(profile.additional_mcp_servers) == 1 + assert profile.additional_mcp_servers[0].name == "server1" + assert profile.session.timeout_hours == 12 + assert profile.delegation.allow_project_overrides is True + + def test_marketplaces_normalized(self) -> None: + """Marketplaces should be normalized.""" + raw = { + "organization": {"name": "TestOrg"}, + "marketplaces": { + "internal": { + "source": "github", + "owner": "myorg", + "repo": "plugins", + "branch": "main", + } + }, + } + result = normalize_org_config(raw) + + assert "internal" in result.marketplaces + marketplace = result.marketplaces["internal"] + assert marketplace.name == "internal" + assert marketplace.source == "github" + assert marketplace.owner == "myorg" + assert marketplace.repo == "plugins" + assert marketplace.branch == "main" + + def test_get_profile_returns_profile(self) -> None: + """get_profile should return the requested profile.""" + raw = { + "organization": {"name": "TestOrg"}, + "profiles": {"platform": {"description": "Platform"}}, + } + result = normalize_org_config(raw) + + profile = result.get_profile("platform") + assert profile is not None + assert profile.name == "platform" + + def test_get_profile_returns_none_for_missing(self) -> None: + """get_profile should return None for missing profile.""" + result = normalize_org_config({"organization": {"name": "TestOrg"}}) + + assert result.get_profile("nonexistent") is None + + def test_list_profile_names(self) -> None: + """list_profile_names should return all profile names.""" + raw = { + "organization": {"name": "TestOrg"}, + "profiles": {"platform": {}, "backend": {}, "frontend": {}}, + } + result = normalize_org_config(raw) + + names = result.list_profile_names() + assert set(names) == {"platform", "backend", "frontend"} + + +class TestNormalizeProjectConfig: + """Test project config normalization.""" + + def test_none_input_returns_none(self) -> None: + """None input should return None.""" + result = normalize_project_config(None) + + assert result is None + + def test_empty_config_returns_defaults(self) -> None: + """Empty dict should return default values.""" + result = normalize_project_config({}) + + assert result is not None + assert result.additional_plugins == () + assert result.additional_mcp_servers == () + assert result.session.timeout_hours is None + + def test_plugins_normalized(self) -> None: + """Additional plugins should be normalized.""" + raw = {"additional_plugins": ["local-plugin"]} + result = normalize_project_config(raw) + + assert result is not None + assert result.additional_plugins == ("local-plugin",) + + def test_mcp_servers_normalized(self) -> None: + """MCP servers should be normalized.""" + raw = { + "additional_mcp_servers": [ + {"name": "local-mcp", "type": "stdio", "command": "/usr/bin/tool"} + ] + } + result = normalize_project_config(raw) + + assert result is not None + assert len(result.additional_mcp_servers) == 1 + server = result.additional_mcp_servers[0] + assert server.name == "local-mcp" + assert server.type == "stdio" + assert server.command == "/usr/bin/tool" + + def test_session_normalized(self) -> None: + """Session config should be normalized.""" + raw = {"session": {"timeout_hours": 16}} + result = normalize_project_config(raw) + + assert result is not None + assert result.session.timeout_hours == 16 + + +class TestConfigModelImmutability: + """Test that config models are immutable (frozen dataclasses).""" + + def test_user_config_is_frozen(self) -> None: + """NormalizedUserConfig should be immutable.""" + config = normalize_user_config({"selected_profile": "test"}) + + with pytest.raises(AttributeError): + config.selected_profile = "new" # type: ignore[misc] + + def test_org_config_is_frozen(self) -> None: + """NormalizedOrgConfig should be immutable.""" + config = normalize_org_config({"organization": {"name": "Test"}}) + + with pytest.raises(AttributeError): + config.organization = None # type: ignore[misc] + + def test_team_config_is_frozen(self) -> None: + """NormalizedTeamConfig should be immutable.""" + raw = {"organization": {"name": "Test"}, "profiles": {"team": {}}} + config = normalize_org_config(raw) + profile = config.profiles["team"] + + with pytest.raises(AttributeError): + profile.name = "new" # type: ignore[misc] diff --git a/tests/test_docker_core.py b/tests/test_docker_core.py index df3f034..1976001 100644 --- a/tests/test_docker_core.py +++ b/tests/test_docker_core.py @@ -338,17 +338,19 @@ def test_default_not_detached(self): # ─────────────────────────────────────────────────────────────────────────── def test_policy_mount_when_path_provided(self, tmp_path): - """When policy_host_path is provided, should include -v mount flag.""" + """When policy_host_path is provided, should include -v mount for parent directory.""" policy_file = tmp_path / "effective_policy.json" policy_file.write_text('{"action": "block"}') cmd = docker.build_command(policy_host_path=policy_file) assert "-v" in cmd - # Find the -v argument and verify it includes the host path + # Find the -v argument and verify it mounts the parent directory (not the file) v_idx = cmd.index("-v") mount_arg = cmd[v_idx + 1] - assert str(policy_file) in mount_arg + # Directory mounting is more reliable for Docker Desktop VirtioFS + assert str(tmp_path) in mount_arg + assert "/mnt/claude-data/policy:ro" in mount_arg def test_policy_env_var_when_path_provided(self, tmp_path): """When policy_host_path is provided, should set SCC_POLICY_PATH env var.""" @@ -361,8 +363,8 @@ def test_policy_env_var_when_path_provided(self, tmp_path): e_idx = cmd.index("-e") env_arg = cmd[e_idx + 1] assert env_arg.startswith("SCC_POLICY_PATH=") - # Should point to container path, not host path - assert "/mnt/claude-data/effective_policy.json" in env_arg + # Should point to container path under the mounted policy directory + assert "/mnt/claude-data/policy/effective_policy.json" in env_arg def test_no_policy_mount_when_path_none(self): """When policy_host_path is None, should not include policy mount.""" @@ -399,8 +401,8 @@ def test_policy_path_handles_path_object(self, tmp_path): v_idx = cmd.index("-v") mount_arg = cmd[v_idx + 1] - # Should contain the resolved path string - assert str(policy_file) in mount_arg + # Should contain the parent directory path (directory mounting) + assert str(tmp_path) in mount_arg def test_policy_mount_with_workspace(self, tmp_path): """Policy mount should work alongside workspace mount.""" diff --git a/tests/test_filesystem_contracts.py b/tests/test_filesystem_contracts.py new file mode 100644 index 0000000..9830ecd --- /dev/null +++ b/tests/test_filesystem_contracts.py @@ -0,0 +1,75 @@ +"""Contract tests for the Filesystem port.""" + +from __future__ import annotations + +from pathlib import Path + +from scc_cli.adapters.local_filesystem import LocalFilesystem + + +def test_write_text_creates_parent_directories(tmp_path: Path) -> None: + """write_text creates parents before writing content.""" + filesystem = LocalFilesystem() + target = tmp_path / "nested" / "file.txt" + + filesystem.write_text(target, "hello") + + assert target.exists() + assert target.read_text(encoding="utf-8") == "hello" + + +def test_write_text_atomic_overwrites_content(tmp_path: Path) -> None: + """write_text_atomic overwrites existing content atomically.""" + filesystem = LocalFilesystem() + target = tmp_path / "atomic" / "file.txt" + + filesystem.write_text_atomic(target, "first") + filesystem.write_text_atomic(target, "second") + + assert target.read_text(encoding="utf-8") == "second" + + +def test_write_text_atomic_cleans_temp_files(tmp_path: Path) -> None: + """write_text_atomic does not leave temp files behind.""" + filesystem = LocalFilesystem() + target = tmp_path / "atomic" / "notes.txt" + + filesystem.write_text_atomic(target, "content") + + temp_prefix = f".{target.name}." + temp_files = [path for path in target.parent.iterdir() if path.name.startswith(temp_prefix)] + assert temp_files == [] + + +def test_read_text_utf8_roundtrip(tmp_path: Path) -> None: + """read_text returns UTF-8 content unchanged.""" + filesystem = LocalFilesystem() + target = tmp_path / "utf8.txt" + content = "café ✓" + + filesystem.write_text(target, content) + + assert filesystem.read_text(target) == content + + +def test_newline_roundtrip(tmp_path: Path) -> None: + """Newline content is preserved across write/read.""" + filesystem = LocalFilesystem() + target = tmp_path / "lines.txt" + content = "first\nsecond\n" + + filesystem.write_text(target, content) + + assert filesystem.read_text(target) == content + + +def test_mkdir_parents_and_exist_ok(tmp_path: Path) -> None: + """mkdir honors parents/exist_ok contract.""" + filesystem = LocalFilesystem() + target = tmp_path / "one" / "two" + + filesystem.mkdir(target, parents=True, exist_ok=True) + filesystem.mkdir(target, parents=True, exist_ok=True) + + assert target.exists() + assert target.is_dir() diff --git a/tests/test_function_sizes.py b/tests/test_function_sizes.py new file mode 100644 index 0000000..ab2a34d --- /dev/null +++ b/tests/test_function_sizes.py @@ -0,0 +1,237 @@ +"""Function size guardrail test. + +Enforces function size limits to maintain readability and modularity. +Functions exceeding 300 lines fail; functions between 200-300 lines produce warnings. + +Metric: Physical lines per function using AST line numbers. +""" + +from __future__ import annotations + +import ast +import os +from pathlib import Path +from typing import NamedTuple + +import pytest + +# Thresholds +WARNING_THRESHOLD = 200 +FAIL_THRESHOLD = 300 + +# Source directory to scan +SRC_DIR = Path(__file__).parent.parent / "src" / "scc_cli" + +# Exclusion patterns (directories and file patterns to skip) +EXCLUDED_DIRS = { + "tests", + "migrations", + "vendor", + "schemas", + ".venv", + "build", + "dist", + "__pycache__", +} + +EXCLUDED_FILE_PATTERNS = { + "_pb2.py", # protobuf generated +} + + +class FunctionInfo(NamedTuple): + """Information about a function's size.""" + + path: Path + function_name: str + line_count: int + relative_path: str + lineno: int + + +def should_exclude(file_path: Path) -> bool: + """Check if a file should be excluded from size checks.""" + for part in file_path.parts: + if part in EXCLUDED_DIRS: + return True + + file_name = file_path.name + for pattern in EXCLUDED_FILE_PATTERNS: + if file_name.endswith(pattern): + return True + + return False + + +def iter_functions(tree: ast.AST) -> list[tuple[str, int, int]]: + """Yield function name, start line, and end line.""" + functions: list[tuple[str, int, int]] = [] + + for node in ast.walk(tree): + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + if not hasattr(node, "end_lineno") or node.end_lineno is None: + continue + functions.append((node.name, node.lineno, node.end_lineno)) + + return functions + + +def get_function_sizes(directory: Path) -> list[FunctionInfo]: + """Get all functions in directory with their line counts.""" + functions: list[FunctionInfo] = [] + + if not directory.exists(): + return functions + + for root, _dirs, filenames in os.walk(directory): + root_path = Path(root) + + for filename in filenames: + if not filename.endswith(".py"): + continue + + file_path = root_path / filename + + if should_exclude(file_path): + continue + + try: + source = file_path.read_text(encoding="utf-8", errors="replace") + except OSError: + continue + + try: + tree = ast.parse(source) + except SyntaxError: + continue + + for name, start, end in iter_functions(tree): + line_count = end - start + 1 + try: + relative = file_path.relative_to(SRC_DIR.parent) + except ValueError: + relative = file_path + + functions.append( + FunctionInfo( + path=file_path, + function_name=name, + line_count=line_count, + relative_path=str(relative), + lineno=start, + ) + ) + + return functions + + +def categorize_functions( + functions: list[FunctionInfo], +) -> tuple[list[FunctionInfo], list[FunctionInfo], list[FunctionInfo]]: + """Categorize functions by size.""" + ideal_or_acceptable: list[FunctionInfo] = [] + warning_zone: list[FunctionInfo] = [] + failing: list[FunctionInfo] = [] + + for function_info in functions: + if function_info.line_count > FAIL_THRESHOLD: + failing.append(function_info) + elif function_info.line_count >= WARNING_THRESHOLD: + warning_zone.append(function_info) + else: + ideal_or_acceptable.append(function_info) + + return ideal_or_acceptable, warning_zone, failing + + +def format_warning(function_info: FunctionInfo) -> str: + """Format a warning message for a function in the warning zone.""" + return ( + f"WARNING: {function_info.relative_path}:{function_info.lineno} " + f"{function_info.function_name} ({function_info.line_count} lines)\n" + f" Approaching {FAIL_THRESHOLD}-line limit. Please consider splitting.\n" + f" Current threshold: warning at {WARNING_THRESHOLD}, fail at {FAIL_THRESHOLD}" + ) + + +def format_failure(function_info: FunctionInfo) -> str: + """Format a failure message for a function exceeding the limit.""" + return ( + f"FAIL: {function_info.relative_path}:{function_info.lineno} " + f"{function_info.function_name} ({function_info.line_count} lines)\n" + f" Exceeds {FAIL_THRESHOLD}-line limit. Must be split.\n" + f" Current threshold: warning at {WARNING_THRESHOLD}, fail at {FAIL_THRESHOLD}" + ) + + +class TestFunctionSizes: + """Test class for function size guardrails.""" + + @pytest.mark.xfail( + reason=( + "Known large functions exceed guardrail (launch flow and org/reset commands). " + "Tracked in maintainability refactor." + ) + ) + def test_function_size_limits(self) -> None: + """Verify all functions in src/scc_cli/ are within size limits.""" + functions = get_function_sizes(SRC_DIR) + + if not functions: + pytest.skip(f"No functions found in {SRC_DIR}") + + ideal_or_acceptable, warning_zone, failing = categorize_functions(functions) + + print("\n" + "=" * 70) + print("FUNCTION SIZE GUARDRAIL REPORT") + print("=" * 70) + + if warning_zone: + print(f"\n{'=' * 70}") + print(f"WARNING ZONE ({WARNING_THRESHOLD}-{FAIL_THRESHOLD} lines)") + print("=" * 70) + for function_info in sorted(warning_zone, key=lambda x: -x.line_count): + print(f"\n{format_warning(function_info)}") + + if failing: + print(f"\n{'=' * 70}") + print(f"FAILURES (>{FAIL_THRESHOLD} lines)") + print("=" * 70) + for function_info in sorted(failing, key=lambda x: -x.line_count): + print(f"\n{format_failure(function_info)}") + + print(f"\n{'=' * 70}") + print("SUMMARY") + print("=" * 70) + print(f" Total functions scanned: {len(functions)}") + print(f" Ideal/Acceptable (<{WARNING_THRESHOLD} lines): {len(ideal_or_acceptable)}") + print(f" Warning zone ({WARNING_THRESHOLD}-{FAIL_THRESHOLD} lines): {len(warning_zone)}") + print(f" Failing (>{FAIL_THRESHOLD} lines): {len(failing)}") + + print(f"\n{'=' * 70}") + print("TOP 5 LARGEST FUNCTIONS") + print("=" * 70) + sorted_functions = sorted(functions, key=lambda x: -x.line_count)[:5] + for i, function_info in enumerate(sorted_functions, 1): + status = ( + "FAIL" + if function_info.line_count > FAIL_THRESHOLD + else "WARN" + if function_info.line_count >= WARNING_THRESHOLD + else "OK" + ) + print( + " " + f"{i}. [{status}] {function_info.relative_path}:" + f"{function_info.lineno} {function_info.function_name}: " + f"{function_info.line_count} lines" + ) + + print("=" * 70 + "\n") + + if failing: + failure_messages = [format_failure(f) for f in failing] + pytest.fail( + f"\n{len(failing)} function(s) exceed {FAIL_THRESHOLD} lines:\n\n" + + "\n\n".join(failure_messages) + ) diff --git a/tests/test_integration.py b/tests/test_integration.py index 4f2bdad..d569b1e 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -12,12 +12,14 @@ import json import subprocess from datetime import datetime -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from typer.testing import CliRunner from scc_cli.cli import app +from scc_cli.ports.dependency_installer import DependencyInstallResult +from scc_cli.ports.session_models import SessionSummary from tests.fakes import build_fake_adapters runner = CliRunner() @@ -324,13 +326,27 @@ def test_sessions_list_shows_recent(self, full_config_environment): ) # Also patch sessions module - with patch("scc_cli.sessions.config.SESSIONS_FILE", sessions_file): + with patch("scc_cli.config.SESSIONS_FILE", sessions_file): with patch( "scc_cli.commands.worktree.session_commands.sessions.list_recent" ) as mock_list: mock_list.return_value = [ - {"name": "session1", "workspace": "/tmp/proj1", "last_used": "1h ago"}, - {"name": "session2", "workspace": "/tmp/proj2", "last_used": "2h ago"}, + SessionSummary( + name="session1", + workspace="/tmp/proj1", + team=None, + last_used="1h ago", + container_name=None, + branch=None, + ), + SessionSummary( + name="session2", + workspace="/tmp/proj2", + team=None, + last_used="2h ago", + container_name=None, + branch=None, + ), ] result = runner.invoke(app, ["sessions"]) @@ -366,17 +382,36 @@ def test_doctor_checks_all_components(self, full_config_environment): class TestWorktreeWorkflow: """Integration tests for worktree creation workflow.""" - def test_worktree_creates_branch_and_worktree(self, full_config_environment, git_workspace): + def test_worktree_creates_branch_and_worktree( + self, full_config_environment, git_workspace, worktree_dependencies + ): """Worktree command should create git worktree and branch.""" + from scc_cli.application.worktree import WorktreeCreateResult + + dependencies, adapters = worktree_dependencies + dependencies.git_client.is_git_repo.return_value = True + dependencies.git_client.has_commits.return_value = True + with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.create_worktree") as mock_create, + patch( + "scc_cli.commands.worktree.worktree_commands._build_worktree_dependencies", + return_value=(dependencies, adapters), + ), + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree" + ) as mock_create, patch( "scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False ), # Don't start claude ): worktree_path = git_workspace.parent / "claude" / "feature-x" - mock_create.return_value = worktree_path + mock_create.return_value = WorktreeCreateResult( + worktree_path=worktree_path, + worktree_name="feature-x", + branch_name="scc/feature-x", + base_branch="main", + dependencies_installed=True, + ) # CLI structure: scc worktree [group-workspace] create runner.invoke( @@ -385,24 +420,40 @@ def test_worktree_creates_branch_and_worktree(self, full_config_environment, git mock_create.assert_called_once() - def test_worktree_with_install_deps(self, full_config_environment, git_workspace): + def test_worktree_with_install_deps( + self, full_config_environment, git_workspace, worktree_dependencies + ): """Worktree with --install-deps should install after creation.""" + from scc_cli.application.worktree import WorktreeCreateResult + worktree_path = git_workspace.parent / "claude" / "feature-x" worktree_path.mkdir(parents=True) + dependencies, adapters = worktree_dependencies + dependencies.git_client.is_git_repo.return_value = True + dependencies.git_client.has_commits.return_value = True + dependencies.dependency_installer.install.return_value = DependencyInstallResult( + attempted=True, + success=True, + package_manager="uv", + ) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), patch( - "scc_cli.commands.worktree.worktree_commands.create_worktree", - return_value=worktree_path, + "scc_cli.commands.worktree.worktree_commands._build_worktree_dependencies", + return_value=(dependencies, adapters), ), patch( - "scc_cli.commands.worktree.worktree_commands.deps.auto_install_dependencies" - ) as mock_deps, + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree", + return_value=WorktreeCreateResult( + worktree_path=worktree_path, + worktree_name="feature-x", + branch_name="scc/feature-x", + base_branch="main", + dependencies_installed=True, + ), + ), patch("scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False), ): - mock_deps.return_value = True - # CLI structure: scc worktree [group-workspace] create runner.invoke( app, @@ -417,7 +468,7 @@ def test_worktree_with_install_deps(self, full_config_environment, git_workspace ], ) - mock_deps.assert_called_once_with(worktree_path) + dependencies.dependency_installer.install.assert_called_once_with(worktree_path) # ═══════════════════════════════════════════════════════════════════════════════ @@ -509,10 +560,31 @@ class TestDepsWorkflow: def test_start_with_install_deps(self, full_config_environment, git_workspace): """--install-deps should trigger dependency installation.""" + from scc_cli.bootstrap import DefaultAdapters + # Create package.json to trigger npm detection (git_workspace / "package.json").write_text("{}") - fake_adapters = build_fake_adapters() + dependency_installer = MagicMock() + dependency_installer.install.return_value = DependencyInstallResult( + attempted=True, + success=True, + package_manager="npm", + ) + base_adapters = build_fake_adapters() + adapters = DefaultAdapters( + filesystem=base_adapters.filesystem, + git_client=base_adapters.git_client, + dependency_installer=dependency_installer, + remote_fetcher=base_adapters.remote_fetcher, + clock=base_adapters.clock, + agent_runner=base_adapters.agent_runner, + sandbox_runtime=base_adapters.sandbox_runtime, + personal_profile_service=base_adapters.personal_profile_service, + doctor_runner=base_adapters.doctor_runner, + archive_writer=base_adapters.archive_writer, + config_store=base_adapters.config_store, + ) with ( patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), @@ -522,16 +594,17 @@ def test_start_with_install_deps(self, full_config_environment, git_workspace): ), patch( "scc_cli.commands.launch.flow.get_default_adapters", - return_value=fake_adapters, + return_value=adapters, + ), + patch( + "scc_cli.commands.launch.workspace.get_default_adapters", + return_value=adapters, ), patch("scc_cli.commands.launch.workspace.check_branch_safety"), - patch("scc_cli.commands.launch.workspace.deps.auto_install_dependencies") as mock_deps, ): - mock_deps.return_value = True - runner.invoke(app, ["start", str(git_workspace), "--install-deps"]) - mock_deps.assert_called_once() + dependency_installer.install.assert_called_once() # ═══════════════════════════════════════════════════════════════════════════════ @@ -592,9 +665,9 @@ def test_session_record_persists_correctly(self, full_config_environment): # Patch the sessions config path sessions_file = full_config_environment["config_dir"] / "sessions.json" - with patch("scc_cli.sessions.config.SESSIONS_FILE", sessions_file): - # Record a session - sessions._save_sessions([]) # Initialize + with patch("scc_cli.config.SESSIONS_FILE", sessions_file): + store = sessions.get_session_store() + store.save_sessions([]) sessions.record_session( workspace="/tmp/test-proj", team="platform", @@ -602,12 +675,11 @@ def test_session_record_persists_correctly(self, full_config_environment): branch="main", ) - # Retrieve it most_recent = sessions.get_most_recent() assert most_recent is not None - assert most_recent["workspace"] == "/tmp/test-proj" - assert most_recent["team"] == "platform" + assert most_recent.workspace == "/tmp/test-proj" + assert most_recent.team == "platform" def test_config_validation_flows_correctly(self, sample_org_config): """Config validation should properly validate org config.""" diff --git a/tests/test_interaction_requests.py b/tests/test_interaction_requests.py new file mode 100644 index 0000000..13abf37 --- /dev/null +++ b/tests/test_interaction_requests.py @@ -0,0 +1,71 @@ +"""Tests for InteractionRequest schema stability.""" + +from __future__ import annotations + +from scc_cli.application.interaction_requests import ( + BACK_ACTION_HOTKEY, + BACK_ACTION_ID, + BACK_ACTION_LABEL, + CANCEL_ACTION_HOTKEY, + CANCEL_ACTION_ID, + CANCEL_ACTION_LABEL, + CONFIRM_ACTION_HOTKEY, + CONFIRM_ACTION_ID, + CONFIRM_ACTION_LABEL, + ConfirmRequest, + InputRequest, + SelectOption, + SelectRequest, +) + + +def test_back_action_constants() -> None: + """Back action metadata stays stable.""" + assert BACK_ACTION_ID == "back" + assert BACK_ACTION_LABEL == "Back" + assert BACK_ACTION_HOTKEY == "esc" + + +def test_confirm_request_defaults() -> None: + """ConfirmRequest defaults preserve IDs, labels, and hotkeys.""" + request = ConfirmRequest(request_id="confirm-delete", prompt="Delete item?") + + assert request.confirm_id == CONFIRM_ACTION_ID + assert request.confirm_label == CONFIRM_ACTION_LABEL + assert request.confirm_hotkey == CONFIRM_ACTION_HOTKEY + assert request.cancel_id == CANCEL_ACTION_ID + assert request.cancel_label == CANCEL_ACTION_LABEL + assert request.cancel_hotkey == CANCEL_ACTION_HOTKEY + assert request.back_id == BACK_ACTION_ID + assert request.back_label == BACK_ACTION_LABEL + assert request.back_hotkey == BACK_ACTION_HOTKEY + assert request.allow_back is False + + +def test_select_request_back_metadata() -> None: + """SelectRequest retains back metadata when enabled.""" + option = SelectOption(option_id="alpha", label="Alpha", hotkey="a") + request = SelectRequest( + request_id="select-alpha", + title="Pick", + options=(option,), + allow_back=True, + ) + + assert request.options[0].option_id == "alpha" + assert request.options[0].label == "Alpha" + assert request.options[0].hotkey == "a" + assert request.allow_back is True + assert request.back_id == BACK_ACTION_ID + assert request.back_label == BACK_ACTION_LABEL + assert request.back_hotkey == BACK_ACTION_HOTKEY + + +def test_input_request_back_metadata() -> None: + """InputRequest retains back metadata when enabled.""" + request = InputRequest(request_id="input-name", prompt="Name", allow_back=True) + + assert request.allow_back is True + assert request.back_id == BACK_ACTION_ID + assert request.back_label == BACK_ACTION_LABEL + assert request.back_hotkey == BACK_ACTION_HOTKEY diff --git a/tests/test_maintenance_tasks.py b/tests/test_maintenance_tasks.py index aba12c3..a694220 100644 --- a/tests/test_maintenance_tasks.py +++ b/tests/test_maintenance_tasks.py @@ -22,6 +22,7 @@ ) from scc_cli.maintenance.types import ResetResult, RiskTier from scc_cli.models.exceptions import AllowTargets, Exception, ExceptionFile +from scc_cli.ports.session_models import SessionRecord from scc_cli.stores.exception_store import RepoStore, UserStore @@ -119,26 +120,28 @@ def _fake_remove(container_id: str) -> None: def test_prune_sessions_removes_old_entries(temp_config_dir: Path) -> None: now = datetime.now(timezone.utc) - sessions._save_sessions( + store = sessions.get_session_store() + store.save_sessions( [ - {"workspace": "one", "last_used": (now - timedelta(days=1)).isoformat()}, - {"workspace": "two", "last_used": (now - timedelta(days=60)).isoformat()}, - {"workspace": "three", "last_used": (now - timedelta(days=45)).isoformat()}, + SessionRecord(workspace="one", last_used=(now - timedelta(days=1)).isoformat()), + SessionRecord(workspace="two", last_used=(now - timedelta(days=60)).isoformat()), + SessionRecord(workspace="three", last_used=(now - timedelta(days=45)).isoformat()), ] ) result = prune_sessions(older_than_days=30, keep_n=1, dry_run=False) assert result.removed_count == 2 - remaining = sessions._load_sessions() - assert [session["workspace"] for session in remaining] == ["one"] + remaining = store.load_sessions() + assert [session.workspace for session in remaining] == ["one"] def test_delete_all_sessions_creates_backup(temp_config_dir: Path) -> None: - sessions._save_sessions( + store = sessions.get_session_store() + store.save_sessions( [ - {"workspace": "one", "last_used": "2024-01-01T00:00:00+00:00"}, - {"workspace": "two", "last_used": "2024-01-02T00:00:00+00:00"}, + SessionRecord(workspace="one", last_used="2024-01-01T00:00:00+00:00"), + SessionRecord(workspace="two", last_used="2024-01-02T00:00:00+00:00"), ] ) @@ -147,7 +150,7 @@ def test_delete_all_sessions_creates_backup(temp_config_dir: Path) -> None: assert result.removed_count == 2 assert result.backup_path is not None assert result.backup_path.exists() - assert sessions._load_sessions() == [] + assert store.load_sessions() == [] def test_reset_exceptions_resets_user_and_repo(temp_config_dir: Path, tmp_path: Path) -> None: diff --git a/tests/test_no_root_sprawl.py b/tests/test_no_root_sprawl.py index 22fb1e6..fceee36 100644 --- a/tests/test_no_root_sprawl.py +++ b/tests/test_no_root_sprawl.py @@ -42,6 +42,7 @@ "ports", "adapters", "commands", + "presentation", "ui", } diff --git a/tests/test_quick_resume_behavior.py b/tests/test_quick_resume_behavior.py index 1e96e3c..f2017aa 100644 --- a/tests/test_quick_resume_behavior.py +++ b/tests/test_quick_resume_behavior.py @@ -23,7 +23,7 @@ def test_quick_resume_shows_active_team_in_header() -> None: patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), - patch("scc_cli.commands.launch.flow.pick_context_quick_resume") as mock_picker, + patch("scc_cli.ui.wizard.pick_context_quick_resume") as mock_picker, ): mock_picker.side_effect = RuntimeError("stop") try: @@ -52,7 +52,7 @@ def test_quick_resume_back_cancels_at_top_level() -> None: patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), patch( - "scc_cli.commands.launch.flow.pick_context_quick_resume", + "scc_cli.ui.wizard.pick_context_quick_resume", return_value=(QuickResumeResult.BACK, None), ), ): diff --git a/tests/test_select_session_use_case.py b/tests/test_select_session_use_case.py new file mode 100644 index 0000000..6538557 --- /dev/null +++ b/tests/test_select_session_use_case.py @@ -0,0 +1,152 @@ +"""Unit tests for the select_session use case.""" + +from contextlib import AbstractContextManager, nullcontext + +from scc_cli.application.launch import ( + SelectSessionDependencies, + SelectSessionRequest, + SelectSessionResult, + SessionSelectionMode, + SessionSelectionPrompt, + SessionSelectionWarningOutcome, + select_session, +) +from scc_cli.application.sessions import SessionService +from scc_cli.ports.session_models import SessionRecord + + +class FakeSessionStore: + """In-memory SessionStore for select_session tests.""" + + def __init__(self, sessions_list: list[SessionRecord] | None = None) -> None: + self._sessions = list(sessions_list or []) + + def lock(self) -> AbstractContextManager[None]: + return nullcontext() + + def load_sessions(self) -> list[SessionRecord]: + return list(self._sessions) + + def save_sessions(self, sessions_list: list[SessionRecord]) -> None: + self._sessions = list(sessions_list) + + +def _build_service(records: list[SessionRecord]) -> SessionService: + return SessionService(FakeSessionStore(records)) + + +def test_select_session_returns_prompt_for_select_mode() -> None: + records = [ + SessionRecord( + workspace="/tmp/proj-a", + team="platform", + last_used="2024-01-02T00:00:00", + ), + SessionRecord( + workspace="/tmp/proj-b", + team="platform", + last_used="2024-01-03T00:00:00", + ), + ] + service = _build_service(records) + deps = SelectSessionDependencies(session_service=service) + + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team="platform", + include_all=False, + limit=10, + ), + dependencies=deps, + ) + + assert isinstance(outcome, SessionSelectionPrompt) + assert outcome.request.title == "Select Session" + assert len(outcome.request.options) == 2 + + +def test_select_session_returns_most_recent_for_resume() -> None: + records = [ + SessionRecord( + workspace="/tmp/proj-a", + team="platform", + last_used="2024-01-02T00:00:00", + ), + SessionRecord( + workspace="/tmp/proj-b", + team="platform", + last_used="2024-01-03T00:00:00", + ), + ] + service = _build_service(records) + deps = SelectSessionDependencies(session_service=service) + + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.RESUME, + team="platform", + include_all=False, + limit=10, + ), + dependencies=deps, + ) + + assert isinstance(outcome, SelectSessionResult) + assert outcome.session.workspace == "/tmp/proj-b" + + +def test_select_session_warns_when_empty() -> None: + service = _build_service([]) + deps = SelectSessionDependencies(session_service=service) + + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team="platform", + include_all=False, + limit=10, + ), + dependencies=deps, + ) + + assert isinstance(outcome, SessionSelectionWarningOutcome) + assert outcome.warning.title == "No Recent Sessions" + + +def test_select_session_returns_selection_when_provided() -> None: + record = SessionRecord( + workspace="/tmp/proj-a", + team="platform", + last_used="2024-01-02T00:00:00", + ) + service = _build_service([record]) + deps = SelectSessionDependencies(session_service=service) + + first = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team="platform", + include_all=False, + limit=10, + ), + dependencies=deps, + ) + assert isinstance(first, SessionSelectionPrompt) + + item = first.request.options[0].value + assert item is not None + + outcome = select_session( + SelectSessionRequest( + mode=SessionSelectionMode.SELECT, + team="platform", + include_all=False, + limit=10, + selection=item, + ), + dependencies=deps, + ) + + assert isinstance(outcome, SelectSessionResult) + assert outcome.session.workspace == "/tmp/proj-a" diff --git a/tests/test_session_flags.py b/tests/test_session_flags.py index 6501ba6..f1d42b7 100644 --- a/tests/test_session_flags.py +++ b/tests/test_session_flags.py @@ -8,13 +8,15 @@ """ import re -from unittest.mock import patch +from dataclasses import replace +from unittest.mock import MagicMock, patch import pytest from typer.testing import CliRunner from scc_cli.cli import app from scc_cli.core.exit_codes import EXIT_CANCELLED, EXIT_USAGE +from scc_cli.ports.session_models import SessionListResult, SessionSummary from tests.fakes import build_fake_adapters runner = CliRunner() @@ -32,32 +34,38 @@ def strip_ansi(text: str) -> str: @pytest.fixture -def mock_session(): +def mock_session() -> SessionSummary: """A mock session for testing.""" - return { - "name": "test-session", - "workspace": "/home/user/project", - "team": "platform", - "last_used": "2025-12-22T12:00:00", - } + return SessionSummary( + name="test-session", + workspace="/home/user/project", + team="platform", + last_used="2025-12-22T12:00:00", + container_name=None, + branch=None, + ) @pytest.fixture -def mock_sessions_list(): +def mock_sessions_list() -> list[SessionSummary]: """Multiple mock sessions for picker testing.""" return [ - { - "name": "session-1", - "workspace": "/home/user/project1", - "team": "platform", - "last_used": "2025-12-22T12:00:00", - }, - { - "name": "session-2", - "workspace": "/home/user/project2", - "team": "backend", - "last_used": "2025-12-22T11:00:00", - }, + SessionSummary( + name="session-1", + workspace="/home/user/project1", + team="platform", + last_used="2025-12-22T12:00:00", + container_name=None, + branch=None, + ), + SessionSummary( + name="session-2", + workspace="/home/user/project2", + team="backend", + last_used="2025-12-22T11:00:00", + container_name=None, + branch=None, + ), ] @@ -72,15 +80,14 @@ class TestResumeFlag: def test_resume_auto_selects_recent_session(self, mock_session): """--resume without workspace should use most recent session.""" # Mock session with no team (standalone mode) - standalone_session = {**mock_session, "team": None} + standalone_session = replace(mock_session, team=None) fake_adapters = build_fake_adapters() with ( patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=[standalone_session], - ) as mock_list, + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -90,26 +97,30 @@ def test_resume_auto_selects_recent_session(self, mock_session): patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + [standalone_session] + ) + mock_service_factory.return_value = mock_service # Use --standalone flag to bypass team filtering result = runner.invoke(app, ["start", "--resume", "--standalone"]) # Should have called list_recent (new implementation filters by team) - mock_list.assert_called_once() + mock_service.list_recent.assert_called_once() # Should indicate resuming assert "Resuming" in result.output or result.exit_code == 0 def test_resume_short_flag_works(self, mock_session): """-r short flag should work like --resume.""" # Mock session with no team (standalone mode) - standalone_session = {**mock_session, "team": None} + standalone_session = replace(mock_session, team=None) fake_adapters = build_fake_adapters() with ( patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=[standalone_session], - ) as mock_list, + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -119,18 +130,28 @@ def test_resume_short_flag_works(self, mock_session): patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + [standalone_session] + ) + mock_service_factory.return_value = mock_service # Use --standalone flag to bypass team filtering _result = runner.invoke(app, ["start", "-r", "--standalone"]) - mock_list.assert_called_once() + mock_service.list_recent.assert_called_once() def test_resume_without_sessions_shows_error(self): """--resume with no sessions should show appropriate error.""" with ( patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), - patch("scc_cli.commands.launch.flow.sessions.list_recent", return_value=[]), + patch( + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions([]) + mock_service_factory.return_value = mock_service # Use --standalone flag to bypass team filtering result = runner.invoke(app, ["start", "--resume", "--standalone"]) @@ -148,20 +169,17 @@ class TestSelectFlag: def test_select_shows_session_picker(self, mock_sessions_list, mock_session): """--select should trigger the session picker UI.""" # Sessions need team=None for standalone mode filtering - standalone_sessions = [{**s, "team": None} for s in mock_sessions_list] - standalone_session = {**mock_session, "team": None} + standalone_sessions = [replace(s, team=None) for s in mock_sessions_list] + standalone_session = replace(mock_session, team=None) fake_adapters = build_fake_adapters() with ( patch("scc_cli.commands.launch.flow.is_interactive_allowed", return_value=True), patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=standalone_sessions, - ), - patch( - "scc_cli.commands.launch.flow.select_session", return_value=standalone_session - ) as mock_picker, + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, + patch("scc_cli.commands.launch.flow.pick_session") as mock_picker, patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -171,6 +189,12 @@ def test_select_shows_session_picker(self, mock_sessions_list, mock_session): patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + standalone_sessions + ) + mock_service_factory.return_value = mock_service + mock_picker.return_value = standalone_session # Use --standalone flag to bypass team filtering _result = runner.invoke(app, ["start", "--select", "--standalone"]) @@ -180,20 +204,17 @@ def test_select_shows_session_picker(self, mock_sessions_list, mock_session): def test_select_short_flag_works(self, mock_sessions_list, mock_session): """-s short flag should work like --select.""" # Sessions need team=None for standalone mode filtering - standalone_sessions = [{**s, "team": None} for s in mock_sessions_list] - standalone_session = {**mock_session, "team": None} + standalone_sessions = [replace(s, team=None) for s in mock_sessions_list] + standalone_session = replace(mock_session, team=None) fake_adapters = build_fake_adapters() with ( patch("scc_cli.commands.launch.flow.is_interactive_allowed", return_value=True), patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=standalone_sessions, - ), - patch( - "scc_cli.commands.launch.flow.select_session", return_value=standalone_session - ) as mock_picker, + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, + patch("scc_cli.commands.launch.flow.pick_session") as mock_picker, patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -203,6 +224,12 @@ def test_select_short_flag_works(self, mock_sessions_list, mock_session): patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + standalone_sessions + ) + mock_service_factory.return_value = mock_service + mock_picker.return_value = standalone_session # Use --standalone flag to bypass team filtering _result = runner.invoke(app, ["start", "-s", "--standalone"]) @@ -214,8 +241,13 @@ def test_select_without_sessions_shows_message(self): patch("scc_cli.commands.launch.flow.is_interactive_allowed", return_value=True), patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), - patch("scc_cli.commands.launch.flow.sessions.list_recent", return_value=[]), + patch( + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions([]) + mock_service_factory.return_value = mock_service # Use --standalone flag to bypass team filtering result = runner.invoke(app, ["start", "--select", "--standalone"]) @@ -226,19 +258,21 @@ def test_select_without_sessions_shows_message(self): def test_select_user_cancels_exits_gracefully(self, mock_sessions_list): """--select should exit gracefully when user cancels picker.""" # Sessions need team=None for standalone mode filtering - standalone_sessions = [{**s, "team": None} for s in mock_sessions_list] + standalone_sessions = [replace(s, team=None) for s in mock_sessions_list] with ( patch("scc_cli.commands.launch.flow.is_interactive_allowed", return_value=True), patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=standalone_sessions, - ), - patch( - "scc_cli.commands.launch.flow.select_session", return_value=None - ), # User cancelled + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, + patch("scc_cli.commands.launch.flow.pick_session", return_value=None), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + standalone_sessions + ) + mock_service_factory.return_value = mock_service # Use --standalone flag to bypass team filtering result = runner.invoke(app, ["start", "--select", "--standalone"]) @@ -265,12 +299,8 @@ def test_resume_and_select_are_mutually_exclusive(self, mock_session, mock_sessi return_value={"standalone": True}, ), patch( - "scc_cli.commands.launch.flow.sessions.get_most_recent", return_value=mock_session - ), - patch( - "scc_cli.commands.launch.flow.sessions.list_recent", return_value=mock_sessions_list - ), - patch("scc_cli.commands.launch.flow.select_session", return_value=mock_session), + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -280,6 +310,11 @@ def test_resume_and_select_are_mutually_exclusive(self, mock_session, mock_sessi patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + mock_sessions_list + ) + mock_service_factory.return_value = mock_service result = runner.invoke(app, ["start", "--resume", "--select"]) # Either should error OR one should take precedence @@ -310,7 +345,7 @@ def test_auto_detects_workspace_from_git_repo(self, mock_session): # Smart detection returns detected workspace patch( "scc_cli.commands.launch.flow.git.detect_workspace_root", - return_value=(mock_session["workspace"], detected_path), + return_value=(mock_session.workspace, detected_path), ) as mock_detect, patch( "scc_cli.commands.launch.flow.get_default_adapters", @@ -399,18 +434,20 @@ def test_interactive_flag_bypasses_detection(self, mock_sessions_list): @pytest.mark.skip(reason="Phase 3 feature: auto-detection feedback not implemented") def test_detection_feedback_shown_on_success(self, mock_session): """Auto-detected workspace should show brief feedback message.""" - standalone_sessions = [{**mock_session, "team": None}] - standalone_session = {**mock_session, "team": None} + standalone_sessions = [replace(mock_session, team=None)] + standalone_session = replace(mock_session, team=None) fake_adapters = build_fake_adapters() with ( patch("scc_cli.commands.launch.flow.is_interactive_allowed", return_value=True), patch("scc_cli.commands.launch.flow.setup.is_setup_needed", return_value=False), patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), patch( - "scc_cli.commands.launch.flow.sessions.list_recent", - return_value=standalone_sessions, + "scc_cli.commands.launch.flow.sessions.get_session_service" + ) as mock_service_factory, + patch( + "scc_cli.commands.launch.flow.pick_session", + return_value=standalone_session, ), - patch("scc_cli.commands.launch.flow.select_session", return_value=standalone_session), patch( "scc_cli.commands.launch.flow.get_default_adapters", return_value=fake_adapters, @@ -420,6 +457,11 @@ def test_detection_feedback_shown_on_success(self, mock_session): patch("os.path.exists", return_value=True), patch("pathlib.Path.exists", return_value=True), ): + mock_service = MagicMock() + mock_service.list_recent.return_value = SessionListResult.from_sessions( + standalone_sessions + ) + mock_service_factory.return_value = mock_service result = runner.invoke(app, ["start"]) # Should show detection feedback (unless --json) diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 5b053d8..8592b6f 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -8,11 +8,16 @@ """ import json +from contextlib import AbstractContextManager, nullcontext from datetime import datetime, timedelta +from unittest.mock import patch +import click import pytest from scc_cli import sessions +from scc_cli.application.sessions import SessionService +from scc_cli.ports.session_models import SessionFilter, SessionRecord, SessionSummary # ═══════════════════════════════════════════════════════════════════════════════ # Fixtures @@ -25,7 +30,7 @@ def sessions_file(tmp_path, monkeypatch): # Point to temp directory sessions_path = tmp_path / "sessions.json" - monkeypatch.setattr("scc_cli.sessions.config.SESSIONS_FILE", sessions_path) + monkeypatch.setattr("scc_cli.config.SESSIONS_FILE", sessions_path) return sessions_path @@ -81,8 +86,8 @@ def test_returns_most_recent_session(self, sessions_file, sample_sessions): assert result is not None # session2 has the most recent last_used - assert result["workspace"] == "/tmp/proj2" - assert result["name"] == "session2" + assert result.workspace == "/tmp/proj2" + assert result.name == "session2" def test_returns_none_when_no_sessions(self, sessions_file): """Should return None when no sessions exist.""" @@ -113,7 +118,7 @@ def test_handles_single_session(self, sessions_file): result = sessions.get_most_recent() assert result is not None - assert result["workspace"] == "/tmp/only-one" + assert result.workspace == "/tmp/only-one" # ═══════════════════════════════════════════════════════════════════════════════ @@ -132,9 +137,9 @@ def test_returns_sessions_sorted_by_last_used(self, sessions_file, sample_sessio assert len(result) == 3 # Most recent first (session2) - assert result[0]["workspace"] == "/tmp/proj2" - assert result[1]["workspace"] == "/tmp/proj1" - assert result[2]["workspace"] == "/tmp/proj3" + assert result[0].workspace == "/tmp/proj2" + assert result[1].workspace == "/tmp/proj1" + assert result[2].workspace == "/tmp/proj3" def test_respects_limit(self, sessions_file, sample_sessions): """Should limit number of returned sessions.""" @@ -152,13 +157,14 @@ def test_returns_empty_list_when_no_sessions(self, sessions_file): assert result == [] - def test_formats_relative_time(self, sessions_file): - """Should format last_used as relative time.""" + def test_returns_raw_last_used(self, sessions_file): + """Should return raw last_used values from storage.""" now = datetime.now() + last_used = (now - timedelta(minutes=5)).isoformat() recent_session = [ { "workspace": "/tmp/test", - "last_used": (now - timedelta(minutes=5)).isoformat(), + "last_used": last_used, } ] sessions_file.write_text(json.dumps({"sessions": recent_session})) @@ -166,7 +172,7 @@ def test_formats_relative_time(self, sessions_file): result = sessions.list_recent() assert len(result) == 1 - assert "m ago" in result[0]["last_used"] + assert result[0].last_used == last_used def test_generates_name_from_workspace_if_missing(self, sessions_file): """Should generate name from workspace path if name is None.""" @@ -180,7 +186,7 @@ def test_generates_name_from_workspace_if_missing(self, sessions_file): result = sessions.list_recent() - assert result[0]["name"] == "my-project" + assert result[0].name == "my-project" # ═══════════════════════════════════════════════════════════════════════════════ @@ -271,7 +277,7 @@ def test_finds_session_by_workspace(self, sessions_file, sample_sessions): result = sessions.find_session_by_workspace("/tmp/proj1") assert result is not None - assert result["workspace"] == "/tmp/proj1" + assert result.workspace == "/tmp/proj1" def test_returns_none_when_not_found(self, sessions_file, sample_sessions): """Should return None when workspace not found.""" @@ -288,7 +294,7 @@ def test_filters_by_branch(self, sessions_file, sample_sessions): result = sessions.find_session_by_workspace("/tmp/proj2", branch="develop") assert result is not None - assert result["branch"] == "develop" + assert result.branch == "develop" def test_returns_none_when_branch_mismatch(self, sessions_file, sample_sessions): """Should return None when branch doesn't match.""" @@ -436,7 +442,7 @@ def test_migrates_base_team_to_none(self, sessions_file): result = sessions.get_most_recent() assert result is not None - assert result["team"] is None # Migrated from "base" + assert result.team is None # Migrated from "base" def test_preserves_valid_team_names(self, sessions_file): """Sessions with actual team names should not be modified.""" @@ -452,7 +458,7 @@ def test_preserves_valid_team_names(self, sessions_file): result = sessions.get_most_recent() assert result is not None - assert result["team"] == "platform" + assert result.team == "platform" def test_preserves_none_team(self, sessions_file): """Sessions with team=None should remain None.""" @@ -468,7 +474,7 @@ def test_preserves_none_team(self, sessions_file): result = sessions.get_most_recent() assert result is not None - assert result["team"] is None + assert result.team is None def test_migration_does_not_persist_without_save(self, sessions_file): """Migration happens in memory; original file unchanged until save.""" @@ -524,9 +530,8 @@ def test_legacy_session_without_schema_version_defaults_to_1(self, sessions_file ] sessions_file.write_text(json.dumps({"sessions": legacy_session})) - # Load via SessionRecord.from_dict - raw = sessions._load_sessions()[0] - record = sessions.SessionRecord.from_dict(raw) + raw = json.loads(sessions_file.read_text())["sessions"][0] + record = SessionRecord.from_dict(raw) assert record.schema_version == 1 @@ -578,8 +583,7 @@ def test_standalone_session_round_trips(self, sessions_file): result = sessions.find_session_by_workspace("/tmp/standalone") assert result is not None - # to_dict() excludes None values, so team key may not exist - assert result.get("team") is None + assert result.team is None def test_list_recent_includes_standalone_sessions(self, sessions_file): """list_recent should include standalone (team=None) sessions.""" @@ -595,4 +599,245 @@ def test_list_recent_includes_standalone_sessions(self, sessions_file): result = sessions.list_recent() assert len(result) == 1 - assert result[0]["team"] is None + assert result[0].team is None + + +class FakeSessionStore: + """In-memory SessionStore for use case tests.""" + + def __init__(self, sessions_list: list[SessionRecord] | None = None) -> None: + self._sessions = list(sessions_list or []) + + def lock(self) -> AbstractContextManager[None]: + return nullcontext() + + def load_sessions(self) -> list[SessionRecord]: + return list(self._sessions) + + def save_sessions(self, sessions_list: list[SessionRecord]) -> None: + self._sessions = list(sessions_list) + + +__all__ = ["FakeSessionStore"] + + +class TestSessionService: + """Unit tests for SessionService use cases.""" + + def test_list_recent_filters_and_limits(self) -> None: + records = [ + SessionRecord( + workspace="/tmp/proj1", + team="platform", + last_used="2024-01-01T00:00:00", + ), + SessionRecord( + workspace="/tmp/proj2", + team="platform", + last_used="2024-01-02T00:00:00", + ), + SessionRecord( + workspace="/tmp/proj3", + team="api", + last_used="2024-01-03T00:00:00", + ), + ] + service = SessionService(FakeSessionStore(records)) + + result = service.list_recent(SessionFilter(limit=1, team="platform")) + + assert result.count == 1 + assert result.sessions[0].workspace == "/tmp/proj2" + + def test_list_recent_generates_name_from_workspace(self) -> None: + record = SessionRecord( + workspace="/tmp/my-project", + team=None, + last_used="2024-01-01T00:00:00", + ) + service = SessionService(FakeSessionStore([record])) + + result = service.list_recent(SessionFilter(limit=5, include_all=True)) + + assert result.sessions[0].name == "my-project" + + def test_record_session_updates_existing(self) -> None: + existing = SessionRecord( + workspace="/tmp/proj", + team="old", + branch="main", + last_used="2024-01-02T00:00:00", + created_at="2024-01-01T00:00:00", + schema_version=1, + ) + store = FakeSessionStore([existing]) + service = SessionService(store) + + record = service.record_session( + workspace="/tmp/proj", + team="new", + branch="main", + ) + + assert record.created_at == "2024-01-01T00:00:00" + assert len(store.load_sessions()) == 1 + assert store.load_sessions()[0].team == "new" + + def test_update_session_container_preserves_created_at(self) -> None: + existing = SessionRecord( + workspace="/tmp/proj", + team="platform", + branch="main", + container_name=None, + last_used="2024-01-02T00:00:00", + created_at="2024-01-01T00:00:00", + ) + store = FakeSessionStore([existing]) + service = SessionService(store) + + service.update_session_container( + workspace="/tmp/proj", + container_name="scc-proj", + branch="main", + ) + + updated = store.load_sessions()[0] + assert updated.container_name == "scc-proj" + assert updated.created_at == "2024-01-01T00:00:00" + + def test_prune_orphaned_sessions_removes_missing_paths(self, tmp_path) -> None: + existing = tmp_path / "repo" + existing.mkdir() + store = FakeSessionStore( + [ + SessionRecord( + workspace=str(existing), + last_used="2024-01-01T00:00:00", + ), + SessionRecord( + workspace="/missing/path", + last_used="2024-01-01T00:00:00", + ), + ] + ) + service = SessionService(store) + + removed = service.prune_orphaned_sessions() + + assert removed == 1 + assert [record.workspace for record in store.load_sessions()] == [str(existing)] + + +# ═══════════════════════════════════════════════════════════════════════════════ +# Tests for sessions command output +# ═══════════════════════════════════════════════════════════════════════════════ + + +class TestSessionsCommandOutput: + """Characterization tests for scc sessions output.""" + + def test_sessions_cmd_renders_table_rows(self) -> None: + from scc_cli.commands.worktree.session_commands import sessions_cmd + + workspace = "/workspace/" + "a" * 50 + session = SessionSummary( + name="session-1", + workspace=workspace, + team="platform", + last_used="2025-01-01T12:00:00", + container_name="scc-session", + branch="main", + ) + + with ( + patch( + "scc_cli.commands.worktree.session_commands.config.load_user_config", + return_value={"selected_profile": "platform"}, + ), + patch( + "scc_cli.commands.worktree.session_commands.config.is_standalone_mode", + return_value=False, + ), + patch( + "scc_cli.commands.worktree.session_commands.sessions.list_recent", + return_value=[session], + ), + patch( + "scc_cli.commands.worktree.session_commands.sessions.format_relative_time", + return_value="2h ago", + ), + patch( + "scc_cli.commands.worktree.session_commands.render_responsive_table" + ) as mock_table, + ): + sessions_cmd( + limit=10, + team="platform", + all_teams=False, + select=False, + json_output=False, + pretty=False, + ) + + mock_table.assert_called_once() + call_kwargs = mock_table.call_args.kwargs + assert call_kwargs["title"] == "Recent Sessions (platform)" + assert call_kwargs["columns"] == [("Session", "cyan"), ("Workspace", "white")] + assert call_kwargs["wide_columns"] == [("Last Used", "yellow"), ("Team", "green")] + assert call_kwargs["rows"] == [["session-1", "..." + "a" * 37, "2h ago", "platform"]] + + def test_sessions_cmd_json_output(self, capsys) -> None: + from scc_cli.commands.worktree.session_commands import sessions_cmd + + sessions_list = [ + SessionSummary( + name="session-1", + workspace="/workspace/one", + team="platform", + last_used="2025-01-01T12:00:00", + container_name="scc-one", + branch="main", + ), + SessionSummary( + name="session-2", + workspace="/workspace/two", + team="backend", + last_used="2025-01-02T09:00:00", + container_name=None, + branch=None, + ), + ] + + with ( + patch( + "scc_cli.commands.worktree.session_commands.config.load_user_config", + return_value={"selected_profile": "platform"}, + ), + patch( + "scc_cli.commands.worktree.session_commands.config.is_standalone_mode", + return_value=False, + ), + patch( + "scc_cli.commands.worktree.session_commands.sessions.list_recent", + return_value=sessions_list, + ), + ): + with pytest.raises(click.exceptions.Exit): + sessions_cmd( + limit=10, + team=None, + all_teams=True, + select=False, + json_output=True, + pretty=False, + ) + + output = json.loads(capsys.readouterr().out) + assert output["kind"] == "SessionList" + assert output["data"]["count"] == 2 + assert output["data"]["team"] is None + assert output["data"]["sessions"][0]["name"] == "session-1" + assert output["data"]["sessions"][0]["workspace"] == "/workspace/one" + assert output["data"]["sessions"][0]["last_used"] == "2025-01-01T12:00:00" + assert output["data"]["sessions"][0]["container_name"] == "scc-one" + assert output["data"]["sessions"][0]["branch"] == "main" diff --git a/tests/test_start_cross_team_resume_prompt.py b/tests/test_start_cross_team_resume_prompt.py new file mode 100644 index 0000000..23ef8d3 --- /dev/null +++ b/tests/test_start_cross_team_resume_prompt.py @@ -0,0 +1,79 @@ +"""Characterization tests for cross-team resume confirmation prompts.""" + +from pathlib import Path +from unittest.mock import patch + +from scc_cli.contexts import WorkContext +from scc_cli.ui.picker import QuickResumeResult +from scc_cli.ui.wizard import WorkspaceSource + + +def test_cross_team_resume_prompt_text_top_level() -> None: + from scc_cli.commands.launch import interactive_start + + context = WorkContext( + team="alpha", + repo_root=Path("/repo"), + worktree_path=Path("/repo"), + worktree_name="main", + last_session_id="session-1", + ) + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=False), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + return_value=(QuickResumeResult.SELECTED, context), + ), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=True) as confirm, + ): + interactive_start(cfg={"selected_profile": "beta"}, allow_back=False) + + assert confirm.call_args is not None + prompt = confirm.call_args.args[1] + assert "Resume session from team 'alpha'?" in prompt + assert "use alpha plugins for this session" in prompt + + +def test_cross_team_resume_prompt_text_workspace_scope() -> None: + from scc_cli.commands.launch import interactive_start + + context = WorkContext( + team="alpha", + repo_root=Path("/repo"), + worktree_path=Path("/repo"), + worktree_name="main", + last_session_id="session-1", + ) + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=False), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + side_effect=[ + (QuickResumeResult.NEW_SESSION, None), + (QuickResumeResult.SELECTED, context), + ], + ), + patch("scc_cli.ui.wizard._run_single_select_picker", return_value=WorkspaceSource.RECENT), + patch( + "scc_cli.ui.wizard.pick_recent_workspace", + return_value=str(context.worktree_path), + ), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=True) as confirm, + patch("scc_cli.ui.wizard.prompt_with_layout", return_value=None), + ): + interactive_start(cfg={"selected_profile": "beta"}, allow_back=False) + + assert confirm.call_args is not None + prompt = confirm.call_args.args[1] + assert "Resume session from team 'alpha'?" in prompt + assert "use alpha plugins for this session" in prompt diff --git a/tests/test_start_dryrun.py b/tests/test_start_dryrun.py index c57b61d..5635206 100644 --- a/tests/test_start_dryrun.py +++ b/tests/test_start_dryrun.py @@ -36,7 +36,7 @@ def test_dry_run_does_not_launch_docker(self, tmp_path, monkeypatch): with patch( "scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={} ): - with patch("scc_cli.commands.launch.flow.start_session", mock_start_session): + with patch("scc_cli.commands.launch.flow.finalize_launch", mock_start_session): try: start( workspace=str(tmp_path), diff --git a/tests/test_start_personal_profile.py b/tests/test_start_personal_profile.py index 5c37543..a4cfdc4 100644 --- a/tests/test_start_personal_profile.py +++ b/tests/test_start_personal_profile.py @@ -1,9 +1,19 @@ """Tests for personal profile integration in start flow.""" +from dataclasses import dataclass, field from pathlib import Path +from typing import Any -from scc_cli.commands.launch import flow as launch_flow +from scc_cli.adapters.personal_profile_service_local import LocalPersonalProfileService +from scc_cli.application.launch import ( + ApplyPersonalProfileConfirmation, + ApplyPersonalProfileDependencies, + ApplyPersonalProfileRequest, + ApplyPersonalProfileResult, + apply_personal_profile, +) from scc_cli.core import personal_profiles +from scc_cli.core.personal_profiles import PersonalProfile from scc_cli.marketplace.managed import ManagedState, save_managed_state @@ -12,6 +22,63 @@ def _write_json(path: Path, data: dict) -> None: path.write_text(__import__("json").dumps(data, indent=2)) +@dataclass +class FakePersonalProfileService: + profile: PersonalProfile | None + corrupt_profile: bool = False + drift: bool = False + has_overrides: bool = False + settings_invalid: bool = False + mcp_invalid: bool = False + writes: dict[str, Any] = field(default_factory=dict) + + def load_personal_profile_with_status( + self, workspace: Path + ) -> tuple[PersonalProfile | None, bool]: + return self.profile, self.corrupt_profile + + def detect_drift(self, workspace: Path) -> bool: + return self.drift + + def workspace_has_overrides(self, workspace: Path) -> bool: + return self.has_overrides + + def load_workspace_settings_with_status( + self, workspace: Path + ) -> tuple[dict[str, Any] | None, bool]: + return {}, self.settings_invalid + + def load_workspace_mcp_with_status(self, workspace: Path) -> tuple[dict[str, Any] | None, bool]: + return {}, self.mcp_invalid + + def merge_personal_settings( + self, workspace: Path, existing: dict[str, Any], personal: dict[str, Any] + ) -> dict[str, Any]: + return {**existing, **personal} + + def merge_personal_mcp( + self, existing: dict[str, Any], personal: dict[str, Any] + ) -> dict[str, Any]: + return {**existing, **personal} + + def write_workspace_settings(self, workspace: Path, data: dict[str, Any]) -> None: + self.writes["settings"] = data + + def write_workspace_mcp(self, workspace: Path, data: dict[str, Any]) -> None: + self.writes["mcp"] = data + + def save_applied_state( + self, workspace: Path, profile_id: str, fingerprints: dict[str, str] + ) -> None: + self.writes["applied_state"] = { + "profile_id": profile_id, + "fingerprints": fingerprints, + } + + def compute_fingerprints(self, workspace: Path) -> dict[str, str]: + return {"settings.local.json": "hash", ".mcp.json": "hash"} + + def test_apply_personal_profile_applies(tmp_path: Path) -> None: settings_path = tmp_path / ".claude" / "settings.local.json" _write_json(settings_path, {"enabledPlugins": {"team@market": True}}) @@ -27,12 +94,21 @@ def test_apply_personal_profile_applies(tmp_path: Path) -> None: {}, ) - profile_id, applied = launch_flow._apply_personal_profile( - tmp_path, json_mode=True, non_interactive=True + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=False, + confirm_apply=None, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies( + profile_service=LocalPersonalProfileService(), + ), ) - assert applied is True - assert profile_id is not None + assert isinstance(outcome, ApplyPersonalProfileResult) + assert outcome.applied is True + assert outcome.profile_id is not None updated = personal_profiles.load_workspace_settings(tmp_path) or {} assert updated.get("enabledPlugins", {}).get("team@market") is False @@ -40,4 +116,151 @@ def test_apply_personal_profile_applies(tmp_path: Path) -> None: state = personal_profiles.load_applied_state(tmp_path) assert state is not None - assert state.profile_id == profile_id + assert state.profile_id == outcome.profile_id + + +def test_personal_profile_drift_requires_confirmation(tmp_path: Path) -> None: + profile = PersonalProfile( + repo_id="repo", + profile_id="profile-1", + saved_at=None, + settings={"foo": "bar"}, + mcp=None, + path=tmp_path / "profile.json", + ) + service = FakePersonalProfileService( + profile=profile, + drift=True, + has_overrides=True, + ) + + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=True, + confirm_apply=None, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies(profile_service=service), + ) + + assert isinstance(outcome, ApplyPersonalProfileConfirmation) + assert outcome.default_response is False + assert outcome.profile_id == "profile-1" + + +def test_personal_profile_drift_skipped_when_non_interactive(tmp_path: Path) -> None: + profile = PersonalProfile( + repo_id="repo", + profile_id="profile-2", + saved_at=None, + settings={"foo": "bar"}, + mcp=None, + path=tmp_path / "profile.json", + ) + service = FakePersonalProfileService( + profile=profile, + drift=True, + has_overrides=True, + ) + + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=False, + confirm_apply=None, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies(profile_service=service), + ) + + assert isinstance(outcome, ApplyPersonalProfileResult) + assert outcome.applied is False + assert "Workspace overrides detected" in (outcome.message or "") + + +def test_personal_profile_confirmation_rejects_apply(tmp_path: Path) -> None: + profile = PersonalProfile( + repo_id="repo", + profile_id="profile-3", + saved_at=None, + settings={"foo": "bar"}, + mcp=None, + path=tmp_path / "profile.json", + ) + service = FakePersonalProfileService( + profile=profile, + drift=True, + has_overrides=True, + ) + + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=True, + confirm_apply=False, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies(profile_service=service), + ) + + assert isinstance(outcome, ApplyPersonalProfileResult) + assert outcome.applied is False + + +def test_personal_profile_invalid_json_settings(tmp_path: Path) -> None: + profile = PersonalProfile( + repo_id="repo", + profile_id="profile-4", + saved_at=None, + settings={"foo": "bar"}, + mcp=None, + path=tmp_path / "profile.json", + ) + service = FakePersonalProfileService( + profile=profile, + settings_invalid=True, + ) + + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=True, + confirm_apply=True, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies(profile_service=service), + ) + + assert isinstance(outcome, ApplyPersonalProfileResult) + assert outcome.applied is False + assert "Invalid JSON" in (outcome.message or "") + + +def test_personal_profile_invalid_json_mcp(tmp_path: Path) -> None: + profile = PersonalProfile( + repo_id="repo", + profile_id="profile-5", + saved_at=None, + settings={"foo": "bar"}, + mcp={"mcp": "config"}, + path=tmp_path / "profile.json", + ) + service = FakePersonalProfileService( + profile=profile, + mcp_invalid=True, + ) + + request = ApplyPersonalProfileRequest( + workspace_path=tmp_path, + interactive_allowed=True, + confirm_apply=True, + ) + outcome = apply_personal_profile( + request, + dependencies=ApplyPersonalProfileDependencies(profile_service=service), + ) + + assert isinstance(outcome, ApplyPersonalProfileResult) + assert outcome.applied is False + assert "Invalid JSON" in (outcome.message or "") diff --git a/tests/test_start_wizard_quick_resume_flow.py b/tests/test_start_wizard_quick_resume_flow.py new file mode 100644 index 0000000..a66f1f3 --- /dev/null +++ b/tests/test_start_wizard_quick_resume_flow.py @@ -0,0 +1,85 @@ +"""Characterization tests for quick resume wizard flows.""" + +from pathlib import Path +from unittest.mock import patch + +from scc_cli.contexts import WorkContext +from scc_cli.ui.picker import QuickResumeResult + + +def test_quick_resume_new_session_moves_to_workspace_source() -> None: + from scc_cli.commands.launch import interactive_start + from scc_cli.ui.wizard import WorkspaceSource + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=True), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + return_value=(QuickResumeResult.NEW_SESSION, None), + ), + patch("scc_cli.ui.wizard.pick_workspace_source", return_value=WorkspaceSource.RECENT), + patch("scc_cli.ui.wizard.pick_recent_workspace", return_value="/repo"), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=False), + patch("scc_cli.ui.wizard.prompt_with_layout", return_value=None), + ): + result = interactive_start(cfg={}, allow_back=False) + + assert result == ("/repo", None, None, None) + + +def test_quick_resume_back_returns_cancelled() -> None: + from scc_cli.commands.launch import interactive_start + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=True), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + return_value=(QuickResumeResult.BACK, None), + ), + ): + result = interactive_start(cfg={}, allow_back=False) + + assert result == (None, None, None, None) + + +def test_quick_resume_selects_context_returns_immediately() -> None: + from scc_cli.commands.launch import interactive_start + + context = WorkContext( + team=None, + repo_root=Path("/repo"), + worktree_path=Path("/repo"), + worktree_name="main", + last_session_id="session-1", + ) + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=True), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + return_value=(QuickResumeResult.SELECTED, context), + ), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=True) as confirm, + ): + result = interactive_start(cfg={}, allow_back=False) + + assert confirm.call_args is None + + assert result == ( + str(context.worktree_path), + context.team, + context.last_session_id, + None, + ) diff --git a/tests/test_start_wizard_state_machine.py b/tests/test_start_wizard_state_machine.py new file mode 100644 index 0000000..e4ed17e --- /dev/null +++ b/tests/test_start_wizard_state_machine.py @@ -0,0 +1,176 @@ +"""Tests for the start wizard state machine.""" + +from scc_cli.application.launch.start_wizard import ( + BackRequested, + CancelRequested, + QuickResumeDismissed, + QuickResumeSelected, + SessionNameEntered, + StartWizardConfig, + StartWizardStep, + TeamSelected, + WorkspaceSelected, + WorkspaceSource, + WorkspaceSourceChosen, + WorktreeSelected, + apply_start_wizard_event, + initialize_start_wizard, +) + + +def test_wizard_starts_in_quick_resume() -> None: + config = StartWizardConfig( + quick_resume_enabled=True, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + + assert state.step is StartWizardStep.QUICK_RESUME + + +def test_wizard_skips_quick_resume_when_disabled() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=True, + allow_back=False, + ) + state = initialize_start_wizard(config) + + assert state.step is StartWizardStep.TEAM_SELECTION + + +def test_quick_resume_selection_completes_flow() -> None: + config = StartWizardConfig( + quick_resume_enabled=True, + team_selection_required=True, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event( + state, + QuickResumeSelected(workspace="/work", team="team-a", session_name="session"), + ) + + assert state.step is StartWizardStep.COMPLETE + assert state.context.workspace == "/work" + assert state.context.team == "team-a" + assert state.context.session_name == "session" + + +def test_quick_resume_dismissed_moves_to_team_selection() -> None: + config = StartWizardConfig( + quick_resume_enabled=True, + team_selection_required=True, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, QuickResumeDismissed()) + + assert state.step is StartWizardStep.TEAM_SELECTION + + +def test_team_selection_moves_to_workspace_source() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=True, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, TeamSelected(team="team-a")) + + assert state.step is StartWizardStep.WORKSPACE_SOURCE + assert state.context.team == "team-a" + + +def test_workspace_source_moves_to_workspace_picker() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, WorkspaceSourceChosen(source=WorkspaceSource.RECENT)) + + assert state.step is StartWizardStep.WORKSPACE_PICKER + + +def test_workspace_selected_moves_to_worktree_decision() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, WorkspaceSourceChosen(source=WorkspaceSource.RECENT)) + state = apply_start_wizard_event(state, WorkspaceSelected(workspace="/work")) + + assert state.step is StartWizardStep.WORKTREE_DECISION + assert state.context.workspace == "/work" + + +def test_worktree_selected_moves_to_session_name() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, WorkspaceSourceChosen(source=WorkspaceSource.RECENT)) + state = apply_start_wizard_event(state, WorkspaceSelected(workspace="/work")) + state = apply_start_wizard_event(state, WorktreeSelected(worktree_name="feature")) + + assert state.step is StartWizardStep.SESSION_NAME + assert state.context.worktree_name == "feature" + + +def test_session_name_completes_flow() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, WorkspaceSourceChosen(source=WorkspaceSource.RECENT)) + state = apply_start_wizard_event(state, WorkspaceSelected(workspace="/work")) + state = apply_start_wizard_event(state, WorktreeSelected(worktree_name=None)) + state = apply_start_wizard_event(state, SessionNameEntered(session_name="name")) + + assert state.step is StartWizardStep.COMPLETE + assert state.context.session_name == "name" + + +def test_back_request_returns_back_when_allowed() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=True, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, BackRequested()) + + assert state.step is StartWizardStep.BACK + + +def test_back_request_cancels_when_disallowed() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, BackRequested()) + + assert state.step is StartWizardStep.CANCELLED + + +def test_cancel_request_ends_flow() -> None: + config = StartWizardConfig( + quick_resume_enabled=False, + team_selection_required=False, + allow_back=False, + ) + state = initialize_start_wizard(config) + state = apply_start_wizard_event(state, CancelRequested()) + + assert state.step is StartWizardStep.CANCELLED diff --git a/tests/test_start_wizard_workspace_quick_resume.py b/tests/test_start_wizard_workspace_quick_resume.py new file mode 100644 index 0000000..8711bd8 --- /dev/null +++ b/tests/test_start_wizard_workspace_quick_resume.py @@ -0,0 +1,91 @@ +"""Characterization tests for workspace-scoped quick resume flows.""" + +from pathlib import Path +from unittest.mock import patch + +from scc_cli.contexts import WorkContext +from scc_cli.ui.picker import QuickResumeResult +from scc_cli.ui.wizard import WorkspaceSource + + +def test_workspace_quick_resume_returns_selected_context() -> None: + from scc_cli.commands.launch import interactive_start + + context = WorkContext( + team="alpha", + repo_root=Path("/repo"), + worktree_path=Path("/repo"), + worktree_name="main", + last_session_id="session-1", + ) + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=False), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + side_effect=[ + (QuickResumeResult.NEW_SESSION, None), + (QuickResumeResult.SELECTED, context), + ], + ), + patch("scc_cli.ui.wizard._run_single_select_picker", return_value=WorkspaceSource.RECENT), + patch( + "scc_cli.ui.wizard.pick_recent_workspace", + return_value=str(context.worktree_path), + ), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=True), + ): + result = interactive_start(cfg={"selected_profile": "alpha"}, allow_back=False) + + assert result == ( + str(context.worktree_path), + context.team, + context.last_session_id, + None, + ) + + +def test_workspace_quick_resume_new_session_keeps_workspace() -> None: + from scc_cli.commands.launch import interactive_start + + context = WorkContext( + team="alpha", + repo_root=Path("/repo"), + worktree_path=Path("/repo"), + worktree_name="main", + last_session_id="session-1", + ) + + with ( + patch("scc_cli.commands.launch.flow.config.is_standalone_mode", return_value=False), + patch("scc_cli.commands.launch.flow.config.load_cached_org_config", return_value={}), + patch("scc_cli.commands.launch.flow.config.load_user_config", return_value={}), + patch("scc_cli.commands.launch.flow.teams.list_teams", return_value=[]), + patch("scc_cli.commands.launch.flow.load_recent_contexts", return_value=[context]), + patch( + "scc_cli.ui.wizard.pick_context_quick_resume", + side_effect=[ + (QuickResumeResult.NEW_SESSION, None), + (QuickResumeResult.NEW_SESSION, None), + ], + ), + patch("scc_cli.ui.wizard._run_single_select_picker", return_value=WorkspaceSource.RECENT), + patch( + "scc_cli.ui.wizard.pick_recent_workspace", + return_value=str(context.worktree_path), + ), + patch("scc_cli.ui.wizard.confirm_with_layout", return_value=False), + patch("scc_cli.ui.wizard.prompt_with_layout", return_value=None), + ): + result = interactive_start(cfg={"selected_profile": "alpha"}, allow_back=False) + + assert result == ( + str(context.worktree_path), + "alpha", + None, + None, + ) diff --git a/tests/test_support_bundle.py b/tests/test_support_bundle.py index eec4cc1..34d69d1 100644 --- a/tests/test_support_bundle.py +++ b/tests/test_support_bundle.py @@ -244,6 +244,63 @@ def test_build_bundle_data_includes_doctor_output(self) -> None: assert "doctor" in result +# ═══════════════════════════════════════════════════════════════════════════════ +# Tests for Use Case with Fake Dependencies +# ═══════════════════════════════════════════════════════════════════════════════ + + +class TestSupportBundleUseCase: + """Test support bundle use case with fake dependencies.""" + + def test_doctor_failure_produces_error_in_manifest(self, tmp_path: Path) -> None: + """Doctor failure should be captured as error in manifest.""" + from datetime import datetime, timezone + + from scc_cli.application.support_bundle import ( + SupportBundleDependencies, + SupportBundleRequest, + build_support_bundle_manifest, + ) + + class FakeFilesystem: + def exists(self, path: Path) -> bool: + return False + + def read_text(self, path: Path) -> str: + return "{}" + + class FakeClock: + def now(self) -> datetime: + return datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc) + + class FailingDoctorRunner: + def run(self, workspace: str | None = None): + raise RuntimeError("Doctor check failed") + + class FakeArchiveWriter: + def write_manifest(self, output_path: str, manifest_json: str) -> None: + pass + + dependencies = SupportBundleDependencies( + filesystem=FakeFilesystem(), # type: ignore[arg-type] + clock=FakeClock(), # type: ignore[arg-type] + doctor_runner=FailingDoctorRunner(), # type: ignore[arg-type] + archive_writer=FakeArchiveWriter(), # type: ignore[arg-type] + ) + + request = SupportBundleRequest( + output_path=tmp_path / "test.zip", + redact_paths=False, + workspace_path=None, + ) + + manifest = build_support_bundle_manifest(request, dependencies=dependencies) + + assert "doctor" in manifest + assert "error" in manifest["doctor"] + assert "Doctor check failed" in manifest["doctor"]["error"] + + # ═══════════════════════════════════════════════════════════════════════════════ # Tests for Bundle File Creation # ═══════════════════════════════════════════════════════════════════════════════ @@ -309,7 +366,10 @@ def test_json_output_does_not_create_file(self, tmp_path: Path, capsys) -> None: """--json flag should output manifest, not create zip.""" from scc_cli.commands.support import support_bundle_cmd - with patch("scc_cli.commands.support.build_bundle_data", return_value={"test": "data"}): + with patch( + "scc_cli.commands.support.build_support_bundle_manifest", + return_value={"test": "data"}, + ): try: support_bundle_cmd( output=None, @@ -340,7 +400,10 @@ def test_custom_output_path_creates_file_at_location(self, tmp_path: Path) -> No output_path = tmp_path / "custom-bundle.zip" - with patch("scc_cli.commands.support.build_bundle_data", return_value={"test": "data"}): + with patch( + "scc_cli.commands.support.build_support_bundle_manifest", + return_value={"test": "data"}, + ): try: support_bundle_cmd( output=str(output_path), diff --git a/tests/test_ui_chrome_quick_resume_hints.py b/tests/test_ui_chrome_quick_resume_hints.py new file mode 100644 index 0000000..ada3739 --- /dev/null +++ b/tests/test_ui_chrome_quick_resume_hints.py @@ -0,0 +1,23 @@ +"""Characterization tests for Quick Resume chrome hints.""" + +from scc_cli.ui.chrome import ChromeConfig + + +def test_quick_resume_hints_include_expected_actions() -> None: + config = ChromeConfig.for_quick_resume("Quick Resume") + + hints = [(hint.key, hint.action) for hint in config.footer_hints] + + assert ("n", "new session") in hints + assert ("a", "all teams") in hints + assert ("Esc", "back") in hints + assert ("q", "quit") in hints + + +def test_quick_resume_hints_dim_all_teams_in_standalone() -> None: + config = ChromeConfig.for_quick_resume("Quick Resume", standalone=True) + + all_teams_hint = next(hint for hint in config.footer_hints if hint.key == "a") + + assert all_teams_hint.action == "all teams" + assert all_teams_hint.dimmed is True diff --git a/tests/test_ui_dashboard.py b/tests/test_ui_dashboard.py index 0339234..52a45aa 100644 --- a/tests/test_ui_dashboard.py +++ b/tests/test_ui_dashboard.py @@ -16,6 +16,7 @@ from rich.console import Console from scc_cli.application import dashboard as app_dashboard +from scc_cli.ports.session_models import SessionListResult, SessionSummary from scc_cli.ui.dashboard import ( Dashboard, DashboardState, @@ -30,6 +31,12 @@ from scc_cli.ui.list_screen import ListItem, ListState +def _mock_session_service(summaries: list[SessionSummary]) -> MagicMock: + service = MagicMock() + service.list_recent.return_value = SessionListResult.from_sessions(summaries) + return service + + class TestDashboardTab: """Test DashboardTab enum.""" @@ -434,14 +441,16 @@ class TestLoadStatusTabData: def test_returns_tab_data_with_status_tab(self) -> None: """Returns TabData for STATUS tab.""" with patch("scc_cli.config.load_user_config") as mock_config: - with patch("scc_cli.sessions.list_recent") as mock_sessions: + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): with patch("scc_cli.docker.core.list_scc_containers") as mock_docker: mock_config.return_value = { "selected_profile": "team-a", "standalone": False, } mock_docker.return_value = [] - mock_sessions.return_value = [] data = _load_status_tab_data() @@ -451,13 +460,15 @@ def test_returns_tab_data_with_status_tab(self) -> None: def test_includes_team_info_when_selected(self) -> None: """Includes team info when a team is selected.""" with patch("scc_cli.config.load_user_config") as mock_config: - with patch("scc_cli.sessions.list_recent") as mock_sessions: + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): with patch("scc_cli.docker.core.list_scc_containers") as mock_docker: mock_config.return_value = { "selected_profile": "production-team", } mock_docker.return_value = [] - mock_sessions.return_value = [] data = _load_status_tab_data() @@ -477,11 +488,13 @@ def test_includes_team_info_when_selected(self) -> None: def test_handles_no_team_selected(self) -> None: """Shows 'Team: none' when no team configured.""" with patch("scc_cli.config.load_user_config") as mock_config: - with patch("scc_cli.sessions.list_recent") as mock_sessions: + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): with patch("scc_cli.docker.core.list_scc_containers") as mock_docker: mock_config.return_value = {} mock_docker.return_value = [] - mock_sessions.return_value = [] data = _load_status_tab_data() @@ -501,7 +514,10 @@ def test_handles_no_team_selected(self) -> None: def test_includes_container_count(self) -> None: """Includes container count in status.""" with patch("scc_cli.config.load_user_config") as mock_config: - with patch("scc_cli.sessions.list_recent") as mock_sessions: + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): with patch("scc_cli.docker.core.list_scc_containers") as mock_docker: mock_config.return_value = {} # Create mock containers @@ -510,7 +526,6 @@ def test_includes_container_count(self) -> None: container2 = MagicMock() container2.status = "Exited" mock_docker.return_value = [container1, container2] - mock_sessions.return_value = [] data = _load_status_tab_data() @@ -531,9 +546,13 @@ def test_includes_container_count(self) -> None: def test_handles_config_error_gracefully(self) -> None: """Shows error message when config fails to load.""" with patch("scc_cli.config.load_user_config") as mock_config: - mock_config.side_effect = Exception("Config error") + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): + mock_config.side_effect = Exception("Config error") - data = _load_status_tab_data() + data = _load_status_tab_data() error_item = next( ( @@ -640,9 +659,10 @@ class TestLoadSessionsTabData: def test_returns_tab_data_with_sessions_tab(self) -> None: """Returns TabData for SESSIONS tab.""" - with patch("scc_cli.sessions.list_recent") as mock_sessions: - mock_sessions.return_value = [] - + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): data = _load_sessions_tab_data() assert data.tab == DashboardTab.SESSIONS @@ -650,17 +670,18 @@ def test_returns_tab_data_with_sessions_tab(self) -> None: def test_lists_recent_sessions(self) -> None: """Lists recent sessions with metadata.""" - with patch("scc_cli.sessions.list_recent") as mock_sessions: - mock_sessions.return_value = [ - { - "name": "feature-work", - "container_name": "scc-feature", - "team": "dev-team", - "branch": "feature/new-ui", - "last_used": "2h ago", - } - ] - + session = SessionSummary( + name="feature-work", + workspace="/workspace/feature", + team="dev-team", + last_used="2h ago", + container_name="scc-feature", + branch="feature/new-ui", + ) + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([session]), + ): data = _load_sessions_tab_data() assert len(data.items) == 1 @@ -669,9 +690,10 @@ def test_lists_recent_sessions(self) -> None: def test_shows_no_sessions_message(self) -> None: """Shows message when no sessions exist.""" - with patch("scc_cli.sessions.list_recent") as mock_sessions: - mock_sessions.return_value = [] - + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): data = _load_sessions_tab_data() assert len(data.items) == 1 @@ -682,9 +704,9 @@ def test_shows_no_sessions_message(self) -> None: def test_handles_sessions_error_gracefully(self) -> None: """Shows error message when sessions fail to load.""" - with patch("scc_cli.sessions.list_recent") as mock_sessions: - mock_sessions.side_effect = Exception("Sessions error") - + mock_service = MagicMock() + mock_service.list_recent.side_effect = Exception("Sessions error") + with patch("scc_cli.sessions.get_session_service", return_value=mock_service): data = _load_sessions_tab_data() assert len(data.items) == 1 @@ -769,41 +791,45 @@ class TestLoadAllTabData: def test_returns_dict_with_all_tabs(self) -> None: """Returns data for all dashboard tabs.""" - with patch("scc_cli.application.dashboard.load_all_tab_data") as mock_all: - mock_all.return_value = { - DashboardTab.STATUS: app_dashboard.DashboardTabData( - tab=DashboardTab.STATUS, - title="Status", - items=[], - count_active=0, - count_total=0, - ), - DashboardTab.CONTAINERS: app_dashboard.DashboardTabData( - tab=DashboardTab.CONTAINERS, - title="Containers", - items=[], - count_active=0, - count_total=0, - ), - DashboardTab.SESSIONS: app_dashboard.DashboardTabData( - tab=DashboardTab.SESSIONS, - title="Sessions", - items=[], - count_active=0, - count_total=0, - ), - DashboardTab.WORKTREES: app_dashboard.DashboardTabData( - tab=DashboardTab.WORKTREES, - title="Worktrees", - items=[], - count_active=0, - count_total=0, - ), - } + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): + with patch("scc_cli.application.dashboard.load_all_tab_data") as mock_all: + mock_all.return_value = { + DashboardTab.STATUS: app_dashboard.DashboardTabData( + tab=DashboardTab.STATUS, + title="Status", + items=[], + count_active=0, + count_total=0, + ), + DashboardTab.CONTAINERS: app_dashboard.DashboardTabData( + tab=DashboardTab.CONTAINERS, + title="Containers", + items=[], + count_active=0, + count_total=0, + ), + DashboardTab.SESSIONS: app_dashboard.DashboardTabData( + tab=DashboardTab.SESSIONS, + title="Sessions", + items=[], + count_active=0, + count_total=0, + ), + DashboardTab.WORKTREES: app_dashboard.DashboardTabData( + tab=DashboardTab.WORKTREES, + title="Worktrees", + items=[], + count_active=0, + count_total=0, + ), + } - data = _load_all_tab_data() + data = _load_all_tab_data() - assert DashboardTab.STATUS in data - assert DashboardTab.CONTAINERS in data - assert DashboardTab.SESSIONS in data - assert DashboardTab.WORKTREES in data + assert DashboardTab.STATUS in data + assert DashboardTab.CONTAINERS in data + assert DashboardTab.SESSIONS in data + assert DashboardTab.WORKTREES in data diff --git a/tests/test_ui_formatters.py b/tests/test_ui_formatters.py index 4198787..a6bf10a 100644 --- a/tests/test_ui_formatters.py +++ b/tests/test_ui_formatters.py @@ -20,6 +20,7 @@ from scc_cli.contexts import WorkContext from scc_cli.docker.core import ContainerInfo from scc_cli.git import WorktreeInfo +from scc_cli.ports.session_models import SessionSummary from scc_cli.ui.formatters import ( _format_relative_time, _shorten_docker_status, @@ -31,6 +32,25 @@ ) +def _session_summary( + *, + name: str = "my-session", + workspace: str = "/tmp/project", + team: str | None = None, + last_used: str | None = None, + container_name: str | None = None, + branch: str | None = None, +) -> SessionSummary: + return SessionSummary( + name=name, + workspace=workspace, + team=team, + last_used=last_used, + container_name=container_name, + branch=branch, + ) + + class TestFormatTeam: """Test format_team function.""" @@ -172,7 +192,7 @@ class TestFormatSession: def test_formats_basic_session(self) -> None: """Format a basic session.""" - session = {"name": "my-session"} + session = _session_summary() item = format_session(session) assert item.label == "my-session" @@ -180,42 +200,36 @@ def test_formats_basic_session(self) -> None: def test_includes_team_in_description(self) -> None: """Include team name in description.""" - session = {"name": "my-session", "team": "platform"} + session = _session_summary(team="platform") item = format_session(session) assert "platform" in item.description def test_includes_branch_in_description(self) -> None: """Include branch name in description.""" - session = {"name": "my-session", "branch": "feature/auth"} + session = _session_summary(branch="feature/auth") item = format_session(session) assert "feature/auth" in item.description def test_includes_last_used_in_description(self) -> None: """Include last used time in description.""" - session = {"name": "my-session", "last_used": "2 hours ago"} - item = format_session(session) - - assert "2 hours ago" in item.description - - def test_exception_warning_governance_status(self) -> None: - """Session with exception warning has warning governance status.""" - session = {"name": "my-session", "has_exception_warning": True} + last_used = (datetime.now(timezone.utc) - timedelta(hours=2)).isoformat() + session = _session_summary(last_used=last_used) item = format_session(session) - assert item.governance_status == "warning" + assert "2h ago" in item.description - def test_no_exception_warning_no_governance_status(self) -> None: - """Session without exception warning has no governance status.""" - session = {"name": "my-session", "has_exception_warning": False} + def test_governance_status_is_none(self) -> None: + """Session format has no governance status.""" + session = _session_summary() item = format_session(session) assert item.governance_status is None def test_missing_name_uses_unnamed(self) -> None: """Missing name defaults to 'Unnamed'.""" - session = {} + session = _session_summary(name="") item = format_session(session) assert item.label == "Unnamed" diff --git a/tests/test_ui_integration.py b/tests/test_ui_integration.py index a597d41..310091e 100644 --- a/tests/test_ui_integration.py +++ b/tests/test_ui_integration.py @@ -9,14 +9,15 @@ from __future__ import annotations from io import StringIO -from typing import Any from unittest.mock import MagicMock, patch import pytest from rich.console import Console, RenderableType from scc_cli.application import dashboard as app_dashboard +from scc_cli.application.workspace import WorkspaceContext from scc_cli.docker.core import ContainerInfo +from scc_cli.ports.session_models import SessionListResult, SessionSummary from scc_cli.ui.dashboard import ( Dashboard, DashboardState, @@ -36,13 +37,19 @@ def _render_to_str(renderable: RenderableType) -> str: return console.file.getvalue() # type: ignore[union-attr] +def _mock_session_service(summaries: list[SessionSummary]) -> MagicMock: + service = MagicMock() + service.list_recent.return_value = SessionListResult.from_sessions(summaries) + return service + + def _status_item( label: str, description: str = "", *, action: app_dashboard.StatusAction | None = None, action_tab: DashboardTab | None = None, - session: dict[str, Any] | None = None, + session: SessionSummary | None = None, ) -> ListItem[app_dashboard.DashboardItem]: item = app_dashboard.StatusItem( label=label, @@ -69,9 +76,16 @@ def _container_item( def _session_item( label: str, description: str, - session: dict[str, object] | None = None, + session: SessionSummary | None = None, ) -> ListItem[app_dashboard.DashboardItem]: - session_data = session or {"name": label} + session_data = session or SessionSummary( + name=label, + workspace="", + team=None, + last_used=None, + container_name=None, + branch=None, + ) item = app_dashboard.SessionItem(label=label, description=description, session=session_data) return ListItem(value=item, label=label, description=description) @@ -139,7 +153,14 @@ def mock_tab_data(self) -> dict[DashboardTab, TabData]: _session_item( "session-1", "platform", - session={"name": "session-1", "team": "platform"}, + session=SessionSummary( + name="session-1", + workspace="", + team="platform", + last_used=None, + container_name=None, + branch=None, + ), ), ], count_active=1, @@ -410,6 +431,7 @@ def _reset_cli_module(self) -> None: modules_to_reset = [ "scc_cli.cli", "scc_cli.commands.launch.flow", + "scc_cli.application.workspace", "scc_cli.services.workspace", ] for module in modules_to_reset: @@ -419,9 +441,9 @@ def _reset_cli_module(self) -> None: @pytest.mark.xfail(reason="Test isolation issue - passes individually but fails in full suite") def test_cli_shows_dashboard_when_no_workspace_detected(self) -> None: """CLI shows dashboard when NOT in a valid workspace (e.g., $HOME).""" - # Mock resolve_launch_context to return None (no strong signal found) + # Mock resolve_workspace to return None (no strong signal found) # This simulates being outside a git repo and without .scc.yaml - with patch("scc_cli.services.workspace.resolve_launch_context", return_value=None): + with patch("scc_cli.application.workspace.resolve_workspace", return_value=None): with patch("scc_cli.ui.gate.is_interactive_allowed", return_value=True): with patch("scc_cli.ui.dashboard.run_dashboard") as mock_dashboard: # Import after patching @@ -454,7 +476,10 @@ def test_cli_invokes_start_when_workspace_detected(self) -> None: is_suspicious=False, reason="git repo detected", ) - with patch("scc_cli.services.workspace.resolve_launch_context", return_value=mock_result): + with patch( + "scc_cli.application.workspace.resolve_workspace", + return_value=WorkspaceContext(mock_result), + ): with patch("scc_cli.ui.gate.is_interactive_allowed", return_value=True): # Import after patching from scc_cli.cli import main_callback @@ -699,13 +724,15 @@ class TestTabDataLoading: def test_load_all_tab_data_returns_all_tabs(self) -> None: """_load_all_tab_data returns data for all tabs.""" with patch("scc_cli.config.load_user_config") as mock_config: - with patch("scc_cli.sessions.list_recent") as mock_sessions: + with patch( + "scc_cli.sessions.get_session_service", + return_value=_mock_session_service([]), + ): with patch("scc_cli.docker.core.list_scc_containers") as mock_docker: with patch( "scc_cli.services.git.worktree.get_worktrees_data" ) as mock_worktrees: mock_config.return_value = {} - mock_sessions.return_value = [] mock_docker.return_value = [] mock_worktrees.return_value = [] @@ -754,7 +781,14 @@ def resource_tab_data(self) -> dict[DashboardTab, TabData]: _session_item( "session-1", "platform", - session={"name": "session-1", "team": "platform"}, + session=SessionSummary( + name="session-1", + workspace="", + team="platform", + last_used=None, + container_name=None, + branch=None, + ), ) ], count_active=1, @@ -863,7 +897,14 @@ def test_enter_on_session_tab_raises_resume( _session_item( "session-1", "platform", - session={"id": "s1", "name": "session-1"}, + session=SessionSummary( + name="session-1", + workspace="", + team=None, + last_used=None, + container_name=None, + branch=None, + ), ) ], count_active=1, diff --git a/tests/test_ui_picker.py b/tests/test_ui_picker.py index 013e690..5f8a6c0 100644 --- a/tests/test_ui_picker.py +++ b/tests/test_ui_picker.py @@ -13,6 +13,7 @@ from unittest.mock import MagicMock, patch +from scc_cli.ports.session_models import SessionSummary from scc_cli.ui.keys import Action, ActionType from scc_cli.ui.list_screen import ListItem from scc_cli.ui.picker import ( @@ -27,6 +28,22 @@ ) +def _session_summary( + *, + name: str, + team: str | None = None, + branch: str | None = None, +) -> SessionSummary: + return SessionSummary( + name=name, + workspace=f"/workspace/{name}", + team=team, + last_used=None, + container_name=None, + branch=branch, + ) + + class TestPickTeam: """Test pick_team() function.""" @@ -292,8 +309,8 @@ def test_empty_sessions_returns_none(self) -> None: def test_sessions_converted_to_list_items(self) -> None: """Sessions are converted using format_session formatter.""" sessions = [ - {"name": "session-1", "team": "platform", "branch": "main"}, - {"name": "session-2", "team": "backend", "branch": "feature"}, + _session_summary(name="session-1", team="platform", branch="main"), + _session_summary(name="session-2", team="backend", branch="feature"), ] with patch("scc_cli.ui.picker._run_single_select_picker") as mock_picker: @@ -307,7 +324,7 @@ def test_sessions_converted_to_list_items(self) -> None: def test_default_subtitle_uses_count(self) -> None: """Default subtitle shows session count.""" - sessions = [{"name": "s1"}] + sessions = [_session_summary(name="s1")] with patch("scc_cli.ui.picker._run_single_select_picker") as mock_picker: mock_picker.return_value = None diff --git a/tests/test_ui_wizard.py b/tests/test_ui_wizard.py index 5a04845..453a373 100644 --- a/tests/test_ui_wizard.py +++ b/tests/test_ui_wizard.py @@ -24,6 +24,8 @@ from pathlib import Path from unittest.mock import patch +from scc_cli.application.launch.start_wizard import TeamRepoOption +from scc_cli.ports.session_models import SessionSummary from scc_cli.ui.wizard import ( BACK, WorkspaceSource, @@ -36,6 +38,17 @@ ) +def _session_summary(workspace: str, last_used: str) -> SessionSummary: + return SessionSummary( + name=Path(workspace).name or "session", + workspace=workspace, + team=None, + last_used=last_used, + container_name=None, + branch=None, + ) + + class TestBackSentinel: """Test BACK sentinel behavior.""" @@ -400,7 +413,7 @@ class TestPickRecentWorkspaceSubScreen: def test_escape_returns_back(self) -> None: """Esc on sub-screen returns BACK (go back to previous screen).""" - recent = [{"workspace": "/project", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/project", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: # With allow_back=True, underlying picker returns BACK for Esc @@ -413,7 +426,7 @@ def test_escape_returns_back(self) -> None: def test_quit_returns_none(self) -> None: """Q on sub-screen returns None (quit app entirely).""" - recent = [{"workspace": "/project", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/project", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: # With allow_back=True, underlying picker returns None for q @@ -425,7 +438,7 @@ def test_quit_returns_none(self) -> None: def test_back_menu_item_returns_back(self) -> None: """Selecting '← Back' menu item returns BACK.""" - recent = [{"workspace": "/project", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/project", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: mock_picker.return_value = BACK # User selected "← Back" @@ -436,7 +449,7 @@ def test_back_menu_item_returns_back(self) -> None: def test_selection_returns_workspace_path(self) -> None: """Valid selection returns workspace path string.""" - recent = [{"workspace": "/project/myapp", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/project/myapp", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: mock_picker.return_value = "/project/myapp" @@ -448,7 +461,7 @@ def test_selection_returns_workspace_path(self) -> None: def test_includes_back_as_first_item(self) -> None: """Back item is first in the list.""" - recent = [{"workspace": "/project", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/project", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: mock_picker.return_value = None @@ -540,7 +553,12 @@ def test_existing_local_path_returns_path(self) -> None: repos = [{"name": "api", "url": "https://github.com/org/api", "local_path": "/tmp"}] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: - mock_picker.return_value = repos[0] # User selected repo dict + mock_picker.return_value = TeamRepoOption( + name="api", + description="", + url="https://github.com/org/api", + local_path="/tmp", + ) result = pick_team_repo(repos) @@ -709,7 +727,7 @@ def test_top_level_cancel_is_none_not_back(self) -> None: def test_subscreen_escape_returns_back(self) -> None: """CRITICAL: Sub-screen Esc must return BACK (go back to previous).""" - recent = [{"workspace": "/tmp", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/tmp", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: # With allow_back=True, underlying picker returns BACK for Esc @@ -722,7 +740,7 @@ def test_subscreen_escape_returns_back(self) -> None: def test_subscreen_quit_returns_none(self) -> None: """CRITICAL: Sub-screen q must return None (quit app entirely).""" - recent = [{"workspace": "/tmp", "last_used": "2025-01-01T00:00:00Z"}] + recent = [_session_summary("/tmp", "2025-01-01T00:00:00Z")] with patch("scc_cli.ui.wizard._run_single_select_picker") as mock_picker: # With allow_back=True, underlying picker returns None for q @@ -750,7 +768,7 @@ def wizard_step() -> str | None: # Simulate Esc on sub-screen → BACK mock_picker.return_value = BACK result = pick_recent_workspace( - [{"workspace": "/tmp", "last_used": "2025-01-01T00:00:00Z"}] + [_session_summary("/tmp", "2025-01-01T00:00:00Z")] ) if result is None: return None # User pressed q - quit app diff --git a/tests/test_workspace_resolution.py b/tests/test_workspace_resolution.py new file mode 100644 index 0000000..27988e3 --- /dev/null +++ b/tests/test_workspace_resolution.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch + +from scc_cli.application.workspace import ResolveWorkspaceRequest, resolve_workspace + + +def test_resolve_explicit_workspace_arg(tmp_path: Path) -> None: + workspace = tmp_path / "project" + workspace.mkdir() + + result = resolve_workspace(ResolveWorkspaceRequest(cwd=tmp_path, workspace_arg="project")) + + assert result is not None + assert result.workspace_root == workspace.resolve() + assert result.entry_dir == tmp_path.resolve() + assert result.is_auto_detected is False + assert result.reason.startswith("Explicit --workspace") + + +def test_resolve_prefers_git_over_scc_yaml(tmp_path: Path) -> None: + project = tmp_path / "project" + project.mkdir() + (project / ".scc.yaml").write_text("# config") + + with patch("scc_cli.services.workspace.resolver._detect_git_root", return_value=project): + result = resolve_workspace(ResolveWorkspaceRequest(cwd=project, workspace_arg=None)) + + assert result is not None + assert result.workspace_root == project + assert result.reason.startswith("Git repository detected") + + +def test_resolve_uses_scc_yaml_when_no_git(tmp_path: Path) -> None: + project = tmp_path / "project" + project.mkdir() + (project / ".scc.yaml").write_text("# config") + + with patch("scc_cli.services.workspace.resolver._detect_git_root", return_value=None): + result = resolve_workspace(ResolveWorkspaceRequest(cwd=project, workspace_arg=None)) + + assert result is not None + assert result.workspace_root == project + assert result.reason.startswith(".scc.yaml found") + + +def test_resolve_returns_none_without_workspace(tmp_path: Path) -> None: + with patch("scc_cli.services.workspace.resolver._detect_git_root", return_value=None): + result = resolve_workspace(ResolveWorkspaceRequest(cwd=tmp_path, workspace_arg=None)) + + assert result is None diff --git a/tests/test_workspace_validation.py b/tests/test_workspace_validation.py new file mode 100644 index 0000000..6ac4fb4 --- /dev/null +++ b/tests/test_workspace_validation.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from scc_cli.application.workspace import validate_workspace +from scc_cli.core.errors import UsageError +from scc_cli.ports.platform_probe import PlatformProbe + + +class FakePlatformProbe(PlatformProbe): + def __init__(self, is_wsl2: bool, is_optimal: bool) -> None: + self._is_wsl2 = is_wsl2 + self._is_optimal = is_optimal + + def is_wsl2(self) -> bool: + return self._is_wsl2 + + def check_path_performance(self, path: Path) -> tuple[bool, str | None]: + if self._is_optimal: + return True, None + return False, "warning" + + +def test_validate_workspace_returns_none_when_unset() -> None: + result = validate_workspace( + None, + allow_suspicious=False, + interactive_allowed=False, + platform_probe=FakePlatformProbe(is_wsl2=False, is_optimal=True), + ) + + assert result is None + + +def test_validate_workspace_suspicious_interactive(tmp_path: Path) -> None: + workspace = tmp_path / "project" + workspace.mkdir() + + with ( + patch("scc_cli.application.workspace.use_cases.is_suspicious_directory", return_value=True), + patch( + "scc_cli.application.workspace.use_cases.get_suspicious_reason", return_value="Reason" + ), + ): + result = validate_workspace( + str(workspace), + allow_suspicious=False, + interactive_allowed=True, + platform_probe=FakePlatformProbe(is_wsl2=False, is_optimal=True), + ) + + assert result is not None + assert result.workspace_path == workspace.resolve() + assert len(result.steps) == 1 + step = result.steps[0] + assert step.warning.title == "Suspicious Workspace" + assert step.warning.message == "Reason" + assert step.warning.emit_stderr is False + assert step.confirm_request is not None + assert step.confirm_request.prompt == "Continue anyway?" + + +def test_validate_workspace_suspicious_allow_flag(tmp_path: Path) -> None: + workspace = tmp_path / "project" + workspace.mkdir() + + with ( + patch("scc_cli.application.workspace.use_cases.is_suspicious_directory", return_value=True), + patch( + "scc_cli.application.workspace.use_cases.get_suspicious_reason", return_value="Reason" + ), + ): + result = validate_workspace( + str(workspace), + allow_suspicious=True, + interactive_allowed=True, + platform_probe=FakePlatformProbe(is_wsl2=False, is_optimal=True), + ) + + assert result is not None + assert len(result.steps) == 1 + step = result.steps[0] + assert step.warning.emit_stderr is True + assert step.confirm_request is None + + +def test_validate_workspace_suspicious_non_interactive_raises(tmp_path: Path) -> None: + workspace = tmp_path / "project" + workspace.mkdir() + + with ( + patch("scc_cli.application.workspace.use_cases.is_suspicious_directory", return_value=True), + patch( + "scc_cli.application.workspace.use_cases.get_suspicious_reason", return_value="Reason" + ), + ): + with pytest.raises(UsageError) as excinfo: + validate_workspace( + str(workspace), + allow_suspicious=False, + interactive_allowed=False, + platform_probe=FakePlatformProbe(is_wsl2=False, is_optimal=True), + ) + + assert "Refusing to start in suspicious directory" in str(excinfo.value) + + +def test_validate_workspace_wsl_warning_interactive(tmp_path: Path) -> None: + workspace = tmp_path / "project" + workspace.mkdir() + + result = validate_workspace( + str(workspace), + allow_suspicious=False, + interactive_allowed=True, + platform_probe=FakePlatformProbe(is_wsl2=True, is_optimal=False), + ) + + assert result is not None + assert len(result.steps) == 1 + step = result.steps[0] + assert step.warning.title == "Performance Warning" + assert step.warning.emit_stderr is True + assert step.confirm_request is not None diff --git a/tests/test_worktree_cli.py b/tests/test_worktree_cli.py index 5014ddd..8bfa3e3 100644 --- a/tests/test_worktree_cli.py +++ b/tests/test_worktree_cli.py @@ -19,6 +19,41 @@ from scc_cli.git import WorktreeInfo from scc_cli.ui import render_worktrees + +@pytest.fixture +def worktree_command_dependencies(worktree_dependencies, monkeypatch): + """Patch worktree command dependencies for CLI tests.""" + dependencies, adapters = worktree_dependencies + dependencies.git_client.is_git_repo.return_value = True + dependencies.git_client.has_commits.return_value = True + dependencies.git_client.list_worktrees.return_value = [] + dependencies.git_client.find_worktree_by_query.return_value = (None, []) + dependencies.git_client.list_branches_without_worktrees.return_value = [] + dependencies.git_client.find_main_worktree.return_value = None + dependencies.git_client.get_default_branch.return_value = "main" + monkeypatch.setattr( + "scc_cli.commands.worktree.worktree_commands._build_worktree_dependencies", + lambda: (dependencies, adapters), + ) + return dependencies, adapters + + +def _summary(path: Path, branch: str = "main", status: str = "clean"): + from scc_cli.application.worktree import WorktreeSummary + + return WorktreeSummary( + path=path, + branch=branch, + status=status, + is_current=False, + has_changes=False, + staged_count=0, + modified_count=0, + untracked_count=0, + status_timed_out=False, + ) + + # ═══════════════════════════════════════════════════════════════════════════════ # Tests for Worktree CLI Structure # ═══════════════════════════════════════════════════════════════════════════════ @@ -63,17 +98,26 @@ def test_worktree_app_has_remove_command(self) -> None: class TestWorktreeCreate: """Test scc worktree create command.""" - def test_create_calls_ui_create_worktree(self, tmp_path: Path) -> None: - """create should call ui.create_worktree with correct args.""" + def test_create_calls_ui_create_worktree( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: + """create should call the worktree use case with correct args.""" + from scc_cli.application.worktree import WorktreeCreateResult from scc_cli.commands.worktree import worktree_create_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.create_worktree") as mock_create, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree" + ) as mock_create, patch("scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False), ): - mock_create.return_value = tmp_path / "worktrees" / "feature" + mock_create.return_value = WorktreeCreateResult( + worktree_path=tmp_path / "worktrees" / "feature", + worktree_name="feature", + branch_name="scc/feature", + base_branch="main", + dependencies_installed=True, + ) try: worktree_create_cmd( workspace=str(tmp_path), @@ -86,20 +130,29 @@ def test_create_calls_ui_create_worktree(self, tmp_path: Path) -> None: pass mock_create.assert_called_once() - call_args = mock_create.call_args - assert call_args[0][1] == "feature" - - def test_create_with_base_branch(self, tmp_path: Path) -> None: - """create with --base should pass branch to ui.create_worktree.""" + request = mock_create.call_args.args[0] + assert request.name == "feature" + assert request.base_branch is None + assert request.workspace_path == tmp_path + + def test_create_with_base_branch(self, tmp_path: Path, worktree_command_dependencies) -> None: + """create with --base should pass branch to use case.""" + from scc_cli.application.worktree import WorktreeCreateResult from scc_cli.commands.worktree import worktree_create_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.create_worktree") as mock_create, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree" + ) as mock_create, patch("scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False), ): - mock_create.return_value = tmp_path / "worktrees" / "feature" + mock_create.return_value = WorktreeCreateResult( + worktree_path=tmp_path / "worktrees" / "feature", + worktree_name="feature", + branch_name="scc/feature", + base_branch="develop", + dependencies_installed=True, + ) try: worktree_create_cmd( workspace=str(tmp_path), @@ -111,19 +164,19 @@ def test_create_with_base_branch(self, tmp_path: Path) -> None: except click.exceptions.Exit: pass - call_args = mock_create.call_args - assert call_args[0][2] == "develop" + request = mock_create.call_args.args[0] + assert request.base_branch == "develop" - def test_create_raises_for_non_repo(self, tmp_path: Path) -> None: + def test_create_raises_for_non_repo( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """create should exit with error for non-git directories in non-interactive mode.""" from scc_cli.commands.worktree import worktree_create_cmd - with ( - patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=False - ), - patch("scc_cli.cli_helpers.is_interactive", return_value=False), - ): + dependencies = worktree_command_dependencies[0] + dependencies.git_client.is_git_repo.return_value = False + + with patch("scc_cli.cli_helpers.is_interactive", return_value=False): # @handle_errors decorator converts NotAGitRepoError to typer.Exit(4) with pytest.raises(click.exceptions.Exit) as exc_info: worktree_create_cmd( @@ -144,17 +197,23 @@ def test_create_raises_for_non_repo(self, tmp_path: Path) -> None: class TestWorktreeList: """Test scc worktree list command.""" - def test_list_calls_ui_list_worktrees(self, tmp_path: Path) -> None: - """list should call ui.list_worktrees.""" + def test_list_calls_ui_list_worktrees( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: + """list should call the worktree list use case.""" + from scc_cli.application.worktree import WorktreeListResult from scc_cli.commands.worktree import worktree_list_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees") as mock_list, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees" + ) as mock_list, patch("scc_cli.commands.worktree.worktree_commands.render_worktrees"), ): - mock_list.return_value = [ - WorktreeInfo(path=str(tmp_path), branch="main", status="clean") - ] + mock_list.return_value = WorktreeListResult( + workspace_path=tmp_path, + worktrees=(_summary(tmp_path, branch="main", status="clean"),), + ) try: worktree_list_cmd( workspace=str(tmp_path), @@ -165,18 +224,26 @@ def test_list_calls_ui_list_worktrees(self, tmp_path: Path) -> None: pass mock_list.assert_called_once() + request = mock_list.call_args.args[0] + assert request.workspace_path == tmp_path - def test_list_json_has_correct_kind(self, tmp_path: Path, capsys) -> None: + def test_list_json_has_correct_kind( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """list --json should output JSON with kind=WorktreeList.""" + from scc_cli.application.worktree import WorktreeListResult from scc_cli.commands.worktree import worktree_list_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees") as mock_list, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees" + ) as mock_list, patch("scc_cli.commands.worktree.worktree_commands.render_worktrees"), ): - mock_list.return_value = [ - WorktreeInfo(path=str(tmp_path), branch="main", status="clean") - ] + mock_list.return_value = WorktreeListResult( + workspace_path=tmp_path, + worktrees=(_summary(tmp_path, branch="main", status="clean"),), + ) try: worktree_list_cmd( workspace=str(tmp_path), @@ -191,20 +258,28 @@ def test_list_json_has_correct_kind(self, tmp_path: Path, capsys) -> None: assert output["kind"] == "WorktreeList" assert output["apiVersion"] == "scc.cli/v1" - def test_list_json_contains_worktrees(self, tmp_path: Path, capsys) -> None: + def test_list_json_contains_worktrees( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """list --json should contain worktree data.""" + from scc_cli.application.worktree import WorktreeListResult from scc_cli.commands.worktree import worktree_list_cmd - worktrees = [ - WorktreeInfo(path=str(tmp_path), branch="main", status="clean"), - WorktreeInfo(path=str(tmp_path / "feature"), branch="feature/x", status="clean"), - ] + worktrees = ( + _summary(tmp_path, branch="main", status="clean"), + _summary(tmp_path / "feature", branch="feature/x", status="clean"), + ) with ( - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees") as mock_list, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees" + ) as mock_list, patch("scc_cli.commands.worktree.worktree_commands.render_worktrees"), ): - mock_list.return_value = worktrees + mock_list.return_value = WorktreeListResult( + workspace_path=tmp_path, + worktrees=worktrees, + ) try: worktree_list_cmd( workspace=str(tmp_path), @@ -219,15 +294,23 @@ def test_list_json_contains_worktrees(self, tmp_path: Path, capsys) -> None: assert "worktrees" in output["data"] assert len(output["data"]["worktrees"]) == 2 - def test_list_json_empty_worktrees(self, tmp_path: Path, capsys) -> None: + def test_list_json_empty_worktrees( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """list --json with no worktrees should return empty array.""" + from scc_cli.application.worktree import WorktreeListResult from scc_cli.commands.worktree import worktree_list_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees") as mock_list, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees" + ) as mock_list, patch("scc_cli.commands.worktree.worktree_commands.render_worktrees"), ): - mock_list.return_value = [] + mock_list.return_value = WorktreeListResult( + workspace_path=tmp_path, + worktrees=(), + ) try: worktree_list_cmd( workspace=str(tmp_path), @@ -367,102 +450,85 @@ def test_switch_app_has_switch_command(self) -> None: command_names = [cmd.name for cmd in worktree_app.registered_commands] assert "switch" in command_names - def test_switch_dash_uses_oldpwd(self, tmp_path: Path, capsys) -> None: + def test_switch_dash_uses_oldpwd( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """switch - should print $OLDPWD.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch.dict("os.environ", {"OLDPWD": "/previous/path"}), - ): + with patch.dict("os.environ", {"OLDPWD": "/previous/path"}): worktree_switch_cmd(target="-", workspace=str(tmp_path)) captured = capsys.readouterr() assert captured.out.strip() == "/previous/path" - def test_switch_dash_without_oldpwd_exits(self, tmp_path: Path) -> None: + def test_switch_dash_without_oldpwd_exits( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """switch - without $OLDPWD should exit with error.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch.dict("os.environ", {}, clear=True), - ): + with patch.dict("os.environ", {}, clear=True): with pytest.raises(click.exceptions.Exit) as exc_info: worktree_switch_cmd(target="-", workspace=str(tmp_path)) assert exc_info.value.exit_code == 1 - def test_switch_caret_uses_main_worktree(self, tmp_path: Path, capsys) -> None: + def test_switch_caret_uses_main_worktree( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """switch ^ should print main branch worktree path.""" from scc_cli.commands.worktree import worktree_switch_cmd - main_wt = WorktreeInfo(path="/repo/main", branch="main", status="clean") - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_main_worktree", - return_value=main_wt, - ), - ): - worktree_switch_cmd(target="^", workspace=str(tmp_path)) + dependencies = worktree_command_dependencies[0] + dependencies.git_client.find_main_worktree.return_value = WorktreeInfo( + path="/repo/main", branch="main", status="clean" + ) + + worktree_switch_cmd(target="^", workspace=str(tmp_path)) captured = capsys.readouterr() assert captured.out.strip() == "/repo/main" - def test_switch_caret_without_main_worktree_exits(self, tmp_path: Path) -> None: + def test_switch_caret_without_main_worktree_exits( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """switch ^ without main worktree should exit with error.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_main_worktree", - return_value=None, - ), - patch( - "scc_cli.commands.worktree.worktree_commands.git.get_default_branch", - return_value="main", - ), - ): - with pytest.raises(click.exceptions.Exit) as exc_info: - worktree_switch_cmd(target="^", workspace=str(tmp_path)) - assert exc_info.value.exit_code == 1 + dependencies = worktree_command_dependencies[0] + dependencies.git_client.find_main_worktree.return_value = None + dependencies.git_client.get_default_branch.return_value = "main" - def test_switch_fuzzy_match_exact(self, tmp_path: Path, capsys) -> None: + with pytest.raises(click.exceptions.Exit) as exc_info: + worktree_switch_cmd(target="^", workspace=str(tmp_path)) + assert exc_info.value.exit_code == 1 + + def test_switch_fuzzy_match_exact( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """switch with exact match should print worktree path.""" from scc_cli.commands.worktree import worktree_switch_cmd + dependencies = worktree_command_dependencies[0] feature_wt = WorktreeInfo(path="/repo/feature", branch="feature", status="clean") - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(feature_wt, [feature_wt]), - ), - ): - worktree_switch_cmd(target="feature", workspace=str(tmp_path)) + dependencies.git_client.find_worktree_by_query.return_value = (feature_wt, [feature_wt]) + + worktree_switch_cmd(target="feature", workspace=str(tmp_path)) captured = capsys.readouterr() assert captured.out.strip() == "/repo/feature" - def test_switch_no_match_exits(self, tmp_path: Path) -> None: + def test_switch_no_match_exits(self, tmp_path: Path, worktree_command_dependencies) -> None: """switch with no match should exit with error.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(None, []), - ), - patch( - "scc_cli.commands.worktree.worktree_commands.git.list_branches_without_worktrees", - return_value=[], - ), - ): - with pytest.raises(click.exceptions.Exit) as exc_info: - worktree_switch_cmd(target="nonexistent", workspace=str(tmp_path)) - assert exc_info.value.exit_code == 1 + dependencies = worktree_command_dependencies[0] + dependencies.git_client.find_worktree_by_query.return_value = (None, []) + dependencies.git_client.list_branches_without_worktrees.return_value = [] + + with pytest.raises(click.exceptions.Exit) as exc_info: + worktree_switch_cmd(target="nonexistent", workspace=str(tmp_path)) + assert exc_info.value.exit_code == 1 # ═══════════════════════════════════════════════════════════════════════════════ @@ -479,19 +545,17 @@ class TestWorktreeSwitchStdoutPurity: - Cancel: stdout = empty """ - def test_success_stdout_is_exactly_path(self, tmp_path: Path, capsys) -> None: + def test_success_stdout_is_exactly_path( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """On success, stdout should be exactly the path with one newline.""" from scc_cli.commands.worktree import worktree_switch_cmd + dependencies = worktree_command_dependencies[0] feature_wt = WorktreeInfo(path="/repo/feature", branch="feature", status="clean") - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(feature_wt, [feature_wt]), - ), - ): - worktree_switch_cmd(target="feature", workspace=str(tmp_path)) + dependencies.git_client.find_worktree_by_query.return_value = (feature_wt, [feature_wt]) + + worktree_switch_cmd(target="feature", workspace=str(tmp_path)) captured = capsys.readouterr() # stdout should be exactly the path with single newline (from print()) @@ -499,36 +563,30 @@ def test_success_stdout_is_exactly_path(self, tmp_path: Path, capsys) -> None: # No extra lines, no trailing spaces assert captured.out.count("\n") == 1 - def test_error_stdout_is_empty(self, tmp_path: Path, capsys) -> None: + def test_error_stdout_is_empty( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """On error (not found), stdout should be empty.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(None, []), - ), - patch( - "scc_cli.commands.worktree.worktree_commands.git.list_branches_without_worktrees", - return_value=[], - ), - ): - with pytest.raises(click.exceptions.Exit): - worktree_switch_cmd(target="nonexistent", workspace=str(tmp_path)) + dependencies = worktree_command_dependencies[0] + dependencies.git_client.find_worktree_by_query.return_value = (None, []) + dependencies.git_client.list_branches_without_worktrees.return_value = [] + + with pytest.raises(click.exceptions.Exit): + worktree_switch_cmd(target="nonexistent", workspace=str(tmp_path)) captured = capsys.readouterr() # stdout must be empty - all error output goes to stderr via console assert captured.out == "" - def test_dash_shortcut_stdout_is_exactly_path(self, tmp_path: Path, capsys) -> None: + def test_dash_shortcut_stdout_is_exactly_path( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """The '-' shortcut should print OLDPWD to stdout.""" from scc_cli.commands.worktree import worktree_switch_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch.dict("os.environ", {"OLDPWD": "/previous/path"}), - ): + with patch.dict("os.environ", {"OLDPWD": "/previous/path"}): worktree_switch_cmd(target="-", workspace=str(tmp_path)) captured = capsys.readouterr() @@ -542,11 +600,17 @@ class TestWorktreeSelectStdoutPurity: In non-interactive mode, error messages go to stderr, stdout stays clean. """ - def test_select_no_worktrees_has_actionable_error(self, tmp_path: Path, capsys) -> None: + def test_select_no_worktrees_has_actionable_error( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """On error (no worktrees), output should have actionable message.""" from scc_cli.commands.worktree import worktree_select_cmd from scc_cli.ui.gate import InteractivityContext, InteractivityMode + dependencies = worktree_command_dependencies[0] + dependencies.git_client.list_worktrees.return_value = [] + dependencies.git_client.list_branches_without_worktrees.return_value = [] + # Create a context that disallows prompts (non-interactive mode) mock_ctx = InteractivityContext( mode=InteractivityMode.NON_INTERACTIVE, @@ -554,15 +618,7 @@ def test_select_no_worktrees_has_actionable_error(self, tmp_path: Path, capsys) force_yes=False, ) - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees", return_value=[]), - patch( - "scc_cli.commands.worktree.worktree_commands.git.list_branches_without_worktrees", - return_value=[], - ), - patch("scc_cli.ui.gate.InteractivityContext.create", return_value=mock_ctx), - ): + with patch("scc_cli.ui.gate.InteractivityContext.create", return_value=mock_ctx): with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: worktree_select_cmd(workspace=str(tmp_path), branches=False) # Should exit with error @@ -581,24 +637,22 @@ def test_select_no_worktrees_has_actionable_error(self, tmp_path: Path, capsys) class TestWorktreeListJsonContract: """Ensure worktree list --json outputs valid JSON.""" - def test_list_json_output_is_valid(self, tmp_path: Path) -> None: + def test_list_json_output_is_valid(self, tmp_path: Path, worktree_command_dependencies) -> None: """--json flag should output valid JSON.""" from typer.testing import CliRunner + from scc_cli.application.worktree import WorktreeListResult from scc_cli.cli import app runner = CliRunner() - worktrees = [ - WorktreeInfo(path=str(tmp_path / "main"), branch="main", status="clean"), - WorktreeInfo(path=str(tmp_path / "feature"), branch="feature", status="clean"), - ] - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.list_worktrees", - return_value=worktrees, - ), + worktrees = ( + _summary(tmp_path / "main", branch="main", status="clean"), + _summary(tmp_path / "feature", branch="feature", status="clean"), + ) + with patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees", + return_value=WorktreeListResult(workspace_path=tmp_path, worktrees=worktrees), ): # CLI structure: scc worktree [group-workspace] list [options] result = runner.invoke(app, ["worktree", str(tmp_path), "list", "--json"]) @@ -610,21 +664,21 @@ def test_list_json_output_is_valid(self, tmp_path: Path) -> None: data = json.loads(result.output) # Should parse without error assert "WorktreeList" in data.get("kind", "") or "worktree" in data.get("kind", "").lower() - def test_list_json_contains_worktree_data(self, tmp_path: Path) -> None: + def test_list_json_contains_worktree_data( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """--json output should contain worktree data.""" from typer.testing import CliRunner + from scc_cli.application.worktree import WorktreeListResult from scc_cli.cli import app runner = CliRunner() - worktrees = [WorktreeInfo(path=str(tmp_path / "main"), branch="main", status="")] - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.list_worktrees", - return_value=worktrees, - ), + worktrees = (_summary(tmp_path / "main", branch="main", status=""),) + with patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees", + return_value=WorktreeListResult(workspace_path=tmp_path, worktrees=worktrees), ): # CLI structure: scc worktree [group-workspace] list [options] result = runner.invoke(app, ["worktree", str(tmp_path), "list", "--json"]) @@ -651,17 +705,16 @@ def test_select_app_has_select_command(self) -> None: command_names = [cmd.name for cmd in worktree_app.registered_commands] assert "select" in command_names - def test_select_no_worktrees_exits(self, tmp_path: Path) -> None: + def test_select_no_worktrees_exits(self, tmp_path: Path, worktree_command_dependencies) -> None: """select with no worktrees should exit with error.""" from scc_cli.commands.worktree import worktree_select_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees", return_value=[]), - ): - with pytest.raises(click.exceptions.Exit) as exc_info: - worktree_select_cmd(workspace=str(tmp_path), branches=False) - assert exc_info.value.exit_code == 1 + dependencies = worktree_command_dependencies[0] + dependencies.git_client.list_worktrees.return_value = [] + + with pytest.raises(click.exceptions.Exit) as exc_info: + worktree_select_cmd(workspace=str(tmp_path), branches=False) + assert exc_info.value.exit_code == 1 # ═══════════════════════════════════════════════════════════════════════════════ @@ -820,17 +873,21 @@ class TestWorktreeListVerbose: This prevents the flag from becoming a no-op during refactoring. """ - def test_list_passes_verbose_to_ui(self, tmp_path: Path) -> None: - """list --verbose should pass verbose=True to ui.list_worktrees.""" + def test_list_passes_verbose_to_ui(self, tmp_path: Path, worktree_command_dependencies) -> None: + """list --verbose should pass verbose=True to worktree use case.""" + from scc_cli.application.worktree import WorktreeListResult from scc_cli.commands.worktree import worktree_list_cmd with ( - patch("scc_cli.commands.worktree.worktree_commands.list_worktrees") as mock_list, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.list_worktrees" + ) as mock_list, patch("scc_cli.commands.worktree.worktree_commands.render_worktrees"), ): - mock_list.return_value = [ - WorktreeInfo(path=str(tmp_path), branch="main", status="clean") - ] + mock_list.return_value = WorktreeListResult( + workspace_path=tmp_path, + worktrees=(_summary(tmp_path, branch="main", status="clean"),), + ) try: worktree_list_cmd( workspace=str(tmp_path), @@ -842,8 +899,8 @@ def test_list_passes_verbose_to_ui(self, tmp_path: Path) -> None: pass mock_list.assert_called_once() - call_kwargs = mock_list.call_args[1] - assert call_kwargs.get("verbose") is True + request = mock_list.call_args.args[0] + assert request.verbose is True def test_verbose_triggers_get_worktree_status(self, tmp_path: Path) -> None: """list --verbose MUST call get_worktree_status() for each worktree. @@ -1015,23 +1072,22 @@ def test_enter_command_exists(self) -> None: commands = {cmd.name for cmd in worktree_app.registered_commands} assert "enter" in commands - def test_enter_opens_subshell_in_worktree(self, tmp_path: Path, capsys) -> None: + def test_enter_opens_subshell_in_worktree( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Enter should open subshell in the worktree directory.""" from scc_cli.commands.worktree import worktree_enter_cmd + dependencies = worktree_command_dependencies[0] worktree = WorktreeInfo( path=str(tmp_path / "feature-auth"), branch="scc/feature-auth", status="", ) (tmp_path / "feature-auth").mkdir() + dependencies.git_client.find_worktree_by_query.return_value = (worktree, [worktree]) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(worktree, [worktree]), - ), patch("subprocess.run") as mock_run, patch.dict("os.environ", {"SHELL": "/bin/bash"}), ): @@ -1042,21 +1098,20 @@ def test_enter_opens_subshell_in_worktree(self, tmp_path: Path, capsys) -> None: call_kwargs = mock_run.call_args[1] assert call_kwargs["cwd"] == str(tmp_path / "feature-auth") - def test_enter_sets_scc_worktree_env_var(self, tmp_path: Path) -> None: + def test_enter_sets_scc_worktree_env_var( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """Enter should set $SCC_WORKTREE environment variable.""" + dependencies = worktree_command_dependencies[0] worktree = WorktreeInfo( path=str(tmp_path / "feature-auth"), branch="scc/feature-auth", status="", ) (tmp_path / "feature-auth").mkdir() + dependencies.git_client.find_worktree_by_query.return_value = (worktree, [worktree]) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(worktree, [worktree]), - ), patch("subprocess.run") as mock_run, patch.dict("os.environ", {"SHELL": "/bin/bash"}), ): @@ -1070,21 +1125,20 @@ def test_enter_sets_scc_worktree_env_var(self, tmp_path: Path) -> None: assert "SCC_WORKTREE" in env assert env["SCC_WORKTREE"] == "scc/feature-auth" - def test_enter_prints_to_stderr_not_stdout(self, tmp_path: Path, capsys) -> None: + def test_enter_prints_to_stderr_not_stdout( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Enter should print info to stderr, keeping stdout clean.""" + dependencies = worktree_command_dependencies[0] worktree = WorktreeInfo( path=str(tmp_path / "feature-auth"), branch="scc/feature-auth", status="", ) (tmp_path / "feature-auth").mkdir() + dependencies.git_client.find_worktree_by_query.return_value = (worktree, [worktree]) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.find_worktree_by_query", - return_value=(worktree, [worktree]), - ), patch("subprocess.run"), patch.dict("os.environ", {"SHELL": "/bin/bash"}), ): @@ -1098,13 +1152,12 @@ def test_enter_prints_to_stderr_not_stdout(self, tmp_path: Path, capsys) -> None # stderr should contain informative messages assert "Entering" in captured.err or "worktree" in captured.err.lower() - def test_enter_dash_uses_oldpwd(self, tmp_path: Path) -> None: + def test_enter_dash_uses_oldpwd(self, tmp_path: Path, worktree_command_dependencies) -> None: """Enter '-' should use $OLDPWD as target.""" previous_dir = tmp_path / "previous" previous_dir.mkdir() with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), patch("subprocess.run") as mock_run, patch.dict("os.environ", {"SHELL": "/bin/bash", "OLDPWD": str(previous_dir)}), ): @@ -1116,25 +1169,21 @@ def test_enter_dash_uses_oldpwd(self, tmp_path: Path) -> None: call_kwargs = mock_run.call_args[1] assert call_kwargs["cwd"] == str(previous_dir) - def test_enter_caret_uses_main_worktree(self, tmp_path: Path) -> None: + def test_enter_caret_uses_main_worktree( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """Enter '^' should enter the main branch worktree.""" + dependencies = worktree_command_dependencies[0] main_worktree = WorktreeInfo( path=str(tmp_path / "main"), branch="main", status="", ) (tmp_path / "main").mkdir() + dependencies.git_client.get_default_branch.return_value = "main" + dependencies.git_client.list_worktrees.return_value = [main_worktree] with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.get_default_branch", - return_value="main", - ), - patch( - "scc_cli.commands.worktree.worktree_commands.list_worktrees", - return_value=[main_worktree], - ), patch("subprocess.run") as mock_run, patch.dict("os.environ", {"SHELL": "/bin/bash"}), ): @@ -1146,21 +1195,20 @@ def test_enter_caret_uses_main_worktree(self, tmp_path: Path) -> None: call_kwargs = mock_run.call_args[1] assert call_kwargs["cwd"] == str(tmp_path / "main") - def test_enter_no_target_would_show_picker(self, tmp_path: Path) -> None: + def test_enter_no_target_would_show_picker( + self, tmp_path: Path, worktree_command_dependencies + ) -> None: """Enter with no target should show interactive picker.""" + dependencies = worktree_command_dependencies[0] worktree = WorktreeInfo( path=str(tmp_path / "feature"), branch="feature", status="", ) (tmp_path / "feature").mkdir() + dependencies.git_client.list_worktrees.return_value = [worktree] with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.list_worktrees", - return_value=[worktree], - ), patch("scc_cli.commands.worktree.worktree_commands.pick_worktree") as mock_picker, patch("subprocess.run"), patch.dict("os.environ", {"SHELL": "/bin/bash"}), @@ -1173,24 +1221,24 @@ def test_enter_no_target_would_show_picker(self, tmp_path: Path) -> None: # Verify picker was called mock_picker.assert_called_once() - def test_enter_non_git_repo_fails(self, tmp_path: Path, capsys) -> None: + def test_enter_non_git_repo_fails( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Enter in non-git directory should fail with appropriate error.""" from scc_cli.commands.worktree import worktree_enter_cmd from scc_cli.core.exit_codes import EXIT_TOOL - with ( - patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=False - ), - ): - with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: - worktree_enter_cmd(target="feature", workspace=str(tmp_path)) + dependencies = worktree_command_dependencies[0] + dependencies.git_client.is_git_repo.return_value = False - # Should exit with EXIT_TOOL (4) for not a git repo - exit_code = getattr(exc_info.value, "code", None) or getattr( - exc_info.value, "exit_code", None - ) - assert exit_code == EXIT_TOOL + with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: + worktree_enter_cmd(target="feature", workspace=str(tmp_path)) + + # Should exit with EXIT_TOOL (4) for not a git repo + exit_code = getattr(exc_info.value, "code", None) or getattr( + exc_info.value, "exit_code", None + ) + assert exit_code == EXIT_TOOL captured = capsys.readouterr() # stdout should be empty @@ -1210,16 +1258,16 @@ class TestWorktreeCreateInteractiveInit: Phase 4: CLI git init prompts mirror dashboard behavior. """ - def test_non_git_repo_non_interactive_raises_error(self, tmp_path: Path, capsys) -> None: + def test_non_git_repo_non_interactive_raises_error( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Non-git repo in non-interactive mode should raise NotAGitRepoError via handle_errors.""" from scc_cli.commands.worktree import worktree_create_cmd - with ( - patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=False - ), - patch("scc_cli.cli_helpers.is_interactive", return_value=False), - ): + dependencies = worktree_command_dependencies[0] + dependencies.git_client.is_git_repo.return_value = False + + with patch("scc_cli.cli_helpers.is_interactive", return_value=False): with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: worktree_create_cmd(workspace=str(tmp_path), name="feature-x") @@ -1233,14 +1281,16 @@ def test_non_git_repo_non_interactive_raises_error(self, tmp_path: Path, capsys) captured = capsys.readouterr() assert "Not a git repository" in captured.err - def test_non_git_repo_interactive_prompts_init(self, tmp_path: Path, capsys) -> None: + def test_non_git_repo_interactive_prompts_init( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Non-git repo in interactive mode should prompt for init.""" from scc_cli.commands.worktree import worktree_create_cmd + dependencies = worktree_command_dependencies[0] + dependencies.git_client.is_git_repo.return_value = False + with ( - patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=False - ), patch("scc_cli.cli_helpers.is_interactive", return_value=True), patch( "scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False @@ -1258,24 +1308,34 @@ def test_non_git_repo_interactive_prompts_init(self, tmp_path: Path, capsys) -> captured = capsys.readouterr() assert "Skipped git initialization" in captured.err - def test_non_git_repo_interactive_accepts_init(self, tmp_path: Path, capsys) -> None: + def test_non_git_repo_interactive_accepts_init( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Non-git repo in interactive mode should init when user accepts.""" + from scc_cli.application.worktree import WorktreeCreateResult from scc_cli.commands.worktree import worktree_create_cmd + dependencies = worktree_command_dependencies[0] + dependencies.git_client.is_git_repo.side_effect = [False, True] + dependencies.git_client.init_repo.return_value = True + dependencies.git_client.has_commits.return_value = True + with ( - patch( - "scc_cli.commands.worktree.worktree_commands.git.is_git_repo", - side_effect=[False, True], - ), patch("scc_cli.cli_helpers.is_interactive", return_value=True), patch( "scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=True ), # User accepts init - patch("scc_cli.commands.worktree.worktree_commands.git.init_repo", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=True), - patch("scc_cli.commands.worktree.worktree_commands.create_worktree") as mock_create, + patch( + "scc_cli.commands.worktree.worktree_commands.worktree_use_cases.create_worktree" + ) as mock_create, ): - mock_create.return_value = tmp_path / "feature-x" + mock_create.return_value = WorktreeCreateResult( + worktree_path=tmp_path / "feature-x", + worktree_name="feature-x", + branch_name="scc/feature-x", + base_branch="main", + dependencies_installed=True, + ) try: worktree_create_cmd(workspace=str(tmp_path), name="feature-x", start_claude=False) except (click.exceptions.Exit, SystemExit): @@ -1285,18 +1345,15 @@ def test_non_git_repo_interactive_accepts_init(self, tmp_path: Path, capsys) -> assert "Git repository initialized" in captured.err def test_no_commits_non_interactive_shows_actionable_error( - self, tmp_path: Path, capsys + self, tmp_path: Path, capsys, worktree_command_dependencies ) -> None: """No commits in non-interactive mode should show actionable error.""" from scc_cli.commands.worktree import worktree_create_cmd - with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), - patch("scc_cli.cli_helpers.is_interactive", return_value=False), - patch( - "scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=False - ), - ): + dependencies = worktree_command_dependencies[0] + dependencies.git_client.has_commits.return_value = False + + with patch("scc_cli.cli_helpers.is_interactive", return_value=False): with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: worktree_create_cmd(workspace=str(tmp_path), name="feature-x") @@ -1310,16 +1367,17 @@ def test_no_commits_non_interactive_shows_actionable_error( # Should show actionable command assert "git commit --allow-empty" in captured.err - def test_no_commits_interactive_prompts_create(self, tmp_path: Path, capsys) -> None: + def test_no_commits_interactive_prompts_create( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """No commits in interactive mode should prompt for initial commit.""" from scc_cli.commands.worktree import worktree_create_cmd + dependencies = worktree_command_dependencies[0] + dependencies.git_client.has_commits.return_value = False + with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), patch("scc_cli.cli_helpers.is_interactive", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=False - ), patch( "scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=False ), # User declines @@ -1337,10 +1395,15 @@ def test_no_commits_interactive_prompts_create(self, tmp_path: Path, capsys) -> # Should show how to create commit manually assert "git commit --allow-empty" in captured.err - def test_git_identity_failure_shows_actionable_message(self, tmp_path: Path, capsys) -> None: + def test_git_identity_failure_shows_actionable_message( + self, tmp_path: Path, capsys, worktree_command_dependencies + ) -> None: """Git identity failure should show actionable message.""" from scc_cli.commands.worktree import worktree_create_cmd + dependencies = worktree_command_dependencies[0] + dependencies.git_client.has_commits.return_value = False + identity_error = ( "Git identity not configured. Run:\n" " git config --global user.name 'Your Name'\n" @@ -1348,19 +1411,15 @@ def test_git_identity_failure_shows_actionable_message(self, tmp_path: Path, cap ) with ( - patch("scc_cli.commands.worktree.worktree_commands.git.is_git_repo", return_value=True), patch("scc_cli.cli_helpers.is_interactive", return_value=True), - patch( - "scc_cli.commands.worktree.worktree_commands.git.has_commits", return_value=False - ), patch( "scc_cli.commands.worktree.worktree_commands.Confirm.ask", return_value=True ), # User accepts - patch( - "scc_cli.commands.worktree.worktree_commands.git.create_empty_initial_commit", - return_value=(False, identity_error), - ), ): + dependencies.git_client.create_empty_initial_commit.return_value = ( + False, + identity_error, + ) with pytest.raises((click.exceptions.Exit, SystemExit)) as exc_info: worktree_create_cmd(workspace=str(tmp_path), name="feature-x") diff --git a/tests/test_worktree_use_cases.py b/tests/test_worktree_use_cases.py new file mode 100644 index 0000000..f5d76d4 --- /dev/null +++ b/tests/test_worktree_use_cases.py @@ -0,0 +1,207 @@ +"""Tests for worktree application use cases.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import MagicMock + +from scc_cli.application.worktree import ( + WorktreeConfirmation, + WorktreeCreateResult, + WorktreeDependencies, + WorktreeEnterRequest, + WorktreeResolution, + WorktreeSelectionItem, + WorktreeSelectRequest, + WorktreeShellResult, + WorktreeSwitchRequest, + WorktreeWarningOutcome, + enter_worktree_shell, + select_worktree, + switch_worktree, +) +from scc_cli.core.exit_codes import EXIT_CANCELLED +from scc_cli.ports.dependency_installer import DependencyInstallResult +from scc_cli.ports.git_client import GitClient +from scc_cli.services.git.worktree import WorktreeInfo + + +def _make_dependencies() -> WorktreeDependencies: + git_client = MagicMock(spec=GitClient) + git_client.is_git_repo.return_value = True + dependency_installer = MagicMock() + dependency_installer.install.return_value = DependencyInstallResult( + attempted=False, + success=False, + ) + return WorktreeDependencies( + git_client=git_client, + dependency_installer=dependency_installer, + ) + + +def test_switch_dash_returns_oldpwd(tmp_path: Path) -> None: + dependencies = _make_dependencies() + outcome = switch_worktree( + WorktreeSwitchRequest( + workspace_path=tmp_path, + target="-", + oldpwd=str(tmp_path / "previous"), + interactive_allowed=True, + current_dir=tmp_path, + ), + dependencies=dependencies, + ) + assert isinstance(outcome, WorktreeResolution) + assert outcome.worktree_path == tmp_path / "previous" + + +def test_switch_caret_without_main_warns(tmp_path: Path) -> None: + dependencies = _make_dependencies() + dependencies.git_client.find_main_worktree.return_value = None + dependencies.git_client.get_default_branch.return_value = "main" + + outcome = switch_worktree( + WorktreeSwitchRequest( + workspace_path=tmp_path, + target="^", + oldpwd=None, + interactive_allowed=True, + current_dir=tmp_path, + ), + dependencies=dependencies, + ) + + assert isinstance(outcome, WorktreeWarningOutcome) + assert outcome.warning.title == "No Main Worktree" + + +def test_switch_branch_without_worktree_prompts_and_creates(tmp_path: Path) -> None: + dependencies = _make_dependencies() + dependencies.git_client.find_worktree_by_query.return_value = (None, []) + dependencies.git_client.list_branches_without_worktrees.return_value = ["feature-x"] + dependencies.git_client.has_remote.return_value = False + + repo = tmp_path / "repo" + repo.mkdir() + + first = switch_worktree( + WorktreeSwitchRequest( + workspace_path=repo, + target="feature-x", + oldpwd=None, + interactive_allowed=True, + current_dir=repo, + ), + dependencies=dependencies, + ) + assert isinstance(first, WorktreeConfirmation) + + dependencies.git_client.add_worktree.return_value = None + + second = switch_worktree( + WorktreeSwitchRequest( + workspace_path=repo, + target="feature-x", + oldpwd=None, + interactive_allowed=True, + current_dir=repo, + confirm_create=True, + ), + dependencies=dependencies, + ) + assert isinstance(second, WorktreeCreateResult) + assert second.branch_name.endswith("feature-x") + + +def test_switch_ambiguous_matches_noninteractive(tmp_path: Path) -> None: + dependencies = _make_dependencies() + matches = [ + WorktreeInfo(path=str(tmp_path / "feature"), branch="feature/auth", status=""), + WorktreeInfo(path=str(tmp_path / "feature2"), branch="feature/login", status=""), + ] + dependencies.git_client.find_worktree_by_query.return_value = (None, matches) + dependencies.git_client.list_branches_without_worktrees.return_value = [] + + outcome = switch_worktree( + WorktreeSwitchRequest( + workspace_path=tmp_path, + target="feature", + oldpwd=None, + interactive_allowed=False, + current_dir=tmp_path, + ), + dependencies=dependencies, + ) + + assert isinstance(outcome, WorktreeWarningOutcome) + assert outcome.warning.title == "Ambiguous Match" + assert "best match" in (outcome.warning.suggestion or "") + + +def test_select_branch_requires_confirmation(tmp_path: Path) -> None: + dependencies = _make_dependencies() + selection = WorktreeSelectionItem( + item_id="branch:feature-x", + branch="feature-x", + worktree=None, + is_branch_only=True, + ) + + outcome = select_worktree( + WorktreeSelectRequest( + workspace_path=tmp_path, + include_branches=True, + current_dir=tmp_path, + selection=selection, + ), + dependencies=dependencies, + ) + + assert isinstance(outcome, WorktreeConfirmation) + assert outcome.request.prompt.startswith("Create worktree") + + +def test_enter_dash_builds_shell_command(tmp_path: Path) -> None: + dependencies = _make_dependencies() + previous = tmp_path / "previous" + previous.mkdir() + + outcome = enter_worktree_shell( + WorktreeEnterRequest( + workspace_path=tmp_path, + target="-", + oldpwd=str(previous), + interactive_allowed=True, + current_dir=tmp_path, + env={"SHELL": "/bin/bash"}, + platform_system="Linux", + ), + dependencies=dependencies, + ) + + assert isinstance(outcome, WorktreeShellResult) + assert outcome.worktree_path == previous + assert outcome.shell_command.workdir == previous + assert outcome.shell_command.env["SCC_WORKTREE"] == "previous" + + +def test_switch_branch_cancel_returns_exit_cancelled(tmp_path: Path) -> None: + dependencies = _make_dependencies() + dependencies.git_client.find_worktree_by_query.return_value = (None, []) + dependencies.git_client.list_branches_without_worktrees.return_value = ["feature-x"] + + outcome = switch_worktree( + WorktreeSwitchRequest( + workspace_path=tmp_path, + target="feature-x", + oldpwd=None, + interactive_allowed=True, + current_dir=tmp_path, + confirm_create=False, + ), + dependencies=dependencies, + ) + + assert isinstance(outcome, WorktreeWarningOutcome) + assert outcome.exit_code == EXIT_CANCELLED diff --git a/tests/test_wsl2_warning.py b/tests/test_wsl2_warning.py index bae1469..0fd1200 100644 --- a/tests/test_wsl2_warning.py +++ b/tests/test_wsl2_warning.py @@ -12,9 +12,9 @@ def test_wsl2_warning_emitted_in_non_interactive(tmp_path: Path) -> None: workspace.mkdir() with ( - patch("scc_cli.commands.launch.workspace.platform_module.is_wsl2", return_value=True), + patch("scc_cli.commands.launch.workspace.LocalPlatformProbe.is_wsl2", return_value=True), patch( - "scc_cli.commands.launch.workspace.platform_module.check_path_performance", + "scc_cli.commands.launch.workspace.LocalPlatformProbe.check_path_performance", return_value=(False, "warning"), ), patch("scc_cli.commands.launch.workspace.is_interactive_allowed", return_value=False),