From 002a05e65396ada0e1f840222e264037b78b35fb Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Tue, 21 Apr 2026 06:34:51 +0300 Subject: [PATCH 1/7] feat: add universal cross-root MCP support Introduce root-scoped runtime state and extend the project_root contract across the pg-mcp tool surface so calls can target an effective root without mutating the active session root. set_project_root remains the explicit mechanism for switching the default root for later calls. This Stage 1 functionality commit folds the validated Rust changes into one cross-root checkpoint. It includes the root-aware server and tool plumbing, blank-parameter and empty-batch hardening needed for real MCP clients, reset semantics for transient runtime state on project-root switches, centralized compatibility-state syncing for the default root, the large-repository sharded hierarchy deadlock fix, and the shared embedding model cache needed for stable cross-root semantic search behavior. Live validation confirmed the resulting surface on healthy external roots, including search, fetch, explore, build, reload, update, lifting submission, routing submission, file synthesis submission, hierarchy submission, and large-root hierarchy workflows. Documentation remains separate from this Stage 1 functionality commit. --- crates/rpg-mcp/src/main.rs | 39 +- crates/rpg-mcp/src/params.rs | 93 +- crates/rpg-mcp/src/server.rs | 510 ++++++++--- crates/rpg-mcp/src/tools.rs | 1358 +++++++++++++++++++----------- crates/rpg-nav/src/embeddings.rs | 117 ++- 5 files changed, 1508 insertions(+), 609 deletions(-) diff --git a/crates/rpg-mcp/src/main.rs b/crates/rpg-mcp/src/main.rs index abd1a7b..9e36a5d 100644 --- a/crates/rpg-mcp/src/main.rs +++ b/crates/rpg-mcp/src/main.rs @@ -51,11 +51,13 @@ async fn main() -> Result<()> { // Auto-update graph on startup if stale (structural-only, no LLM) { - let mut lock = server.graph.write().await; - if let Some(ref mut graph) = *lock + let project_root = server.project_root().await; + let root_state = server.root_state(&project_root).await; + let mut root_state = root_state.write().await; + if let Some(ref mut graph) = root_state.graph && let (Some(base), Ok(head)) = ( &graph.base_commit.clone(), - rpg_encoder::evolution::get_head_sha(&server.project_root().await), + rpg_encoder::evolution::get_head_sha(&project_root), ) { if *base != head { @@ -71,7 +73,7 @@ async fn main() -> Result<()> { let qcache_result = rpg_parser::paradigms::query_engine::QueryCache::compile_all(¶digm_defs); let active_defs = rpg_parser::paradigms::detect_paradigms_toml( - &server.project_root().await, + &project_root, &detected_langs, ¶digm_defs, ); @@ -86,27 +88,21 @@ async fn main() -> Result<()> { }); match rpg_encoder::evolution::run_update( graph, - &server.project_root().await, + &project_root, None, pipeline.as_ref(), ) { Ok(s) => { graph.metadata.paradigms = paradigm_names; - let _ = storage::save(&server.project_root().await, graph); - // Persist stale entity IDs from the startup sync so - // lifting_status sees them on the first query. Every - // other path that produces a summary feeds - // `modified_entity_ids` into `stale_entity_ids` - // (`auto_sync_if_stale`, `update_rpg`). The startup - // path is the one exception — without this, modified - // entities from between the last lift and this startup - // are silently dropped across the session boundary. + let _ = storage::save(&project_root, graph); + let existing_ids: std::collections::HashSet = + graph.entities.keys().cloned().collect(); { - let mut stale = server.stale_entity_ids.write().await; + let stale = &mut root_state.stale_entity_ids; for id in &s.modified_entity_ids { stale.insert(id.clone()); } - stale.retain(|id| graph.entities.contains_key(id)); + stale.retain(|id| existing_ids.contains(id)); } eprintln!( " Auto-update complete: +{} -{} ~{}", @@ -123,12 +119,15 @@ async fn main() -> Result<()> { // changeset) match instead of redundantly re-running the // workdir diff. Must use the real empty-workdir changeset // hash (not an empty string) for the match to fire. - let project_root = server.project_root().await; - *server.last_auto_sync_head.write().await = + root_state.last_auto_sync_head = rpg_encoder::evolution::get_head_sha(&project_root).ok(); - *server.last_auto_sync_changeset.write().await = + root_state.last_auto_sync_changeset = Some(RpgServer::compute_changeset_hash(&[], &project_root)); - *server.last_auto_sync_workdir_paths.write().await = std::collections::HashSet::new(); + root_state.last_auto_sync_workdir_paths = std::collections::HashSet::new(); + drop(root_state); + server + .sync_default_root_compat_from_state(&project_root) + .await; } } diff --git a/crates/rpg-mcp/src/params.rs b/crates/rpg-mcp/src/params.rs index c4a6b00..fdd9b9d 100644 --- a/crates/rpg-mcp/src/params.rs +++ b/crates/rpg-mcp/src/params.rs @@ -8,6 +8,8 @@ use serde::Deserialize; pub(crate) struct SearchNodeParams { /// The search query describing what you're looking for pub(crate) query: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Search mode: 'features', 'snippets', or 'auto' (default: 'auto') pub(crate) mode: Option, /// Optional hierarchy scope to restrict search (e.g., 'Security/auth'). Comma-separated for multiple scopes. @@ -26,7 +28,9 @@ pub(crate) struct SearchNodeParams { #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct FetchNodeParams { /// The entity ID to fetch (e.g., 'src/auth.rs:validate_token') - pub(crate) entity_id: String, + pub(crate) entity_id: Option, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Multiple entity IDs to fetch in batch (overrides entity_id when provided) pub(crate) entity_ids: Option>, /// Comma-separated fields to include: "features", "source", "deps", "hierarchy". Omit for all fields. @@ -39,7 +43,9 @@ pub(crate) struct FetchNodeParams { #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct ExploreRpgParams { /// The entity ID to start exploration from - pub(crate) entity_id: String, + pub(crate) entity_id: Option, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Multiple entity IDs to explore from in batch (overrides entity_id when provided) pub(crate) entity_ids: Option>, /// Traversal direction: 'upstream', 'downstream', or 'both' @@ -56,6 +62,12 @@ pub(crate) struct ExploreRpgParams { pub(crate) max_results: Option, } +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct RpgInfoParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, +} + /// Parameters for the `set_project_root` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SetProjectRootParams { @@ -66,6 +78,8 @@ pub(crate) struct SetProjectRootParams { /// Parameters for the `build_rpg` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct BuildRpgParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Primary language override (auto-detected if not specified) pub(crate) language: Option, /// Glob pattern to include files (e.g., "src/**/*.rs") @@ -77,15 +91,33 @@ pub(crate) struct BuildRpgParams { /// Parameters for the `update_rpg` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct UpdateRpgParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Base commit SHA to diff from (defaults to RPG's stored base_commit) pub(crate) since: Option, } +/// Parameters for the `reload_rpg` tool. +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct ReloadRpgParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, +} + +/// Parameters for the `lifting_status` tool. +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct LiftingStatusParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, +} + /// Parameters for the `get_entities_for_lifting` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct GetEntitiesForLiftingParams { /// Scope specifier: file glob ("src/auth/**"), hierarchy path, entity IDs, or "*"/"all". pub(crate) scope: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Batch index to retrieve (0-based). Omit or 0 for first batch. pub(crate) batch_index: Option, } @@ -93,6 +125,8 @@ pub(crate) struct GetEntitiesForLiftingParams { /// Parameters for the `submit_lift_results` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SubmitLiftResultsParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// JSON object mapping function names to feature arrays. /// Example: {"my_func": ["validate input", "return result"], "other": ["compute hash"]} pub(crate) features: String, @@ -101,6 +135,8 @@ pub(crate) struct SubmitLiftResultsParams { /// Parameters for the `submit_hierarchy` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SubmitHierarchyParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// JSON object mapping file paths to 3-level hierarchy paths. /// Example: {"src/auth/login.rs": "Authentication/manage sessions/handle login"} pub(crate) assignments: String, @@ -109,30 +145,51 @@ pub(crate) struct SubmitHierarchyParams { /// Parameters for the `get_files_for_synthesis` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct GetFilesForSynthesisParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Batch index to retrieve (0-based). Omit or 0 for first batch. pub(crate) batch_index: Option, } +/// Parameters for the `finalize_lifting` tool. +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct FinalizeLiftingParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, +} + +/// Parameters for the `get_routing_candidates` tool. +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct GetRoutingCandidatesParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, + /// Batch index to retrieve (0-based). For large sets, returns paginated candidates. + pub(crate) batch_index: Option, +} + /// Parameters for the `submit_file_syntheses` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SubmitFileSynthesesParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// JSON object mapping file paths to comma-separated feature strings. /// Example: {"src/auth/login.rs": "handle user authentication, manage session tokens", /// "src/db/query.rs": "build SQL queries, execute database operations"} pub(crate) syntheses: String, } -/// Parameters for the `get_routing_candidates` tool. -#[derive(Debug, Deserialize, JsonSchema)] -pub(crate) struct GetRoutingCandidatesParams { - /// Batch index to retrieve (0-based). For large sets, returns paginated candidates. - #[serde(default)] - pub(crate) batch_index: Option, +/// Parameters for the `build_semantic_hierarchy` tool. +#[derive(Debug, Default, Deserialize, JsonSchema)] +pub(crate) struct BuildSemanticHierarchyParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, } /// Parameters for the `reconstruct_plan` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct ReconstructPlanParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Maximum number of entities per execution batch (default: 8). pub(crate) max_batch_size: Option, /// Include file-level Module entities in the schedule (default: false). @@ -144,6 +201,8 @@ pub(crate) struct ReconstructPlanParams { pub(crate) struct ContextPackParams { /// The search query describing what context you need pub(crate) query: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Optional hierarchy scope to restrict search (e.g., 'Security/auth') pub(crate) scope: Option, /// Target token budget for the packed context (default: 4000) @@ -159,6 +218,8 @@ pub(crate) struct ContextPackParams { pub(crate) struct ImpactRadiusParams { /// The entity ID to compute impact from pub(crate) entity_id: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Traversal direction: 'upstream' (what depends on this), 'downstream' (what this depends on), or 'both' pub(crate) direction: Option, /// Maximum traversal depth (default: 3). Use -1 for unlimited. @@ -172,6 +233,8 @@ pub(crate) struct ImpactRadiusParams { /// Parameters for the `submit_routing_decisions` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SubmitRoutingDecisionsParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// JSON object mapping entity IDs to routing action. /// Value is a hierarchy path to route there, or "keep" to confirm current position. /// Example: {"src/auth.rs:validate_token": "Security/auth/validate", "src/db.rs:query": "keep"} @@ -186,6 +249,8 @@ pub(crate) struct SubmitRoutingDecisionsParams { pub(crate) struct PlanChangeParams { /// The goal or intent of the change (e.g., "add rate limiting to API endpoints") pub(crate) goal: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Optional hierarchy scope to restrict search (e.g., 'Security/auth') pub(crate) scope: Option, /// Maximum number of relevant entities to include (default: 15) @@ -197,6 +262,8 @@ pub(crate) struct PlanChangeParams { pub(crate) struct FindPathsParams { /// Source entity ID pub(crate) source: String, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Target entity ID pub(crate) target: String, /// Maximum path length (default: 5). Use -1 for unlimited. @@ -212,6 +279,8 @@ pub(crate) struct FindPathsParams { pub(crate) struct SliceBetweenParams { /// Entity IDs to connect (minimum 2) pub(crate) entity_ids: Vec, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Maximum path length when searching for connections (default: 3) pub(crate) max_depth: Option, /// Include entity metadata (name, file, features) in output @@ -223,6 +292,8 @@ pub(crate) struct SliceBetweenParams { pub(crate) struct AnalyzeHealthParams { /// Instability threshold above which entities are flagged as highly unstable (default: 0.7). pub(crate) instability_threshold: Option, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Minimum total degree for god object detection (default: 10). pub(crate) god_object_threshold: Option, /// Run Rabin-Karp token-based clone detection (reads source files from disk, slower). Default: false. @@ -237,6 +308,8 @@ pub(crate) struct AnalyzeHealthParams { /// Parameters for the `semantic_snapshot` tool. #[derive(Debug, Deserialize, JsonSchema)] pub(crate) struct SemanticSnapshotParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Target token budget (default: 30000). Controls how much detail is included. pub(crate) token_budget: Option, /// Include dependency skeleton (default: true). Set to false to save tokens. @@ -248,6 +321,8 @@ pub(crate) struct SemanticSnapshotParams { pub(crate) struct DetectCyclesParams { /// Maximum number of cycles to return (default: all). Use to limit output. pub(crate) max_cycles: Option, + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// Minimum cycle length to report (default: 2). Use 3+ to skip trivial 2-cycles. pub(crate) min_cycle_length: Option, /// Maximum cycle length to detect (default: 20, prevents exponential blowup) @@ -273,6 +348,8 @@ pub(crate) struct DetectCyclesParams { /// Parameters for the `auto_lift` tool. #[derive(Deserialize, JsonSchema)] pub(crate) struct AutoLiftParams { + /// Optional project root override for this call only. + pub(crate) project_root: Option, /// LLM provider: "anthropic", "openai", or any OpenAI-compatible endpoint. pub(crate) provider: String, /// API key for the provider. Use this OR api_key_env (not both). Prefer api_key_env to avoid exposing keys in tool call transcripts. diff --git a/crates/rpg-mcp/src/server.rs b/crates/rpg-mcp/src/server.rs index 9132753..4509113 100644 --- a/crates/rpg-mcp/src/server.rs +++ b/crates/rpg-mcp/src/server.rs @@ -10,6 +10,34 @@ use tokio::sync::RwLock; use crate::types::{HierarchySession, LiftingSession, PendingRouting, load_pending_routing}; +type RootKey = PathBuf; + +#[derive(Default)] +#[allow(dead_code)] +pub(crate) struct RootRuntimeState { + pub(crate) graph: Option, + pub(crate) config: RpgConfig, + pub(crate) lifting_session: Option, + pub(crate) hierarchy_session: Option, + pub(crate) pending_routing: Vec, + pub(crate) stale_entity_ids: std::collections::HashSet, + #[cfg(feature = "embeddings")] + pub(crate) embedding_index: Option, + #[cfg(feature = "embeddings")] + pub(crate) embedding_init_failed: bool, + pub(crate) last_auto_sync_head: Option, + pub(crate) last_auto_sync_changeset: Option, + pub(crate) last_auto_sync_workdir_paths: std::collections::HashSet, +} + +#[derive(Clone)] +#[allow(dead_code)] +pub(crate) struct EffectiveContext { + pub(crate) root: RootKey, + pub(crate) state: Arc>, + pub(crate) cross_root_notice: String, +} + /// Cached protocol prompt versions (SHA256 hashes) for deduplication. #[derive(Clone)] pub(crate) struct PromptVersions { @@ -65,10 +93,15 @@ impl PromptVersions { /// cycle on the inner locks. #[derive(Clone)] pub(crate) struct RpgServer { - /// Active project root. Mutable at runtime via the `set_project_root` tool - /// so a single long-lived session can switch between projects without - /// restart. Tools acquire a snapshot via [`RpgServer::project_root`]. - pub(crate) project_root_cell: Arc>, + /// Default project root. Mutable at runtime via the `set_project_root` tool + /// so a single long-lived session can switch the default project without + /// restart. Tools may override it per-call via `project_root`. + pub(crate) default_root: Arc>, + /// Root-scoped runtime state keyed by canonical project root. + pub(crate) roots: + Arc>>>>, + /// Compatibility fields used by the current tool implementation while the + /// tool surface is migrated incrementally to root-scoped state. pub(crate) graph: Arc>>, pub(crate) config: Arc>, pub(crate) lifting_session: Arc>>, @@ -76,27 +109,14 @@ pub(crate) struct RpgServer { pub(crate) pending_routing: Arc>>, #[cfg(feature = "embeddings")] pub(crate) embedding_index: Arc>>, - /// Set to true after first failed init to avoid retrying every search. #[cfg(feature = "embeddings")] pub(crate) embedding_init_failed: Arc, pub(crate) tool_router: rmcp::handler::server::router::tool::ToolRouter, /// Protocol prompt versions for deduplication. pub(crate) prompt_versions: PromptVersions, - /// Last git HEAD SHA at which auto-sync ran. Prevents redundant updates. pub(crate) last_auto_sync_head: Arc>>, - /// Entity IDs whose source was modified after their features were lifted. - /// Populated by `auto_sync_if_stale` (from `summary.modified_entity_ids`), - /// drained by `submit_lift_results` as entities get re-lifted. Lets - /// `lifting_status` surface stale-feature drift even though those entities - /// still appear "lifted" in the coverage count. pub(crate) stale_entity_ids: Arc>>, - /// Hash of the last-synced workdir changeset (dirty files + their stat). - /// Combined with `last_auto_sync_head` to detect when a re-sync is needed - /// for uncommitted/staged/unstaged changes. pub(crate) last_auto_sync_changeset: Arc>>, - /// Paths that were dirty at the last successful auto-sync. Lets us detect - /// reverts: when a previously-dirty file returns to clean, the workdir - /// diff no longer lists it — we must re-parse it to restore HEAD content. pub(crate) last_auto_sync_workdir_paths: Arc>>, /// Guard: true while auto_lift is running. Rejects concurrent lift calls. @@ -106,16 +126,256 @@ pub(crate) struct RpgServer { impl std::fmt::Debug for RpgServer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RpgServer") - .field("project_root", &"") - .field("lifting_session", &"...") + .field("default_root", &"") + .field("roots", &"") .finish() } } impl RpgServer { + pub(crate) fn cross_root_notice( + active_root: &std::path::Path, + project_root: &std::path::Path, + ) -> String { + if active_root == project_root { + return String::new(); + } + format!( + "[cross-root: queried {}; active root unchanged]\n\n", + project_root.display() + ) + } + + pub(crate) fn expand_project_root_path(path: &str) -> PathBuf { + if let Some(rest) = path.strip_prefix("~/") { + match std::env::var("HOME") { + Ok(home) => PathBuf::from(home).join(rest), + Err(_) => PathBuf::from(path), + } + } else if path == "~" { + PathBuf::from(std::env::var("HOME").unwrap_or_else(|_| "/".into())) + } else { + PathBuf::from(path) + } + } + + pub(crate) fn canonicalize_project_root_path(path: &str) -> Result { + let expanded = Self::expand_project_root_path(path); + let canonical = expanded.canonicalize().map_err(|e| { + format!( + "Path does not exist or is not accessible: {}: {}", + expanded.display(), + e + ) + })?; + + if !canonical.is_dir() { + return Err(format!("Path is not a directory: {}", canonical.display())); + } + + Ok(canonical) + } + /// Snapshot of the active project root. Cheap — locks for a single clone. pub(crate) async fn project_root(&self) -> PathBuf { - self.project_root_cell.read().await.clone() + self.default_root.read().await.clone() + } + + pub(crate) fn normalize_optional_project_root(input: Option<&str>) -> Option<&str> { + input.and_then(|value| { + let trimmed = value.trim(); + if trimmed.is_empty() { + None + } else { + Some(trimmed) + } + }) + } + + #[allow(dead_code)] + pub(crate) async fn effective_root( + &self, + override_root: Option<&str>, + ) -> Result { + match Self::normalize_optional_project_root(override_root) { + Some(path) => Self::canonicalize_project_root_path(path), + None => Ok(self.project_root().await), + } + } + + #[allow(dead_code)] + pub(crate) async fn root_state(&self, root: &std::path::Path) -> Arc> { + if let Some(existing) = self.roots.read().await.get(root).cloned() { + return existing; + } + + let graph = storage::load(root).ok(); + let initial_head = graph.as_ref().and_then(|g| g.base_commit.clone()); + let config = Self::load_request_config(root); + let pending_routing = load_pending_routing(root) + .map(|s| s.entries) + .unwrap_or_default(); + + let state = Arc::new(RwLock::new(RootRuntimeState { + graph, + config, + lifting_session: None, + hierarchy_session: None, + pending_routing, + stale_entity_ids: std::collections::HashSet::new(), + #[cfg(feature = "embeddings")] + embedding_index: None, + #[cfg(feature = "embeddings")] + embedding_init_failed: false, + last_auto_sync_head: initial_head, + last_auto_sync_changeset: None, + last_auto_sync_workdir_paths: std::collections::HashSet::new(), + })); + + let mut roots = self.roots.write().await; + roots + .entry(root.to_path_buf()) + .or_insert_with(|| state.clone()) + .clone() + } + + #[allow(dead_code)] + pub(crate) async fn effective_context( + &self, + override_root: Option<&str>, + ) -> Result { + let active_root = self.project_root().await; + let root = self.effective_root(override_root).await?; + let state = self.root_state(&root).await; + Ok(EffectiveContext { + cross_root_notice: Self::cross_root_notice(&active_root, &root), + root, + state, + }) + } + + pub(crate) async fn sync_default_root_compat_from_state(&self, root: &std::path::Path) { + let active_root = self.project_root().await; + if root != active_root { + return; + } + + let state = self.root_state(root).await; + let state = state.read().await; + + *self.graph.write().await = state.graph.clone(); + *self.config.write().await = state.config.clone(); + *self.pending_routing.write().await = state.pending_routing.clone(); + *self.last_auto_sync_head.write().await = state.last_auto_sync_head.clone(); + *self.last_auto_sync_changeset.write().await = state.last_auto_sync_changeset.clone(); + *self.last_auto_sync_workdir_paths.write().await = + state.last_auto_sync_workdir_paths.clone(); + *self.stale_entity_ids.write().await = state.stale_entity_ids.clone(); + } + + pub(crate) fn reset_transient_runtime_state( + state: &mut RootRuntimeState, + root: &std::path::Path, + ) { + state.lifting_session = None; + state.hierarchy_session = None; + state.pending_routing = load_pending_routing(root) + .map(|s| s.entries) + .unwrap_or_default(); + state.last_auto_sync_head = state.graph.as_ref().and_then(|g| g.base_commit.clone()); + state.last_auto_sync_changeset = None; + state.last_auto_sync_workdir_paths = std::collections::HashSet::new(); + state.stale_entity_ids = std::collections::HashSet::new(); + + #[cfg(feature = "embeddings")] + { + state.embedding_index = None; + state.embedding_init_failed = false; + } + } + + pub(crate) fn staleness_detail_for_root( + project_root: &std::path::Path, + graph: &RPGraph, + ) -> Option { + let changes = rpg_encoder::evolution::detect_workdir_changes(project_root, graph).ok()?; + let changes = rpg_encoder::evolution::filter_rpgignore_changes(project_root, changes); + let languages = Self::resolve_languages(&graph.metadata); + let changes = rpg_encoder::evolution::filter_source_changes(changes, &languages); + + if changes.is_empty() { + return None; + } + + let graph_commit = graph.base_commit.as_deref().unwrap_or("unknown"); + let mut out = format!( + "STALE ({} source file(s) changed since {})\n", + changes.len(), + &graph_commit[..8.min(graph_commit.len())], + ); + for change in changes.iter().take(10) { + let (label, path) = match change { + rpg_encoder::evolution::FileChange::Added(p) => ("added", p.display().to_string()), + rpg_encoder::evolution::FileChange::Modified(p) => { + ("modified", p.display().to_string()) + } + rpg_encoder::evolution::FileChange::Deleted(p) => { + ("deleted", p.display().to_string()) + } + rpg_encoder::evolution::FileChange::Renamed { from, to } => { + ("renamed", format!("{} -> {}", from.display(), to.display())) + } + }; + out.push_str(&format!(" {}: {}\n", label, path)); + } + if changes.len() > 10 { + out.push_str(&format!(" ... and {} more\n", changes.len() - 10)); + } + Some(out) + } + + pub(crate) fn load_request_config(project_root: &std::path::Path) -> RpgConfig { + match RpgConfig::load(project_root) { + Ok(cfg) => cfg, + Err(e) => { + eprintln!( + "rpg: failed to parse {} ({}); using defaults for this request", + project_root.join(".rpg/config.toml").display(), + e + ); + RpgConfig::default() + } + } + } + + pub(crate) fn load_graph_from_root(project_root: &std::path::Path) -> Result { + storage::load(project_root).map_err(|_| { + format!( + "No RPG found at {}. Use the build_rpg tool to index this repository first.", + project_root.display() + ) + }) + } + + pub(crate) fn staleness_notice_for(project_root: &std::path::Path, graph: &RPGraph) -> String { + let Ok(changes) = rpg_encoder::evolution::detect_workdir_changes(project_root, graph) + else { + return String::new(); + }; + let changes = rpg_encoder::evolution::filter_rpgignore_changes(project_root, changes); + let languages = Self::resolve_languages(&graph.metadata); + let source_changes = if languages.is_empty() { + changes + } else { + rpg_encoder::evolution::filter_source_changes(changes, &languages) + }; + if source_changes.is_empty() { + return String::new(); + } + format!( + "[stale: {} source file(s) changed since graph was built — call update_rpg to sync]\n\n", + source_changes.len(), + ) } /// Reload `.rpg/config.toml` into the given config slot. @@ -149,27 +409,52 @@ impl RpgServer { /// Create a new server, loading graph and config from `project_root` if present. pub(crate) fn new(project_root: PathBuf) -> Self { + let project_root = project_root.canonicalize().unwrap_or(project_root); let graph = storage::load(&project_root).ok(); let initial_head = graph.as_ref().and_then(|g| g.base_commit.clone()); - let config = RpgConfig::load(&project_root).unwrap_or_default(); + let config = Self::load_request_config(&project_root); // Restore pending routing from disk if present let pending = load_pending_routing(&project_root) .map(|s| s.entries) .unwrap_or_default(); + let root_key = project_root.clone(); + let compat_graph = graph.clone(); + let compat_config = config.clone(); + let compat_pending = pending.clone(); + let compat_head = initial_head.clone(); + let root_state = RootRuntimeState { + graph, + config, + lifting_session: None, + hierarchy_session: None, + pending_routing: pending, + stale_entity_ids: std::collections::HashSet::new(), + #[cfg(feature = "embeddings")] + embedding_index: None, + #[cfg(feature = "embeddings")] + embedding_init_failed: false, + last_auto_sync_head: initial_head, + last_auto_sync_changeset: None, + last_auto_sync_workdir_paths: std::collections::HashSet::new(), + }; + let mut roots = std::collections::HashMap::new(); + roots.insert(root_key, Arc::new(RwLock::new(root_state))); + Self { - project_root_cell: Arc::new(RwLock::new(project_root)), - graph: Arc::new(RwLock::new(graph)), - config: Arc::new(RwLock::new(config)), + default_root: Arc::new(RwLock::new(project_root)), + roots: Arc::new(RwLock::new(roots)), + graph: Arc::new(RwLock::new(compat_graph)), + config: Arc::new(RwLock::new(compat_config)), lifting_session: Arc::new(RwLock::new(None)), hierarchy_session: Arc::new(RwLock::new(None)), - pending_routing: Arc::new(RwLock::new(pending)), + pending_routing: Arc::new(RwLock::new(compat_pending)), #[cfg(feature = "embeddings")] embedding_index: Arc::new(RwLock::new(None)), #[cfg(feature = "embeddings")] embedding_init_failed: Arc::new(std::sync::atomic::AtomicBool::new(false)), tool_router: Self::create_tool_router(), prompt_versions: PromptVersions::new(), - last_auto_sync_head: Arc::new(RwLock::new(initial_head)), + last_auto_sync_head: Arc::new(RwLock::new(compat_head)), stale_entity_ids: Arc::new(RwLock::new(std::collections::HashSet::new())), last_auto_sync_changeset: Arc::new(RwLock::new(None)), last_auto_sync_workdir_paths: Arc::new(RwLock::new(std::collections::HashSet::new())), @@ -184,25 +469,7 @@ impl RpgServer { let Some(graph) = guard.as_ref() else { return String::new(); }; - // Detect workdir changes (committed + staged + unstaged) - let Ok(changes) = rpg_encoder::evolution::detect_workdir_changes(&project_root, graph) - else { - return String::new(); - }; - let changes = rpg_encoder::evolution::filter_rpgignore_changes(&project_root, changes); - let languages = Self::resolve_languages(&graph.metadata); - let source_changes = if languages.is_empty() { - changes - } else { - rpg_encoder::evolution::filter_source_changes(changes, &languages) - }; - if source_changes.is_empty() { - return String::new(); - } - format!( - "[stale: {} source file(s) changed since graph was built — call update_rpg to sync]\n\n", - source_changes.len(), - ) + Self::staleness_notice_for(&project_root, graph) } /// Auto-sync the graph if stale, returning a notice string. @@ -532,58 +799,29 @@ impl RpgServer { drop(read); let project_root = self.project_root().await; - match storage::load(&project_root) { + match Self::load_graph_from_root(&project_root) { Ok(g) => { *self.graph.write().await = Some(g); Ok(()) } - Err(_) => { - Err("No RPG found. Use the build_rpg tool to index this repository first.".into()) - } + Err(e) => Err(e), } } /// Detailed staleness info: which source files changed (committed + staged + unstaged). + #[allow(dead_code)] pub(crate) async fn staleness_detail(&self, graph: &RPGraph) -> Option { let project_root = self.project_root().await; - let changes = rpg_encoder::evolution::detect_workdir_changes(&project_root, graph).ok()?; - let changes = rpg_encoder::evolution::filter_rpgignore_changes(&project_root, changes); - let languages = Self::resolve_languages(&graph.metadata); - let changes = rpg_encoder::evolution::filter_source_changes(changes, &languages); - - if changes.is_empty() { - return None; - } - - let graph_commit = graph.base_commit.as_deref().unwrap_or("unknown"); - let mut out = format!( - "STALE ({} source file(s) changed since {})\n", - changes.len(), - &graph_commit[..8.min(graph_commit.len())], - ); - for change in changes.iter().take(10) { - let (label, path) = match change { - rpg_encoder::evolution::FileChange::Added(p) => ("added", p.display().to_string()), - rpg_encoder::evolution::FileChange::Modified(p) => { - ("modified", p.display().to_string()) - } - rpg_encoder::evolution::FileChange::Deleted(p) => { - ("deleted", p.display().to_string()) - } - rpg_encoder::evolution::FileChange::Renamed { from, to } => { - ("renamed", format!("{} -> {}", from.display(), to.display())) - } - }; - out.push_str(&format!(" {}: {}\n", label, path)); - } - if changes.len() > 10 { - out.push_str(&format!(" ... and {} more\n", changes.len() - 10)); - } - Some(out) + Self::staleness_detail_for_root(&project_root, graph) } /// Format the full lifting status dashboard (coverage, areas, session, next step). - pub(crate) async fn format_lifting_status(&self, graph: &RPGraph) -> Result { + pub(crate) async fn format_lifting_status( + &self, + graph: &RPGraph, + project_root: &std::path::Path, + root_state: &RootRuntimeState, + ) -> Result { let (lifted, total) = graph.lifting_coverage(); let coverage_pct = if total > 0 { lifted as f64 / total as f64 * 100.0 @@ -598,7 +836,7 @@ impl RpgServer { }; // Check staleness - let stale_detail = self.staleness_detail(graph).await; + let stale_detail = Self::staleness_detail_for_root(project_root, graph); let graph_line = match &stale_detail { Some(detail) => format!( "graph: {} ({} entities, {} files)", @@ -617,9 +855,8 @@ impl RpgServer { // the source was modified after lifting. Tracked across syncs by // auto_sync_if_stale. let stale_features_count = { - let stale = self.stale_entity_ids.read().await; - // Filter to entities still present in the graph - stale + root_state + .stale_entity_ids .iter() .filter(|id| graph.entities.contains_key(*id)) .count() @@ -662,8 +899,7 @@ impl RpgServer { } // Lifting session info - let session = self.lifting_session.read().await; - match session.as_ref() { + match root_state.lifting_session.as_ref() { Some(s) => { out.push_str(&format!( "\nLifting session: active (scope=\"{}\", {} batches, {} entities cached)\n", @@ -676,7 +912,6 @@ impl RpgServer { out.push_str("\nLifting session: inactive\n"); } } - drop(session); // Unlifted file breakdown (if any) if lifted < total { @@ -819,10 +1054,66 @@ impl RpgServer { Ok(out) } - /// Load the RPG config for the active project. - pub(crate) async fn load_config(&self) -> RpgConfig { - let project_root = self.project_root().await; - RpgConfig::load(&project_root).unwrap_or_default() + #[cfg(feature = "embeddings")] + pub(crate) async fn query_embedding_scores( + &self, + graph: &RPGraph, + project_root: &std::path::Path, + query: &str, + use_session_cache: bool, + ) -> ( + Option>, + Option, + ) { + if use_session_cache { + self.try_init_embeddings(graph).await; + let mut emb_guard = self.embedding_index.write().await; + return if let Some(ref mut idx) = *emb_guard { + ( + idx.score_all(query) + .ok() + .filter(|scores| !scores.is_empty()), + None, + ) + } else { + (None, None) + }; + } + + let updated_at = graph.updated_at.to_rfc3339(); + match rpg_nav::embeddings::EmbeddingIndex::load_or_init(project_root, &updated_at) { + Ok(mut idx) => { + let sync_notice = match idx.sync(graph) { + Ok(stats) if stats.added > 0 || stats.changed > 0 || stats.pruned > 0 => { + Some(format!( + "[cross-root: refreshed target embedding cache in {} (.rpg/embeddings.*) for semantic search quality]\n\n", + project_root.display() + )) + } + Ok(_) => None, + Err(e) => { + eprintln!( + "rpg: embedding sync failed for {}: {e}", + project_root.display() + ); + None + } + }; + ( + idx.score_all(query) + .ok() + .filter(|scores| !scores.is_empty()), + sync_notice, + ) + } + Err(e) => { + eprintln!( + "rpg: embedding init failed for {}: {e} — using lexical search", + project_root.display() + ); + (None, None) + } + } } /// Lazy-initialize the embedding index on first semantic search. @@ -977,4 +1268,33 @@ mod tests { "same path + different size/mtime must yield different hashes" ); } + + #[tokio::test] + async fn test_effective_context_initializes_root_state() { + let root_a = tempfile::tempdir().unwrap(); + let root_b = tempfile::tempdir().unwrap(); + let root_a_canonical = root_a.path().canonicalize().unwrap(); + let root_b_canonical = root_b.path().canonicalize().unwrap(); + + let server = RpgServer::new(root_a.path().to_path_buf()); + + let default_ctx = server.effective_context(None).await.unwrap(); + assert_eq!(default_ctx.root, root_a_canonical); + assert!(default_ctx.cross_root_notice.is_empty()); + + let override_ctx = server + .effective_context(Some(root_b.path().to_str().unwrap())) + .await + .unwrap(); + assert_eq!(override_ctx.root, root_b_canonical); + assert!( + override_ctx + .cross_root_notice + .contains("active root unchanged") + ); + + let roots = server.roots.read().await; + assert!(roots.contains_key(&root_a_canonical)); + assert!(roots.contains_key(&root_b_canonical)); + } } diff --git a/crates/rpg-mcp/src/tools.rs b/crates/rpg-mcp/src/tools.rs index 74d8190..d54cd04 100644 --- a/crates/rpg-mcp/src/tools.rs +++ b/crates/rpg-mcp/src/tools.rs @@ -12,8 +12,86 @@ use rpg_core::storage; use crate::server::RpgServer; use crate::types::*; +fn non_empty_str(value: Option<&str>) -> Option<&str> { + value.and_then(|s| { + let trimmed = s.trim(); + if trimmed.is_empty() { + None + } else { + Some(trimmed) + } + }) +} + +fn non_empty_string(value: Option<&String>) -> Option<&str> { + non_empty_str(value.map(String::as_str)) +} + +fn normalized_entity_ids<'a>( + batch: Option<&'a Vec>, + fallback: Option<&'a String>, +) -> Vec<&'a str> { + let batch_ids: Vec<&'a str> = batch + .filter(|items| !items.is_empty()) + .map(|items| { + items + .iter() + .filter_map(|s| non_empty_str(Some(s.as_str()))) + .collect() + }) + .unwrap_or_default(); + + if !batch_ids.is_empty() { + return batch_ids; + } + + non_empty_string(fallback).into_iter().collect() +} + #[tool_router] impl RpgServer { + async fn resolve_query_context( + &self, + override_root: Option<&str>, + ) -> Result< + ( + std::path::PathBuf, + RPGraph, + rpg_core::config::RpgConfig, + String, + bool, + ), + String, + > { + let active_root = self.project_root().await; + if let Some(path) = non_empty_str(override_root) { + let project_root = Self::canonicalize_project_root_path(path)?; + if project_root == active_root { + self.ensure_graph().await?; + let notice = self.auto_sync_if_stale().await; + let graph = self.graph.read().await.as_ref().cloned().unwrap(); + let config = self.config.read().await.clone(); + return Ok((project_root, graph, config, notice, true)); + } + + let graph = Self::load_graph_from_root(&project_root)?; + let config = Self::load_request_config(&project_root); + let notice = format!( + "{}{}", + Self::cross_root_notice(&active_root, &project_root), + Self::staleness_notice_for(&project_root, &graph) + ); + return Ok((project_root, graph, config, notice, false)); + } + + let project_root = active_root; + self.ensure_graph().await?; + let notice = self.auto_sync_if_stale().await; + let graph = self.graph.read().await.as_ref().cloned().unwrap(); + let config = self.config.read().await.clone(); + Ok((project_root, graph, config, notice, true)) + } + #[tool( description = "PREFER THIS OVER grep/rg FOR ANY QUESTION ABOUT CODE BEHAVIOR OR NAMES. Search for code entities by intent or keywords. Returns entities with file paths, line numbers, and relevance scores. Use mode='features' for semantic intent search (e.g., 'validate user input') — finds code by what it DOES even when names don't match. Use mode='snippets' for name/path matching (e.g., 'FilterGroupManager' or 'src/auth/'). Use mode='auto' (default) to try both. This replaces grep/rg for every structural query.", annotations(read_only_hint = true, open_world_hint = false) @@ -22,13 +100,11 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); - let config = self.config.read().await; + let (project_root, graph, config, notice, use_session_cache) = self + .resolve_query_context(non_empty_string(params.project_root.as_ref())) + .await?; - let search_mode = match params.mode.as_deref() { + let search_mode = match non_empty_string(params.mode.as_ref()) { Some("features") => rpg_nav::search::SearchMode::Features, Some("snippets") => rpg_nav::search::SearchMode::Snippets, _ => rpg_nav::search::SearchMode::Auto, @@ -45,6 +121,7 @@ impl RpgServer { let entity_type_filter = params .entity_type_filter .as_deref() + .and_then(|value| non_empty_str(Some(value))) .map(parse_entity_type_filter) .filter(|v| !v.is_empty()); @@ -57,37 +134,33 @@ impl RpgServer { let mut search_mode_label = "lexical"; #[cfg(feature = "embeddings")] - let embedding_scores = if use_embeddings { - self.try_init_embeddings(graph).await; - let mut emb_guard = self.embedding_index.write().await; - if let Some(ref mut idx) = *emb_guard { - match idx.score_all(¶ms.query) { - Ok(scores) if !scores.is_empty() => { - search_mode_label = "hybrid"; - Some(scores) - } - _ => None, + let (embedding_scores, embedding_notice) = if use_embeddings { + match self + .query_embedding_scores(&graph, &project_root, ¶ms.query, use_session_cache) + .await + { + (Some(scores), notice) => { + search_mode_label = "hybrid"; + (Some(scores), notice) } - } else { - None + (None, notice) => (None, notice), } } else { - None + (None, None) }; #[cfg(not(feature = "embeddings"))] - let embedding_scores: Option> = { + let (embedding_scores, embedding_notice): ( + Option>, + Option, + ) = { let _ = use_embeddings; - None + (None, None) }; // Compute diff-aware search context if since_commit is provided let mut diff_warning = String::new(); - let diff_context = if let Some(ref commit) = params.since_commit { - match rpg_encoder::evolution::detect_changes( - &self.project_root().await, - graph, - Some(commit), - ) { + let diff_context = if let Some(commit) = non_empty_string(params.since_commit.as_ref()) { + match rpg_encoder::evolution::detect_changes(&project_root, &graph, Some(commit)) { Ok(changes) => { let mut changed_entities = std::collections::HashSet::new(); for change in &changes { @@ -103,7 +176,7 @@ impl RpgServer { } if !changed_entities.is_empty() { Some(rpg_nav::diff::compute_change_proximity( - graph, + &graph, changed_entities, )) } else { @@ -127,14 +200,14 @@ impl RpgServer { }; let results = rpg_nav::search::search_with_params( - graph, + &graph, &rpg_nav::search::SearchParams { query: ¶ms.query, mode: search_mode, - scope: params.scope.as_deref(), + scope: non_empty_string(params.scope.as_ref()), limit: config.navigation.search_result_limit, line_nums, - file_pattern: params.file_pattern.as_deref(), + file_pattern: non_empty_string(params.file_pattern.as_ref()), entity_type_filter, embedding_scores: embedding_scores.as_ref(), diff_context: diff_context.as_ref(), @@ -143,14 +216,19 @@ impl RpgServer { if results.is_empty() { return Ok(format!( - "{}{}No results found for: {} (search_mode: {})", - notice, diff_warning, params.query, search_mode_label, + "{}{}{}No results found for: {} (search_mode: {})", + notice, + embedding_notice.unwrap_or_default(), + diff_warning, + params.query, + search_mode_label, )); } Ok(format!( - "{}{}{}\n\nsearch_mode: {}", + "{}{}{}{}\n\nsearch_mode: {}", notice, + embedding_notice.unwrap_or_default(), diff_warning, rpg_nav::toon::format_search_results(&results), search_mode_label, @@ -165,25 +243,23 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(non_empty_string(params.project_root.as_ref())) + .await?; - let ids: Vec<&str> = if let Some(ref batch) = params.entity_ids { - batch.iter().map(|s| s.as_str()).collect() - } else { - vec![params.entity_id.as_str()] - }; + let ids = normalized_entity_ids(params.entity_ids.as_ref(), params.entity_id.as_ref()); + if ids.is_empty() { + return Err("Provide entity_id or at least one non-blank entity_ids entry.".into()); + } let projection = rpg_nav::toon::FetchProjection::from_params( - params.fields.as_deref(), + non_empty_string(params.fields.as_ref()), params.source_max_lines, )?; let mut outputs = Vec::new(); for id in &ids { - match rpg_nav::fetch::fetch(graph, id, &self.project_root().await) { + match rpg_nav::fetch::fetch(&graph, id, &project_root) { Ok(output) => outputs.push(rpg_nav::toon::format_fetch_output_projected( &output, &projection, @@ -203,12 +279,11 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(non_empty_string(params.project_root.as_ref())) + .await?; - let dir = match params.direction.as_deref() { + let dir = match non_empty_string(params.direction.as_ref()) { Some("upstream" | "up") => rpg_nav::explore::Direction::Upstream, Some("both") => rpg_nav::explore::Direction::Both, _ => rpg_nav::explore::Direction::Downstream, @@ -220,26 +295,26 @@ impl RpgServer { _ => 2, // Default }; - let edge_filter = params.edge_filter.as_deref().and_then(parse_edge_filter); + let edge_filter = non_empty_string(params.edge_filter.as_ref()).and_then(parse_edge_filter); let entity_type_filter = params .entity_type_filter .as_deref() + .and_then(|value| non_empty_str(Some(value))) .map(parse_entity_type_filter) .filter(|v| !v.is_empty()); - let ids: Vec<&str> = if let Some(ref batch) = params.entity_ids { - batch.iter().map(|s| s.as_str()).collect() - } else { - vec![params.entity_id.as_str()] - }; + let ids = normalized_entity_ids(params.entity_ids.as_ref(), params.entity_id.as_ref()); + if ids.is_empty() { + return Err("Provide entity_id or at least one non-blank entity_ids entry.".into()); + } - let use_compact = matches!(params.format.as_deref(), Some("compact")); + let use_compact = matches!(non_empty_string(params.format.as_ref()), Some("compact")); let mut outputs = Vec::new(); for id in &ids { match rpg_nav::explore::explore_filtered( - graph, + &graph, id, dir, max_depth, @@ -284,32 +359,54 @@ impl RpgServer { description = "PREFER THIS OVER wc/find/tree FOR CODEBASE OVERVIEW. RPG statistics: entity count, file count, functional areas, dependency edges, containment edges, inter-area connectivity, hierarchy overview. Call this first on any new codebase to orient yourself before searching.", annotations(read_only_hint = true, open_world_hint = false) )] - async fn rpg_info(&self) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + async fn rpg_info( + &self, + Parameters(params): Parameters, + ) -> Result { + let (project_root, graph, _config, notice, use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; #[cfg(feature = "embeddings")] let emb_status = { - let emb_guard = self.embedding_index.read().await; - if let Some(ref idx) = *emb_guard { - format!( - "\nembedding_index: {} entities indexed (BGE-small-en-v1.5)", - idx.entity_count() - ) - } else if self - .embedding_init_failed - .load(std::sync::atomic::Ordering::Relaxed) - { - "\nembedding_index: init failed (lexical-only search)".to_string() + if use_session_cache { + let emb_guard = self.embedding_index.read().await; + if let Some(ref idx) = *emb_guard { + format!( + "\nembedding_index: {} entities indexed (BGE-small-en-v1.5)", + idx.entity_count() + ) + } else if self + .embedding_init_failed + .load(std::sync::atomic::Ordering::Relaxed) + { + "\nembedding_index: init failed (lexical-only search)".to_string() + } else { + "\nembedding_index: not initialized (will load on first semantic search)" + .to_string() + } } else { - "\nembedding_index: not initialized (will load on first semantic search)" - .to_string() + let updated_at = graph.updated_at.to_rfc3339(); + match rpg_nav::embeddings::EmbeddingIndex::load_or_init(&project_root, &updated_at) + { + Ok(mut idx) => { + if let Err(e) = idx.sync(&graph) { + eprintln!( + "rpg: embedding sync failed for {}: {e}", + project_root.display() + ); + } + format!( + "\nembedding_index: {} entities indexed (BGE-small-en-v1.5)", + idx.entity_count() + ) + } + Err(_) => "\nembedding_index: init failed (lexical-only search)".to_string(), + } } }; #[cfg(not(feature = "embeddings"))] let emb_status = "\nembedding_index: disabled (compiled without embeddings feature)"; - let area_invocations = rpg_nav::dataflow::compute_area_invocations(graph); + let area_invocations = rpg_nav::dataflow::compute_area_invocations(&graph); let area_text = rpg_nav::dataflow::format_area_invocations(&area_invocations); let area_section = if area_text.is_empty() { String::new() @@ -319,7 +416,7 @@ impl RpgServer { Ok(format!( "{}{}{}{}", notice, - rpg_nav::toon::format_rpg_info(graph), + rpg_nav::toon::format_rpg_info(&graph), emb_status, area_section, )) @@ -333,10 +430,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; let request = rpg_nav::snapshot::SnapshotRequest { token_budget: params.token_budget.unwrap_or(30_000), @@ -345,7 +441,7 @@ impl RpgServer { }; let token_budget = params.token_budget.unwrap_or(30_000); - let result = rpg_nav::snapshot::build_semantic_snapshot(graph, &request); + let result = rpg_nav::snapshot::build_semantic_snapshot(&graph, &request); let (lifted, total) = graph.lifting_coverage(); let mut output = format!( @@ -378,9 +474,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; if !graph.metadata.semantic_hierarchy { return Err( @@ -392,13 +488,14 @@ impl RpgServer { max_batch_size: params.max_batch_size.unwrap_or(8).max(1), include_modules: params.include_modules.unwrap_or(false), }; - let plan = rpg_encoder::reconstruction::schedule_reconstruction(graph, options); + let plan = rpg_encoder::reconstruction::schedule_reconstruction(&graph, options); let json = serde_json::to_string_pretty(&plan) .map_err(|e| format!("Failed to serialize plan: {}", e))?; Ok(format!( - "Reconstruction plan: {} entities, {} batches (max_batch_size: {})\n\n{}", + "{}Reconstruction plan: {} entities, {} batches (max_batch_size: {})\n\n{}", + notice, plan.topological_order.len(), plan.batches.len(), options.max_batch_size, @@ -418,43 +515,18 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - // Expand ~ and canonicalize - let expanded = if let Some(rest) = params.path.strip_prefix("~/") { - match std::env::var("HOME") { - Ok(home) => std::path::PathBuf::from(home).join(rest), - Err(_) => std::path::PathBuf::from(¶ms.path), - } - } else if params.path == "~" { - std::path::PathBuf::from(std::env::var("HOME").unwrap_or_else(|_| "/".into())) - } else { - std::path::PathBuf::from(¶ms.path) - }; + let canonical = Self::canonicalize_project_root_path(¶ms.path)?; - let canonical = expanded.canonicalize().map_err(|e| { - format!( - "Path does not exist or is not accessible: {}: {}", - expanded.display(), - e - ) - })?; + // Swap the root + *self.default_root.write().await = canonical.clone(); - if !canonical.is_dir() { - return Err(format!("Path is not a directory: {}", canonical.display())); - } + // Root-scoped state is authoritative. Refresh persisted state for the + // new default root, then reset transient runtime/session state. + let state = self.root_state(&canonical).await; + let graph_note = { + let mut root_state = state.write().await; - // Swap the root - *self.project_root_cell.write().await = canonical.clone(); - - // Load the NEW project's config. Unlike `reload_rpg` (same-project - // reload, where keeping the previous in-memory config on parse - // error is the right call because the old config was project- - // valid), a project switch must *not* inherit the old project's - // config — that would silently cross-contaminate encoding/batch - // settings across unrelated codebases. So on parse failure we - // fall back to `RpgConfig::default()` and emit a warning, and on - // "file absent" we likewise use defaults. - { - let new_config = match rpg_core::config::RpgConfig::load(&canonical) { + root_state.config = match rpg_core::config::RpgConfig::load(&canonical) { Ok(cfg) => cfg, Err(e) => { eprintln!( @@ -464,19 +536,27 @@ impl RpgServer { rpg_core::config::RpgConfig::default() } }; - *self.config.write().await = new_config; - } + root_state.graph = rpg_core::storage::load(&canonical).ok(); + Self::reset_transient_runtime_state(&mut root_state, &canonical); + + match &root_state.graph { + Some(g) => format!( + "Loaded existing graph: {} entities, {} files, {}", + g.metadata.total_entities, + g.metadata.total_files, + if g.metadata.semantic_hierarchy { + "semantic hierarchy" + } else { + "structural hierarchy" + } + ), + None => "No .rpg/graph.json at this root — call build_rpg to index it.".to_string(), + } + }; - // Reset all session + sync state — everything is project-scoped + self.sync_default_root_compat_from_state(&canonical).await; *self.lifting_session.write().await = None; *self.hierarchy_session.write().await = None; - *self.pending_routing.write().await = load_pending_routing(&canonical) - .map(|s| s.entries) - .unwrap_or_default(); - *self.last_auto_sync_head.write().await = None; - *self.last_auto_sync_changeset.write().await = None; - *self.last_auto_sync_workdir_paths.write().await = std::collections::HashSet::new(); - *self.stale_entity_ids.write().await = std::collections::HashSet::new(); #[cfg(feature = "embeddings")] { *self.embedding_index.write().await = None; @@ -484,23 +564,6 @@ impl RpgServer { .store(false, std::sync::atomic::Ordering::Relaxed); } - // Load graph from the new root (if one exists there) - let loaded = rpg_core::storage::load(&canonical).ok(); - let graph_note = match &loaded { - Some(g) => format!( - "Loaded existing graph: {} entities, {} files, {}", - g.metadata.total_entities, - g.metadata.total_files, - if g.metadata.semantic_hierarchy { - "semantic hierarchy" - } else { - "structural hierarchy" - } - ), - None => "No .rpg/graph.json at this root — call build_rpg to index it.".to_string(), - }; - *self.graph.write().await = loaded; - Ok(format!( "Project root set to: {}\n{}", canonical.display(), @@ -522,17 +585,25 @@ impl RpgServer { ) -> Result { use rpg_parser::languages::Language; - let project_root = &self.project_root().await; + let ctx = self + .effective_context(non_empty_string(params.project_root.as_ref())) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + let language_override = non_empty_string(params.language.as_ref()); + let include_pattern = non_empty_string(params.include.as_ref()); + let exclude_pattern = non_empty_string(params.exclude.as_ref()); // Detect languages (multi-language support) - let languages: Vec = if let Some(ref l) = params.language { + let languages: Vec = if let Some(l) = language_override { // User specified a single language override let lang = Language::from_name(l) .or_else(|| Language::from_extension(l)) .ok_or_else(|| format!("unsupported language: {}", l))?; vec![lang] } else { - let detected = Language::detect_all(project_root); + let detected = Language::detect_all(&project_root); if detected.is_empty() { return Err( "could not detect any supported language; specify the 'language' parameter" @@ -544,11 +615,11 @@ impl RpgServer { // Auto-preserve: load existing graph if it has lifted features let mut backup_failed = false; - let old_graph: Option = if storage::rpg_exists(project_root) { - match storage::load(project_root) { + let old_graph: Option = if storage::rpg_exists(&project_root) { + match storage::load(&project_root) { Ok(g) if g.metadata.lifted_entities > 0 => { // Backup before overwriting - if let Err(e) = storage::create_backup(project_root) { + if let Err(e) = storage::create_backup(&project_root) { eprintln!("rpg: WARNING: backup failed: {e}"); backup_failed = true; } @@ -579,20 +650,16 @@ impl RpgServer { // Detect paradigms using TOML-driven engine let active_defs = - rpg_parser::paradigms::detect_paradigms_toml(project_root, &languages, ¶digm_defs); + rpg_parser::paradigms::detect_paradigms_toml(&project_root, &languages, ¶digm_defs); graph.metadata.paradigms = active_defs.iter().map(|d| d.name.clone()).collect(); // Parse code entities (all detected languages) - let include_glob = params - .include - .as_deref() - .and_then(|p| globset::Glob::new(p).ok().map(|g| g.compile_matcher())); - let exclude_glob = params - .exclude - .as_deref() - .and_then(|p| globset::Glob::new(p).ok().map(|g| g.compile_matcher())); + let include_glob = + include_pattern.and_then(|p| globset::Glob::new(p).ok().map(|g| g.compile_matcher())); + let exclude_glob = + exclude_pattern.and_then(|p| globset::Glob::new(p).ok().map(|g| g.compile_matcher())); - let walker = ignore::WalkBuilder::new(project_root) + let walker = ignore::WalkBuilder::new(&project_root) .hidden(true) .git_ignore(true) .add_custom_ignore_filename(".rpgignore") @@ -610,7 +677,7 @@ impl RpgServer { if !languages.contains(&file_lang) { continue; } - let rel_path_for_glob = path.strip_prefix(project_root).unwrap_or(path); + let rel_path_for_glob = path.strip_prefix(&project_root).unwrap_or(path); if let Some(ref inc) = include_glob && !inc.is_match(rel_path_for_glob) { @@ -626,7 +693,7 @@ impl RpgServer { continue; }; - let rel_path = path.strip_prefix(project_root).unwrap_or(path); + let rel_path = path.strip_prefix(&project_root).unwrap_or(path); let mut raw_entities = rpg_parser::entities::extract_entities(rel_path, &source, file_lang); @@ -670,14 +737,14 @@ impl RpgServer { graph.materialize_containment_edges(); // Artifact grounding + dependency resolution - let cfg = self.load_config().await; + let cfg = state.read().await.config.clone(); let paradigm_ctx = rpg_encoder::grounding::ParadigmContext { active_defs: active_defs.clone(), qcache: &qcache, }; rpg_encoder::grounding::populate_entity_deps( &mut graph, - project_root, + &project_root, cfg.encoding.broadcast_imports, None, Some(¶digm_ctx), @@ -686,7 +753,7 @@ impl RpgServer { rpg_encoder::grounding::resolve_dependencies(&mut graph); // Set git commit - if let Ok(sha) = rpg_encoder::evolution::get_head_sha(project_root) { + if let Ok(sha) = rpg_encoder::evolution::get_head_sha(&project_root) { graph.base_commit = Some(sha); } @@ -713,8 +780,8 @@ impl RpgServer { // Refresh metadata and save graph.refresh_metadata(); - storage::save(project_root, &graph).map_err(|e| format!("Failed to save RPG: {}", e))?; - let _ = storage::ensure_gitignore(project_root); + storage::save(&project_root, &graph).map_err(|e| format!("Failed to save RPG: {}", e))?; + let _ = storage::ensure_gitignore(&project_root); // Capture lifting coverage BEFORE the graph moves into `self.graph`. // `lifting_coverage()` excludes `Module` entities (they get features @@ -726,32 +793,35 @@ impl RpgServer { // Update in-memory state let meta = graph.metadata.clone(); - *self.graph.write().await = Some(graph); - - // Clear sessions — graph structure changed - *self.lifting_session.write().await = None; - *self.hierarchy_session.write().await = None; + { + let mut root_state = state.write().await; + root_state.graph = Some(graph.clone()); + root_state.lifting_session = None; + root_state.hierarchy_session = None; + } + if is_default_root { + self.sync_default_root_compat_from_state(&project_root) + .await; + } // Sync embedding index incrementally (fingerprints detect what changed) #[cfg(feature = "embeddings")] { - let graph_guard = self.graph.read().await; - if let Some(ref graph) = *graph_guard { - let mut emb_guard = self.embedding_index.write().await; - if let Some(ref mut idx) = *emb_guard - && let Err(e) = idx.sync(graph) - { - eprintln!("rpg: embedding sync failed: {e}"); - *emb_guard = None; - } + let mut root_state = state.write().await; + if let Some(ref mut idx) = root_state.embedding_index + && let Err(e) = idx.sync(&graph) + { + eprintln!("rpg: embedding sync failed: {e}"); + root_state.embedding_index = None; } + root_state.embedding_init_failed = false; } - #[cfg(feature = "embeddings")] - self.embedding_init_failed - .store(false, std::sync::atomic::Ordering::Relaxed); // Clear stale pending routing (graph was fully replaced) - self.pending_routing.write().await.clear(); - clear_pending_routing(&self.project_root().await); + { + let mut root_state = state.write().await; + root_state.pending_routing.clear(); + } + clear_pending_routing(&project_root); // Prune the drift-tracking set against the new graph. build_rpg // preserves features for entities whose IDs survive the rebuild, @@ -759,11 +829,14 @@ impl RpgServer { // But IDs removed in the rebuild should drop out of the set so it // doesn't accumulate dead references over many rebuilds. { - let graph_guard = self.graph.read().await; - if let Some(ref g) = *graph_guard { - let mut stale = self.stale_entity_ids.write().await; - stale.retain(|id| g.entities.contains_key(id)); - } + let mut root_state = state.write().await; + root_state + .stale_entity_ids + .retain(|id| graph.entities.contains_key(id)); + } + if is_default_root { + self.sync_default_root_compat_from_state(&project_root) + .await; } let lang_display = if languages.len() == 1 { @@ -845,7 +918,7 @@ impl RpgServer { "\n\nNEXT STEP: Graph is fully lifted. Semantic tools (search_node, context_pack, plan_change, explore_rpg) are ready — prefer them over grep/cat/find for any structural question.", ); } else if unlifted >= crate::LARGE_SCOPE_ENTITIES { - let batch_tokens = self.config.read().await.encoding.max_batch_tokens; + let batch_tokens = state.read().await.config.encoding.max_batch_tokens; result.push_str(&format!( "\n\nNEXT STEP: {} entities unlifted (of {}). Dispatch a sub-agent now to run the lift loop — don't wait for the user to ask. Each batch is ~{}K tokens of source, so running the loop here would exhaust caller context before any real work begins.\n\ \nLOOP (sub-agent runs this in its own context):\n \ @@ -859,6 +932,9 @@ impl RpgServer { unlifted, total_non_module, )); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } Ok(result) } @@ -889,7 +965,22 @@ impl RpgServer { } } - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } // Reject concurrent lift calls if self @@ -938,24 +1029,26 @@ impl RpgServer { // Dry run: estimate cost without lifting if dry_run { - let project_root = self.project_root().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let guard = state.read().await; + let graph = guard.graph.as_ref().unwrap(); let estimate = rpg_lift::estimate_cost(graph, provider.as_ref(), &project_root); - return Ok(format!( + let mut out = format!( "Cost estimate for lifting with {} ({}):\n\n{}", params.provider, provider.model_name(), estimate, - )); + ); + if !ctx.cross_root_notice.is_empty() { + out = format!("{}{}", ctx.cross_root_notice, out); + } + return Ok(out); } // Hold the write lock for the entire pipeline. The graph never leaves // shared state, so cancellation or concurrent tools can't corrupt it. - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; - - let project_root = self.project_root().await.clone(); + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; let scope_owned = scope.to_string(); // Compute the in-scope entity IDs up front so we can drain @@ -994,24 +1087,51 @@ impl RpgServer { }) .map_err(|e| format!("Lift failed: {}", e))?; - drop(guard); + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + } // Drain stale tracking for every in-scope ID. After the // pipeline, those entities have authoritative features (LLM // or auto-lift), regardless of whether the features changed. if !in_scope_ids.is_empty() { + let mut state_guard = state.write().await; + state_guard + .stale_entity_ids + .retain(|id| !in_scope_ids.contains(id)); + } + if is_default_root && !in_scope_ids.is_empty() { let mut stale = self.stale_entity_ids.write().await; stale.retain(|id| !in_scope_ids.contains(id)); } // Clear sessions — entity list changed - *self.lifting_session.write().await = None; - *self.hierarchy_session.write().await = None; + { + let mut state_guard = state.write().await; + state_guard.lifting_session = None; + state_guard.hierarchy_session = None; + } + if is_default_root { + *self.lifting_session.write().await = None; + *self.hierarchy_session.write().await = None; + } // Update auto-sync markers — force re-evaluation on next query - *self.last_auto_sync_head.write().await = - rpg_encoder::evolution::get_head_sha(&self.project_root().await).ok(); - *self.last_auto_sync_changeset.write().await = None; + let new_head = rpg_encoder::evolution::get_head_sha(&project_root).ok(); + { + let mut state_guard = state.write().await; + state_guard.last_auto_sync_head = new_head.clone(); + state_guard.last_auto_sync_changeset = None; + state_guard.last_auto_sync_workdir_paths = std::collections::HashSet::new(); + } + if is_default_root { + *self.last_auto_sync_head.write().await = new_head; + *self.last_auto_sync_changeset.write().await = None; + *self.last_auto_sync_workdir_paths.write().await = std::collections::HashSet::new(); + } let mut out = format!( "Lifting complete ({}, {}).\n\ @@ -1047,6 +1167,9 @@ impl RpgServer { out.push_str( "\n\nNEXT STEP: Call semantic_snapshot to see the full repo understanding.", ); + if !ctx.cross_root_notice.is_empty() { + out = format!("{}{}", ctx.cross_root_notice, out); + } Ok(out) } // #[cfg(feature = "auto-lift")] } @@ -1054,21 +1177,36 @@ impl RpgServer { #[tool( description = "Check lifting progress: coverage per area, unlifted files, active session, and NEXT STEP. Call this at any point to see where you are in the lifting flow. Reads state from the persisted graph — works across sessions." )] - async fn lifting_status(&self) -> Result { - // Check if graph is loaded - let guard = self.graph.read().await; - if guard.is_some() { - let graph = guard.as_ref().unwrap(); - return self.format_lifting_status(graph).await; + async fn lifting_status( + &self, + Parameters(params): Parameters, + ) -> Result { + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let state = ctx.state.clone(); + + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph && storage::rpg_exists(&project_root) { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if project_root == self.project_root().await { + *self.graph.write().await = Some(loaded); + } + } } - drop(guard); - // Try loading from disk - if storage::rpg_exists(&self.project_root().await) { - self.ensure_graph().await?; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); - return self.format_lifting_status(graph).await; + let state_guard = state.read().await; + if let Some(ref graph) = state_guard.graph { + let mut out = self + .format_lifting_status(graph, &project_root, &state_guard) + .await?; + if !ctx.cross_root_notice.is_empty() { + out = format!("{}{}", ctx.cross_root_notice, out); + } + return Ok(out); } Ok( @@ -1084,7 +1222,21 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if project_root == self.project_root().await { + *self.graph.write().await = Some(loaded); + } + } + } let batch_index = params.batch_index.unwrap_or(0); @@ -1095,8 +1247,8 @@ impl RpgServer { { // Check rebuild need with a brief read (no graph lock held) let needs_rebuild = { - let session = self.lifting_session.read().await; - match session.as_ref() { + let state_guard = state.read().await; + match state_guard.lifting_session.as_ref() { None => true, Some(s) => s.scope_key != params.scope || batch_index == 0, } @@ -1109,14 +1261,13 @@ impl RpgServer { // still exist but are outdated, so they should be treated as // "needs LLM work" alongside unlifted entities. let stale_snapshot: HashSet = { - let stale = self.stale_entity_ids.read().await; - stale.iter().cloned().collect() + let state_guard = state.read().await; + state_guard.stale_entity_ids.iter().cloned().collect() }; - // Lock order: graph first, then session (consistent with lifting_status) - let mut guard = self.graph.write().await; - let mut session = self.lifting_session.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + // Lock the root state while we inspect and update the graph. + let mut state_guard = state.write().await; + let graph = state_guard.graph.as_mut().ok_or("No RPG loaded")?; let mut scope = rpg_encoder::lift::resolve_scope(graph, ¶ms.scope); @@ -1147,22 +1298,19 @@ impl RpgServer { } if scope.entity_ids.is_empty() { - *session = None; + state_guard.lifting_session = None; return Ok(format!( "No entities matched scope: {}\nTry a file glob like 'src/**' or '*' for all.", params.scope )); } - let all_raw_entities = rpg_encoder::lift::collect_raw_entities( - graph, - &scope, - &self.project_root().await, - ) - .map_err(|e| format!("Failed to collect entities: {}", e))?; + let all_raw_entities = + rpg_encoder::lift::collect_raw_entities(graph, &scope, &project_root) + .map_err(|e| format!("Failed to collect entities: {}", e))?; if all_raw_entities.is_empty() { - *session = None; + state_guard.lifting_session = None; return Ok("No source code found for matched entities.".into()); } @@ -1231,7 +1379,7 @@ impl RpgServer { // Save if we auto-lifted anything if auto_lifted > 0 { graph.refresh_metadata(); - if let Err(e) = rpg_core::storage::save(&self.project_root().await, graph) { + if let Err(e) = rpg_core::storage::save(&project_root, graph) { eprintln!("Warning: failed to persist auto-lifted features: {e}"); } } @@ -1241,25 +1389,24 @@ impl RpgServer { // counting them as stale forever because the auto-lift path // skips submit_lift_results entirely. if !auto_relifted_stale.is_empty() { - let mut stale = self.stale_entity_ids.write().await; + let mut state_guard = state.write().await; + let stale = &mut state_guard.stale_entity_ids; for id in &auto_relifted_stale { stale.remove(id); } } if needs_llm.is_empty() { - *session = None; let (lifted, total) = graph.lifting_coverage(); + state_guard.lifting_session = None; return Ok(format!( "AUTO-LIFTED: {} trivial entities. No entities need LLM analysis.\ncoverage: {}/{}\nNEXT: Call finalize_lifting.", auto_lifted, lifted, total, )); } - let config = self.config.read().await; - let batch_size = config.encoding.batch_size; - let max_batch_tokens = config.encoding.max_batch_tokens; - drop(config); + let batch_size = state_guard.config.encoding.batch_size; + let max_batch_tokens = state_guard.config.encoding.max_batch_tokens; let mcp_batch_size = batch_size.min(25); let batch_ranges = rpg_encoder::lift::build_token_aware_batches( @@ -1271,7 +1418,7 @@ impl RpgServer { // Store auto-lift count for batch 0 output let auto_lift_count = auto_lifted; - *session = Some(LiftingSession { + state_guard.lifting_session = Some(LiftingSession { scope_key: params.scope.clone(), raw_entities: needs_llm, batch_ranges, @@ -1282,10 +1429,9 @@ impl RpgServer { } // Lock order: graph first, then session (consistent with rebuild block above) - let guard = self.graph.read().await; - let graph = guard.as_ref().ok_or("No RPG loaded")?; - let session = self.lifting_session.read().await; - let Some(session) = session.as_ref() else { + let state_guard = state.read().await; + let graph = state_guard.graph.as_ref().ok_or("No RPG loaded")?; + let Some(session) = state_guard.lifting_session.as_ref() else { return Err("Lifting session expired. Call get_entities_for_lifting with batch_index=0 to restart.".into()); }; @@ -1325,7 +1471,7 @@ impl RpgServer { // lives (kept there to avoid duplicating detail in the per-batch // response, which ships with every batch's source payload). if total_batches >= crate::LARGE_SCOPE_BATCHES { - let batch_tokens = self.config.read().await.encoding.max_batch_tokens; + let batch_tokens = state_guard.config.encoding.max_batch_tokens; let approx_total_k = (total_batches * batch_tokens).div_ceil(1000); output.push_str(&format!( "\nNOTE: {} batches queued (~{}K tokens of source total). If your runtime supports sub-agent dispatch or a cheaper model, stop here — do not request further batches in this context — and invoke `lifting_status` for the delegation pattern. Continue the sequential loop only if no dispatch is available.\n\n", @@ -1344,8 +1490,7 @@ impl RpgServer { output.push_str(&crate::types::format_review_candidates( &session.review_candidates, )); - let root = self.project_root().await; - let project_name = root + let project_name = project_root .file_name() .and_then(|n| n.to_str()) .unwrap_or("unknown"); @@ -1411,7 +1556,22 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } let mut features: std::collections::HashMap> = serde_json::from_str(¶ms.features) @@ -1420,12 +1580,12 @@ impl RpgServer { // Normalize per paper: trim, lowercase, dedup rpg_encoder::semantic_lifting::normalize_features(&mut features); - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; - let config = self.load_config().await; - let drift_ignore = config.encoding.drift_ignore_threshold; - let drift_auto = config.encoding.drift_auto_threshold; + let drift_ignore = state_guard.config.encoding.drift_ignore_threshold; + let drift_auto = state_guard.config.encoding.drift_auto_threshold; let mut updated = 0usize; let mut unmatched = 0usize; @@ -1517,7 +1677,7 @@ impl RpgServer { let routing_count; if needs_routing { - let mut pending = self.pending_routing.write().await; + let pending = &mut state_guard.pending_routing; for eid in &auto_route_ids { let original_path = graph @@ -1584,7 +1744,7 @@ impl RpgServer { graph_revision: revision, entries: pending.clone(), }; - if let Err(e) = save_pending_routing(&self.project_root().await, &state) { + if let Err(e) = save_pending_routing(&project_root, &state) { eprintln!("rpg: failed to persist pending routing: {e}"); } } else { @@ -1596,27 +1756,44 @@ impl RpgServer { // Re-lifted entities are no longer stale — drain them from the set // tracked by auto-sync so lifting_status reports accurate drift. if !resolved_features.is_empty() { + let stale = &mut state_guard.stale_entity_ids; + for id in resolved_features.keys() { + stale.remove(id); + } + } + if is_default_root && !resolved_features.is_empty() { let mut stale = self.stale_entity_ids.write().await; for id in resolved_features.keys() { stale.remove(id); } } - storage::save(&self.project_root().await, graph) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, graph).map_err(|e| format!("Failed to save RPG: {}", e))?; // Update embedding index for newly-lifted entities (non-blocking on failure) #[cfg(feature = "embeddings")] { let graph_ts = graph.updated_at.to_rfc3339(); - drop(guard); // Release graph write lock before async embedding update - self.update_embeddings(&resolved_features, &graph_ts).await; + drop(state_guard); // Release graph write lock before async embedding update + if is_default_root { + self.update_embeddings(&resolved_features, &graph_ts).await; + } } #[cfg(not(feature = "embeddings"))] - drop(guard); + drop(state_guard); - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let final_graph = graph_slot.take().unwrap(); + { + let mut state_guard = state.write().await; + state_guard.graph = Some(final_graph.clone()); + } + if is_default_root { + *self.graph.write().await = Some(final_graph.clone()); + *self.pending_routing.write().await = state.read().await.pending_routing.clone(); + } + + let graph_guard = state.read().await; + let graph = graph_guard.graph.as_ref().unwrap(); let (lifted, total) = graph.lifting_coverage(); let coverage_pct = if total > 0 { lifted as f64 / total as f64 * 100.0 @@ -1720,8 +1897,8 @@ impl RpgServer { // alone would tell a stale-only re-lift loop to stop after batch 1 // while later batches are still queued. let stale_remaining = { - let stale = self.stale_entity_ids.read().await; - stale + graph_guard + .stale_entity_ids .iter() .filter(|id| graph.entities.contains_key(*id)) .count() @@ -1745,6 +1922,9 @@ impl RpgServer { } else { result.push_str("\nNEXT: continue with get_entities_for_lifting, then call finalize_lifting when done."); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } Ok(result) } @@ -1755,13 +1935,27 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if project_root == self.project_root().await { + *self.graph.write().await = Some(loaded); + } + } + } - let guard = self.graph.read().await; - let graph = guard.as_ref().ok_or("No RPG loaded")?; + let state_guard = state.read().await; + let graph = state_guard.graph.as_ref().ok_or("No RPG loaded")?; let revision = graph_revision(graph); - let pending = self.pending_routing.read().await; + let pending = &state_guard.pending_routing; if pending.is_empty() { return Ok("No entities pending routing.".into()); } @@ -1890,6 +2084,10 @@ impl RpgServer { revision, )); + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + Ok(result) } @@ -1900,14 +2098,30 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } let decisions: std::collections::HashMap = serde_json::from_str(¶ms.decisions) .map_err(|e| format!("Invalid decisions JSON: {}", e))?; - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; // Validate graph revision let current_revision = graph_revision(graph); @@ -1918,7 +2132,7 @@ impl RpgServer { )); } - let mut pending = self.pending_routing.write().await; + let pending = &mut state_guard.pending_routing; let mut routed = 0usize; let mut kept = 0usize; let mut reports: Vec = Vec::new(); @@ -2020,23 +2234,30 @@ impl RpgServer { } graph.refresh_metadata(); - storage::save(&self.project_root().await, graph) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, graph).map_err(|e| format!("Failed to save RPG: {}", e))?; // Update or clear persisted pending state if pending.is_empty() { - clear_pending_routing(&self.project_root().await); + clear_pending_routing(&project_root); } else { let state = PendingRoutingState { graph_revision: current_revision, entries: pending.clone(), }; - if let Err(e) = save_pending_routing(&self.project_root().await, &state) { + if let Err(e) = save_pending_routing(&project_root, &state) { eprintln!("rpg: failed to persist pending routing: {e}"); } } let remaining = pending.len(); + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + let pending_clone = state_guard.pending_routing.clone(); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + *self.pending_routing.write().await = pending_clone; + } let mut result = format!("Routed {} entities, kept {} in place.\n", routed, kept); for report in &reports { result.push_str(report); @@ -2052,6 +2273,10 @@ impl RpgServer { result.push_str("\nAll routing complete.\n"); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + Ok(result) } @@ -2063,9 +2288,25 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } - let mut graph = self.graph.write().await; + let mut root_state = state.write().await; + let mut graph = root_state.graph.take(); let g = graph.as_mut().ok_or("No RPG loaded")?; // Detect paradigms BEFORE running update so entities get classified @@ -2075,7 +2316,7 @@ impl RpgServer { let qcache = rpg_parser::paradigms::query_engine::QueryCache::compile_all(¶digm_defs) .map_err(|errs| format!("query compile errors: {}", errs.join("; ")))?; let active_defs = rpg_parser::paradigms::detect_paradigms_toml( - &self.project_root().await, + &project_root, &detected_langs, ¶digm_defs, ); @@ -2091,33 +2332,28 @@ impl RpgServer { let summary = if let Some(since) = params.since.as_deref() { rpg_encoder::evolution::run_update( g, - &self.project_root().await, + &project_root, Some(since), Some(¶digm_pipeline), ) } else { - rpg_encoder::evolution::run_update_workdir( - g, - &self.project_root().await, - Some(¶digm_pipeline), - ) + rpg_encoder::evolution::run_update_workdir(g, &project_root, Some(¶digm_pipeline)) } .map_err(|e| format!("Update failed: {}", e))?; - storage::save(&self.project_root().await, g) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, g).map_err(|e| format!("Failed to save RPG: {}", e))?; // Update auto-sync markers — force re-evaluation on next query - *self.last_auto_sync_head.write().await = - rpg_encoder::evolution::get_head_sha(&self.project_root().await).ok(); - *self.last_auto_sync_changeset.write().await = None; + root_state.last_auto_sync_head = rpg_encoder::evolution::get_head_sha(&project_root).ok(); + root_state.last_auto_sync_changeset = None; + root_state.last_auto_sync_workdir_paths = std::collections::HashSet::new(); // Track modified entities so lifting_status and // get_entities_for_lifting(scope="*") surface them as re-lift work. // Without this, the "needs_relift: N" value we report below would // point the caller at a path that returns zero entities. { - let mut stale = self.stale_entity_ids.write().await; + let stale = &mut root_state.stale_entity_ids; for id in &summary.modified_entity_ids { stale.insert(id.clone()); } @@ -2125,12 +2361,22 @@ impl RpgServer { } // Clear sessions — entity list changed - *self.lifting_session.write().await = None; - *self.hierarchy_session.write().await = None; + root_state.lifting_session = None; + root_state.hierarchy_session = None; // Sync embedding index incrementally — entities changed #[cfg(feature = "embeddings")] { + if let Some(ref mut idx) = root_state.embedding_index + && let Err(e) = idx.sync(g) + { + eprintln!("rpg: embedding sync failed: {e}"); + root_state.embedding_index = None; + } + root_state.embedding_init_failed = false; + } + #[cfg(feature = "embeddings")] + if is_default_root { let mut emb_guard = self.embedding_index.write().await; if let Some(ref mut idx) = *emb_guard && let Err(e) = idx.sync(g) @@ -2138,17 +2384,15 @@ impl RpgServer { eprintln!("rpg: embedding sync failed: {e}"); *emb_guard = None; } + self.embedding_init_failed + .store(false, std::sync::atomic::Ordering::Relaxed); } - #[cfg(feature = "embeddings")] - self.embedding_init_failed - .store(false, std::sync::atomic::Ordering::Relaxed); // Reconcile pending routing against the updated graph: // preserve entries whose entities still exist and have features, drop the rest. let mut pending_preserved = 0usize; let mut pending_dropped = 0usize; { - let mut pending = self.pending_routing.write().await; - let previous = std::mem::take(&mut *pending); + let previous = std::mem::take(&mut root_state.pending_routing); if g.metadata.semantic_hierarchy { let mut preserved = Vec::new(); for mut entry in previous { @@ -2165,29 +2409,32 @@ impl RpgServer { } } pending_preserved = preserved.len(); - *pending = preserved.clone(); - if pending.is_empty() { - clear_pending_routing(&self.project_root().await); + root_state.pending_routing = preserved.clone(); + if root_state.pending_routing.is_empty() { + clear_pending_routing(&project_root); } else { let state = PendingRoutingState { graph_revision: graph_revision(g), entries: preserved, }; - if let Err(e) = save_pending_routing(&self.project_root().await, &state) { + if let Err(e) = save_pending_routing(&project_root, &state) { eprintln!("rpg: failed to persist pending routing: {e}"); } } } else { pending_dropped = previous.len(); - clear_pending_routing(&self.project_root().await); + clear_pending_routing(&project_root); } } - if summary.entities_added == 0 + let result = if summary.entities_added == 0 && summary.entities_modified == 0 && summary.entities_removed == 0 { - Ok("RPG is up to date. No source changes detected.".into()) + format!( + "{}RPG is up to date. No source changes detected.", + ctx.cross_root_notice + ) } else { // Count entities needing lifting (new/modified with empty features) let (lifted, total) = g.lifting_coverage(); @@ -2230,21 +2477,48 @@ impl RpgServer { )); } - Ok(result) + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + result + }; + + let final_graph = graph.take().unwrap(); + root_state.graph = Some(final_graph.clone()); + drop(root_state); + if is_default_root { + self.sync_default_root_compat_from_state(&project_root) + .await; } + + Ok(result) } #[tool( description = "Reload the RPG graph and config from disk. Use after external changes to .rpg/graph.json or .rpg/config.toml — for example, after the CLI ran `rpg-encoder lift` or after editing batch-size settings." )] - async fn reload_rpg(&self) -> Result { - let project_root = self.project_root().await; + async fn reload_rpg( + &self, + Parameters(params): Parameters, + ) -> Result { + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); // Refresh config from disk — if the user edited .rpg/config.toml // or the lifter wrote new settings, pick them up here. Logs a // warning if the file exists but failed to parse, then keeps the // existing config (don't clobber a working in-memory config over // a temporarily broken edit). - Self::reload_config_with_warning(&self.config, &project_root).await; + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let config_slot = + std::sync::Arc::new(tokio::sync::RwLock::new(state.read().await.config.clone())); + Self::reload_config_with_warning(&config_slot, &project_root).await; + let new_config = config_slot.read().await.clone(); + state.write().await.config = new_config.clone(); + } match storage::load(&project_root) { Ok(g) => { let entities = g.metadata.total_entities; @@ -2258,38 +2532,44 @@ impl RpgServer { // keeps that backlog visible while dropping IDs that were // removed in the new graph. { - let mut stale = self.stale_entity_ids.write().await; - stale.retain(|id| g.entities.contains_key(id)); + let mut root_state = state.write().await; + root_state + .stale_entity_ids + .retain(|id| g.entities.contains_key(id)); + root_state.graph = Some(g.clone()); } - *self.graph.write().await = Some(g); // Sync embedding index incrementally #[cfg(feature = "embeddings")] { - let graph_guard = self.graph.read().await; - if let Some(ref graph) = *graph_guard { - let mut emb_guard = self.embedding_index.write().await; - if let Some(ref mut idx) = *emb_guard - && let Err(e) = idx.sync(graph) - { - eprintln!("rpg: embedding sync failed: {e}"); - *emb_guard = None; - } + let mut root_state = state.write().await; + if let Some(ref mut idx) = root_state.embedding_index + && let Err(e) = idx.sync(&g) + { + eprintln!("rpg: embedding sync failed: {e}"); + root_state.embedding_index = None; } + root_state.embedding_init_failed = false; } - #[cfg(feature = "embeddings")] - self.embedding_init_failed - .store(false, std::sync::atomic::Ordering::Relaxed); - // Clear sessions — graph reloaded - *self.lifting_session.write().await = None; - *self.hierarchy_session.write().await = None; + { + let mut root_state = state.write().await; + root_state.lifting_session = None; + root_state.hierarchy_session = None; - // Reload pending routing from disk (may have changed externally) - let pending = load_pending_routing(&self.project_root().await) - .map(|s| s.entries) - .unwrap_or_default(); - *self.pending_routing.write().await = pending; - Ok(format!("RPG reloaded. {} entities loaded.", entities)) + // Reload pending routing from disk (may have changed externally) + root_state.pending_routing = load_pending_routing(&project_root) + .map(|s| s.entries) + .unwrap_or_default(); + } + if is_default_root { + self.sync_default_root_compat_from_state(&project_root) + .await; + } + + Ok(format!( + "{}RPG reloaded. {} entities loaded.", + ctx.cross_root_notice, entities + )) } Err(e) => Err(format!("Failed to reload RPG: {}", e)), } @@ -2298,11 +2578,30 @@ impl RpgServer { #[tool( description = "Finalize the lifting process: aggregate file-level features onto Module entities and re-ground artifacts. Call this AFTER all entities have been lifted via submit_lift_results. No LLM needed — uses dedup-aggregation of already-lifted entity features. After finalizing, proceed to get_files_for_synthesis for holistic file-level features, then build_semantic_hierarchy + submit_hierarchy." )] - async fn finalize_lifting(&self) -> Result { - self.ensure_graph().await?; + async fn finalize_lifting( + &self, + Parameters(params): Parameters, + ) -> Result { + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; let (lifted, _total) = graph.lifting_coverage(); if lifted == 0 { @@ -2312,7 +2611,7 @@ impl RpgServer { // Drain pending routing via Jaccard fallback if agent didn't route explicitly let mut fallback_routed = 0usize; { - let mut pending = self.pending_routing.write().await; + let pending = &mut state_guard.pending_routing; if !pending.is_empty() && graph.metadata.semantic_hierarchy { for p in pending.drain(..) { let feats = graph @@ -2348,11 +2647,11 @@ impl RpgServer { graph.assign_hierarchy_ids(); graph.materialize_containment_edges(); } - clear_pending_routing(&self.project_root().await); + clear_pending_routing(&project_root); } // Clear lifting session cache - *self.lifting_session.write().await = None; + state_guard.lifting_session = None; let mut steps: Vec = Vec::new(); @@ -2376,8 +2675,7 @@ impl RpgServer { graph.refresh_metadata(); // Save - storage::save(&self.project_root().await, graph) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, graph).map_err(|e| format!("Failed to save RPG: {}", e))?; let (final_lifted, final_total) = graph.lifting_coverage(); let coverage_pct = if final_total > 0 { @@ -2432,6 +2730,20 @@ impl RpgServer { ); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + let pending_clone = state_guard.pending_routing.clone(); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + *self.lifting_session.write().await = None; + *self.pending_routing.write().await = pending_clone; + } + Ok(result) } @@ -2442,10 +2754,24 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if project_root == self.project_root().await { + *self.graph.write().await = Some(loaded); + } + } + } - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let guard = state.read().await; + let graph = guard.graph.as_ref().unwrap(); // Collect files that have lifted child entities #[allow(clippy::type_complexity)] @@ -2538,6 +2864,10 @@ impl RpgServer { output.push_str("DONE — last batch. After submitting, call `build_semantic_hierarchy` to construct domain areas.\n"); } + if !ctx.cross_root_notice.is_empty() { + output = format!("{}{}", ctx.cross_root_notice, output); + } + Ok(output) } @@ -2548,14 +2878,30 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } let syntheses: std::collections::HashMap = serde_json::from_str(¶ms.syntheses) .map_err(|e| format!("Invalid syntheses JSON: {}", e))?; - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; let mut updated = 0usize; let mut unmatched = Vec::new(); @@ -2609,8 +2955,7 @@ impl RpgServer { graph.aggregate_hierarchy_features(); graph.refresh_metadata(); - storage::save(&self.project_root().await, graph) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, graph).map_err(|e| format!("Failed to save RPG: {}", e))?; let total_modules = graph .entities @@ -2642,17 +2987,45 @@ impl RpgServer { result.push_str("\nNEXT STEP: Call build_semantic_hierarchy to construct domain areas, then submit_hierarchy to apply them.\n"); + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + } + Ok(result) } #[tool( description = "Get file-level features and instructions for building a semantic hierarchy. Returns Module (file) entities with their aggregated features, plus the domain discovery and hierarchy assignment prompts. YOU (the LLM) analyze the features, identify functional domains, and assign each file to a 3-level hierarchy path. Then call submit_hierarchy with your assignments." )] - async fn build_semantic_hierarchy(&self) -> Result { - self.ensure_graph().await?; + async fn build_semantic_hierarchy( + &self, + Parameters(params): Parameters, + ) -> Result { + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if project_root == self.project_root().await { + *self.graph.write().await = Some(loaded); + } + } + } - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let guard = state.read().await; + let graph = guard.graph.as_ref().unwrap(); let (lifted, total) = graph.lifting_coverage(); let coverage_pct = if total > 0 { @@ -2743,18 +3116,18 @@ impl RpgServer { }, } - let graph_guard = self.graph.read().await; - let graph = graph_guard.as_ref().unwrap(); + let graph_guard = state.read().await; + let graph = graph_guard.graph.as_ref().unwrap(); let action = { - let mut session_guard = self.hierarchy_session.write().await; + let mut session_guard = state.write().await; // Initialize if absent — fresh or cleared-out-from-under-us. - if session_guard.is_none() { + if session_guard.hierarchy_session.is_none() { let new_clusters = rpg_encoder::hierarchy::cluster_files_for_hierarchy(graph, 70); let snapshot = new_clusters.clone(); - *session_guard = Some(HierarchySession { + session_guard.hierarchy_session = Some(HierarchySession { clusters: new_clusters, functional_areas: None, assignments: std::collections::HashMap::new(), @@ -2762,13 +3135,13 @@ impl RpgServer { }); Action::EmitBatch0(snapshot) } else { - let session = session_guard.as_mut().unwrap(); + let session = session_guard.hierarchy_session.as_mut().unwrap(); let total_batches = session.clusters.len() + 1; if session.batches_completed == 0 { Action::EmitBatch0(session.clusters.clone()) } else if session.batches_completed > session.clusters.len() { - *session_guard = None; + session_guard.hierarchy_session = None; Action::AllDone { total_batches } } else { let batch_idx = session.batches_completed - 1; @@ -2816,16 +3189,15 @@ impl RpgServer { } // Non-sharded workflow (≤100 files) - original single-shot behavior - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let guard = state.read().await; + let graph = guard.graph.as_ref().unwrap(); let domain_prompt = include_str!("../../../crates/rpg-encoder/src/prompts/domain_discovery.md"); let hierarchy_prompt = include_str!("../../../crates/rpg-encoder/src/prompts/hierarchy_construction.md"); - let root = self.project_root().await; - let repo_name = root + let repo_name = project_root .file_name() .and_then(|n| n.to_str()) .unwrap_or("unknown"); @@ -2868,6 +3240,10 @@ impl RpgServer { output.push_str("\n\n"); output.push_str(include_str!("prompts/hierarchy_instructions.md")); + if !ctx.cross_root_notice.is_empty() { + output = format!("{}{}", ctx.cross_root_notice, output); + } + Ok(output) } @@ -2879,24 +3255,21 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (project_root, graph, _config, notice, use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; // Attempt hybrid embedding search #[cfg(feature = "embeddings")] - let embedding_scores = { - self.try_init_embeddings(graph).await; - let mut emb_guard = self.embedding_index.write().await; - if let Some(ref mut idx) = *emb_guard { - idx.score_all(¶ms.query).ok().filter(|s| !s.is_empty()) - } else { - None - } + let (embedding_scores, embedding_notice) = { + self.query_embedding_scores(&graph, &project_root, ¶ms.query, use_session_cache) + .await }; #[cfg(not(feature = "embeddings"))] - let embedding_scores: Option> = None; + let (embedding_scores, embedding_notice): ( + Option>, + Option, + ) = (None, None); let request = rpg_nav::context::ContextPackRequest { query: ¶ms.query, @@ -2907,19 +3280,25 @@ impl RpgServer { }; let result = rpg_nav::context::build_context_pack( - graph, - &self.project_root().await, + &graph, + &project_root, &request, embedding_scores.as_ref(), ); if result.primary_entities.is_empty() { - return Ok(format!("{}No entities found for: {}", notice, params.query,)); + return Ok(format!( + "{}{}No entities found for: {}", + notice, + embedding_notice.unwrap_or_default(), + params.query, + )); } Ok(format!( - "{}{}", + "{}{}{}", notice, + embedding_notice.unwrap_or_default(), rpg_nav::toon::format_context_pack(&result), )) } @@ -2931,10 +3310,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; let dir = match params.direction.as_deref() { Some("downstream" | "down") => rpg_nav::explore::Direction::Downstream, @@ -2953,7 +3331,7 @@ impl RpgServer { let max_results = params.max_results.or(Some(100)); match rpg_nav::impact::compute_impact_radius( - graph, + &graph, ¶ms.entity_id, dir, max_depth, @@ -2976,10 +3354,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; // Parse max_hops (default: 5, use -1 for unlimited) let max_hops = match params.max_hops { @@ -3006,7 +3383,7 @@ impl RpgServer { let max_paths = params.max_paths.unwrap_or(3); let paths = rpg_nav::paths::find_paths( - graph, + &graph, ¶ms.source, ¶ms.target, max_hops, @@ -3055,16 +3432,15 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (_project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; let max_depth = params.max_depth.unwrap_or(3); let include_metadata = params.include_metadata.unwrap_or(false); let result = - rpg_nav::slice::slice_between(graph, ¶ms.entity_ids, max_depth, include_metadata)?; + rpg_nav::slice::slice_between(&graph, ¶ms.entity_ids, max_depth, include_metadata)?; let mut output = format!( "{}Minimal subgraph connecting {} entities:\n\n", @@ -3115,24 +3491,21 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (project_root, graph, _config, notice, use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; // Attempt hybrid embedding search #[cfg(feature = "embeddings")] - let embedding_scores = { - self.try_init_embeddings(graph).await; - let mut emb_guard = self.embedding_index.write().await; - if let Some(ref mut idx) = *emb_guard { - idx.score_all(¶ms.goal).ok().filter(|s| !s.is_empty()) - } else { - None - } + let (embedding_scores, embedding_notice) = { + self.query_embedding_scores(&graph, &project_root, ¶ms.goal, use_session_cache) + .await }; #[cfg(not(feature = "embeddings"))] - let embedding_scores: Option> = None; + let (embedding_scores, embedding_notice): ( + Option>, + Option, + ) = (None, None); let request = rpg_nav::planner::PlanChangeRequest { goal: ¶ms.goal, @@ -3140,18 +3513,21 @@ impl RpgServer { max_entities: params.max_entities.unwrap_or(15), }; - let plan = rpg_nav::planner::plan_change(graph, &request, embedding_scores.as_ref()); + let plan = rpg_nav::planner::plan_change(&graph, &request, embedding_scores.as_ref()); if plan.relevant_entities.is_empty() { return Ok(format!( - "{}No relevant entities found for: {}", - notice, params.goal + "{}{}No relevant entities found for: {}", + notice, + embedding_notice.unwrap_or_default(), + params.goal )); } Ok(format!( - "{}{}", + "{}{}{}", notice, + embedding_notice.unwrap_or_default(), rpg_nav::planner::format_change_plan(&plan), )) } @@ -3163,12 +3539,27 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; + let ctx = self + .effective_context(params.project_root.as_deref()) + .await?; + let project_root = ctx.root.clone(); + let is_default_root = project_root == self.project_root().await; + let state = ctx.state.clone(); + { + let needs_graph = state.read().await.graph.is_none(); + if needs_graph { + let loaded = Self::load_graph_from_root(&project_root)?; + state.write().await.graph = Some(loaded.clone()); + if is_default_root { + *self.graph.write().await = Some(loaded); + } + } + } // Check if we have an active hierarchy session - let mut session_guard = self.hierarchy_session.write().await; + let mut session_guard = state.write().await; - if let Some(session) = session_guard.as_mut() { + if let Some(session) = session_guard.hierarchy_session.as_mut() { // BATCHED WORKFLOW // Batch 0: Domain discovery (functional areas registration) @@ -3191,10 +3582,14 @@ impl RpgServer { session.functional_areas = Some(payload.areas.clone()); session.batches_completed += 1; - return Ok(format!( + let mut result = format!( "Functional areas registered: {}\n\nCall build_semantic_hierarchy to get batch 1 for file assignment.", payload.areas.join(", ") - )); + ); + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + return Ok(result); } // Batch 1+: File assignments @@ -3247,12 +3642,13 @@ impl RpgServer { let clusters_count = session.clusters.len(); // Drop the session before acquiring graph write lock - *session_guard = None; + session_guard.hierarchy_session = None; drop(session_guard); // Now apply the hierarchy - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; // Convert file paths to Module entity IDs for apply_hierarchy let mut entity_assignments: std::collections::HashMap = @@ -3315,9 +3711,17 @@ impl RpgServer { graph.refresh_metadata(); // Save - storage::save(&self.project_root().await, graph) + storage::save(&project_root, graph) .map_err(|e| format!("Failed to save RPG: {}", e))?; + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + *self.hierarchy_session.write().await = None; + } + let mut result = format!( "Hierarchy applied (batched workflow, {} file batches).\nfiles_matched: {}\nfiles_unmatched: {}\nhierarchy_type: semantic\n", clusters_count, @@ -3338,6 +3742,8 @@ impl RpgServer { } // Show hierarchy summary + let state_guard = state.read().await; + let graph = state_guard.graph.as_ref().unwrap(); result.push_str("\nHierarchy areas:\n"); for (area_name, area_node) in &graph.hierarchy { result.push_str(&format!( @@ -3347,14 +3753,22 @@ impl RpgServer { )); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + return Ok(result); } // More batches remaining - return Ok(format!( + let mut result = format!( "Batch {}/{} complete. Call build_semantic_hierarchy for next batch.", session.batches_completed, total_batches - )); + ); + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + return Ok(result); } // NO SESSION: Single-shot mode (backward compatibility) @@ -3394,8 +3808,9 @@ impl RpgServer { )); } - let mut guard = self.graph.write().await; - let graph = guard.as_mut().ok_or("No RPG loaded")?; + let mut state_guard = state.write().await; + let mut graph_slot = state_guard.graph.take(); + let graph = graph_slot.as_mut().ok_or("No RPG loaded")?; // Convert file paths to Module entity IDs for apply_hierarchy // Module entities use the file path as their ID in the format "path:filename_stem" @@ -3461,8 +3876,7 @@ impl RpgServer { graph.refresh_metadata(); // Save - storage::save(&self.project_root().await, graph) - .map_err(|e| format!("Failed to save RPG: {}", e))?; + storage::save(&project_root, graph).map_err(|e| format!("Failed to save RPG: {}", e))?; let mut result = format!( "Hierarchy applied.\nfiles_matched: {}\nfiles_unmatched: {}\nhierarchy_type: semantic\n", @@ -3492,6 +3906,18 @@ impl RpgServer { )); } + if !ctx.cross_root_notice.is_empty() { + result = format!("{}{}", ctx.cross_root_notice, result); + } + + let final_graph = graph_slot.take().unwrap(); + state_guard.graph = Some(final_graph.clone()); + drop(state_guard); + if is_default_root { + *self.graph.write().await = Some(final_graph); + *self.hierarchy_session.write().await = None; + } + Ok(result) } @@ -3503,10 +3929,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; let config = rpg_nav::health::HealthConfig { instability_threshold: params.instability_threshold.unwrap_or(0.7), @@ -3520,8 +3945,7 @@ impl RpgServer { ..Default::default() }; - let report = - rpg_nav::health::compute_health_full(graph, &self.project_root().await, &config); + let report = rpg_nav::health::compute_health_full(&graph, &project_root, &config); Ok(format!( "{}{}", @@ -3538,10 +3962,9 @@ impl RpgServer { &self, Parameters(params): Parameters, ) -> Result { - self.ensure_graph().await?; - let notice = self.auto_sync_if_stale().await; - let guard = self.graph.read().await; - let graph = guard.as_ref().unwrap(); + let (project_root, graph, _config, notice, _use_session_cache) = self + .resolve_query_context(params.project_root.as_deref()) + .await?; let cross_file_only = params.cross_file_only.unwrap_or(false); let cross_area_only = params.cross_area_only.unwrap_or(false); @@ -3553,7 +3976,7 @@ impl RpgServer { .unwrap_or_else(|| "length".to_string()); let excluded_paths = if !params.ignore_rpgignore.unwrap_or(false) { - let ignore_path = self.project_root().await.join(".rpgignore"); + let ignore_path = project_root.join(".rpgignore"); let (gitignore, err) = ignore::gitignore::Gitignore::new(&ignore_path); if err.is_none() || ignore_path.exists() { Some(gitignore) @@ -3576,7 +3999,7 @@ impl RpgServer { excluded_paths, }; - let report = rpg_nav::cycles::detect_cycles(graph, &config); + let report = rpg_nav::cycles::detect_cycles(&graph, &config); let has_filters = params.max_cycles.is_some() || params.min_cycle_length.unwrap_or(2) > 2 @@ -3609,7 +4032,7 @@ impl RpgServer { Ok(format!( "{}{}", notice, - rpg_nav::toon::format_cycle_report(&report, graph, &opts) + rpg_nav::toon::format_cycle_report(&report, &graph, &opts) )) } } @@ -3638,32 +4061,3 @@ pub fn parse_edge_filter(filter: &str) -> Option { _ => None, } } - -#[cfg(test)] -mod tests { - use super::*; - use rpg_core::graph::EdgeKind; - - #[test] - fn test_parse_edge_filter_data_flow() { - assert_eq!(parse_edge_filter("data_flow"), Some(EdgeKind::DataFlow)); - } - - #[test] - fn test_parse_edge_filter_all_kinds() { - assert_eq!(parse_edge_filter("imports"), Some(EdgeKind::Imports)); - assert_eq!(parse_edge_filter("invokes"), Some(EdgeKind::Invokes)); - assert_eq!(parse_edge_filter("inherits"), Some(EdgeKind::Inherits)); - assert_eq!(parse_edge_filter("composes"), Some(EdgeKind::Composes)); - assert_eq!(parse_edge_filter("renders"), Some(EdgeKind::Renders)); - assert_eq!(parse_edge_filter("reads_state"), Some(EdgeKind::ReadsState)); - assert_eq!( - parse_edge_filter("writes_state"), - Some(EdgeKind::WritesState) - ); - assert_eq!(parse_edge_filter("dispatches"), Some(EdgeKind::Dispatches)); - assert_eq!(parse_edge_filter("data_flow"), Some(EdgeKind::DataFlow)); - assert_eq!(parse_edge_filter("contains"), Some(EdgeKind::Contains)); - assert_eq!(parse_edge_filter("unknown"), None); - } -} diff --git a/crates/rpg-nav/src/embeddings.rs b/crates/rpg-nav/src/embeddings.rs index 08ad761..39889a1 100644 --- a/crates/rpg-nav/src/embeddings.rs +++ b/crates/rpg-nav/src/embeddings.rs @@ -69,7 +69,7 @@ impl EmbeddingIndex { /// Fingerprints are loaded from meta for incremental sync support. pub fn load_or_init(project_root: &Path, graph_updated_at: &str) -> Result { let rpg_dir = project_root.join(".rpg"); - let model = init_model(&rpg_dir)?; + let model = init_model()?; let embeddings_path = rpg_dir.join("embeddings.bin"); let meta_path = rpg_dir.join("embeddings.meta.json"); @@ -328,11 +328,46 @@ fn compute_fingerprint(features: &[String]) -> String { format!("{:016x}", hasher.finish()) } -/// Initialize the fastembed model with cache in .rpg/models/. -fn init_model(rpg_dir: &Path) -> Result { - let cache_dir = rpg_dir.join("models"); +fn resolve_shared_model_cache_dir_from_env( + rpg_model_cache_dir: Option<&std::ffi::OsStr>, + xdg_cache_home: Option<&std::ffi::OsStr>, + home: Option<&std::ffi::OsStr>, + userprofile: Option<&std::ffi::OsStr>, +) -> Result { + if let Some(path) = rpg_model_cache_dir { + return Ok(PathBuf::from(path)); + } + + let home_dir = home + .map(PathBuf::from) + .or_else(|| userprofile.map(PathBuf::from)) + .context("could not determine home directory for shared model cache")?; + + let base = if cfg!(target_os = "linux") { + xdg_cache_home + .map(PathBuf::from) + .unwrap_or_else(|| home_dir.join(".cache")) + } else { + home_dir.join(".cache") + }; + + Ok(base.join("rpg-encoder").join("models").join("fastembed")) +} + +fn shared_model_cache_dir() -> Result { + let cache_dir = resolve_shared_model_cache_dir_from_env( + std::env::var_os("RPG_MODEL_CACHE_DIR").as_deref(), + std::env::var_os("XDG_CACHE_HOME").as_deref(), + std::env::var_os("HOME").as_deref(), + std::env::var_os("USERPROFILE").as_deref(), + )?; std::fs::create_dir_all(&cache_dir)?; + Ok(cache_dir) +} +/// Initialize the fastembed model with a shared machine-level cache. +fn init_model() -> Result { + let cache_dir = shared_model_cache_dir()?; let options = fastembed::TextInitOptions::new(EmbeddingModel::BGESmallENV15) .with_show_download_progress(true) .with_cache_dir(cache_dir); @@ -530,6 +565,80 @@ pub fn hybrid_blend( mod tests { use super::*; + #[test] + fn test_resolve_shared_model_cache_dir_prefers_explicit_override() { + let path = resolve_shared_model_cache_dir_from_env( + Some(std::ffi::OsStr::new("D:/cache/rpg-models")), + Some(std::ffi::OsStr::new("/tmp/xdg-cache")), + Some(std::ffi::OsStr::new("/home/tester")), + Some(std::ffi::OsStr::new("C:/Users/tester")), + ) + .unwrap(); + assert_eq!(path, PathBuf::from("D:/cache/rpg-models")); + } + + #[test] + fn test_resolve_shared_model_cache_dir_uses_xdg_on_linux() { + let path = resolve_shared_model_cache_dir_from_env( + None, + Some(std::ffi::OsStr::new("/tmp/xdg-cache")), + Some(std::ffi::OsStr::new("/home/tester")), + None, + ) + .unwrap(); + + if cfg!(target_os = "linux") { + assert_eq!( + path, + PathBuf::from("/tmp/xdg-cache/rpg-encoder/models/fastembed") + ); + } else { + assert_eq!( + path, + PathBuf::from("/home/tester/.cache/rpg-encoder/models/fastembed") + ); + } + } + + #[test] + fn test_resolve_shared_model_cache_dir_falls_back_to_home_cache() { + let path = resolve_shared_model_cache_dir_from_env( + None, + None, + Some(std::ffi::OsStr::new("/home/tester")), + None, + ) + .unwrap(); + assert_eq!( + path, + PathBuf::from("/home/tester/.cache/rpg-encoder/models/fastembed") + ); + } + + #[test] + fn test_resolve_shared_model_cache_dir_uses_userprofile_when_home_missing() { + let path = resolve_shared_model_cache_dir_from_env( + None, + None, + None, + Some(std::ffi::OsStr::new("C:/Users/tester")), + ) + .unwrap(); + assert_eq!( + path, + PathBuf::from("C:/Users/tester/.cache/rpg-encoder/models/fastembed") + ); + } + + #[test] + fn test_resolve_shared_model_cache_dir_errors_without_home_or_override() { + let err = resolve_shared_model_cache_dir_from_env(None, None, None, None).unwrap_err(); + assert!( + err.to_string() + .contains("could not determine home directory") + ); + } + #[test] fn test_cosine_similarity_identical() { let a = vec![1.0, 0.0, 0.0]; From 1f7144716d01eea99264b2b7b77ea69aa063bfaf Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Tue, 21 Apr 2026 17:52:07 +0300 Subject: [PATCH 2/7] fix: improve root state handling Document the expanded per-call project_root behavior in the README and harden path expansion in the MCP server so '~' resolution works across Windows and Unix-style environments. This keeps the follow-up focused only on files touched after the original cross-root feature commit. The README clarifies the additional tool surface and override behavior, while server.rs now resolves the home directory from USERPROFILE/HOMEDRIVE+HOMEPATH before falling back to HOME so project_root overrides behave correctly on Windows hosts. --- README.md | 57 +++++++++++++++++++----------------- crates/rpg-mcp/src/server.rs | 31 +++++++++++++++++--- 2 files changed, 57 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 4823aa3..6f9177f 100644 --- a/README.md +++ b/README.md @@ -132,21 +132,24 @@ Seven Rust crates, one MCP server binary, one CLI binary: | `rpg-nav` | Search, fetch, explore, snapshot, TOON serialization | | `rpg-lift` | Autonomous LLM lifting (Anthropic, OpenAI, OpenRouter, Gemini) | | `rpg-cli` | CLI binary (`rpg-encoder`) | -| `rpg-mcp` | MCP server binary (`rpg-mcp-server`) with 27 tools | +| `rpg-mcp` | MCP server binary (`rpg-mcp-server`) with 28 tools | --- -## MCP Tools (27) +## MCP Tools (28) + +Per-call `project_root` overrides target another repository without changing the active root. Semantic or embedding-backed queries may refresh the target root's `.rpg/embeddings.*` cache files to keep results up to date.
-Build & Maintain (4 tools) +Build & Maintain (5 tools) | Tool | Description | |------|-------------| +| `set_project_root` | Switch the active repository root at runtime without restarting the MCP server | | `build_rpg` | Index the codebase (run once, instant) | | `update_rpg` | Incremental update from git changes | | `reload_rpg` | Reload graph from disk after external changes | -| `rpg_info` | Graph statistics, hierarchy overview, per-area lifting coverage | +| `rpg_info` | Graph statistics, hierarchy overview, per-area lifting coverage. Supports optional per-call `project_root` override |
@@ -155,11 +158,11 @@ Seven Rust crates, one MCP server binary, one CLI binary: | Tool | Description | |------|-------------| -| `semantic_snapshot` | Whole-repo semantic understanding in one call (~25K tokens for 1000 entities) | -| `search_node` | Search entities by intent or keywords (hybrid embedding + lexical scoring) | -| `fetch_node` | Get entity metadata, source code, dependencies, and hierarchy context | -| `explore_rpg` | Traverse dependency graph (upstream, downstream, or both) | -| `context_pack` | Single-call search + fetch + explore with token budget | +| `semantic_snapshot` | Whole-repo semantic understanding in one call (~25K tokens for 1000 entities). Supports optional per-call `project_root` override | +| `search_node` | Search entities by intent or keywords (hybrid embedding + lexical scoring). Supports optional per-call `project_root` override | +| `fetch_node` | Get entity metadata, source code, dependencies, and hierarchy context. Supports optional per-call `project_root` override | +| `explore_rpg` | Traverse dependency graph (upstream, downstream, or both). Supports optional per-call `project_root` override | +| `context_pack` | Single-call search + fetch + explore with token budget. Supports optional per-call `project_root` override | @@ -168,13 +171,13 @@ Seven Rust crates, one MCP server binary, one CLI binary: | Tool | Description | |------|-------------| -| `impact_radius` | BFS reachability analysis — "what depends on X?" | -| `plan_change` | Change planning — find relevant entities, modification order, blast radius | -| `find_paths` | K-shortest dependency paths between two entities | -| `slice_between` | Extract minimal connecting subgraph between entities | -| `analyze_health` | Code health: coupling, instability, god objects, clone detection | -| `detect_cycles` | Find circular dependencies and architectural cycles | -| `reconstruct_plan` | Dependency-safe reconstruction execution plan | +| `impact_radius` | BFS reachability analysis — "what depends on X?" Supports optional per-call `project_root` override | +| `plan_change` | Change planning — find relevant entities, modification order, blast radius. Supports optional per-call `project_root` override | +| `find_paths` | K-shortest dependency paths between two entities. Supports optional per-call `project_root` override | +| `slice_between` | Extract minimal connecting subgraph between entities. Supports optional per-call `project_root` override | +| `analyze_health` | Code health: coupling, instability, god objects, clone detection. Supports optional per-call `project_root` override | +| `detect_cycles` | Find circular dependencies and architectural cycles. Supports optional per-call `project_root` override | +| `reconstruct_plan` | Dependency-safe reconstruction execution plan. Supports optional per-call `project_root` override | @@ -183,17 +186,17 @@ Seven Rust crates, one MCP server binary, one CLI binary: | Tool | Description | |------|-------------| -| `auto_lift` | One-call autonomous lifting via cheap LLM API (Haiku, GPT-4o-mini, OpenRouter, Gemini) | -| `lifting_status` | Dashboard — coverage, per-area progress, NEXT STEP | -| `get_entities_for_lifting` | Get entity source code for your agent to analyze | -| `submit_lift_results` | Submit the agent's semantic features back to the graph | -| `finalize_lifting` | Aggregate file-level features, rebuild hierarchy metadata | -| `get_files_for_synthesis` | Get file-level entity features for holistic synthesis | -| `submit_file_syntheses` | Submit holistic file-level summaries | -| `build_semantic_hierarchy` | Get domain discovery + hierarchy assignment prompts | -| `submit_hierarchy` | Apply hierarchy assignments to the graph | -| `get_routing_candidates` | Get entities needing semantic routing (drifted or newly lifted) | -| `submit_routing_decisions` | Submit routing decisions (hierarchy path or "keep") | +| `auto_lift` | One-call autonomous lifting via cheap LLM API (Haiku, GPT-4o-mini, OpenRouter, Gemini). Supports optional per-call `project_root` override | +| `lifting_status` | Dashboard — coverage, per-area progress, NEXT STEP. Supports optional per-call `project_root` override | +| `get_entities_for_lifting` | Get entity source code for your agent to analyze. Supports optional per-call `project_root` override | +| `submit_lift_results` | Submit the agent's semantic features back to the graph. Supports optional per-call `project_root` override | +| `finalize_lifting` | Aggregate file-level features, rebuild hierarchy metadata. Supports optional per-call `project_root` override | +| `get_files_for_synthesis` | Get file-level entity features for holistic synthesis. Supports optional per-call `project_root` override | +| `submit_file_syntheses` | Submit holistic file-level summaries. Supports optional per-call `project_root` override | +| `build_semantic_hierarchy` | Get domain discovery + hierarchy assignment prompts. Supports optional per-call `project_root` override | +| `submit_hierarchy` | Apply hierarchy assignments to the graph. Supports optional per-call `project_root` override | +| `get_routing_candidates` | Get entities needing semantic routing (drifted or newly lifted). Supports optional per-call `project_root` override | +| `submit_routing_decisions` | Submit routing decisions (hierarchy path or "keep"). Supports optional per-call `project_root` override | diff --git a/crates/rpg-mcp/src/server.rs b/crates/rpg-mcp/src/server.rs index 4509113..cea6db5 100644 --- a/crates/rpg-mcp/src/server.rs +++ b/crates/rpg-mcp/src/server.rs @@ -147,13 +147,36 @@ impl RpgServer { } pub(crate) fn expand_project_root_path(path: &str) -> PathBuf { + fn home_dir() -> Option { + std::env::var("USERPROFILE") + .ok() + .filter(|s| !s.trim().is_empty()) + .map(PathBuf::from) + .or_else(|| { + let drive = std::env::var("HOMEDRIVE").ok()?; + let home_path = std::env::var("HOMEPATH").ok()?; + let joined = format!("{}{}", drive, home_path); + if joined.trim().is_empty() { + None + } else { + Some(PathBuf::from(joined)) + } + }) + .or_else(|| { + std::env::var("HOME") + .ok() + .filter(|s| !s.trim().is_empty()) + .map(PathBuf::from) + }) + } + if let Some(rest) = path.strip_prefix("~/") { - match std::env::var("HOME") { - Ok(home) => PathBuf::from(home).join(rest), - Err(_) => PathBuf::from(path), + match home_dir() { + Some(home) => home.join(rest), + None => PathBuf::from(path), } } else if path == "~" { - PathBuf::from(std::env::var("HOME").unwrap_or_else(|_| "/".into())) + home_dir().unwrap_or_else(|| PathBuf::from(path)) } else { PathBuf::from(path) } From b645856bfc6a4c229e307e4658c4f6e3b309081a Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Tue, 21 Apr 2026 17:50:13 +0300 Subject: [PATCH 3/7] fix: unblock current CI drift Keep the cross-root PR surface separate from baseline maintenance needed to satisfy today's CI environment. This commit updates the lockfile for the current RustSec advisories and applies the minimal lint-driven cleanup now required by stable Clippy across the workspace. The source changes are intentionally mechanical: remove unnecessary trailing commas, prefer sort_by_key where Clippy requires it, simplify one Rust parser match arm, iterate map values directly, and use clearer Duration units. Together with the rustls-webpki lockfile bump to 0.103.12, this restores green results for fmt, clippy, test, and audit without mixing feature behavior changes into the maintenance commit. --- Cargo.lock | 4 ++-- crates/rpg-core/src/graph.rs | 2 +- crates/rpg-encoder/src/semantic_lifting.rs | 4 ++-- crates/rpg-lift/src/provider.rs | 4 ++-- crates/rpg-mcp/src/tools.rs | 4 ++-- crates/rpg-nav/src/cycles.rs | 2 +- crates/rpg-nav/src/snapshot.rs | 2 +- crates/rpg-parser/build.rs | 8 +++---- crates/rpg-parser/src/deps.rs | 27 +++++++++++----------- 9 files changed, 28 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3e069de..9310df0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2860,9 +2860,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.11" +version = "0.103.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" +checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06" dependencies = [ "ring", "rustls-pki-types", diff --git a/crates/rpg-core/src/graph.rs b/crates/rpg-core/src/graph.rs index 0d40758..1d35d14 100644 --- a/crates/rpg-core/src/graph.rs +++ b/crates/rpg-core/src/graph.rs @@ -526,7 +526,7 @@ impl RPGraph { } } let mut result: Vec<(String, Vec)> = by_file.into_iter().collect(); - result.sort_by(|a, b| b.1.len().cmp(&a.1.len())); + result.sort_by_key(|entry| std::cmp::Reverse(entry.1.len())); result } diff --git a/crates/rpg-encoder/src/semantic_lifting.rs b/crates/rpg-encoder/src/semantic_lifting.rs index 317e4a8..228f850 100644 --- a/crates/rpg-encoder/src/semantic_lifting.rs +++ b/crates/rpg-encoder/src/semantic_lifting.rs @@ -119,8 +119,8 @@ pub fn apply_features( pub fn aggregate_module_features(graph: &mut RPGraph) -> usize { let module_data: Vec<(String, Vec)> = graph .file_index - .iter() - .filter_map(|(_, ids)| { + .values() + .filter_map(|ids| { let module_id = ids.iter().find(|id| { graph .entities diff --git a/crates/rpg-lift/src/provider.rs b/crates/rpg-lift/src/provider.rs index 59ded06..0137c65 100644 --- a/crates/rpg-lift/src/provider.rs +++ b/crates/rpg-lift/src/provider.rs @@ -67,7 +67,7 @@ impl AnthropicProvider { model: model.unwrap_or_else(|| Self::DEFAULT_MODEL.to_string()), agent: ureq::Agent::new_with_config( ureq::config::Config::builder() - .timeout_global(Some(std::time::Duration::from_secs(120))) + .timeout_global(Some(std::time::Duration::from_mins(2))) .build(), ), } @@ -193,7 +193,7 @@ impl OpenAiProvider { base_url: base_url.unwrap_or_else(|| Self::DEFAULT_BASE_URL.to_string()), agent: ureq::Agent::new_with_config( ureq::config::Config::builder() - .timeout_global(Some(std::time::Duration::from_secs(120))) + .timeout_global(Some(std::time::Duration::from_mins(2))) .build(), ), } diff --git a/crates/rpg-mcp/src/tools.rs b/crates/rpg-mcp/src/tools.rs index d54cd04..d206f7d 100644 --- a/crates/rpg-mcp/src/tools.rs +++ b/crates/rpg-mcp/src/tools.rs @@ -2061,10 +2061,10 @@ impl RpgServer { } else { cat_node.semantic_features.join(", ") }; - result.push_str(&format!(" - {}/{}: {}\n", area_name, cat_name, cat_feats,)); + result.push_str(&format!(" - {}/{}: {}\n", area_name, cat_name, cat_feats)); for sub_name in cat_node.children.keys() { result - .push_str(&format!(" - {}/{}/{}\n", area_name, cat_name, sub_name,)); + .push_str(&format!(" - {}/{}/{}\n", area_name, cat_name, sub_name)); } } } diff --git a/crates/rpg-nav/src/cycles.rs b/crates/rpg-nav/src/cycles.rs index 1e1cd0b..eb599ba 100644 --- a/crates/rpg-nav/src/cycles.rs +++ b/crates/rpg-nav/src/cycles.rs @@ -423,7 +423,7 @@ pub fn detect_cycles(graph: &RPGraph, config: &CycleConfig) -> CycleReport { stats }) .collect(); - area_breakdown.sort_by(|a, b| b.cycle_count.cmp(&a.cycle_count)); + area_breakdown.sort_by_key(|entry| std::cmp::Reverse(entry.cycle_count)); let areas_in_cycles = area_breakdown.len(); diff --git a/crates/rpg-nav/src/snapshot.rs b/crates/rpg-nav/src/snapshot.rs index d345af2..f488a00 100644 --- a/crates/rpg-nav/src/snapshot.rs +++ b/crates/rpg-nav/src/snapshot.rs @@ -299,7 +299,7 @@ fn build_hot_spots(graph: &RPGraph, top_n: usize) -> Vec { (id.as_str(), connections) }) .collect(); - scored.sort_by(|a, b| b.1.cmp(&a.1)); + scored.sort_by_key(|entry| std::cmp::Reverse(entry.1)); scored .into_iter() diff --git a/crates/rpg-parser/build.rs b/crates/rpg-parser/build.rs index 93385da..0f3d754 100644 --- a/crates/rpg-parser/build.rs +++ b/crates/rpg-parser/build.rs @@ -291,7 +291,7 @@ fn generate_lang_registry(defs: &[LangToml]) -> String { \x20 .filter(|(_, count)| **count > 0)\n\ \x20 .map(|(idx, count)| (Self::from_index(idx), *count))\n\ \x20 .collect();\n\ - \x20 langs.sort_by(|a, b| b.1.cmp(&a.1));\n\ + \x20 langs.sort_by_key(|entry| std::cmp::Reverse(entry.1));\n\ \x20 langs.into_iter().map(|(lang, _)| lang).collect()\n\ \x20 }\n\n\ \x20 /// Count files per language in the project.\n\ @@ -373,7 +373,7 @@ fn generate_lang_registry(defs: &[LangToml]) -> String { ); for def in defs { if !def.grammar.aliases.is_empty() { - code.push_str(&format!(" \"{}\" => match file_ext {{\n", def.name,)); + code.push_str(&format!(" \"{}\" => match file_ext {{\n", def.name)); for alias in &def.grammar.aliases { let ext_patterns: Vec = alias .for_extensions @@ -429,7 +429,7 @@ fn generate_lang_registry(defs: &[LangToml]) -> String { if let Some(ref builtin) = def.builtin && let Some(ref extractor) = builtin.dep_extractor { - code.push_str(&format!(" {} => Some(\"{}\"),\n", i, extractor,)); + code.push_str(&format!(" {} => Some(\"{}\"),\n", i, extractor)); } } code.push_str(" _ => None,\n }\n}\n\n"); @@ -444,7 +444,7 @@ fn generate_lang_registry(defs: &[LangToml]) -> String { if let Some(ref builtin) = def.builtin && let Some(ref extractor) = builtin.entity_extractor { - code.push_str(&format!(" {} => Some(\"{}\"),\n", i, extractor,)); + code.push_str(&format!(" {} => Some(\"{}\"),\n", i, extractor)); } } code.push_str(" _ => None,\n }\n}\n"); diff --git a/crates/rpg-parser/src/deps.rs b/crates/rpg-parser/src/deps.rs index e277d8b..120260e 100644 --- a/crates/rpg-parser/src/deps.rs +++ b/crates/rpg-parser/src/deps.rs @@ -360,25 +360,24 @@ fn collect_rust_calls( } } } - "method_call_expression" | "field_expression" => { + "method_call_expression" => { // obj.method() — tree-sitter-rust uses "method_call_expression" for x.foo() // but some versions nest it differently - if child.kind() == "method_call_expression" { - // The method name is in the "name" field - if let Some(method_node) = child.child_by_field_name("name") { - let callee = source[method_node.byte_range()].to_string(); - if !callee.is_empty() { - let call_row = child.start_position().row; - let caller = find_enclosing_scope(scopes, call_row) - .unwrap_or_else(|| "".to_string()); - calls.push(CallDep { - caller_entity: caller, - callee, - }); - } + // The method name is in the "name" field + if let Some(method_node) = child.child_by_field_name("name") { + let callee = source[method_node.byte_range()].to_string(); + if !callee.is_empty() { + let call_row = child.start_position().row; + let caller = find_enclosing_scope(scopes, call_row) + .unwrap_or_else(|| "".to_string()); + calls.push(CallDep { + caller_entity: caller, + callee, + }); } } } + "field_expression" => {} _ => {} } collect_rust_calls(&child, source, scopes, calls); From 2ba6cf56e8685fdfa791f827d42207f5ffcee47d Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Tue, 21 Apr 2026 21:52:45 +0300 Subject: [PATCH 4/7] fix: ignore blank since in update_rpg Sanitize whitespace-only MCP since values before dispatching update_rpg. The handler previously forwarded Some("") into the committed-diff path, which caused git OID parsing to fail with 'invalid base_commit SHA' instead of falling back to the default workdir update behavior. Treating blank input as unset preserves the intended API contract for optional since values and fixes cross-root callers that serialize an empty string rather than omitting the field. --- crates/rpg-mcp/src/tools.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rpg-mcp/src/tools.rs b/crates/rpg-mcp/src/tools.rs index d206f7d..9a78907 100644 --- a/crates/rpg-mcp/src/tools.rs +++ b/crates/rpg-mcp/src/tools.rs @@ -2329,7 +2329,8 @@ impl RpgServer { // Default: sync from current working tree (committed + staged + unstaged). // If `since` is provided, fall back to committed-only diff from that commit. - let summary = if let Some(since) = params.since.as_deref() { + let since = params.since.as_deref().filter(|s| !s.trim().is_empty()); + let summary = if let Some(since) = since { rpg_encoder::evolution::run_update( g, &project_root, From f17c72ecbb2e5f47b43562f4689284acda5ba6b0 Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Sat, 25 Apr 2026 21:46:38 +0300 Subject: [PATCH 5/7] fix: avoid sharded hierarchy deadlock Large repositories enter the sharded build_semantic_hierarchy flow, where the server needs both a graph snapshot and a hierarchy session update. The previous implementation held a read lock on the root-scoped runtime state and then tried to acquire a write lock on that same RwLock, which can block forever and surface as a timeout or aborted MCP call. Take the graph-derived cluster snapshot in a short read section, release that guard, then mutate hierarchy_session under the write lock and reacquire a read guard only for rendering the batch output. This preserves the existing sharded workflow and prompt behavior while removing the self-deadlock in the large-repository hierarchy path. --- crates/rpg-mcp/src/tools.rs | 36 ++++++++++++++++-------------------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/crates/rpg-mcp/src/tools.rs b/crates/rpg-mcp/src/tools.rs index 9a78907..c017bb2 100644 --- a/crates/rpg-mcp/src/tools.rs +++ b/crates/rpg-mcp/src/tools.rs @@ -3095,15 +3095,10 @@ impl RpgServer { // Handle sharded workflow if needs_sharding { - // Lock order invariant (see RpgServer doc): graph before - // hierarchy_session. A concurrent build_rpg/update_rpg/ - // reload_rpg/set_project_root can clear the session at any - // moment before we hold its write lock, so decide whether to - // initialize only while holding the write lock — never by - // re-trusting an earlier peek. We take graph.read() FIRST - // (ordering) so that if we need to initialize, we can compute - // clusters from a stable graph and install under the - // session.write() that's about to follow. + // Avoid holding a root-state read lock while acquiring the write + // lock on the same RwLock. Sharded rendering only needs immutable + // snapshots, so compute any graph-derived data in a short read + // section, then release it before mutating hierarchy_session. enum Action { EmitBatch0(Vec), EmitBatchN { @@ -3117,24 +3112,24 @@ impl RpgServer { }, } - let graph_guard = state.read().await; - let graph = graph_guard.graph.as_ref().unwrap(); + let initial_clusters = { + let graph_guard = state.read().await; + let graph = graph_guard.graph.as_ref().unwrap(); + rpg_encoder::hierarchy::cluster_files_for_hierarchy(graph, 70) + }; let action = { let mut session_guard = state.write().await; // Initialize if absent — fresh or cleared-out-from-under-us. if session_guard.hierarchy_session.is_none() { - let new_clusters = - rpg_encoder::hierarchy::cluster_files_for_hierarchy(graph, 70); - let snapshot = new_clusters.clone(); session_guard.hierarchy_session = Some(HierarchySession { - clusters: new_clusters, + clusters: initial_clusters.clone(), functional_areas: None, assignments: std::collections::HashMap::new(), batches_completed: 0, }); - Action::EmitBatch0(snapshot) + Action::EmitBatch0(initial_clusters) } else { let session = session_guard.hierarchy_session.as_mut().unwrap(); let total_batches = session.clusters.len() + 1; @@ -3156,10 +3151,11 @@ impl RpgServer { } }; - // Keep `graph_guard` held across rendering. Both helpers now - // take `&RPGraph` so they don't re-read `self.graph`, which - // would otherwise expose us to a concurrent `set_project_root` - // that could swap the graph to `None` mid-render. + let graph_guard = state.read().await; + let graph = graph_guard.graph.as_ref().unwrap(); + + // Hold the read guard only while rendering so the helpers never + // re-read mutable compatibility state during batch emission. match action { Action::EmitBatch0(clusters) => { return self.build_batch_0_domain_discovery(graph, &clusters).await; From fc70880c3c68ca4fdb8e2d6d098e10ecc09accb4 Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Sun, 26 Apr 2026 00:07:17 +0300 Subject: [PATCH 6/7] fix: respect startup cwd for MCP root OpenCode can launch the release rpg-mcp-server binary with logging flags before any positional project path. The server previously read argv[1] directly, so a flag like --log was misinterpreted as the project root and the default session started against the wrong directory until set_project_root was called manually. Parse startup arguments so only a real positional path overrides the default root, while known logging flags are skipped. When no positional project path is present, the server now falls back to the directory it was started from. Keep the npm wrapper aligned by explicitly launching the binary from the current process directory, and document that startup-directory behavior in the README files. Add regression tests for both startup-root fallback and flags-only launch configurations. --- README.md | 2 +- crates/rpg-mcp/src/main.rs | 10 ++-- crates/rpg-mcp/src/server.rs | 95 ++++++++++++++++++++++++++++++++++++ npm/README.md | 10 +++- npm/bin/run-mcp.js | 2 +- 5 files changed, 110 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 6f9177f..8d7ee34 100644 --- a/README.md +++ b/README.md @@ -244,7 +244,7 @@ claude mcp add rpg -- npx -y -p rpg-encoder rpg-mcp-server } ``` -The server auto-detects the project root from the current working directory — no path argument needed. +The server auto-detects the project root from the startup directory by default. When no positional project path is provided, the binary falls back to its current working directory.
CLI diff --git a/crates/rpg-mcp/src/main.rs b/crates/rpg-mcp/src/main.rs index 9e36a5d..a262952 100644 --- a/crates/rpg-mcp/src/main.rs +++ b/crates/rpg-mcp/src/main.rs @@ -32,16 +32,14 @@ pub(crate) const LARGE_SCOPE_BATCHES: usize = 10; use anyhow::Result; use rmcp::ServiceExt; use rpg_core::storage; -use std::path::PathBuf; - use server::RpgServer; #[tokio::main] async fn main() -> Result<()> { - let project_root = std::env::args() - .nth(1) - .map(PathBuf::from) - .unwrap_or_else(|| std::env::current_dir().expect("failed to get current directory")); + let cli_args: Vec = std::env::args().collect(); + let cli_root = RpgServer::startup_project_root_arg(cli_args.iter().map(String::as_str)); + let cwd = std::env::current_dir().expect("failed to get current directory"); + let project_root = RpgServer::resolve_startup_project_root(cli_root.as_deref(), cwd); eprintln!("RPG MCP server starting for: {}", project_root.display()); diff --git a/crates/rpg-mcp/src/server.rs b/crates/rpg-mcp/src/server.rs index cea6db5..a7ec8c9 100644 --- a/crates/rpg-mcp/src/server.rs +++ b/crates/rpg-mcp/src/server.rs @@ -133,6 +133,49 @@ impl std::fmt::Debug for RpgServer { } impl RpgServer { + fn canonicalize_startup_root_candidate(path: PathBuf) -> PathBuf { + path.canonicalize().unwrap_or(path) + } + + pub(crate) fn startup_project_root_arg(args: I) -> Option + where + I: IntoIterator, + I::Item: AsRef, + { + let mut iter = args.into_iter().map(|arg| arg.as_ref().to_string()); + let _program = iter.next(); + + while let Some(arg) = iter.next() { + if arg == "--log" || arg == "--log-append" { + continue; + } + + if arg == "--log-level" { + let _ = iter.next(); + continue; + } + + if arg.starts_with("--") { + continue; + } + + return Some(arg); + } + + None + } + + pub(crate) fn resolve_startup_project_root( + explicit_arg: Option<&str>, + current_dir: PathBuf, + ) -> PathBuf { + if let Some(path) = Self::normalize_optional_project_root(explicit_arg) { + return Self::canonicalize_startup_root_candidate(Self::expand_project_root_path(path)); + } + + Self::canonicalize_startup_root_candidate(current_dir) + } + pub(crate) fn cross_root_notice( active_root: &std::path::Path, project_root: &std::path::Path, @@ -1292,6 +1335,58 @@ mod tests { ); } + #[test] + fn test_resolve_startup_project_root_prefers_explicit_arg() { + let cwd = tempfile::tempdir().unwrap(); + let explicit_root = tempfile::tempdir().unwrap(); + + let resolved = RpgServer::resolve_startup_project_root( + Some(explicit_root.path().to_str().unwrap()), + cwd.path().to_path_buf(), + ); + + assert_eq!(resolved, explicit_root.path().canonicalize().unwrap()); + } + + #[test] + fn test_resolve_startup_project_root_falls_back_to_current_dir() { + let cwd = tempfile::tempdir().unwrap(); + + let resolved = RpgServer::resolve_startup_project_root(None, cwd.path().to_path_buf()); + + assert_eq!(resolved, cwd.path().canonicalize().unwrap()); + } + + #[test] + fn test_startup_project_root_arg_skips_logging_flags() { + let root = tempfile::tempdir().unwrap(); + let root_str = root.path().to_str().unwrap().to_string(); + + let parsed = RpgServer::startup_project_root_arg([ + "rpg-mcp-server", + "--log", + "--log-append", + "--log-level", + "debug", + root_str.as_str(), + ]); + + assert_eq!(parsed.as_deref(), Some(root_str.as_str())); + } + + #[test] + fn test_startup_project_root_arg_none_for_flags_only() { + let parsed = RpgServer::startup_project_root_arg([ + "rpg-mcp-server", + "--log", + "--log-append", + "--log-level", + "debug", + ]); + + assert!(parsed.is_none()); + } + #[tokio::test] async fn test_effective_context_initializes_root_state() { let root_a = tempfile::tempdir().unwrap(); diff --git a/npm/README.md b/npm/README.md index 911de9b..87a60ea 100644 --- a/npm/README.md +++ b/npm/README.md @@ -14,12 +14,14 @@ Add to your MCP config: "mcpServers": { "rpg": { "command": "npx", - "args": ["-y", "-p", "rpg-encoder", "rpg-mcp-server", "/path/to/your/project"] + "args": ["-y", "-p", "rpg-encoder", "rpg-mcp-server"] } } } ``` +The MCP server uses the startup directory by default when no positional project path is provided. + ## CLI ```bash @@ -33,6 +35,12 @@ Or install globally: ```bash npm install -g rpg-encoder rpg-encoder build +rpg-mcp-server +``` + +Or pass an explicit path when you want a different startup root: + +```bash rpg-mcp-server /path/to/project ``` diff --git a/npm/bin/run-mcp.js b/npm/bin/run-mcp.js index 2c63498..a155b62 100644 --- a/npm/bin/run-mcp.js +++ b/npm/bin/run-mcp.js @@ -7,7 +7,7 @@ const ext = process.platform === "win32" ? ".exe" : ""; const bin = path.join(__dirname, `rpg-mcp-server${ext}`); try { - execFileSync(bin, process.argv.slice(2), { stdio: "inherit" }); + execFileSync(bin, process.argv.slice(2), { stdio: "inherit", cwd: process.cwd() }); } catch (err) { if (err.status != null) process.exit(err.status); throw err; From 8676d9fa254fd347aa8b55b4f1be8406c5abcd46 Mon Sep 17 00:00:00 2001 From: VooDisss <41582720+VooDisss@users.noreply.github.com> Date: Sun, 26 Apr 2026 00:20:08 +0300 Subject: [PATCH 7/7] fix: refresh embedding deps for cargo audit Resolve the new rustls-webpki advisory with a lockfile refresh and remove the unmaintained core2 chain that was coming in through fastembed's default image-model stack. The repo only uses text embeddings, so keeping the image-related defaults pulled in unnecessary transitive dependencies and caused cargo audit to fail. Pin fastembed to the current 5.13.x line, disable its default features, and enable only the text-embedding features this workspace actually uses. Switch ONNX runtime loading to the dynamic mode so verification does not depend on downloading prebuilt binaries during local builds. The lockfile refresh updates hf-hub, ort, and rustls-webpki accordingly while preserving the existing embedding API usage. --- Cargo.lock | 707 +++-------------------------------------------------- Cargo.toml | 2 +- 2 files changed, 32 insertions(+), 677 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9310df0..3ffd2a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -31,24 +31,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "aligned" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4508988c62edf04abd8d92897fca0c2995d907ce1dfeaf369dac3716a40685" -dependencies = [ - "as-slice", -] - -[[package]] -name = "aligned-vec" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" -dependencies = [ - "equator", -] - [[package]] name = "alloca" version = "0.4.0" @@ -135,38 +117,6 @@ version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" -[[package]] -name = "arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" - -[[package]] -name = "arg_enum_proc_macro" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "as-slice" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516" -dependencies = [ - "stable_deref_trait", -] - [[package]] name = "async-trait" version = "0.1.89" @@ -190,49 +140,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "av-scenechange" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f321d77c20e19b92c39e7471cf986812cbb46659d2af674adc4331ef3f18394" -dependencies = [ - "aligned", - "anyhow", - "arg_enum_proc_macro", - "arrayvec", - "log", - "num-rational", - "num-traits", - "pastey 0.1.1", - "rayon", - "thiserror", - "v_frame", - "y4m", -] - -[[package]] -name = "av1-grain" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cfddb07216410377231960af4fcab838eaa12e013417781b78bd95ee22077f8" -dependencies = [ - "anyhow", - "arrayvec", - "log", - "nom 8.0.0", - "num-rational", - "v_frame", -] - -[[package]] -name = "avif-serialize" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47c8fbc0f831f4519fe8b810b6a7a91410ec83031b8233f730a0480029f6a23f" -dependencies = [ - "arrayvec", -] - [[package]] name = "base64" version = "0.13.1" @@ -251,27 +158,12 @@ version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" -[[package]] -name = "bit_field" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6" - [[package]] name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" -[[package]] -name = "bitstream-io" -version = "4.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60d4bd9d1db2c6bdf285e223a7fa369d5ce98ec767dec949c6ca62863ce61757" -dependencies = [ - "core2", -] - [[package]] name = "block-buffer" version = "0.10.4" @@ -291,36 +183,18 @@ dependencies = [ "serde", ] -[[package]] -name = "built" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4ad8f11f288f48ca24471bbd51ac257aaeaaa07adae295591266b792902ae64" - [[package]] name = "bumpalo" version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" -[[package]] -name = "bytemuck" -version = "1.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" - [[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" -[[package]] -name = "byteorder-lite" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" - [[package]] name = "bytes" version = "1.11.1" @@ -441,12 +315,6 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" -[[package]] -name = "color_quant" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" - [[package]] name = "colorchoice" version = "1.0.4" @@ -468,19 +336,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "unicode-width", - "windows-sys 0.59.0", -] - [[package]] name = "console" version = "0.16.2" @@ -539,15 +394,6 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" -[[package]] -name = "core2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" -dependencies = [ - "memchr", -] - [[package]] name = "cpufeatures" version = "0.2.17" @@ -848,26 +694,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "equator" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" -dependencies = [ - "equator-macro", -] - -[[package]] -name = "equator-macro" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "equivalent" version = "1.0.2" @@ -890,30 +716,14 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d817e038c30374a4bcb22f94d0a8a0e216958d4c3dcde369b1439fec4bdda6e6" -[[package]] -name = "exr" -version = "1.74.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4300e043a56aa2cb633c01af81ca8f699a321879a7854d3896a0ba89056363be" -dependencies = [ - "bit_field", - "half", - "lebe", - "miniz_oxide", - "rayon-core", - "smallvec", - "zune-inflate", -] - [[package]] name = "fastembed" -version = "5.8.1" +version = "5.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a3f841f27a44bcc32214f8df75cc9b6cea55dbbebbfe546735690eab5bb2d2" +checksum = "58d74247f8cb93f94459e6f3599391f30c3f434f167f7109bd01a288db1bbe67" dependencies = [ "anyhow", "hf-hub", - "image", "ndarray", "ort", "safetensors", @@ -928,35 +738,6 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" -[[package]] -name = "fax" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" -dependencies = [ - "fax_derive", -] - -[[package]] -name = "fax_derive" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "fdeflate" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" -dependencies = [ - "simd-adler32", -] - [[package]] name = "find-msvc-tools" version = "0.1.9" @@ -1131,16 +912,6 @@ dependencies = [ "wasip2", ] -[[package]] -name = "gif" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5df2ba84018d80c213569363bdcd0c64e6933c67fe4c1d60ecf822971a3c35e" -dependencies = [ - "color_quant", - "weezl", -] - [[package]] name = "git2" version = "0.20.4" @@ -1220,13 +991,13 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hf-hub" -version = "0.4.3" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "629d8f3bbeda9d148036d6b0de0a3ab947abd08ce90626327fc3547a49d59d97" +checksum = "aef3982638978efa195ff11b305f51f1f22f4f0a6cabee7af79b383ebee6a213" dependencies = [ "dirs", "http", - "indicatif 0.17.11", + "indicatif", "libc", "log", "native-tls", @@ -1235,16 +1006,10 @@ dependencies = [ "serde", "serde_json", "thiserror", - "ureq 2.12.1", - "windows-sys 0.60.2", + "ureq", + "windows-sys 0.61.2", ] -[[package]] -name = "hmac-sha256" -version = "1.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f0ae375a85536cac3a243e3a9cda80a47910348abdea7e2c22f8ec556d586d" - [[package]] name = "http" version = "1.4.0" @@ -1511,46 +1276,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "image" -version = "0.25.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a" -dependencies = [ - "bytemuck", - "byteorder-lite", - "color_quant", - "exr", - "gif", - "image-webp", - "moxcms", - "num-traits", - "png", - "qoi", - "ravif", - "rayon", - "rgb", - "tiff", - "zune-core 0.5.1", - "zune-jpeg 0.5.12", -] - -[[package]] -name = "image-webp" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" -dependencies = [ - "byteorder-lite", - "quick-error", -] - -[[package]] -name = "imgref" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" - [[package]] name = "indexmap" version = "2.13.0" @@ -1561,43 +1286,19 @@ dependencies = [ "hashbrown", ] -[[package]] -name = "indicatif" -version = "0.17.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" -dependencies = [ - "console 0.15.11", - "number_prefix", - "portable-atomic", - "unicode-width", - "web-time", -] - [[package]] name = "indicatif" version = "0.18.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" dependencies = [ - "console 0.16.2", + "console", "portable-atomic", "unicode-width", "unit-prefix", "web-time", ] -[[package]] -name = "interpolate_name" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "ipnet" version = "2.11.0" @@ -1670,28 +1371,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -[[package]] -name = "lebe" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" - [[package]] name = "libc" version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" -[[package]] -name = "libfuzzer-sys" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404" -dependencies = [ - "arbitrary", - "cc", -] - [[package]] name = "libgit2-sys" version = "0.18.3+1.9.2" @@ -1706,6 +1391,16 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "libloading" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "754ca22de805bb5744484a5b151a9e1a8e837d5dc232c2d7d8c2e3492edc8b60" +dependencies = [ + "cfg-if", + "windows-link", +] + [[package]] name = "libredox" version = "0.1.12" @@ -1775,21 +1470,6 @@ version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" -[[package]] -name = "loop9" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062" -dependencies = [ - "imgref", -] - -[[package]] -name = "lzma-rust2" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1670343e58806300d87950e3401e820b519b9384281bbabfb15e3636689ffd69" - [[package]] name = "macro_rules_attribute" version = "0.2.2" @@ -1825,16 +1505,6 @@ dependencies = [ "rawpointer", ] -[[package]] -name = "maybe-rayon" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" -dependencies = [ - "cfg-if", - "rayon", -] - [[package]] name = "memchr" version = "2.7.6" @@ -1896,16 +1566,6 @@ dependencies = [ "syn", ] -[[package]] -name = "moxcms" -version = "0.7.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9557c559cd6fc9867e122e20d2cbefc9ca29d80d027a8e39310920ed2f0a97" -dependencies = [ - "num-traits", - "pxfm", -] - [[package]] name = "native-tls" version = "0.2.14" @@ -1938,12 +1598,6 @@ dependencies = [ "rawpointer", ] -[[package]] -name = "new_debug_unreachable" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" - [[package]] name = "nom" version = "7.1.3" @@ -1954,21 +1608,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nom" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" -dependencies = [ - "memchr", -] - -[[package]] -name = "noop_proc_macro" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" - [[package]] name = "nu-ansi-term" version = "0.50.3" @@ -1978,16 +1617,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - [[package]] name = "num-complex" version = "0.4.6" @@ -2003,17 +1632,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" -[[package]] -name = "num-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "num-integer" version = "0.1.46" @@ -2023,17 +1641,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.19" @@ -2043,12 +1650,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - [[package]] name = "once_cell" version = "1.21.3" @@ -2141,27 +1742,22 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ort" -version = "2.0.0-rc.11" +version = "2.0.0-rc.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5df903c0d2c07b56950f1058104ab0c8557159f2741782223704de9be73c3c" +checksum = "d7de3af33d24a745ffb8fab904b13478438d1cd52868e6f17735ef6e1f8bf133" dependencies = [ + "libloading", "ndarray", "ort-sys", "smallvec", "tracing", - "ureq 3.2.0", ] [[package]] name = "ort-sys" -version = "2.0.0-rc.11" +version = "2.0.0-rc.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06503bb33f294c5f1ba484011e053bfa6ae227074bdb841e9863492dc5960d4b" -dependencies = [ - "hmac-sha256", - "lzma-rust2", - "ureq 3.2.0", -] +checksum = "d7b497d21a8b6fbb4b5a544f8fadb77e801a09ae0add9e411d31c6f89e3c1e90" [[package]] name = "page_size" @@ -2202,12 +1798,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "pastey" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" - [[package]] name = "pastey" version = "0.2.1" @@ -2275,19 +1865,6 @@ dependencies = [ "plotters-backend", ] -[[package]] -name = "png" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" -dependencies = [ - "bitflags", - "crc32fast", - "fdeflate", - "flate2", - "miniz_oxide", -] - [[package]] name = "portable-atomic" version = "1.13.1" @@ -2336,49 +1913,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "profiling" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" -dependencies = [ - "profiling-procmacros", -] - -[[package]] -name = "profiling-procmacros" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" -dependencies = [ - "quote", - "syn", -] - -[[package]] -name = "pxfm" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7186d3822593aa4393561d186d1393b3923e9d6163d3fbfd6e825e3e6cf3e6a8" -dependencies = [ - "num-traits", -] - -[[package]] -name = "qoi" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" version = "1.0.44" @@ -2423,56 +1957,6 @@ dependencies = [ "getrandom 0.3.4", ] -[[package]] -name = "rav1e" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43b6dd56e85d9483277cde964fd1bdb0428de4fec5ebba7540995639a21cb32b" -dependencies = [ - "aligned-vec", - "arbitrary", - "arg_enum_proc_macro", - "arrayvec", - "av-scenechange", - "av1-grain", - "bitstream-io", - "built", - "cfg-if", - "interpolate_name", - "itertools 0.14.0", - "libc", - "libfuzzer-sys", - "log", - "maybe-rayon", - "new_debug_unreachable", - "noop_proc_macro", - "num-derive", - "num-traits", - "paste", - "profiling", - "rand", - "rand_chacha", - "simd_helpers", - "thiserror", - "v_frame", - "wasm-bindgen", -] - -[[package]] -name = "ravif" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef69c1990ceef18a116855938e74793a5f7496ee907562bd0857b6ac734ab285" -dependencies = [ - "avif-serialize", - "imgref", - "loop9", - "quick-error", - "rav1e", - "rayon", - "rgb", -] - [[package]] name = "rawpointer" version = "0.2.1" @@ -2622,12 +2106,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "rgb" -version = "0.8.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" - [[package]] name = "ring" version = "0.17.14" @@ -2652,7 +2130,7 @@ dependencies = [ "base64 0.22.1", "chrono", "futures", - "pastey 0.2.1", + "pastey", "pin-project-lite", "rmcp-macros", "schemars", @@ -2686,7 +2164,7 @@ dependencies = [ "clap", "globset", "ignore", - "indicatif 0.18.3", + "indicatif", "rpg-core", "rpg-encoder", "rpg-lift", @@ -2739,7 +2217,7 @@ name = "rpg-lift" version = "0.8.3" dependencies = [ "globset", - "indicatif 0.18.3", + "indicatif", "rpg-core", "rpg-encoder", "rpg-parser", @@ -2748,7 +2226,7 @@ dependencies = [ "tempfile", "thiserror", "tracing", - "ureq 3.2.0", + "ureq", ] [[package]] @@ -2860,9 +2338,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.12" +version = "0.103.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06" +checksum = "61c429a8649f110dddef65e2a5ad240f747e85f7758a6bccc7e5777bd33f756e" dependencies = [ "ring", "rustls-pki-types", @@ -3089,15 +2567,6 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" -[[package]] -name = "simd_helpers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" -dependencies = [ - "quote", -] - [[package]] name = "slab" version = "0.4.12" @@ -3138,7 +2607,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5851699c4033c63636f7ea4cf7b7c1f1bf06d0cc03cfb42e711de5a5c46cf326" dependencies = [ "base64 0.13.1", - "nom 7.1.3", + "nom", "serde", "unicode-segmentation", ] @@ -3267,20 +2736,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "tiff" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" -dependencies = [ - "fax", - "flate2", - "half", - "quick-error", - "weezl", - "zune-jpeg 0.4.21", -] - [[package]] name = "time" version = "0.3.47" @@ -3810,26 +3265,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "ureq" -version = "2.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" -dependencies = [ - "base64 0.22.1", - "flate2", - "log", - "native-tls", - "once_cell", - "rustls", - "rustls-pki-types", - "serde", - "serde_json", - "socks", - "url", - "webpki-roots 0.26.11", -] - [[package]] name = "ureq" version = "3.2.0" @@ -3851,7 +3286,7 @@ dependencies = [ "ureq-proto", "utf-8", "webpki-root-certs", - "webpki-roots 1.0.6", + "webpki-roots", ] [[package]] @@ -3896,17 +3331,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "v_frame" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2" -dependencies = [ - "aligned-vec", - "num-traits", - "wasm-bindgen", -] - [[package]] name = "valuable" version = "0.1.1" @@ -4060,15 +3484,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "webpki-roots" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" -dependencies = [ - "webpki-roots 1.0.6", -] - [[package]] name = "webpki-roots" version = "1.0.6" @@ -4078,12 +3493,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "weezl" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" - [[package]] name = "winapi" version = "0.3.9" @@ -4194,15 +3603,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.60.2" @@ -4368,12 +3768,6 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" -[[package]] -name = "y4m" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448" - [[package]] name = "yoke" version = "0.8.1" @@ -4510,42 +3904,3 @@ dependencies = [ "cc", "pkg-config", ] - -[[package]] -name = "zune-core" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" - -[[package]] -name = "zune-core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9" - -[[package]] -name = "zune-inflate" -version = "0.2.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" -dependencies = [ - "simd-adler32", -] - -[[package]] -name = "zune-jpeg" -version = "0.4.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" -dependencies = [ - "zune-core 0.4.12", -] - -[[package]] -name = "zune-jpeg" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "410e9ecef634c709e3831c2cfdb8d9c32164fae1c67496d5b68fff728eec37fe" -dependencies = [ - "zune-core 0.5.1", -] diff --git a/Cargo.toml b/Cargo.toml index 2e12004..0a1aef2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -143,7 +143,7 @@ zstd = "0.13.3" ureq = "3" # Embeddings -fastembed = "5.8.1" +fastembed = { version = "5.13.3", default-features = false, features = ["ort-load-dynamic", "hf-hub-native-tls"] } # TOON format toon-format = { version = "0.4.1", default-features = false }