diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1b37f895..4f59786a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -38,7 +38,7 @@ jobs:
with:
components: clippy
- uses: Swatinem/rust-cache@v2
- - run: cargo clippy -- -D warnings
+ - run: cargo clippy -- -W clippy::all
test:
name: Test
diff --git a/Cargo.toml b/Cargo.toml
index 0a02dd88..31d4b065 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -11,7 +11,7 @@ documentation = "https://docs.rs/vectorless"
keywords = ["rag", "document", "retrieval", "indexing", "llm"]
categories = ["text-processing", "data-structures", "algorithms"]
readme = "README.md"
-exclude = ["samples/", "docs/", "benches/", ".*"]
+exclude = ["samples/", "docs/", ".*"]
[dependencies]
# Async runtime
@@ -72,13 +72,8 @@ rand = "0.8"
[dev-dependencies]
tempfile = "3.10"
-criterion = { version = "0.5", features = ["async_tokio"] }
tokio-test = "0.4"
-[[bench]]
-name = "bench"
-harness = false
-
[profile.release]
opt-level = 3
lto = "thin"
diff --git a/README.md b/README.md
index 93fa0129..300818ff 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-
+
[](https://crates.io/crates/vectorless)
[](https://crates.io/crates/vectorless)
@@ -8,11 +8,14 @@
[](LICENSE)
[](https://www.rust-lang.org/)
-**A hierarchical, reasoning-native document intelligence engine.**
-
+Ultra performant document intelligence engine for RAG, with core written in **Rust**. Zero vector database, zero embedding model — just LLM-powered tree navigation. Incremental indexing and multi-format support out-of-box.
+
+⭐ **Drop a star to help us grow!**
+
+
## Why Vectorless?
Traditional RAG systems have a fundamental problem: **they lose document structure.**
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..4b66d90b
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,44 @@
+# Security Policy
+
+## Reporting a Vulnerability
+
+If you discover a security vulnerability in Vectorless, please report it by emailing:
+
+**beautifularea@gmail.com**
+
+**Do NOT create a public GitHub issue for security vulnerabilities.**
+
+## What to Include
+
+Please include the following in your report:
+
+- Description of the vulnerability
+- Steps to reproduce
+- Affected versions (if known)
+- Potential impact
+
+## Response Timeline
+
+| Stage | Timeframe |
+|-------|-----------|
+| Initial response | Within 48 hours |
+| Vulnerability confirmation | Within 7 days |
+| Fix development | Depends on severity |
+| Security advisory | After fix is released |
+
+## Disclosure Policy
+
+- Vulnerabilities will be disclosed after a fix is available
+- We will credit reporters (unless you prefer to remain anonymous)
+- We request a reasonable time to fix before public disclosure
+
+## Supported Versions
+
+| Version | Supported |
+| ------- | --------- |
+| 0.1.x | ✅ |
+| < 0.1 | ❌ |
+
+---
+
+Thank you for helping keep Vectorless secure!
diff --git a/benches/bench.rs b/benches/bench.rs
deleted file mode 100644
index b33e507f..00000000
--- a/benches/bench.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-//! Benchmark runner placeholder.
-//!
-//! Run `cargo bench` to execute benchmarks.
-
-#![allow(missing_docs)]
-
-fn main() {
- println!("Run `cargo bench` to execute benchmarks");
-}
\ No newline at end of file
diff --git a/docs/assets/brand/icon.svg b/docs/assets/brand/icon.svg
deleted file mode 100644
index 6899600c..00000000
--- a/docs/assets/brand/icon.svg
+++ /dev/null
@@ -1,24 +0,0 @@
-
diff --git a/docs/assets/brand/logo-dark.svg b/docs/assets/brand/logo-dark.svg
deleted file mode 100644
index 646bad97..00000000
--- a/docs/assets/brand/logo-dark.svg
+++ /dev/null
@@ -1,27 +0,0 @@
-
diff --git a/docs/assets/brand/logo.svg b/docs/assets/brand/logo.svg
deleted file mode 100644
index 6879f501..00000000
--- a/docs/assets/brand/logo.svg
+++ /dev/null
@@ -1,24 +0,0 @@
-
diff --git a/docs/assets/brand/logo-horizontal.svg b/docs/design/logo-horizontal.svg
similarity index 100%
rename from docs/assets/brand/logo-horizontal.svg
rename to docs/design/logo-horizontal.svg
diff --git a/examples/index.rs b/examples/index.rs
index fb1686e1..cbb318b1 100644
--- a/examples/index.rs
+++ b/examples/index.rs
@@ -15,7 +15,7 @@
//! cargo run --example index
//! ```
-use vectorless::index::{PipelineExecutor, PipelineOptions, IndexInput};
+use vectorless::index::{IndexInput, PipelineExecutor, PipelineOptions};
#[tokio::main]
async fn main() -> vectorless::Result<()> {
@@ -89,8 +89,15 @@ fn print_tree_structure(
if let Some(node) = tree.get(node_id) {
let children = tree.children(node_id);
- let marker = if children.is_empty() { "└─" } else { "├─" };
- println!("{}{} {} (depth: {})", indent, marker, node.title, node.depth);
+ let marker = if children.is_empty() {
+ "└─"
+ } else {
+ "├─"
+ };
+ println!(
+ "{}{} {} (depth: {})",
+ indent, marker, node.title, node.depth
+ );
for child_id in children {
print_tree_structure(tree, child_id, current_depth + 1, max_depth);
diff --git a/examples/markdownflow.rs b/examples/markdownflow.rs
index 7854fab1..4cde85f9 100644
--- a/examples/markdownflow.rs
+++ b/examples/markdownflow.rs
@@ -86,9 +86,7 @@ async fn main() -> Result<(), Box> {
// Step 4: Query the document
println!("Step 4: Querying the document...");
- let queries = vec![
- "What is this project about?",
- ];
+ let queries = vec!["What is this project about?"];
for query in queries {
println!(" Query: \"{}\"", query);
diff --git a/examples/retrieve.rs b/examples/retrieve.rs
index 036a3b3f..f3ed1751 100644
--- a/examples/retrieve.rs
+++ b/examples/retrieve.rs
@@ -16,12 +16,12 @@
//! ```
use std::sync::Arc;
+use vectorless::domain::DocumentTree;
use vectorless::retrieval::{
- PipelineRetriever, Retriever, RetrieveOptions, StrategyPreference,
+ PipelineRetriever, RetrieveOptions, Retriever, StrategyPreference,
pipeline::RetrievalOrchestrator,
stages::{AnalyzeStage, JudgeStage, PlanStage, SearchStage},
};
-use vectorless::domain::{DocumentTree, NodeId};
#[tokio::main]
async fn main() -> vectorless::Result<()> {
@@ -29,7 +29,10 @@ async fn main() -> vectorless::Result<()> {
// 1. Create a sample document tree
let tree = create_sample_tree();
- println!("✓ Created sample document tree ({} nodes)\n", tree.node_count());
+ println!(
+ "✓ Created sample document tree ({} nodes)\n",
+ tree.node_count()
+ );
// 2. Method A: Use PipelineRetriever (simple API)
println!("--- Method A: PipelineRetriever (Simple API) ---\n");
@@ -79,7 +82,9 @@ async fn demo_pipeline_retriever(tree: &DocumentTree) -> vectorless::Result<()>
let query = "What is the main architecture?";
println!("Query: \"{}\"\n", query);
- let response = retriever.retrieve(tree, query, &options).await
+ let response = retriever
+ .retrieve(tree, query, &options)
+ .await
.map_err(|e| vectorless::Error::Retrieval(e.to_string()))?;
// Display results
@@ -93,7 +98,12 @@ async fn demo_pipeline_retriever(tree: &DocumentTree) -> vectorless::Result<()>
if !response.results.is_empty() {
println!("\n Top results:");
for (i, result) in response.results.iter().take(3).enumerate() {
- println!(" {}. {} (score: {:.2})", i + 1, result.title, result.score);
+ println!(
+ " {}. {} (score: {:.2})",
+ i + 1,
+ result.title,
+ result.score
+ );
}
}
@@ -123,8 +133,17 @@ async fn demo_orchestrator(tree: &DocumentTree) -> vectorless::Result<()> {
if let Ok(groups) = orchestrator.get_execution_groups() {
println!("Execution groups: {} groups", groups.len());
for (i, group) in groups.iter().enumerate() {
- let parallel = if group.parallel { " (can parallelize)" } else { "" };
- println!(" Group {}: {} stages{}", i, group.stage_indices.len(), parallel);
+ let parallel = if group.parallel {
+ " (can parallelize)"
+ } else {
+ ""
+ };
+ println!(
+ " Group {}: {} stages{}",
+ i,
+ group.stage_indices.len(),
+ parallel
+ );
}
}
println!();
@@ -135,7 +154,9 @@ async fn demo_orchestrator(tree: &DocumentTree) -> vectorless::Result<()> {
let options = RetrieveOptions::default();
let tree_arc = Arc::new(tree.clone());
- let response = orchestrator.execute(tree_arc, query, options).await
+ let response = orchestrator
+ .execute(tree_arc, query, options)
+ .await
.map_err(|e| vectorless::Error::Retrieval(e.to_string()))?;
println!("Results:");
@@ -162,39 +183,77 @@ fn create_sample_tree() -> DocumentTree {
);
// Add sections using the correct API
- let _intro = tree.add_child(tree.root(), "Introduction",
- "Vectorless is a document intelligence engine written in Rust.");
-
- let arch = tree.add_child(tree.root(), "Architecture",
- "The system consists of three main components: indexer, retriever, and storage.");
-
- let index_section = tree.add_child(arch, "Index Pipeline",
- "The index pipeline processes documents into a tree structure with summaries.");
- let retrieve_section = tree.add_child(arch, "Retrieval Pipeline",
- "The retrieval pipeline finds relevant content using multi-stage processing.");
-
- tree.add_child(index_section, "Parse Stage",
- "Parses documents (Markdown, PDF, DOCX) into structured content.");
- tree.add_child(index_section, "Build Stage",
- "Builds the document tree with metadata like page numbers and indices.");
- tree.add_child(index_section, "Enrich Stage",
- "Generates AI summaries for tree nodes using LLM.");
-
- tree.add_child(retrieve_section, "Analyze Stage",
- "Analyzes query complexity and extracts keywords for matching.");
- tree.add_child(retrieve_section, "Plan Stage",
- "Selects retrieval strategy (keyword/semantic/LLM) and search algorithm.");
- tree.add_child(retrieve_section, "Search Stage",
- "Executes tree traversal (greedy/beam/MCTS) to find relevant content.");
- tree.add_child(retrieve_section, "Judge Stage",
- "Evaluates sufficiency of collected content, can trigger backtracking.");
-
- let usage = tree.add_child(tree.root(), "Usage",
- "How to use the vectorless library.");
- tree.add_child(usage, "Basic Example",
- "Simple usage with default configuration and workspace.");
- tree.add_child(usage, "Advanced Example",
- "Custom pipeline configuration with LLM and custom stages.");
+ let _intro = tree.add_child(
+ tree.root(),
+ "Introduction",
+ "Vectorless is a document intelligence engine written in Rust.",
+ );
+
+ let arch = tree.add_child(
+ tree.root(),
+ "Architecture",
+ "The system consists of three main components: indexer, retriever, and storage.",
+ );
+
+ let index_section = tree.add_child(
+ arch,
+ "Index Pipeline",
+ "The index pipeline processes documents into a tree structure with summaries.",
+ );
+ let retrieve_section = tree.add_child(
+ arch,
+ "Retrieval Pipeline",
+ "The retrieval pipeline finds relevant content using multi-stage processing.",
+ );
+
+ tree.add_child(
+ index_section,
+ "Parse Stage",
+ "Parses documents (Markdown, PDF, DOCX) into structured content.",
+ );
+ tree.add_child(
+ index_section,
+ "Build Stage",
+ "Builds the document tree with metadata like page numbers and indices.",
+ );
+ tree.add_child(
+ index_section,
+ "Enrich Stage",
+ "Generates AI summaries for tree nodes using LLM.",
+ );
+
+ tree.add_child(
+ retrieve_section,
+ "Analyze Stage",
+ "Analyzes query complexity and extracts keywords for matching.",
+ );
+ tree.add_child(
+ retrieve_section,
+ "Plan Stage",
+ "Selects retrieval strategy (keyword/semantic/LLM) and search algorithm.",
+ );
+ tree.add_child(
+ retrieve_section,
+ "Search Stage",
+ "Executes tree traversal (greedy/beam/MCTS) to find relevant content.",
+ );
+ tree.add_child(
+ retrieve_section,
+ "Judge Stage",
+ "Evaluates sufficiency of collected content, can trigger backtracking.",
+ );
+
+ let usage = tree.add_child(tree.root(), "Usage", "How to use the vectorless library.");
+ tree.add_child(
+ usage,
+ "Basic Example",
+ "Simple usage with default configuration and workspace.",
+ );
+ tree.add_child(
+ usage,
+ "Advanced Example",
+ "Custom pipeline configuration with LLM and custom stages.",
+ );
tree
}
diff --git a/src/client/builder.rs b/src/client/builder.rs
index c36aa86d..243e047e 100644
--- a/src/client/builder.rs
+++ b/src/client/builder.rs
@@ -6,8 +6,8 @@
use std::path::PathBuf;
use crate::config::{Config, ConfigLoader, RetrievalConfig};
-use crate::storage::Workspace;
use crate::retrieval::PipelineRetriever;
+use crate::storage::Workspace;
use super::Engine;
@@ -140,10 +140,9 @@ impl EngineBuilder {
.map_err(|e| BuildError::Config(e.to_string()))?
} else if let Some(config_path) = Self::find_config_file() {
// Auto-detect config file
- ConfigLoader::new()
- .file(&config_path)
- .load()
- .map_err(|e| BuildError::Config(format!("Failed to load {}: {}", config_path.display(), e)))?
+ ConfigLoader::new().file(&config_path).load().map_err(|e| {
+ BuildError::Config(format!("Failed to load {}: {}", config_path.display(), e))
+ })?
} else {
// Use defaults
Config::default()
@@ -154,8 +153,10 @@ impl EngineBuilder {
Some(Workspace::open(path).map_err(|e| BuildError::Workspace(e.to_string()))?)
} else {
// Use workspace_dir from config
- Some(Workspace::open(&config.storage.workspace_dir)
- .map_err(|e| BuildError::Workspace(e.to_string()))?)
+ Some(
+ Workspace::open(&config.storage.workspace_dir)
+ .map_err(|e| BuildError::Workspace(e.to_string()))?,
+ )
};
// Create pipeline executor with LLM client if API key is available
@@ -174,9 +175,11 @@ impl EngineBuilder {
};
// Create pipeline retriever with config
- let retrieval_config = self.retrieval_config.unwrap_or_else(|| config.retrieval.clone());
- let mut retriever = PipelineRetriever::new()
- .with_max_iterations(retrieval_config.search.max_iterations);
+ let retrieval_config = self
+ .retrieval_config
+ .unwrap_or_else(|| config.retrieval.clone());
+ let mut retriever =
+ PipelineRetriever::new().with_max_iterations(retrieval_config.search.max_iterations);
// Add LLM client if API key is available in retrieval config
if let Some(ref api_key) = retrieval_config.api_key {
@@ -188,7 +191,9 @@ impl EngineBuilder {
retriever = retriever.with_llm_client(llm_client);
}
- Ok(Engine::with_components(config, workspace, retriever, executor))
+ Ok(Engine::with_components(
+ config, workspace, retriever, executor,
+ ))
}
}
@@ -222,8 +227,7 @@ mod tests {
#[test]
fn test_builder_with_workspace() {
- let builder = EngineBuilder::new()
- .with_workspace("./test_workspace");
+ let builder = EngineBuilder::new().with_workspace("./test_workspace");
assert_eq!(builder.workspace, Some(PathBuf::from("./test_workspace")));
}
diff --git a/src/client/engine.rs b/src/client/engine.rs
index a9cd693c..aeaa87b5 100644
--- a/src/client/engine.rs
+++ b/src/client/engine.rs
@@ -48,17 +48,17 @@
use std::path::Path;
use std::sync::{Arc, Mutex, RwLock};
-use uuid::Uuid;
use tracing::info;
+use uuid::Uuid;
use crate::config::Config;
-use crate::domain::{DocumentTree, Result, Error};
+use crate::domain::{DocumentTree, Error, Result};
+use crate::index::{IndexInput, PipelineExecutor, PipelineOptions, SummaryStrategy};
use crate::parser::DocumentFormat;
-use crate::storage::{Workspace, PersistedDocument, DocumentMeta as StorageMeta};
use crate::retrieval::{PipelineRetriever, Retriever};
-use crate::index::{PipelineExecutor, PipelineOptions, IndexInput, SummaryStrategy};
+use crate::storage::{DocumentMeta as StorageMeta, PersistedDocument, Workspace};
-use super::types::{IndexMode, IndexOptions, DocumentInfo, QueryResult};
+use super::types::{DocumentInfo, IndexMode, IndexOptions, QueryResult};
/// The main Engine client.
///
@@ -168,10 +168,7 @@ impl Engine {
},
generate_ids: options.generate_ids,
summary_strategy: if options.generate_summaries {
- SummaryStrategy::selective(
- self.config.indexer.min_summary_tokens,
- false,
- )
+ SummaryStrategy::selective(self.config.indexer.min_summary_tokens, false)
} else {
SummaryStrategy::none()
},
@@ -182,16 +179,17 @@ impl Engine {
// Create pipeline input and execute (with mutex lock)
let input = IndexInput::file(&path);
let result = {
- let mut executor = self.executor.lock().map_err(|_| {
- Error::Other("Pipeline executor lock poisoned".to_string())
- })?;
+ let mut executor = self
+ .executor
+ .lock()
+ .map_err(|_| Error::Other("Pipeline executor lock poisoned".to_string()))?;
executor.execute(input, pipeline_options).await?
};
// Build persisted document
- let tree = result.tree.ok_or_else(|| {
- Error::Parse("Document tree not generated".to_string())
- })?;
+ let tree = result
+ .tree
+ .ok_or_else(|| Error::Parse("Document tree not generated".to_string()))?;
let meta = StorageMeta::new(&doc_id, &result.name, format.extension())
.with_source_path(path.to_string_lossy().to_string())
@@ -208,9 +206,9 @@ impl Engine {
// Save to workspace if configured
if let Some(ref workspace) = self.workspace {
- let mut ws = workspace.write().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let mut ws = workspace
+ .write()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
ws.add(&doc)?;
info!("Saved document {} to workspace", doc_id);
}
@@ -223,9 +221,7 @@ impl Engine {
fn detect_format(&self, path: &Path, options: &IndexOptions) -> Result {
match options.mode {
IndexMode::Auto => {
- let ext = path.extension()
- .and_then(|e| e.to_str())
- .unwrap_or("");
+ let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
DocumentFormat::from_extension(ext)
.ok_or_else(|| Error::Parse(format!("Unknown format: {}", ext)))
}
@@ -274,15 +270,18 @@ impl Engine {
/// - No workspace is configured
/// - The document is not found
pub fn get_structure(&self, doc_id: &str) -> Result {
- let workspace = self.workspace.as_ref()
+ let workspace = self
+ .workspace
+ .as_ref()
.ok_or_else(|| Error::Config("No workspace configured".to_string()))?;
// Use read lock - Workspace::load now uses interior mutability for cache
- let ws = workspace.read().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let ws = workspace
+ .read()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
- let doc = ws.load(doc_id)?
+ let doc = ws
+ .load(doc_id)?
.ok_or_else(|| Error::DocumentNotFound(format!("Document not found: {}", doc_id)))?;
Ok(doc.tree)
@@ -297,15 +296,18 @@ impl Engine {
/// - The document is not found
/// - No page content is available
pub fn get_page_content(&self, doc_id: &str, pages: &str) -> Result {
- let workspace = self.workspace.as_ref()
+ let workspace = self
+ .workspace
+ .as_ref()
.ok_or_else(|| Error::Config("No workspace configured".to_string()))?;
// Use read lock - Workspace::load now uses interior mutability for cache
- let ws = workspace.read().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let ws = workspace
+ .read()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
- let doc = ws.load(doc_id)?
+ let doc = ws
+ .load(doc_id)?
.ok_or_else(|| Error::DocumentNotFound(format!("Document not found: {}", doc_id)))?;
if doc.pages.is_empty() {
@@ -335,16 +337,19 @@ impl Engine {
if part.contains('-') {
let range: Vec<&str> = part.split('-').collect();
if range.len() == 2 {
- let start: usize = range[0].parse()
+ let start: usize = range[0]
+ .parse()
.map_err(|_| Error::Parse(format!("Invalid page number: {}", range[0])))?;
- let end: usize = range[1].parse()
+ let end: usize = range[1]
+ .parse()
.map_err(|_| Error::Parse(format!("Invalid page number: {}", range[1])))?;
for p in start..=end {
result.push(p);
}
}
} else if !part.is_empty() {
- let page: usize = part.parse()
+ let page: usize = part
+ .parse()
.map_err(|_| Error::Parse(format!("Invalid page number: {}", part)))?;
result.push(page);
}
@@ -373,15 +378,22 @@ impl Engine {
.with_include_summaries(true);
// Use adaptive retriever
- let response = self.retriever.retrieve(&tree, question, &retrieve_options).await
+ let response = self
+ .retriever
+ .retrieve(&tree, question, &retrieve_options)
+ .await
.map_err(|e| Error::Retrieval(e.to_string()))?;
// Extract node IDs and build content from results
- let node_ids: Vec = response.results.iter()
+ let node_ids: Vec = response
+ .results
+ .iter()
.filter_map(|r| r.node_id.clone())
.collect();
- let content_parts: Vec = response.results.iter()
+ let content_parts: Vec = response
+ .results
+ .iter()
.map(|r| {
let mut parts = vec![format!("## {}", r.title)];
@@ -423,13 +435,15 @@ impl Engine {
///
/// Returns an error if no workspace is configured.
pub fn load(&self, doc_id: &str) -> Result {
- let workspace = self.workspace.as_ref()
+ let workspace = self
+ .workspace
+ .as_ref()
.ok_or_else(|| Error::Config("No workspace configured".to_string()))?;
// Use read lock - Workspace::load now uses interior mutability for cache
- let ws = workspace.read().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let ws = workspace
+ .read()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
if !ws.contains(doc_id) {
return Ok(false);
@@ -445,12 +459,14 @@ impl Engine {
///
/// Returns an error if no workspace is configured.
pub fn remove(&self, doc_id: &str) -> Result {
- let workspace = self.workspace.as_ref()
+ let workspace = self
+ .workspace
+ .as_ref()
.ok_or_else(|| Error::Config("No workspace configured".to_string()))?;
- let mut ws = workspace.write().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let mut ws = workspace
+ .write()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
ws.remove(doc_id)
}
@@ -460,12 +476,14 @@ impl Engine {
///
/// Returns an error if no workspace is configured.
pub fn exists(&self, doc_id: &str) -> Result {
- let workspace = self.workspace.as_ref()
+ let workspace = self
+ .workspace
+ .as_ref()
.ok_or_else(|| Error::Config("No workspace configured".to_string()))?;
- let ws = workspace.read().map_err(|_| {
- Error::Other("Workspace lock poisoned".to_string())
- })?;
+ let ws = workspace
+ .read()
+ .map_err(|_| Error::Other("Workspace lock poisoned".to_string()))?;
Ok(ws.contains(doc_id))
}
@@ -475,12 +493,14 @@ impl Engine {
///
/// Returns an error if no workspace is configured.
pub fn get_metadata(&self, doc_id: &str) -> Result