diff --git a/Cargo.toml b/Cargo.toml index d912c66..8813557 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,6 +44,8 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } dialoguer = "0.12" console = "0.15" ctrlc = "3" +tracing-appender = "0.2.4" +log = "0.4.29" [dev-dependencies] tempfile = "3" diff --git a/src/cli/commands.rs b/src/cli/commands.rs index f128499..490c49f 100644 --- a/src/cli/commands.rs +++ b/src/cli/commands.rs @@ -40,6 +40,14 @@ pub struct Cli { /// Server port to use #[arg(short = 'p', long, global = true, env = "VGREP_PORT")] port: Option, + + /// Log level (error, warn, info, debug, trace) + #[arg(long, global = true, env = "VGREP_LOG_LEVEL")] + log_level: Option, + + /// Log file path + #[arg(long, global = true, env = "VGREP_LOG_FILE")] + log_file: Option, } #[derive(Subcommand)] @@ -242,6 +250,8 @@ enum ConfigKey { impl Cli { pub fn run(self) -> Result<()> { + crate::logging::init_logging(self.log_level.clone(), self.log_file.clone()); + let mut config = Config::load()?; // Apply global overrides diff --git a/src/core/indexer.rs b/src/core/indexer.rs index 469c120..8549f04 100644 --- a/src/core/indexer.rs +++ b/src/core/indexer.rs @@ -5,6 +5,7 @@ use indicatif::{ProgressBar, ProgressStyle}; use sha2::{Digest, Sha256}; use std::fs; use std::path::{Path, PathBuf}; +use tracing::{info, warn}; use super::db::Database; use super::embeddings::EmbeddingEngine; @@ -45,6 +46,7 @@ impl Indexer { pub fn index_directory(&self, path: &Path, force: bool) -> Result<()> { let abs_path = fs::canonicalize(path).context("Failed to resolve path")?; + info!("Scanning directory: {}", abs_path.display()); println!( " {}Scanning: {}", ui::FOLDER, @@ -64,14 +66,17 @@ impl Indexer { .collect(); if files.is_empty() { + warn!("No files to index in {}", abs_path.display()); println!(" {}No files to index.", ui::WARN); return Ok(()); } + info!("Found {} files to index", files.len()); println!(" {}Found {} files", ui::FILE, style(files.len()).cyan()); println!(); // Phase 1: Collect all chunks from all files + info!("Phase 1: Reading files"); println!(" {}Phase 1: Reading files...", style("→").dim()); let pb = ProgressBar::new(files.len() as u64); @@ -108,6 +113,7 @@ impl Indexer { } pb.finish_and_clear(); + info!("Read {} files, {} chunks", pending_files.len(), total_chunks); println!( " {}Read {} files, {} chunks", ui::CHECK, @@ -117,11 +123,13 @@ impl Indexer { if pending_files.is_empty() { println!(); + info!("All files up to date ({} skipped)", skipped); println!(" {}All files up to date ({} skipped)", ui::CHECK, skipped); return Ok(()); } // Phase 2: Generate embeddings for all chunks at once + info!("Phase 2: Generating embeddings"); println!(" {}Phase 2: Generating embeddings...", style("→").dim()); let all_chunks: Vec<&str> = pending_files @@ -139,6 +147,7 @@ impl Indexer { let all_embeddings = self.engine.embed_batch(&all_chunks)?; pb.finish_and_clear(); + info!("Generated {} embeddings", all_embeddings.len()); println!( " {}Generated {} embeddings", ui::CHECK, @@ -146,6 +155,7 @@ impl Indexer { ); // Phase 3: Store in database + info!("Phase 3: Storing in database"); println!(" {}Phase 3: Storing in database...", style("→").dim()); let pb = ProgressBar::new(pending_files.len() as u64); @@ -181,9 +191,11 @@ impl Indexer { } pb.finish_and_clear(); + info!("Stored {} files", indexed); println!(" {}Stored {} files", ui::CHECK, indexed); println!(); + info!("Indexing complete. Files: {}, Skipped: {}, Chunks: {}", indexed, skipped, total_chunks); println!(" {}Indexing complete!", ui::SPARKLES); println!( " {} {} indexed, {} skipped", @@ -558,9 +570,11 @@ impl ServerIndexer { } pb.finish_and_clear(); + info!("Stored {} files", indexed); println!(" {}Stored {} files", ui::CHECK, indexed); println!(); + info!("Indexing complete. Files: {}, Skipped: {}, Chunks: {}", indexed, skipped, total_chunks); println!(" {}Indexing complete!", ui::SPARKLES); println!( " {} {} indexed, {} skipped", diff --git a/src/lib.rs b/src/lib.rs index 6f688f4..77f20da 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,6 +18,7 @@ pub mod core; pub mod server; pub mod ui; pub mod watcher; +pub mod logging; pub use config::Config; pub use core::{Database, EmbeddingEngine, Indexer, SearchEngine, ServerIndexer}; diff --git a/src/logging.rs b/src/logging.rs new file mode 100644 index 0000000..d7c9c4d --- /dev/null +++ b/src/logging.rs @@ -0,0 +1,44 @@ +use std::path::PathBuf; +use std::str::FromStr; +use tracing::level_filters::LevelFilter; +use tracing_subscriber::fmt::format::FmtSpan; +use tracing_subscriber::{fmt, EnvFilter, Layer}; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::util::SubscriberInitExt; + +pub fn init_logging(log_level: Option, log_file: Option) { + let env_filter = EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new(log_level.unwrap_or_else(|| "info".to_string()))) + .unwrap_or_else(|_| EnvFilter::new("info")); + + let stdout_layer = fmt::layer() + .with_target(false) + .with_thread_ids(false) + .with_file(false) + .with_line_number(false) + .with_level(false) + .compact() + .with_filter(env_filter.clone()); + + let registry = tracing_subscriber::registry(); + + if let Some(path) = log_file { + let file_appender = tracing_appender::rolling::never( + path.parent().unwrap_or(&PathBuf::from(".")), + path.file_name().unwrap_or(&std::ffi::OsString::from("vgrep.log")), + ); + let file_layer = fmt::layer() + .with_ansi(false) + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_target(true) + .with_span_events(FmtSpan::CLOSE) + .with_writer(file_appender) + .with_filter(env_filter); + + registry.with(stdout_layer).with(file_layer).init(); + } else { + registry.with(stdout_layer).init(); + } +} diff --git a/src/server/api.rs b/src/server/api.rs index bd5cc8a..9de3c33 100644 --- a/src/server/api.rs +++ b/src/server/api.rs @@ -11,6 +11,7 @@ use std::net::SocketAddr; use std::path::PathBuf; use std::sync::{Arc, Mutex}; use tower_http::cors::{Any, CorsLayer}; +use tracing::{error, info}; use crate::config::Config; use crate::core::{Database, EmbeddingEngine}; @@ -93,8 +94,10 @@ pub async fn run_server(config: &Config, host: &str, port: u16) -> Result<()> { crate::ui::print_banner(); + info!("Loading embedding model..."); println!(" {}Loading embedding model...", crate::ui::BRAIN); let engine = EmbeddingEngine::new(&config)?; + info!("Model loaded successfully"); println!(" {}Model loaded successfully!", crate::ui::CHECK); println!(); @@ -123,6 +126,7 @@ pub async fn run_server(config: &Config, host: &str, port: u16) -> Result<()> { crate::ui::print_server_banner(host, port); let listener = tokio::net::TcpListener::bind(addr).await?; + info!("Server listening on {}:{}", host, port); axum::serve(listener, app).await?; Ok(()) @@ -146,6 +150,7 @@ async fn status(State(state): State) -> impl IntoResponse { let db = match Database::new(&state.config.db_path().unwrap_or_default()) { Ok(db) => db, Err(e) => { + error!("Failed to open database: {}", e); return ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ @@ -159,6 +164,7 @@ async fn status(State(state): State) -> impl IntoResponse { let stats = match db.get_stats() { Ok(stats) => stats, Err(e) => { + error!("Failed to get stats: {}", e); return ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ @@ -216,6 +222,7 @@ async fn search( match engine.embed(&req.query) { Ok(emb) => emb, Err(e) => { + error!("Failed to generate embedding: {}", e); return ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ @@ -231,6 +238,7 @@ async fn search( let db = match Database::new(&state.config.db_path().unwrap_or_default()) { Ok(db) => db, Err(e) => { + error!("Failed to open database: {}", e); return ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ @@ -244,6 +252,7 @@ async fn search( let candidates = match db.search_similar(&query_embedding, &abs_path, req.max_results * 3) { Ok(c) => c, Err(e) => { + error!("Search failed: {}", e); return ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({ diff --git a/src/watcher.rs b/src/watcher.rs index 816219f..dac70f1 100644 --- a/src/watcher.rs +++ b/src/watcher.rs @@ -9,6 +9,7 @@ use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::channel; use std::sync::{Arc, Mutex}; use std::time::{Duration, Instant}; +use tracing::{info, warn}; use crate::config::Config; use crate::core::{Database, Indexer, ServerIndexer}; @@ -65,6 +66,7 @@ impl FileWatcher { .expect("Error setting Ctrl+C handler"); // Initial index + info!("Starting initial indexing"); println!(" {} Initial indexing...", style(">>").dim()); self.index_all()?; @@ -85,6 +87,7 @@ impl FileWatcher { watcher.watch(&abs_path, RecursiveMode::Recursive)?; + info!("Watching for changes in {}", abs_path.display()); println!(" {} Watching for changes...", style("[~]").cyan()); println!(); @@ -125,6 +128,7 @@ impl FileWatcher { } } + info!("Watcher stopped"); println!(); println!(" {} Watcher stopped", style("[x]").yellow()); println!(); @@ -302,6 +306,7 @@ impl FileWatcher { // File was deleted if let Some(entry) = db.get_file_by_path(path)? { db.delete_file(entry.id)?; + info!("File removed: {}", filename); println!( " {} {} {}", style("[-]").red(), @@ -332,6 +337,7 @@ impl FileWatcher { Mode::Server => { let client = Client::new(&self.config.server_host, self.config.server_port); self.index_file_server(&db, &client, path, &content)?; + info!("File indexed: {}", filename); println!( " {} {} {}", style("[+]").green(), @@ -340,6 +346,7 @@ impl FileWatcher { ); } Mode::Local => { + info!("File modified (pending): {}", filename); println!( " {} {} {} {}", style("[~]").yellow(),