diff --git a/agent-support/kilo-code/.gitignore b/agent-support/kilo-code/.gitignore new file mode 100644 index 000000000..c2658d7d1 --- /dev/null +++ b/agent-support/kilo-code/.gitignore @@ -0,0 +1 @@ +node_modules/ diff --git a/agent-support/kilo-code/README.md b/agent-support/kilo-code/README.md new file mode 100644 index 000000000..42bdfac52 --- /dev/null +++ b/agent-support/kilo-code/README.md @@ -0,0 +1,54 @@ +# git-ai Plugin for Kilo Code + +A plugin that integrates [git-ai](https://github.com/git-ai-project/git-ai) with [Kilo Code](https://kilo.ai) to automatically track AI-generated code. + +## Overview + +This plugin hooks into Kilo Code's tool execution lifecycle to create checkpoints that mark code changes as either human or AI-authored. It uses the `tool.execute.before` and `tool.execute.after` events to: + +1. Create a human checkpoint before AI edits (marking any intermediate changes as human-authored) +2. Create an AI checkpoint after AI edits (marking the changes as AI-authored with model information) + +## Installation + +The plugin is automatically installed by `git-ai install-hooks`. + +Build `git-ai` (`cargo build`) and then run the `git-ai install-hooks` or `cargo run -- install-hooks` command to test the entire flow of installing and using the plugin. + +## Requirements + +- [git-ai](https://github.com/git-ai-project/git-ai) must be installed and available in PATH +- [Kilo Code](https://kilo.ai) with plugin support + +## How It Works + +The plugin intercepts file editing operations (`edit` and `write` tools) and: + +1. **Before AI edit**: Creates a human checkpoint to mark any changes since the last checkpoint as human-authored +2. **After AI edit**: Creates an AI checkpoint with: + - Model information (provider/model ID) + - Session/conversation ID + - List of edited file paths + +If `git-ai` is not installed or the file is not in a git repository, the plugin gracefully skips checkpoint creation without breaking Kilo Code functionality. + +## Development + +### Type Checking + +Run type checking: +```bash +yarn type-check +``` + +### Dependencies + +Install dependencies: +```bash +yarn install +``` + +## See Also + +- [git-ai Documentation](https://github.com/git-ai-project/git-ai) +- [Kilo Code](https://kilo.ai) diff --git a/agent-support/kilo-code/git-ai.ts b/agent-support/kilo-code/git-ai.ts new file mode 100644 index 000000000..c15d5c3bc --- /dev/null +++ b/agent-support/kilo-code/git-ai.ts @@ -0,0 +1,136 @@ +/** + * git-ai plugin for Kilo Code + * + * This plugin integrates git-ai with Kilo Code to track AI-generated code. + * It uses the tool.execute.before and tool.execute.after events to create + * checkpoints that mark code changes as human or AI-authored. + * + * Installation: + * - Automatically installed by `git-ai install-hooks` + * - Or manually copy to ~/.config/kilo/plugins/git-ai.ts (global) + * - Or to .kilo/plugins/git-ai.ts (project-local) + * + * Requirements: + * - git-ai must be installed (path is injected at install time) + * + * @see https://github.com/git-ai-project/git-ai + * @see https://kilo.ai + */ + +import type { Plugin } from "@kilocode/plugin" +import { dirname } from "path" + +// Absolute path to git-ai binary, replaced at install time by `git-ai install-hooks` +const GIT_AI_BIN = "__GIT_AI_BINARY_PATH__" + +// Tools that modify files and should be tracked +const FILE_EDIT_TOOLS = ["edit", "write", "patch", "multiedit"] + +export const GitAiPlugin: Plugin = async (ctx) => { + const { $ } = ctx + + // Check if git-ai is installed + let gitAiInstalled = false + try { + await $`${GIT_AI_BIN} --version`.quiet() + gitAiInstalled = true + } catch { + // git-ai not installed, plugin will be a no-op + } + + if (!gitAiInstalled) { + return {} + } + + // Track pending edits by callID so we can reference them in the after hook + // Stores { filePath, repoDir, sessionID } for each pending edit + const pendingEdits = new Map() + + // Helper to find git repo root from a file path + const findGitRepo = async (filePath: string): Promise => { + try { + const dir = dirname(filePath) + const result = await $`git -C ${dir} rev-parse --show-toplevel`.quiet() + const repoRoot = result.stdout.toString().trim() + return repoRoot || null + } catch { + // Not a git repo or git not available + return null + } + } + + return { + "tool.execute.before": async (input, output) => { + // Only intercept file editing tools + if (!FILE_EDIT_TOOLS.includes(input.tool)) { + return + } + + // Extract file path from tool arguments (args are in output, not input) + const filePath = output.args?.filePath as string | undefined + if (!filePath) { + return + } + + // Find the git repo for this file + const repoDir = await findGitRepo(filePath) + if (!repoDir) { + // File is not in a git repo, skip silently + return + } + + // Store filePath, repoDir, and sessionID for the after hook + pendingEdits.set(input.callID, { filePath, repoDir, sessionID: input.sessionID }) + + try { + // Create human checkpoint before AI edit + // This marks any changes since the last checkpoint as human-authored + const hookInput = JSON.stringify({ + hook_event_name: "PreToolUse", + session_id: input.sessionID, + cwd: repoDir, + tool_input: { filePath }, + }) + + await $`echo ${hookInput} | ${GIT_AI_BIN} checkpoint kilo-code --hook-input stdin`.quiet() + } catch (error) { + // Log to stderr for debugging, but don't throw - git-ai errors shouldn't break the agent + console.error("[git-ai] Failed to create human checkpoint:", String(error)) + } + }, + + "tool.execute.after": async (input, _output) => { + // Only intercept file editing tools + if (!FILE_EDIT_TOOLS.includes(input.tool)) { + return + } + + // Get the filePath and repoDir we stored in the before hook + const editInfo = pendingEdits.get(input.callID) + pendingEdits.delete(input.callID) + + if (!editInfo) { + return + } + + const { filePath, repoDir, sessionID } = editInfo + + try { + // Create AI checkpoint after edit + // This marks the changes made by this tool call as AI-authored + // Transcript is fetched from Kilo Code's local storage by the preset + const hookInput = JSON.stringify({ + hook_event_name: "PostToolUse", + session_id: sessionID, + cwd: repoDir, + tool_input: { filePath }, + }) + + await $`echo ${hookInput} | ${GIT_AI_BIN} checkpoint kilo-code --hook-input stdin`.quiet() + } catch (error) { + // Log to stderr for debugging, but don't throw - git-ai errors shouldn't break the agent + console.error("[git-ai] Failed to create AI checkpoint:", String(error)) + } + }, + } +} diff --git a/agent-support/kilo-code/package.json b/agent-support/kilo-code/package.json new file mode 100644 index 000000000..750dcfed1 --- /dev/null +++ b/agent-support/kilo-code/package.json @@ -0,0 +1,17 @@ +{ + "name": "git-ai-kilo-code-plugin", + "version": "0.1.0", + "description": "git-ai plugin for Kilo Code", + "license": "Apache-2.0", + "type": "module", + "scripts": { + "type-check": "tsc --noEmit" + }, + "dependencies": { + "@kilocode/plugin": "^7.0.50" + }, + "devDependencies": { + "@types/node": "^25.x", + "typescript": "^5.8.3" + } +} diff --git a/agent-support/kilo-code/tsconfig.json b/agent-support/kilo-code/tsconfig.json new file mode 100644 index 000000000..d3b0210b5 --- /dev/null +++ b/agent-support/kilo-code/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "bundler", + "lib": ["ES2022"], + "types": ["node"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "noEmit": true + }, + "include": ["*.ts"], + "exclude": ["node_modules"] +} diff --git a/src/authorship/prompt_utils.rs b/src/authorship/prompt_utils.rs index 0b90ba0d2..650fcb6c3 100644 --- a/src/authorship/prompt_utils.rs +++ b/src/authorship/prompt_utils.rs @@ -6,6 +6,7 @@ use crate::commands::checkpoint_agent::agent_presets::{ GithubCopilotPreset, WindsurfPreset, }; use crate::commands::checkpoint_agent::amp_preset::AmpPreset; +use crate::commands::checkpoint_agent::kilo_code_preset::KiloCodePreset; use crate::commands::checkpoint_agent::opencode_preset::OpenCodePreset; use crate::error::GitAiError; use crate::git::refs::{get_authorship, grep_ai_notes}; @@ -178,6 +179,7 @@ pub fn update_prompt_from_tool( "droid" => update_droid_prompt(agent_metadata, current_model), "amp" => update_amp_prompt(external_thread_id, agent_metadata, current_model), "opencode" => update_opencode_prompt(external_thread_id, agent_metadata, current_model), + "kilo-code" => update_kilo_code_prompt(external_thread_id, agent_metadata, current_model), "windsurf" => update_windsurf_prompt(agent_metadata, current_model), _ => { debug_log(&format!("Unknown tool: {}", tool)); @@ -580,6 +582,49 @@ fn update_opencode_prompt( } } +/// Update Kilo Code prompt by fetching latest transcript from storage +fn update_kilo_code_prompt( + session_id: &str, + metadata: Option<&HashMap>, + current_model: &str, +) -> PromptUpdateResult { + // Check for test storage path override in metadata or env var + let storage_path = if let Ok(env_path) = std::env::var("GIT_AI_KILO_CODE_STORAGE_PATH") { + Some(std::path::PathBuf::from(env_path)) + } else { + metadata + .and_then(|m| m.get("__test_storage_path")) + .map(std::path::PathBuf::from) + }; + + let result = if let Some(path) = storage_path { + KiloCodePreset::transcript_and_model_from_storage(&path, session_id) + } else { + KiloCodePreset::transcript_and_model_from_session(session_id) + }; + + match result { + Ok((transcript, model)) => PromptUpdateResult::Updated( + transcript, + model.unwrap_or_else(|| current_model.to_string()), + ), + Err(e) => { + debug_log(&format!( + "Failed to fetch Kilo Code transcript for session {}: {}", + session_id, e + )); + log_error( + &e, + Some(serde_json::json!({ + "agent_tool": "kilo-code", + "operation": "transcript_and_model_from_storage" + })), + ); + PromptUpdateResult::Failed(e) + } + } +} + /// Update Windsurf prompt from transcript JSONL file fn update_windsurf_prompt( metadata: Option<&HashMap>, diff --git a/src/commands/checkpoint_agent/kilo_code_preset.rs b/src/commands/checkpoint_agent/kilo_code_preset.rs new file mode 100644 index 000000000..859668acb --- /dev/null +++ b/src/commands/checkpoint_agent/kilo_code_preset.rs @@ -0,0 +1,786 @@ +use crate::{ + authorship::{ + transcript::{AiTranscript, Message}, + working_log::{AgentId, CheckpointKind}, + }, + commands::checkpoint_agent::agent_presets::{ + AgentCheckpointFlags, AgentCheckpointPreset, AgentRunResult, + }, + error::GitAiError, + observability::log_error, +}; +use chrono::DateTime; +use rusqlite::{Connection, OpenFlags}; +use serde::Deserialize; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +pub struct KiloCodePreset; + +/// Hook input from Kilo Code plugin +#[derive(Debug, Deserialize)] +struct KiloCodeHookInput { + hook_event_name: String, + session_id: String, + cwd: String, + tool_input: Option, +} + +#[derive(Debug, Deserialize)] +struct ToolInput { + #[serde(rename = "filePath")] + file_path: Option, +} + +/// Message metadata from legacy file storage message/{session_id}/{msg_id}.json +#[derive(Debug, Deserialize)] +struct KiloCodeMessage { + id: String, + #[serde(rename = "sessionID", default)] + #[allow(dead_code)] + session_id: String, + role: String, // "user" | "assistant" + time: KiloCodeTime, + #[serde(rename = "modelID")] + model_id: Option, + #[serde(rename = "providerID")] + provider_id: Option, +} + +#[derive(Debug, Deserialize)] +struct KiloCodeTime { + created: i64, + #[allow(dead_code)] + completed: Option, +} + +/// SQLite message payload from message.data +#[derive(Debug, Deserialize)] +struct KiloCodeDbMessageData { + role: String, + #[serde(default)] + time: Option, + #[serde(rename = "modelID")] + model_id: Option, + #[serde(rename = "providerID")] + provider_id: Option, +} + +#[derive(Debug)] +struct TranscriptSourceMessage { + id: String, + role: String, + created: i64, + model_id: Option, + provider_id: Option, +} + +/// Tool state object containing status and nested data +#[derive(Debug, Deserialize)] +struct ToolState { + #[allow(dead_code)] + status: Option, + input: Option, + #[allow(dead_code)] + output: Option, + #[allow(dead_code)] + title: Option, + #[allow(dead_code)] + metadata: Option, + time: Option, +} + +/// Part content from either legacy part/{msg_id}/{prt_id}.json or sqlite part.data +#[derive(Debug, Deserialize)] +#[serde(tag = "type", rename_all = "kebab-case")] +#[allow(clippy::large_enum_variant)] +enum KiloCodePart { + Text { + #[serde(rename = "messageID", default)] + #[allow(dead_code)] + message_id: Option, + text: String, + time: Option, + #[allow(dead_code)] + synthetic: Option, + #[allow(dead_code)] + id: Option, + }, + Tool { + #[serde(rename = "messageID", default)] + #[allow(dead_code)] + message_id: Option, + tool: String, + #[serde(rename = "callID")] + #[allow(dead_code)] + call_id: String, + state: Option, + input: Option, + #[allow(dead_code)] + output: Option, + time: Option, + #[allow(dead_code)] + id: Option, + }, + StepStart { + #[serde(rename = "messageID", default)] + #[allow(dead_code)] + message_id: Option, + #[allow(dead_code)] + time: Option, + #[allow(dead_code)] + id: Option, + }, + StepFinish { + #[serde(rename = "messageID", default)] + #[allow(dead_code)] + message_id: Option, + #[allow(dead_code)] + time: Option, + #[allow(dead_code)] + id: Option, + }, + #[serde(other)] + Unknown, +} + +#[derive(Debug, Deserialize)] +struct KiloCodePartTime { + start: i64, + #[allow(dead_code)] + end: Option, +} + +impl AgentCheckpointPreset for KiloCodePreset { + fn run(&self, flags: AgentCheckpointFlags) -> Result { + let hook_input_json = flags.hook_input.ok_or_else(|| { + GitAiError::PresetError("hook_input is required for Kilo Code preset".to_string()) + })?; + + let hook_input: KiloCodeHookInput = serde_json::from_str(&hook_input_json) + .map_err(|e| GitAiError::PresetError(format!("Invalid JSON in hook_input: {}", e)))?; + + let KiloCodeHookInput { + hook_event_name, + session_id, + cwd, + tool_input, + } = hook_input; + + // Extract file_path from tool_input if present + let file_path_as_vec = tool_input + .and_then(|ti| ti.file_path) + .map(|path| vec![path]); + + // Determine Kilo Code path (test override can point to either root or legacy storage path) + let kilo_code_path = if let Ok(test_path) = std::env::var("GIT_AI_KILO_CODE_STORAGE_PATH") { + PathBuf::from(test_path) + } else { + Self::kilo_code_data_path()? + }; + + // Fetch transcript and model from sqlite first, then fallback to legacy storage + let (transcript, model) = + match Self::transcript_and_model_from_storage(&kilo_code_path, &session_id) { + Ok((transcript, model)) => (transcript, model), + Err(e) => { + eprintln!("[Warning] Failed to parse Kilo Code storage: {e}"); + log_error( + &e, + Some(serde_json::json!({ + "agent_tool": "kilo-code", + "operation": "transcript_and_model_from_storage" + })), + ); + (AiTranscript::new(), None) + } + }; + + let agent_id = AgentId { + tool: "kilo-code".to_string(), + id: session_id.clone(), + model: model.unwrap_or_else(|| "unknown".to_string()), + }; + + // Store session_id in metadata for post-commit refetch + let mut agent_metadata = HashMap::new(); + agent_metadata.insert("session_id".to_string(), session_id); + // Store test path if set, for subprocess access in tests + if let Ok(test_path) = std::env::var("GIT_AI_KILO_CODE_STORAGE_PATH") { + agent_metadata.insert("__test_storage_path".to_string(), test_path); + } + + // Check if this is a PreToolUse event (human checkpoint) + if hook_event_name == "PreToolUse" { + return Ok(AgentRunResult { + agent_id, + agent_metadata: None, + checkpoint_kind: CheckpointKind::Human, + transcript: None, + repo_working_dir: Some(cwd), + edited_filepaths: None, + will_edit_filepaths: file_path_as_vec, + dirty_files: None, + }); + } + + // PostToolUse event - AI checkpoint + Ok(AgentRunResult { + agent_id, + agent_metadata: Some(agent_metadata), + checkpoint_kind: CheckpointKind::AiAgent, + transcript: Some(transcript), + repo_working_dir: Some(cwd), + edited_filepaths: file_path_as_vec, + will_edit_filepaths: None, + dirty_files: None, + }) + } +} + +impl KiloCodePreset { + /// Get the Kilo Code data directory based on platform. + /// Expected layout: {data_dir}/kilo.db and {data_dir}/storage + pub fn kilo_code_data_path() -> Result { + #[cfg(target_os = "macos")] + { + let home = dirs::home_dir().ok_or_else(|| { + GitAiError::Generic("Could not determine home directory".to_string()) + })?; + Ok(home.join(".local").join("share").join("kilo")) + } + + #[cfg(target_os = "linux")] + { + // Try XDG_DATA_HOME first, then fall back to ~/.local/share + if let Ok(xdg_data) = std::env::var("XDG_DATA_HOME") { + Ok(PathBuf::from(xdg_data).join("kilo")) + } else { + let home = dirs::home_dir().ok_or_else(|| { + GitAiError::Generic("Could not determine home directory".to_string()) + })?; + Ok(home.join(".local").join("share").join("kilo")) + } + } + + #[cfg(target_os = "windows")] + { + // Kilo Code uses ~/.local/share/kilo on all platforms (including Windows) + let home = dirs::home_dir().ok_or_else(|| { + GitAiError::Generic("Could not determine home directory".to_string()) + })?; + let unix_style_path = home.join(".local").join("share").join("kilo"); + if unix_style_path.exists() { + return Ok(unix_style_path); + } + + // Fallback to standard Windows paths + if let Ok(app_data) = std::env::var("APPDATA") { + Ok(PathBuf::from(app_data).join("kilo")) + } else if let Ok(local_app_data) = std::env::var("LOCALAPPDATA") { + Ok(PathBuf::from(local_app_data).join("kilo")) + } else { + Err(GitAiError::Generic( + "Neither APPDATA nor LOCALAPPDATA is set".to_string(), + )) + } + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))] + { + Err(GitAiError::PresetError( + "Kilo Code storage path not supported on this platform".to_string(), + )) + } + } + + /// Public API for fetching transcript from session_id (uses default Kilo Code data path) + pub fn transcript_and_model_from_session( + session_id: &str, + ) -> Result<(AiTranscript, Option), GitAiError> { + let kilo_code_path = Self::kilo_code_data_path()?; + Self::transcript_and_model_from_storage(&kilo_code_path, session_id) + } + + /// Fetch transcript and model from Kilo Code path (sqlite first, fallback to legacy storage) + /// + /// `kilo_code_path` may be one of: + /// - Kilo Code data dir (contains `kilo.db` and optional `storage/`) + /// - Legacy storage dir (contains `message/` and `part/`) + /// - Direct path to `kilo.db` + pub fn transcript_and_model_from_storage( + kilo_code_path: &Path, + session_id: &str, + ) -> Result<(AiTranscript, Option), GitAiError> { + if !kilo_code_path.exists() { + return Err(GitAiError::PresetError(format!( + "Kilo Code path does not exist: {:?}", + kilo_code_path + ))); + } + + let mut sqlite_empty_result: Option<(AiTranscript, Option)> = None; + let mut sqlite_error: Option = None; + + if let Some(db_path) = Self::resolve_sqlite_db_path(kilo_code_path) { + match Self::transcript_and_model_from_sqlite(&db_path, session_id) { + Ok((transcript, model)) => { + if !transcript.messages().is_empty() || model.is_some() { + return Ok((transcript, model)); + } + sqlite_empty_result = Some((transcript, model)); + } + Err(e) => { + eprintln!( + "[Warning] Failed to parse Kilo Code sqlite db {:?}: {}", + db_path, e + ); + sqlite_error = Some(e); + } + } + } + + if let Some(storage_path) = Self::resolve_legacy_storage_path(kilo_code_path) { + match Self::transcript_and_model_from_legacy_storage(&storage_path, session_id) { + Ok((transcript, model)) => { + if !transcript.messages().is_empty() || model.is_some() { + return Ok((transcript, model)); + } + if let Some(result) = sqlite_empty_result.take() { + return Ok(result); + } + return Ok((transcript, model)); + } + Err(e) => { + if let Some(result) = sqlite_empty_result.take() { + return Ok(result); + } + if let Some(sqlite_err) = sqlite_error { + return Err(sqlite_err); + } + return Err(e); + } + } + } + + if let Some(result) = sqlite_empty_result { + return Ok(result); + } + + if let Some(sqlite_err) = sqlite_error { + return Err(sqlite_err); + } + + Err(GitAiError::PresetError(format!( + "No Kilo Code sqlite database or legacy storage found under {:?}", + kilo_code_path + ))) + } + + fn resolve_sqlite_db_path(path: &Path) -> Option { + if path.is_file() { + return path + .file_name() + .and_then(|name| name.to_str()) + .filter(|name| *name == "kilo.db") + .map(|_| path.to_path_buf()); + } + + if !path.is_dir() { + return None; + } + + let direct_db = path.join("kilo.db"); + if direct_db.exists() { + return Some(direct_db); + } + + // If caller passed legacy storage path, check sibling kilo.db + if path + .file_name() + .and_then(|name| name.to_str()) + .is_some_and(|name| name == "storage") + { + let sibling_db = path.parent()?.join("kilo.db"); + if sibling_db.exists() { + return Some(sibling_db); + } + } + + None + } + + fn resolve_legacy_storage_path(path: &Path) -> Option { + if path.is_file() { + if path + .file_name() + .and_then(|name| name.to_str()) + .is_some_and(|name| name == "kilo.db") + { + let storage = path.parent()?.join("storage"); + if storage.exists() { + return Some(storage); + } + } + return None; + } + + if !path.is_dir() { + return None; + } + + if path.join("message").exists() || path.join("part").exists() { + return Some(path.to_path_buf()); + } + + let nested_storage = path.join("storage"); + if nested_storage.exists() { + return Some(nested_storage); + } + + None + } + + fn open_sqlite_readonly(path: &Path) -> Result { + Connection::open_with_flags(path, OpenFlags::SQLITE_OPEN_READ_ONLY) + .map_err(|e| GitAiError::Generic(format!("Failed to open {:?}: {}", path, e))) + } + + fn transcript_and_model_from_sqlite( + db_path: &Path, + session_id: &str, + ) -> Result<(AiTranscript, Option), GitAiError> { + let conn = Self::open_sqlite_readonly(db_path)?; + let messages = Self::read_session_messages_from_sqlite(&conn, session_id)?; + + if messages.is_empty() { + return Ok((AiTranscript::new(), None)); + } + + Self::build_transcript_from_messages(messages, |message_id| { + Self::read_message_parts_from_sqlite(&conn, session_id, message_id) + }) + } + + fn transcript_and_model_from_legacy_storage( + storage_path: &Path, + session_id: &str, + ) -> Result<(AiTranscript, Option), GitAiError> { + if !storage_path.exists() { + return Err(GitAiError::PresetError(format!( + "Kilo Code legacy storage path does not exist: {:?}", + storage_path + ))); + } + + let messages = Self::read_session_messages(storage_path, session_id)?; + if messages.is_empty() { + return Ok((AiTranscript::new(), None)); + } + + Self::build_transcript_from_messages(messages, |message_id| { + Self::read_message_parts(storage_path, message_id) + }) + } + + fn build_transcript_from_messages( + mut messages: Vec, + mut read_parts: F, + ) -> Result<(AiTranscript, Option), GitAiError> + where + F: FnMut(&str) -> Result, GitAiError>, + { + messages.sort_by_key(|m| m.created); + + let mut transcript = AiTranscript::new(); + let mut model: Option = None; + + for message in &messages { + // Extract model from first assistant message + if model.is_none() && message.role == "assistant" { + if let (Some(provider_id), Some(model_id)) = + (&message.provider_id, &message.model_id) + { + model = Some(format!("{}/{}", provider_id, model_id)); + } else if let Some(model_id) = &message.model_id { + model = Some(model_id.clone()); + } + } + + let parts = read_parts(&message.id)?; + + // Convert Unix ms to RFC3339 timestamp + let timestamp = + DateTime::from_timestamp_millis(message.created).map(|dt| dt.to_rfc3339()); + + for part in parts { + match part { + KiloCodePart::Text { text, .. } => { + let trimmed = text.trim(); + if !trimmed.is_empty() { + if message.role == "user" { + transcript.add_message(Message::User { + text: trimmed.to_string(), + timestamp: timestamp.clone(), + }); + } else if message.role == "assistant" { + transcript.add_message(Message::Assistant { + text: trimmed.to_string(), + timestamp: timestamp.clone(), + }); + } + } + } + KiloCodePart::Tool { + tool, input, state, .. + } => { + // Only include tool calls from assistant messages + if message.role == "assistant" { + // Try part input first, then state.input as fallback + let tool_input = input + .or_else(|| state.and_then(|s| s.input)) + .unwrap_or(serde_json::Value::Object(serde_json::Map::new())); + transcript.add_message(Message::ToolUse { + name: tool, + input: tool_input, + timestamp: timestamp.clone(), + }); + } + } + KiloCodePart::StepStart { .. } | KiloCodePart::StepFinish { .. } => { + // Skip step markers - they don't contribute to the transcript + } + KiloCodePart::Unknown => { + // Skip unknown part types + } + } + } + } + + Ok((transcript, model)) + } + + fn part_created_for_sort(part: &KiloCodePart, fallback: i64) -> i64 { + match part { + KiloCodePart::Text { time, .. } => time.as_ref().map(|t| t.start).unwrap_or(fallback), + KiloCodePart::Tool { time, state, .. } => time + .as_ref() + .map(|t| t.start) + .or_else(|| { + state + .as_ref() + .and_then(|s| s.time.as_ref()) + .map(|t| t.start) + }) + .unwrap_or(fallback), + KiloCodePart::StepStart { time, .. } => { + time.as_ref().map(|t| t.start).unwrap_or(fallback) + } + KiloCodePart::StepFinish { time, .. } => { + time.as_ref().map(|t| t.start).unwrap_or(fallback) + } + KiloCodePart::Unknown => fallback, + } + } + + /// Read all legacy message files for a session + fn read_session_messages( + storage_path: &Path, + session_id: &str, + ) -> Result, GitAiError> { + let message_dir = storage_path.join("message").join(session_id); + if !message_dir.exists() { + return Ok(Vec::new()); + } + + let mut messages = Vec::new(); + + let entries = std::fs::read_dir(&message_dir).map_err(GitAiError::IoError)?; + + for entry in entries { + let entry = entry.map_err(GitAiError::IoError)?; + let path = entry.path(); + + if path.extension().is_some_and(|ext| ext == "json") { + match std::fs::read_to_string(&path) { + Ok(content) => match serde_json::from_str::(&content) { + Ok(message) => messages.push(TranscriptSourceMessage { + id: message.id, + role: message.role, + created: message.time.created, + model_id: message.model_id, + provider_id: message.provider_id, + }), + Err(e) => { + eprintln!( + "[Warning] Failed to parse Kilo Code message file {:?}: {}", + path, e + ); + } + }, + Err(e) => { + eprintln!( + "[Warning] Failed to read Kilo Code message file {:?}: {}", + path, e + ); + } + } + } + } + + Ok(messages) + } + + /// Read all legacy part files for a message + fn read_message_parts( + storage_path: &Path, + message_id: &str, + ) -> Result, GitAiError> { + let part_dir = storage_path.join("part").join(message_id); + if !part_dir.exists() { + return Ok(Vec::new()); + } + + let mut parts: Vec<(i64, KiloCodePart)> = Vec::new(); + let entries = std::fs::read_dir(&part_dir).map_err(GitAiError::IoError)?; + + for entry in entries { + let entry = entry.map_err(GitAiError::IoError)?; + let path = entry.path(); + + if path.extension().is_some_and(|ext| ext == "json") { + match std::fs::read_to_string(&path) { + Ok(content) => match serde_json::from_str::(&content) { + Ok(part) => { + let created = Self::part_created_for_sort(&part, 0); + parts.push((created, part)); + } + Err(e) => { + eprintln!( + "[Warning] Failed to parse Kilo Code part file {:?}: {}", + path, e + ); + } + }, + Err(e) => { + eprintln!( + "[Warning] Failed to read Kilo Code part file {:?}: {}", + path, e + ); + } + } + } + } + + // Sort parts by creation time + parts.sort_by_key(|(created, _)| *created); + Ok(parts.into_iter().map(|(_, part)| part).collect()) + } + + fn read_session_messages_from_sqlite( + conn: &Connection, + session_id: &str, + ) -> Result, GitAiError> { + let mut stmt = conn + .prepare( + "SELECT id, time_created, data FROM message WHERE session_id = ? ORDER BY time_created ASC, id ASC", + ) + .map_err(|e| GitAiError::Generic(format!("SQLite query prepare failed: {}", e)))?; + + let mut rows = stmt + .query([session_id]) + .map_err(|e| GitAiError::Generic(format!("SQLite query failed: {}", e)))?; + + let mut messages = Vec::new(); + + while let Some(row) = rows + .next() + .map_err(|e| GitAiError::Generic(format!("SQLite row read failed: {}", e)))? + { + let id: String = row + .get(0) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + let created_column: i64 = row + .get(1) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + let data_text: String = row + .get(2) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + + match serde_json::from_str::(&data_text) { + Ok(data) => { + let KiloCodeDbMessageData { + role, + time, + model_id, + provider_id, + } = data; + messages.push(TranscriptSourceMessage { + id, + role, + created: time.map(|t| t.created).unwrap_or(created_column), + model_id, + provider_id, + }); + } + Err(e) => { + eprintln!( + "[Warning] Failed to parse Kilo Code sqlite message row {}: {}", + id, e + ); + } + } + } + + Ok(messages) + } + + fn read_message_parts_from_sqlite( + conn: &Connection, + session_id: &str, + message_id: &str, + ) -> Result, GitAiError> { + let mut stmt = conn + .prepare( + "SELECT id, time_created, data FROM part WHERE session_id = ? AND message_id = ? ORDER BY id ASC", + ) + .map_err(|e| GitAiError::Generic(format!("SQLite query prepare failed: {}", e)))?; + + let mut rows = stmt + .query([session_id, message_id]) + .map_err(|e| GitAiError::Generic(format!("SQLite query failed: {}", e)))?; + + let mut parts: Vec<(i64, KiloCodePart)> = Vec::new(); + + while let Some(row) = rows + .next() + .map_err(|e| GitAiError::Generic(format!("SQLite row read failed: {}", e)))? + { + let part_id: String = row + .get(0) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + let created_column: i64 = row + .get(1) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + let data_text: String = row + .get(2) + .map_err(|e| GitAiError::Generic(format!("SQLite field read failed: {}", e)))?; + + match serde_json::from_str::(&data_text) { + Ok(part) => { + let created = Self::part_created_for_sort(&part, created_column); + parts.push((created, part)); + } + Err(e) => { + eprintln!( + "[Warning] Failed to parse Kilo Code sqlite part row {}: {}", + part_id, e + ); + } + } + } + + parts.sort_by_key(|(created, _)| *created); + Ok(parts.into_iter().map(|(_, part)| part).collect()) + } +} diff --git a/src/commands/checkpoint_agent/mod.rs b/src/commands/checkpoint_agent/mod.rs index f6ae812b4..402e8f10f 100644 --- a/src/commands/checkpoint_agent/mod.rs +++ b/src/commands/checkpoint_agent/mod.rs @@ -1,4 +1,5 @@ pub mod agent_presets; pub mod agent_v1_preset; pub mod amp_preset; +pub mod kilo_code_preset; pub mod opencode_preset; diff --git a/src/commands/git_ai_handlers.rs b/src/commands/git_ai_handlers.rs index f0b07e3e7..72d64fcfb 100644 --- a/src/commands/git_ai_handlers.rs +++ b/src/commands/git_ai_handlers.rs @@ -12,6 +12,7 @@ use crate::commands::checkpoint_agent::agent_presets::{ }; use crate::commands::checkpoint_agent::agent_v1_preset::AgentV1Preset; use crate::commands::checkpoint_agent::amp_preset::AmpPreset; +use crate::commands::checkpoint_agent::kilo_code_preset::KiloCodePreset; use crate::commands::checkpoint_agent::opencode_preset::OpenCodePreset; use crate::config; use crate::git::find_repository; @@ -546,6 +547,22 @@ fn handle_checkpoint(args: &[String]) { } } } + "kilo-code" => { + match KiloCodePreset.run(AgentCheckpointFlags { + hook_input: hook_input.clone(), + }) { + Ok(agent_run) => { + if agent_run.repo_working_dir.is_some() { + repository_working_dir = agent_run.repo_working_dir.clone().unwrap(); + } + agent_run_result = Some(agent_run); + } + Err(e) => { + eprintln!("Kilo Code preset error: {}", e); + std::process::exit(0); + } + } + } "mock_ai" => { let mock_agent_id = format!( "ai-thread-{}", diff --git a/src/git/repo_storage.rs b/src/git/repo_storage.rs index 006251bbd..b7ada7e6c 100644 --- a/src/git/repo_storage.rs +++ b/src/git/repo_storage.rs @@ -359,8 +359,8 @@ impl PersistedWorkingLog { .and_then(|m| m.get("transcript_path")) .is_none() } - // opencode can always refetch from its session storage - "opencode" => false, + // opencode and kilo-code can always refetch from their session storage + "opencode" | "kilo-code" => false, // github-copilot needs chat_session_path "github-copilot" => metadata .as_ref() diff --git a/src/mdm/agents/kilo_code.rs b/src/mdm/agents/kilo_code.rs new file mode 100644 index 000000000..1e8f5904b --- /dev/null +++ b/src/mdm/agents/kilo_code.rs @@ -0,0 +1,298 @@ +use crate::error::GitAiError; +use crate::mdm::hook_installer::{HookCheckResult, HookInstaller, HookInstallerParams}; +use crate::mdm::utils::{binary_exists, generate_diff, home_dir, write_atomic}; +use std::fs; +use std::path::{Path, PathBuf}; + +// Kilo Code plugin content (TypeScript), embedded from the source file +const KILO_CODE_PLUGIN_CONTENT: &str = include_str!(concat!( + env!("CARGO_MANIFEST_DIR"), + "/agent-support/kilo-code/git-ai.ts" +)); + +pub struct KiloCodeInstaller; + +impl KiloCodeInstaller { + fn plugin_path() -> PathBuf { + home_dir() + .join(".config") + .join("kilo") + .join("plugins") + .join("git-ai.ts") + } + + /// Generate plugin content with the absolute binary path substituted in + fn generate_plugin_content(binary_path: &Path) -> String { + // Escape backslashes for the TypeScript string literal (needed for Windows paths) + let path_str = binary_path.display().to_string().replace('\\', "\\\\"); + KILO_CODE_PLUGIN_CONTENT.replace("__GIT_AI_BINARY_PATH__", &path_str) + } +} + +impl HookInstaller for KiloCodeInstaller { + fn name(&self) -> &str { + "Kilo Code" + } + + fn id(&self) -> &str { + "kilo-code" + } + + fn check_hooks(&self, params: &HookInstallerParams) -> Result { + let has_binary = binary_exists("kilo"); + let has_global_config = home_dir().join(".config").join("kilo").exists(); + let has_local_config = Path::new(".kilo").exists(); + + if !has_binary && !has_global_config && !has_local_config { + return Ok(HookCheckResult { + tool_installed: false, + hooks_installed: false, + hooks_up_to_date: false, + }); + } + + // Check if plugin is installed + let plugin_path = Self::plugin_path(); + if !plugin_path.exists() { + return Ok(HookCheckResult { + tool_installed: true, + hooks_installed: false, + hooks_up_to_date: false, + }); + } + + // Check if plugin is up to date (compare against content with binary path substituted) + let current_content = fs::read_to_string(&plugin_path).unwrap_or_default(); + let expected_content = Self::generate_plugin_content(¶ms.binary_path); + let is_up_to_date = current_content.trim() == expected_content.trim(); + + Ok(HookCheckResult { + tool_installed: true, + hooks_installed: true, + hooks_up_to_date: is_up_to_date, + }) + } + + fn install_hooks( + &self, + params: &HookInstallerParams, + dry_run: bool, + ) -> Result, GitAiError> { + let plugin_path = Self::plugin_path(); + + // Ensure directory exists + if let Some(dir) = plugin_path.parent() + && !dry_run + { + fs::create_dir_all(dir)?; + } + + // Read existing content if present + let existing_content = if plugin_path.exists() { + fs::read_to_string(&plugin_path)? + } else { + String::new() + }; + + let new_content = Self::generate_plugin_content(¶ms.binary_path); + + // Check if there are changes + if existing_content.trim() == new_content.trim() { + return Ok(None); + } + + // Generate diff + let diff_output = generate_diff(&plugin_path, &existing_content, &new_content); + + // Write if not dry-run + if !dry_run { + // Ensure directory exists (might not exist in dry run check above) + if let Some(dir) = plugin_path.parent() { + fs::create_dir_all(dir)?; + } + write_atomic(&plugin_path, new_content.as_bytes())?; + } + + Ok(Some(diff_output)) + } + + fn uninstall_hooks( + &self, + _params: &HookInstallerParams, + dry_run: bool, + ) -> Result, GitAiError> { + let plugin_path = Self::plugin_path(); + + if !plugin_path.exists() { + return Ok(None); + } + + let existing_content = fs::read_to_string(&plugin_path)?; + let diff_output = generate_diff(&plugin_path, &existing_content, ""); + + if !dry_run { + fs::remove_file(&plugin_path)?; + } + + Ok(Some(diff_output)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + fn setup_test_env() -> (TempDir, PathBuf) { + let temp_dir = TempDir::new().unwrap(); + let plugin_path = temp_dir + .path() + .join(".config") + .join("kilo") + .join("plugins") + .join("git-ai.ts"); + (temp_dir, plugin_path) + } + + fn create_test_binary_path() -> PathBuf { + PathBuf::from("/usr/local/bin/git-ai") + } + + #[test] + fn test_kilo_code_install_plugin_creates_file_from_scratch() { + let (_temp_dir, plugin_path) = setup_test_env(); + let binary_path = create_test_binary_path(); + + if let Some(parent) = plugin_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + + let generated = KiloCodeInstaller::generate_plugin_content(&binary_path); + fs::write(&plugin_path, &generated).unwrap(); + + assert!(plugin_path.exists()); + + let content = fs::read_to_string(&plugin_path).unwrap(); + assert!(content.contains("GitAiPlugin")); + assert!(content.contains("tool.execute.before")); + assert!(content.contains("tool.execute.after")); + // Uses the kilo-code preset with session_id-based hook input and absolute path + assert!(content.contains("session_id")); + // Placeholder should be replaced with actual binary path in the const declaration + assert!(!content.contains("__GIT_AI_BINARY_PATH__")); + assert!(content.contains(r#"const GIT_AI_BIN = "/usr/local/bin/git-ai""#)); + } + + #[test] + fn test_kilo_code_plugin_content_is_valid_typescript() { + let content = KILO_CODE_PLUGIN_CONTENT; + + assert!(content.contains("import type { Plugin }")); + assert!(content.contains("@kilocode/plugin")); + assert!(content.contains("export const GitAiPlugin: Plugin")); + assert!(content.contains("\"tool.execute.before\"")); + assert!(content.contains("\"tool.execute.after\"")); + assert!(content.contains("FILE_EDIT_TOOLS")); + assert!(content.contains("edit")); + assert!(content.contains("write")); + // Template contains placeholder for binary path + assert!(content.contains("__GIT_AI_BINARY_PATH__")); + assert!(content.contains("hook_event_name")); + assert!(content.contains("session_id")); + assert!(content.contains("PreToolUse")); + assert!(content.contains("PostToolUse")); + } + + #[test] + fn test_kilo_code_plugin_placeholder_substitution() { + let binary_path = create_test_binary_path(); + let content = KiloCodeInstaller::generate_plugin_content(&binary_path); + + // Placeholder should be replaced with the actual binary path in the const + assert!(!content.contains("__GIT_AI_BINARY_PATH__")); + assert!(content.contains(r#"const GIT_AI_BIN = "/usr/local/bin/git-ai""#)); + // Commands reference the const which now holds the absolute path + assert!(content.contains("${GIT_AI_BIN} --version")); + assert!(content.contains("${GIT_AI_BIN} checkpoint kilo-code")); + } + + #[test] + fn test_kilo_code_plugin_skips_if_already_exists() { + let (_temp_dir, plugin_path) = setup_test_env(); + let binary_path = create_test_binary_path(); + + if let Some(parent) = plugin_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + + let generated = KiloCodeInstaller::generate_plugin_content(&binary_path); + fs::write(&plugin_path, &generated).unwrap(); + let content1 = fs::read_to_string(&plugin_path).unwrap(); + + fs::write(&plugin_path, &generated).unwrap(); + let content2 = fs::read_to_string(&plugin_path).unwrap(); + + assert_eq!(content1, content2); + } + + #[test] + fn test_kilo_code_plugin_updates_outdated_content() { + let (_temp_dir, plugin_path) = setup_test_env(); + let binary_path = create_test_binary_path(); + + if let Some(parent) = plugin_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + + let old_content = "// Old plugin version\nexport const OldPlugin = {}"; + fs::write(&plugin_path, old_content).unwrap(); + + let content_before = fs::read_to_string(&plugin_path).unwrap(); + assert!(content_before.contains("OldPlugin")); + + let generated = KiloCodeInstaller::generate_plugin_content(&binary_path); + fs::write(&plugin_path, &generated).unwrap(); + + let content_after = fs::read_to_string(&plugin_path).unwrap(); + assert!(content_after.contains("GitAiPlugin")); + assert!(!content_after.contains("OldPlugin")); + } + + #[test] + fn test_kilo_code_plugin_windows_path_escaping() { + let binary_path = PathBuf::from(r"C:\Users\foo\.git-ai\bin\git-ai.exe"); + let content = KiloCodeInstaller::generate_plugin_content(&binary_path); + + assert!(!content.contains("__GIT_AI_BINARY_PATH__")); + // Backslashes should be doubled for the TS string literal + assert!( + content.contains(r#"const GIT_AI_BIN = "C:\\Users\\foo\\.git-ai\\bin\\git-ai.exe""#) + ); + } + + #[test] + fn test_kilo_code_plugin_handles_empty_directory() { + let temp_dir = TempDir::new().unwrap(); + let binary_path = create_test_binary_path(); + let plugin_path = temp_dir + .path() + .join(".config") + .join("kilo") + .join("plugins") + .join("git-ai.ts"); + + assert!(!plugin_path.parent().unwrap().exists()); + + if let Some(parent) = plugin_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + let generated = KiloCodeInstaller::generate_plugin_content(&binary_path); + fs::write(&plugin_path, &generated).unwrap(); + + assert!(plugin_path.exists()); + let content = fs::read_to_string(&plugin_path).unwrap(); + assert!(content.contains("GitAiPlugin")); + assert!(!content.contains("__GIT_AI_BINARY_PATH__")); + } +} diff --git a/src/mdm/agents/mod.rs b/src/mdm/agents/mod.rs index 57f637471..94d9ceeb9 100644 --- a/src/mdm/agents/mod.rs +++ b/src/mdm/agents/mod.rs @@ -6,6 +6,7 @@ mod droid; mod gemini; mod github_copilot; mod jetbrains; +mod kilo_code; mod opencode; mod vscode; mod windsurf; @@ -18,6 +19,7 @@ pub use droid::DroidInstaller; pub use gemini::GeminiInstaller; pub use github_copilot::GitHubCopilotInstaller; pub use jetbrains::JetBrainsInstaller; +pub use kilo_code::KiloCodeInstaller; pub use opencode::OpenCodeInstaller; pub use vscode::VSCodeInstaller; pub use windsurf::WindsurfInstaller; @@ -34,6 +36,7 @@ pub fn get_all_installers() -> Vec> { Box::new(GitHubCopilotInstaller), Box::new(AmpInstaller), Box::new(OpenCodeInstaller), + Box::new(KiloCodeInstaller), Box::new(GeminiInstaller), Box::new(DroidInstaller), Box::new(JetBrainsInstaller), diff --git a/tests/fixtures/kilo-code-sqlite-empty/kilo.db b/tests/fixtures/kilo-code-sqlite-empty/kilo.db new file mode 100644 index 000000000..bb0e025d3 Binary files /dev/null and b/tests/fixtures/kilo-code-sqlite-empty/kilo.db differ diff --git a/tests/fixtures/kilo-code-sqlite-empty/schema.sql b/tests/fixtures/kilo-code-sqlite-empty/schema.sql new file mode 100644 index 000000000..6f84162f7 --- /dev/null +++ b/tests/fixtures/kilo-code-sqlite-empty/schema.sql @@ -0,0 +1,16 @@ +CREATE TABLE message ( + id TEXT PRIMARY KEY, + session_id TEXT NOT NULL, + time_created INTEGER NOT NULL, + time_updated INTEGER NOT NULL, + data TEXT NOT NULL +); + +CREATE TABLE part ( + id TEXT PRIMARY KEY, + message_id TEXT NOT NULL, + session_id TEXT NOT NULL, + time_created INTEGER NOT NULL, + time_updated INTEGER NOT NULL, + data TEXT NOT NULL +); diff --git a/tests/fixtures/kilo-code-sqlite/kilo.db b/tests/fixtures/kilo-code-sqlite/kilo.db new file mode 100644 index 000000000..a3c447ab1 Binary files /dev/null and b/tests/fixtures/kilo-code-sqlite/kilo.db differ diff --git a/tests/fixtures/kilo-code-sqlite/schema.sql b/tests/fixtures/kilo-code-sqlite/schema.sql new file mode 100644 index 000000000..79ccebea1 --- /dev/null +++ b/tests/fixtures/kilo-code-sqlite/schema.sql @@ -0,0 +1,66 @@ +CREATE TABLE message ( + id TEXT PRIMARY KEY, + session_id TEXT NOT NULL, + time_created INTEGER NOT NULL, + time_updated INTEGER NOT NULL, + data TEXT NOT NULL +); + +CREATE TABLE part ( + id TEXT PRIMARY KEY, + message_id TEXT NOT NULL, + session_id TEXT NOT NULL, + time_created INTEGER NOT NULL, + time_updated INTEGER NOT NULL, + data TEXT NOT NULL +); + +INSERT INTO message (id, session_id, time_created, time_updated, data) VALUES + ( + 'msg-user-sql-001', + 'test-session-123', + 1706459830000, + 1706459830000, + '{"role":"user","time":{"created":1706459830000},"agent":"coder","model":{"providerID":"anthropic","modelID":"claude-sonnet-4-20250514"}}' + ), + ( + 'msg-assistant-sql-001', + 'test-session-123', + 1706459831000, + 1706459835000, + '{"role":"assistant","time":{"created":1706459831000,"completed":1706459835000},"parentID":"msg-user-sql-001","modelID":"claude-sonnet-4-20250514","providerID":"anthropic","mode":"build","path":{"cwd":"/Users/test/project","root":"/Users/test/project"},"cost":0,"tokens":{"input":10,"output":20,"reasoning":0,"cache":{"read":0,"write":0}}}' + ); + +INSERT INTO part (id, message_id, session_id, time_created, time_updated, data) VALUES + ( + 'prt-sql-001', + 'msg-user-sql-001', + 'test-session-123', + 1706459830001, + 1706459830001, + '{"type":"text","text":"Please update index.ts using sqlite transcript data"}' + ), + ( + 'prt-sql-002', + 'msg-assistant-sql-001', + 'test-session-123', + 1706459831001, + 1706459831001, + '{"type":"text","text":"I will make the edit from sqlite."}' + ), + ( + 'prt-sql-003', + 'msg-assistant-sql-001', + 'test-session-123', + 1706459832000, + 1706459833000, + '{"type":"tool","tool":"edit","callID":"call-sql-001","state":{"status":"completed","input":{"filePath":"/Users/test/project/index.ts","content":"// sqlite path\nconsole.log(\"sqlite\");"},"output":"Edit applied","title":"index.ts","metadata":{},"time":{"start":1706459832000,"end":1706459833000}}}' + ), + ( + 'prt-sql-004', + 'msg-assistant-sql-001', + 'test-session-123', + 1706459834000, + 1706459834500, + '{"type":"text","text":"Done from sqlite source."}' + ); diff --git a/tests/fixtures/kilo-code-storage/message/test-session-123/msg-assistant-001.json b/tests/fixtures/kilo-code-storage/message/test-session-123/msg-assistant-001.json new file mode 100644 index 000000000..01f478ba3 --- /dev/null +++ b/tests/fixtures/kilo-code-storage/message/test-session-123/msg-assistant-001.json @@ -0,0 +1,11 @@ +{ + "id": "msg-assistant-001", + "sessionID": "test-session-123", + "role": "assistant", + "time": { + "created": 1706459831000, + "completed": 1706459835000 + }, + "modelID": "claude-sonnet-4-20250514", + "providerID": "anthropic" +} diff --git a/tests/fixtures/kilo-code-storage/message/test-session-123/msg-user-001.json b/tests/fixtures/kilo-code-storage/message/test-session-123/msg-user-001.json new file mode 100644 index 000000000..31c4ed102 --- /dev/null +++ b/tests/fixtures/kilo-code-storage/message/test-session-123/msg-user-001.json @@ -0,0 +1,9 @@ +{ + "id": "msg-user-001", + "sessionID": "test-session-123", + "role": "user", + "time": { + "created": 1706459830000, + "completed": 1706459830100 + } +} diff --git a/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-001.json b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-001.json new file mode 100644 index 000000000..7010e125f --- /dev/null +++ b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-001.json @@ -0,0 +1,10 @@ +{ + "type": "text", + "id": "prt-001", + "messageID": "msg-assistant-001", + "text": "I'll update the comment in index.ts for you.", + "time": { + "start": 1706459831000, + "end": 1706459831500 + } +} diff --git a/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-002.json b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-002.json new file mode 100644 index 000000000..565ec36ac --- /dev/null +++ b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-002.json @@ -0,0 +1,25 @@ +{ + "type": "tool", + "id": "prt-002", + "messageID": "msg-assistant-001", + "tool": "edit", + "callID": "call-001", + "state": { + "status": "completed", + "input": { + "filePath": "/Users/test/project/index.ts", + "content": "// Hello World\nconsole.log('test');" + }, + "output": "Edit applied successfully.", + "title": "index.ts", + "metadata": {}, + "time": { + "start": 1706459832000, + "end": 1706459833000 + } + }, + "time": { + "start": 1706459832000, + "end": 1706459833000 + } +} diff --git a/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-003.json b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-003.json new file mode 100644 index 000000000..b8363bc18 --- /dev/null +++ b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-003.json @@ -0,0 +1,10 @@ +{ + "type": "text", + "id": "prt-003", + "messageID": "msg-assistant-001", + "text": "Done! I've updated the comment to 'Hello World'.", + "time": { + "start": 1706459834000, + "end": 1706459834500 + } +} diff --git a/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-004.json b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-004.json new file mode 100644 index 000000000..095913661 --- /dev/null +++ b/tests/fixtures/kilo-code-storage/part/msg-assistant-001/prt-004.json @@ -0,0 +1,7 @@ +{ + "type": "text", + "id": "prt-004", + "messageID": "msg-assistant-001", + "text": "", + "synthetic": true +} diff --git a/tests/fixtures/kilo-code-storage/part/msg-user-001/prt-001.json b/tests/fixtures/kilo-code-storage/part/msg-user-001/prt-001.json new file mode 100644 index 000000000..ea03cfbae --- /dev/null +++ b/tests/fixtures/kilo-code-storage/part/msg-user-001/prt-001.json @@ -0,0 +1,10 @@ +{ + "type": "text", + "id": "prt-001", + "messageID": "msg-user-001", + "text": "Update the comment in index.ts to say 'Hello World'", + "time": { + "start": 1706459830000, + "end": 1706459830050 + } +} diff --git a/tests/kilo_code.rs b/tests/kilo_code.rs new file mode 100644 index 000000000..1d7815f4f --- /dev/null +++ b/tests/kilo_code.rs @@ -0,0 +1,600 @@ +#[macro_use] +mod repos; +mod test_utils; + +use git_ai::authorship::transcript::Message; +use git_ai::authorship::working_log::CheckpointKind; +use git_ai::commands::checkpoint_agent::agent_presets::{ + AgentCheckpointFlags, AgentCheckpointPreset, +}; +use git_ai::commands::checkpoint_agent::kilo_code_preset::KiloCodePreset; +use serde_json::json; +use std::fs; +use test_utils::fixture_path; + +fn kilo_code_storage_fixture_path() -> std::path::PathBuf { + fixture_path("kilo-code-storage") +} + +fn kilo_code_sqlite_fixture_path() -> std::path::PathBuf { + fixture_path("kilo-code-sqlite") +} + +fn kilo_code_sqlite_empty_fixture_path() -> std::path::PathBuf { + fixture_path("kilo-code-sqlite-empty") +} + +#[test] +fn test_parse_kilo_code_storage_transcript() { + let storage_path = kilo_code_storage_fixture_path(); + let session_id = "test-session-123"; + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(&storage_path, session_id) + .expect("Failed to parse Kilo Code storage"); + + // Verify we parsed messages + assert!( + !transcript.messages().is_empty(), + "Transcript should contain messages" + ); + + // Model should be extracted from first assistant message + assert!( + model.is_some(), + "Model should be extracted from assistant message" + ); + assert_eq!( + model.unwrap(), + "anthropic/claude-sonnet-4-20250514", + "Model should be provider/model format" + ); + + // Verify correct message types exist + let has_user = transcript + .messages() + .iter() + .any(|m| matches!(m, Message::User { .. })); + let has_assistant = transcript + .messages() + .iter() + .any(|m| matches!(m, Message::Assistant { .. })); + let has_tool_use = transcript + .messages() + .iter() + .any(|m| matches!(m, Message::ToolUse { .. })); + + assert!(has_user, "Should have user messages"); + assert!(has_assistant, "Should have assistant messages"); + assert!(has_tool_use, "Should have tool_use messages"); +} + +#[test] +fn test_parse_kilo_code_sqlite_transcript() { + let kilo_code_root = kilo_code_sqlite_fixture_path(); + let session_id = "test-session-123"; + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(&kilo_code_root, session_id) + .expect("Failed to parse Kilo Code sqlite storage"); + + assert!( + !transcript.messages().is_empty(), + "Transcript should contain messages" + ); + assert_eq!( + model.as_deref(), + Some("anthropic/claude-sonnet-4-20250514"), + "Model should come from sqlite assistant message metadata" + ); + + assert!( + matches!(transcript.messages()[0], Message::User { .. }), + "First message should be from user" + ); + if let Message::User { text, .. } = &transcript.messages()[0] { + assert!( + text.contains("sqlite transcript data"), + "Expected sqlite fixture user text" + ); + } +} + +#[test] +fn test_kilo_code_sqlite_takes_precedence_over_legacy_storage() { + let temp_dir = tempfile::tempdir().unwrap(); + let kilo_code_root = temp_dir.path(); + + let sqlite_db = kilo_code_sqlite_fixture_path().join("kilo.db"); + fs::copy(&sqlite_db, kilo_code_root.join("kilo.db")).unwrap(); + + let legacy_storage = kilo_code_storage_fixture_path(); + copy_dir_all(&legacy_storage, &kilo_code_root.join("storage")).unwrap(); + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(kilo_code_root, "test-session-123") + .expect("Should parse from sqlite first"); + + assert_eq!(model.as_deref(), Some("anthropic/claude-sonnet-4-20250514")); + if let Message::User { text, .. } = &transcript.messages()[0] { + assert!( + text.contains("sqlite transcript data"), + "sqlite transcript should win over legacy storage" + ); + assert!( + !text.contains("Update the comment"), + "legacy transcript should not be used when sqlite has data" + ); + } +} + +#[test] +fn test_kilo_code_sqlite_falls_back_to_legacy_storage_when_sqlite_empty() { + let temp_dir = tempfile::tempdir().unwrap(); + let kilo_code_root = temp_dir.path(); + + let sqlite_db = kilo_code_sqlite_empty_fixture_path().join("kilo.db"); + fs::copy(&sqlite_db, kilo_code_root.join("kilo.db")).unwrap(); + + let legacy_storage = kilo_code_storage_fixture_path(); + copy_dir_all(&legacy_storage, &kilo_code_root.join("storage")).unwrap(); + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(kilo_code_root, "test-session-123") + .expect("Should fallback to legacy storage when sqlite has no session data"); + + assert_eq!(model.as_deref(), Some("anthropic/claude-sonnet-4-20250514")); + if let Message::User { text, .. } = &transcript.messages()[0] { + assert!( + text.contains("Update the comment"), + "Should fallback to legacy fixture transcript" + ); + } +} + +#[test] +fn test_kilo_code_transcript_message_order() { + let storage_path = kilo_code_storage_fixture_path(); + let session_id = "test-session-123"; + + let (transcript, _) = + KiloCodePreset::transcript_and_model_from_storage(&storage_path, session_id) + .expect("Failed to parse Kilo Code storage"); + + // Messages should be sorted by creation time + // User message comes first (earlier timestamp), then assistant message + assert!( + matches!(transcript.messages()[0], Message::User { .. }), + "First message should be from user" + ); + + // Verify user message text + if let Message::User { text, .. } = &transcript.messages()[0] { + assert!( + text.contains("Update the comment"), + "User message should contain expected text" + ); + } +} + +#[test] +fn test_kilo_code_transcript_timestamps_are_rfc3339() { + let storage_path = kilo_code_storage_fixture_path(); + let session_id = "test-session-123"; + + let (transcript, _) = + KiloCodePreset::transcript_and_model_from_storage(&storage_path, session_id) + .expect("Failed to parse Kilo Code storage"); + + for message in transcript.messages() { + match message { + Message::User { timestamp, .. } + | Message::Assistant { timestamp, .. } + | Message::ToolUse { timestamp, .. } + | Message::Thinking { timestamp, .. } + | Message::Plan { timestamp, .. } => { + if let Some(ts) = timestamp { + // RFC3339 format: 2024-01-28T15:30:30+00:00 or similar + assert!( + ts.contains("T") && (ts.contains("+") || ts.ends_with("Z")), + "Timestamp should be RFC3339 format, got: {}", + ts + ); + } + } + } + } +} + +#[test] +#[serial_test::serial] // Run serially to avoid env var conflicts with other tests +fn test_kilo_code_preset_pretooluse_returns_human_checkpoint() { + let storage_path = kilo_code_storage_fixture_path(); + + let hook_input = json!({ + "hook_event_name": "PreToolUse", + "session_id": "test-session-123", + "cwd": "/Users/test/project", + "tool_input": { + "filePath": "/Users/test/project/index.ts" + } + }) + .to_string(); + + // Set the test storage path via env var + unsafe { + std::env::set_var( + "GIT_AI_KILO_CODE_STORAGE_PATH", + storage_path.to_str().unwrap(), + ); + } + + let flags = AgentCheckpointFlags { + hook_input: Some(hook_input), + }; + + let result = KiloCodePreset + .run(flags) + .expect("Failed to run KiloCodePreset"); + + // Clean up env var + unsafe { + std::env::remove_var("GIT_AI_KILO_CODE_STORAGE_PATH"); + } + + assert_eq!( + result.checkpoint_kind, + CheckpointKind::Human, + "PreToolUse should produce a Human checkpoint" + ); + + assert!( + result.will_edit_filepaths.is_some(), + "will_edit_filepaths should be populated for PreToolUse" + ); + + let will_edit = result.will_edit_filepaths.unwrap(); + assert_eq!(will_edit[0], "/Users/test/project/index.ts"); + + assert!( + result.transcript.is_none(), + "Transcript should be None for Human checkpoint" + ); +} + +#[test] +#[serial_test::serial] // Run serially to avoid env var conflicts with other tests +fn test_kilo_code_preset_posttooluse_returns_ai_checkpoint() { + let storage_path = kilo_code_storage_fixture_path(); + + let hook_input = json!({ + "hook_event_name": "PostToolUse", + "session_id": "test-session-123", + "cwd": "/Users/test/project", + "tool_input": { + "filePath": "/Users/test/project/index.ts" + } + }) + .to_string(); + + // Set the test storage path via env var + unsafe { + std::env::set_var( + "GIT_AI_KILO_CODE_STORAGE_PATH", + storage_path.to_str().unwrap(), + ); + } + + let flags = AgentCheckpointFlags { + hook_input: Some(hook_input), + }; + + let result = KiloCodePreset + .run(flags) + .expect("Failed to run KiloCodePreset"); + + // Clean up env var + unsafe { + std::env::remove_var("GIT_AI_KILO_CODE_STORAGE_PATH"); + } + + assert_eq!( + result.checkpoint_kind, + CheckpointKind::AiAgent, + "PostToolUse should produce an AiAgent checkpoint" + ); + + assert!( + result.transcript.is_some(), + "Transcript should be present for AI checkpoint" + ); + + assert!( + result.edited_filepaths.is_some(), + "edited_filepaths should be populated for PostToolUse" + ); + + let edited = result.edited_filepaths.unwrap(); + assert_eq!(edited[0], "/Users/test/project/index.ts"); + + assert_eq!(result.agent_id.tool, "kilo-code"); + assert_eq!(result.agent_id.id, "test-session-123"); + assert_eq!(result.agent_id.model, "anthropic/claude-sonnet-4-20250514"); +} + +#[test] +#[serial_test::serial] // Run serially to avoid env var conflicts with other tests +fn test_kilo_code_preset_stores_session_id_in_metadata() { + let storage_path = kilo_code_storage_fixture_path(); + + let hook_input = json!({ + "hook_event_name": "PostToolUse", + "session_id": "test-session-123", + "cwd": "/Users/test/project", + "tool_input": { + "filePath": "/Users/test/project/index.ts" + } + }) + .to_string(); + + unsafe { + std::env::set_var( + "GIT_AI_KILO_CODE_STORAGE_PATH", + storage_path.to_str().unwrap(), + ); + } + + let flags = AgentCheckpointFlags { + hook_input: Some(hook_input), + }; + + let result = KiloCodePreset + .run(flags) + .expect("Failed to run KiloCodePreset"); + + unsafe { + std::env::remove_var("GIT_AI_KILO_CODE_STORAGE_PATH"); + } + + assert!(result.agent_metadata.is_some()); + let metadata = result.agent_metadata.unwrap(); + assert!( + metadata.contains_key("session_id"), + "Metadata should contain session_id" + ); + assert_eq!(metadata["session_id"], "test-session-123"); +} + +#[test] +#[serial_test::serial] // Run serially to avoid env var conflicts with other tests +fn test_kilo_code_preset_sets_repo_working_dir() { + let storage_path = kilo_code_storage_fixture_path(); + + let hook_input = json!({ + "hook_event_name": "PostToolUse", + "session_id": "test-session-123", + "cwd": "/Users/test/my-project", + "tool_input": { + "filePath": "/Users/test/my-project/src/main.ts" + } + }) + .to_string(); + + unsafe { + std::env::set_var( + "GIT_AI_KILO_CODE_STORAGE_PATH", + storage_path.to_str().unwrap(), + ); + } + + let flags = AgentCheckpointFlags { + hook_input: Some(hook_input), + }; + + let result = KiloCodePreset + .run(flags) + .expect("Failed to run KiloCodePreset"); + + unsafe { + std::env::remove_var("GIT_AI_KILO_CODE_STORAGE_PATH"); + } + + assert!(result.repo_working_dir.is_some()); + assert_eq!(result.repo_working_dir.unwrap(), "/Users/test/my-project"); +} + +#[test] +fn test_kilo_code_empty_session_returns_empty_transcript() { + // Create a temp directory with empty session structure + let temp_dir = tempfile::tempdir().unwrap(); + let storage_path = temp_dir.path(); + let session_id = "empty-session"; + + // Create message directory but no files + let message_dir = storage_path.join("message").join(session_id); + fs::create_dir_all(&message_dir).unwrap(); + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(storage_path, session_id) + .expect("Should handle empty session"); + + assert!( + transcript.messages().is_empty(), + "Empty session should produce empty transcript" + ); + assert!(model.is_none(), "Empty session should have no model"); +} + +#[test] +fn test_kilo_code_nonexistent_session_returns_empty_transcript() { + let storage_path = kilo_code_storage_fixture_path(); + let session_id = "nonexistent-session"; + + let (transcript, model) = + KiloCodePreset::transcript_and_model_from_storage(&storage_path, session_id) + .expect("Should handle nonexistent session"); + + assert!( + transcript.messages().is_empty(), + "Nonexistent session should produce empty transcript" + ); + assert!(model.is_none(), "Nonexistent session should have no model"); +} + +#[test] +fn test_kilo_code_tool_use_only_from_assistant() { + let storage_path = kilo_code_storage_fixture_path(); + let session_id = "test-session-123"; + + let (transcript, _) = + KiloCodePreset::transcript_and_model_from_storage(&storage_path, session_id) + .expect("Failed to parse Kilo Code storage"); + + // Find tool use messages + let tool_uses: Vec<_> = transcript + .messages() + .iter() + .filter(|m| matches!(m, Message::ToolUse { .. })) + .collect(); + + assert!(!tool_uses.is_empty(), "Should have tool use messages"); + + // Verify tool use has expected content + if let Message::ToolUse { name, input, .. } = tool_uses[0] { + assert_eq!(name, "edit", "Tool name should be 'edit'"); + assert!( + input.get("filePath").is_some(), + "Tool input should contain filePath" + ); + } else { + panic!("Expected ToolUse message"); + } +} + +#[test] +#[serial_test::serial] // Run serially to avoid env var conflicts with other tests +fn test_kilo_code_e2e_checkpoint_and_commit() { + use repos::test_repo::TestRepo; + + let mut repo = TestRepo::new(); + + repo.patch_git_ai_config(|patch| { + patch.exclude_prompts_in_repositories = Some(vec![]); + }); + + let repo_root = repo.canonical_path(); + + // Create initial file and commit + let src_dir = repo_root.join("src"); + fs::create_dir_all(&src_dir).unwrap(); + let file_path = src_dir.join("main.ts"); + fs::write(&file_path, "// initial\n").unwrap(); + repo.stage_all_and_commit("Initial commit").unwrap(); + + // Copy fixture storage to temp location + let temp_storage = tempfile::tempdir().unwrap(); + let storage_path = temp_storage.path(); + + // Copy the fixture storage structure + let fixture_storage = kilo_code_storage_fixture_path(); + copy_dir_all(&fixture_storage, storage_path).unwrap(); + + // Set up environment for the test + unsafe { + std::env::set_var( + "GIT_AI_KILO_CODE_STORAGE_PATH", + storage_path.to_str().unwrap(), + ); + } + + // Create hook input for PreToolUse (human checkpoint) + let pre_hook_input = json!({ + "hook_event_name": "PreToolUse", + "session_id": "test-session-123", + "cwd": repo_root.to_string_lossy().to_string(), + "tool_input": { + "filePath": file_path.to_string_lossy().to_string() + } + }) + .to_string(); + + // Run human checkpoint + repo.git_ai(&["checkpoint", "kilo-code", "--hook-input", &pre_hook_input]) + .unwrap(); + + // Make AI edit + fs::write(&file_path, "// initial\n// Hello World\n").unwrap(); + + // Create hook input for PostToolUse (AI checkpoint) + let post_hook_input = json!({ + "hook_event_name": "PostToolUse", + "session_id": "test-session-123", + "cwd": repo_root.to_string_lossy().to_string(), + "tool_input": { + "filePath": file_path.to_string_lossy().to_string() + } + }) + .to_string(); + + // Run AI checkpoint + repo.git_ai(&["checkpoint", "kilo-code", "--hook-input", &post_hook_input]) + .unwrap(); + + // Clean up env var + unsafe { + std::env::remove_var("GIT_AI_KILO_CODE_STORAGE_PATH"); + } + + // Commit + let commit = repo.stage_all_and_commit("Add AI line").unwrap(); + + // Should have a prompt record + assert!( + !commit.authorship_log.metadata.prompts.is_empty(), + "Should have at least one prompt record" + ); + + let prompt_record = commit + .authorship_log + .metadata + .prompts + .values() + .next() + .expect("Prompt record should exist"); + + assert_eq!( + prompt_record.agent_id.tool, "kilo-code", + "Agent tool should be kilo-code" + ); + assert_eq!( + prompt_record.agent_id.model, "anthropic/claude-sonnet-4-20250514", + "Model should match fixture" + ); +} + +/// Helper function to recursively copy a directory +fn copy_dir_all(src: &std::path::Path, dst: &std::path::Path) -> std::io::Result<()> { + fs::create_dir_all(dst)?; + for entry in fs::read_dir(src)? { + let entry = entry?; + let ty = entry.file_type()?; + if ty.is_dir() { + copy_dir_all(&entry.path(), &dst.join(entry.file_name()))?; + } else { + fs::copy(entry.path(), dst.join(entry.file_name()))?; + } + } + Ok(()) +} + +reuse_tests_in_worktree!( + test_parse_kilo_code_storage_transcript, + test_parse_kilo_code_sqlite_transcript, + test_kilo_code_sqlite_takes_precedence_over_legacy_storage, + test_kilo_code_sqlite_falls_back_to_legacy_storage_when_sqlite_empty, + test_kilo_code_transcript_message_order, + test_kilo_code_transcript_timestamps_are_rfc3339, + test_kilo_code_empty_session_returns_empty_transcript, + test_kilo_code_nonexistent_session_returns_empty_transcript, + test_kilo_code_tool_use_only_from_assistant, +);