Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
423 changes: 423 additions & 0 deletions cli/src/commands/__tests__/insights.test.ts

Large diffs are not rendered by default.

646 changes: 646 additions & 0 deletions cli/src/commands/insights.ts

Large diffs are not rendered by default.

18 changes: 18 additions & 0 deletions cli/src/commands/sync.ts
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,24 @@ export async function syncCommand(options: SyncOptions = {}): Promise<void> {
}
}


/**
* Sync a single session file to SQLite.
* Used by the insights --hook path to guarantee fresh data before analysis.
* Much faster than full sync (no directory scanning, no other providers).
*/
export async function syncSingleFile(options: {
filePath: string;
sourceTool?: string;
quiet?: boolean;
}): Promise<void> {
const provider = getProvider(options.sourceTool ?? 'claude-code');
const session = await provider.parse(options.filePath);
if (!session) return;
insertSessionWithProjectAndReturnIsNew(session, false);
insertMessages(session);
}

/**
* Filter files to only those that need syncing
*/
Expand Down
2 changes: 1 addition & 1 deletion cli/src/db/__tests__/migrate.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ describe('runMigrations — idempotency', () => {
.all() as Array<{ version: number }>;

// One row per version, no duplicates
expect(rows.map(r => r.version)).toEqual([1, 2, 3, 4, 5, 6, 7]);
expect(rows.map(r => r.version)).toEqual([1, 2, 3, 4, 5, 6, 7, 8]);
db.close();
});
});
Expand Down
15 changes: 14 additions & 1 deletion cli/src/db/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { SCHEMA_SQL, CURRENT_SCHEMA_VERSION } from './schema.js';
export interface MigrationResult {
v6Applied: boolean;
v7Applied: boolean;
v8Applied: boolean;
}

/**
Expand All @@ -17,6 +18,7 @@ export interface MigrationResult {
* Version 5: Add deleted_at column to sessions for soft-delete (user-initiated hide)
* Version 6: Add compact_count, auto_compact_count, slash_commands columns to sessions
* Version 7: Add analysis_usage table for tracking LLM analysis costs per session
* Version 8: Add session_message_count to analysis_usage for resume detection
*/
export function runMigrations(db: Database.Database): MigrationResult {
// Create schema_version table first if it doesn't exist.
Expand Down Expand Up @@ -62,7 +64,13 @@ export function runMigrations(db: Database.Database): MigrationResult {
v7Applied = true;
}

return { v6Applied, v7Applied };
let v8Applied = false;
if (currentVersion < 8) {
applyV8(db);
v8Applied = true;
}

return { v6Applied, v7Applied, v8Applied };
}

function getCurrentVersion(db: Database.Database): number {
Expand Down Expand Up @@ -133,6 +141,7 @@ function applyV6(db: Database.Database): void {
db.prepare('INSERT OR IGNORE INTO schema_version (version) VALUES (?)').run(6);
}


function applyV7(db: Database.Database): void {
db.exec(`
CREATE TABLE IF NOT EXISTS analysis_usage (
Expand All @@ -157,3 +166,7 @@ function applyV7(db: Database.Database): void {
`);
db.prepare('INSERT OR IGNORE INTO schema_version (version) VALUES (?)').run(7);
}
function applyV8(db: Database.Database): void {
db.exec(`ALTER TABLE analysis_usage ADD COLUMN session_message_count INTEGER`);
db.prepare('INSERT OR IGNORE INTO schema_version (version) VALUES (?)').run(8);
}
6 changes: 4 additions & 2 deletions cli/src/db/schema.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -298,12 +298,12 @@ describe('runMigrations', () => {
db.close();
});

it('V7 schema version is 7 after migration', () => {
it('V8 schema version is 8 after migration', () => {
const db = new Database(':memory:');
runMigrations(db);

const row = db.prepare('SELECT MAX(version) AS v FROM schema_version').get() as { v: number };
expect(row.v).toBe(7);
expect(row.v).toBe(8);

db.close();
});
Expand All @@ -313,6 +313,7 @@ describe('runMigrations', () => {
const result = runMigrations(db);
expect(result.v6Applied).toBe(true);
expect(result.v7Applied).toBe(true);
expect(result.v8Applied).toBe(true);
db.close();
});

Expand All @@ -322,6 +323,7 @@ describe('runMigrations', () => {
const result = runMigrations(db); // second run — nothing to apply
expect(result.v6Applied).toBe(false);
expect(result.v7Applied).toBe(false);
expect(result.v8Applied).toBe(false);
db.close();
});
});
2 changes: 1 addition & 1 deletion cli/src/db/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,6 @@ CREATE TABLE IF NOT EXISTS usage_stats (
);
`;

export const CURRENT_SCHEMA_VERSION = 7;
export const CURRENT_SCHEMA_VERSION = 8;

export { runMigrations } from './migrate.js';
24 changes: 24 additions & 0 deletions cli/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { statsCommand } from './commands/stats/index.js';
import { configCommand } from './commands/config.js';
import { telemetryCommand } from './commands/telemetry.js';
import { reflectCommand } from './commands/reflect.js';
import { insightsCommand, insightsCheckCommand } from './commands/insights.js';
import { showTelemetryNoticeIfNeeded } from './utils/telemetry.js';

const pkg = JSON.parse(readFileSync(new URL('../package.json', import.meta.url), 'utf-8'));
Expand Down Expand Up @@ -116,6 +117,29 @@ program.addCommand(configCommand);
program.addCommand(telemetryCommand);
program.addCommand(reflectCommand);


// insights command — analyze a session using native claude -p or configured LLM
const insightsCmd = program
.command('insights [session_id]')
.description('Analyze a session with AI — extracts insights and prompt quality score')
.option('--native', 'Use claude -p (your Claude subscription, no API key required)')
.option('--hook', 'Read session context from stdin (for Claude Code SessionEnd hook)')
.option('-s, --source <tool>', 'Source tool identifier (default: claude-code)')
.option('--force', 'Re-analyze even if already analyzed at this session length')
.option('-q, --quiet', 'Suppress output')
.action(async (sessionId: string | undefined, opts) => {
await insightsCommand(sessionId, opts);
});

insightsCmd
.command('check')
.description('Check for unanalyzed sessions in the last N days')
.option('--days <n>', 'Lookback window in days', '7')
.option('-q, --quiet', 'Machine-readable output (just count)')
.action((opts) => {
insightsCheckCommand({ days: opts.days ? parseInt(opts.days, 10) : 7, quiet: opts.quiet });
});

// Default action: running `code-insights` with no arguments opens the dashboard.
// Dashboard auto-syncs sessions first, giving "1 command to value" on first run.
program.action(async () => {
Expand Down
17 changes: 14 additions & 3 deletions server/src/llm/analysis-usage-db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,28 @@ export interface SaveAnalysisUsageData {

/**
* Persist analysis token usage to SQLite.
* Uses INSERT OR REPLACE — re-analysis overwrites the previous row (latest cost only).
* The composite PK (session_id, analysis_type) enforces one row per type per session.
* Uses INSERT ... ON CONFLICT DO UPDATE — preserves columns not in this write
* (e.g. session_message_count written by the CLI insights command).
* INSERT OR REPLACE would DELETE+INSERT, clobbering those columns.
*/
export function saveAnalysisUsage(data: SaveAnalysisUsageData): void {
const db = getDb();
db.prepare(`
INSERT OR REPLACE INTO analysis_usage
INSERT INTO analysis_usage
(session_id, analysis_type, provider, model,
input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens,
estimated_cost_usd, duration_ms, chunk_count)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, analysis_type) DO UPDATE SET
provider = excluded.provider,
model = excluded.model,
input_tokens = excluded.input_tokens,
output_tokens = excluded.output_tokens,
cache_creation_tokens = excluded.cache_creation_tokens,
cache_read_tokens = excluded.cache_read_tokens,
estimated_cost_usd = excluded.estimated_cost_usd,
duration_ms = excluded.duration_ms,
chunk_count = excluded.chunk_count
`).run(
data.session_id,
data.analysis_type,
Expand Down
Loading