Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 6 additions & 13 deletions src/embedder.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import type { EmbedderHealth, EmbedderRetryConfig, EmbeddingConfig } from "./types.js";
import { log } from "./logger.js";

export interface Embedder {
readonly model: string;
Expand Down Expand Up @@ -63,7 +64,7 @@ async function embedWithRetry(
globalEmbedderHealth.lastError = null;
if (globalEmbedderHealth.status === "degraded") {
globalEmbedderHealth.status = "healthy";
console.info(`[lancedb-opencode-pro] Embedder recovered, resuming normal mode`);
log("info", "Embedder recovered, resuming normal mode");
}
return result;
} catch (error) {
Expand All @@ -78,18 +79,14 @@ async function embedWithRetry(
const delay = Math.floor(
retry.initialDelayMs * Math.pow(retry.backoffMultiplier, attempt - 1),
);
console.warn(
`[lancedb-opencode-pro] Embedder failed (attempt ${attempt}/${retry.maxAttempts}), retrying in ${delay}ms: ${lastError.message}`,
);
log("warn", `Embedder failed (attempt ${attempt}/${retry.maxAttempts}), retrying in ${delay}ms: ${lastError.message}`);
await sleep(delay);
}
}

globalEmbedderHealth.status = "degraded";
globalEmbedderHealth.fallbackActive = true;
console.warn(
`[lancedb-opencode-pro] Embedder unavailable after ${retry.maxAttempts} attempts, falling back to BM25-only search`,
);
log("warn", `Embedder unavailable after ${retry.maxAttempts} attempts, falling back to BM25-only search`);
throw lastError;
}

Expand Down Expand Up @@ -218,9 +215,7 @@ export class OllamaEmbedder implements Embedder {
} catch {
const fb = fallbackDim(this.model);
if (fb !== null) {
console.warn(
`[lancedb-opencode-pro] Ollama unreachable, using fallback dim ${fb} for model "${this.model}"`,
);
log("warn", `Ollama unreachable, using fallback dim ${fb} for model "${this.model}"`);
return fb;
}
throw new Error(
Expand Down Expand Up @@ -296,9 +291,7 @@ export class OpenAIEmbedder implements Embedder {
} catch {
const fb = fallbackDim(this.model);
if (fb !== null) {
console.warn(
`[lancedb-opencode-pro] OpenAI embedding probe failed, using fallback dim ${fb} for model "${this.model}"`,
);
log("warn", `OpenAI embedding probe failed, using fallback dim ${fb} for model "${this.model}"`);
return fb;
}
throw new Error(
Expand Down
20 changes: 12 additions & 8 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,12 @@ import { MemoryStore } from "./store.js";
import type { CaptureOutcome, CaptureSkipReason, EpisodicTaskRecord, FailureType, LastRecallSession, MemoryRuntimeConfig, PreferenceProfile, SearchResult, SuccessPattern, TaskState, TaskType, ValidationOutcome, ValidationType } from "./types.js";
import { validateEpisodicRecordArray } from "./types.js";
import { generateId } from "./utils.js";
import { initLogger, log } from "./logger.js";
import { calculateInjectionLimit, createSummarizationConfig, summarizeContent } from "./summarize.js";
import { createMemoryTools, createFeedbackTools, createEpisodicTools, type ToolRuntimeState } from "./tools/index.js";

const PLUGIN_VERSION = "0.7.0";

const SCHEMA_VERSION = 1;

// Task-type detection keywords
Expand Down Expand Up @@ -71,6 +74,9 @@ function getCategoryWeights(taskType: TaskType, profiles: Record<TaskType, { cat
}

const plugin: Plugin = async (input) => {
initLogger(input.client);
log("info", `Plugin v${PLUGIN_VERSION} initialized`);

const state = await createRuntimeState(input);

const hooks: Hooks = {
Expand Down Expand Up @@ -136,7 +142,7 @@ const plugin: Plugin = async (input) => {
queryVector = await state.embedder.embed(query);
} catch (error) {
embedderFailed = true;
console.warn(`[lancedb-opencode-pro] embedding unavailable during recall: ${toErrorMessage(error)}`);
log("warn", `embedding unavailable during recall: ${toErrorMessage(error)}`);
queryVector = [];
}

Expand All @@ -145,7 +151,7 @@ const plugin: Plugin = async (input) => {
const effectiveBm25Weight = isFallback ? 1 : (state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight);

if (isFallback) {
console.info(`[lancedb-opencode-pro] Using BM25-only search (embedder unavailable)`);
log("info", "Using BM25-only search (embedder unavailable)");
}

const results = await state.store.search({
Expand Down Expand Up @@ -279,7 +285,7 @@ const plugin: Plugin = async (input) => {
);
}
} catch (error) {
console.warn(`[lancedb-opencode-pro] similar task recall failed: ${toErrorMessage(error)}`);
log("warn", `similar task recall failed: ${toErrorMessage(error)}`);
}

eventOutput.system.push(blocks.join("\n\n"));
Expand Down Expand Up @@ -320,9 +326,7 @@ async function createRuntimeState(input: Parameters<Plugin>[0]): Promise<Runtime
await state.store.init(dim);
state.initialized = true;
} catch (error) {
console.warn(
`[lancedb-opencode-pro] initialization deferred: ${toErrorMessage(error)}`,
);
log("warn", `initialization deferred: ${toErrorMessage(error)}`);
}
},
};
Expand Down Expand Up @@ -399,7 +403,7 @@ async function getLastUserText(
try {
vector = await state.embedder.embed(result.candidate.text);
} catch (error) {
console.warn(`[lancedb-opencode-pro] embedding unavailable during auto-capture: ${toErrorMessage(error)}`);
log("warn", `embedding unavailable during auto-capture: ${toErrorMessage(error)}`);
await recordCaptureEvent(state, {
sessionID,
scope: activeScope,
Expand All @@ -411,7 +415,7 @@ async function getLastUserText(
}

if (vector.length === 0) {
console.warn("[lancedb-opencode-pro] auto-capture skipped because embedding vector is empty");
log("warn", "auto-capture skipped because embedding vector is empty");
await recordCaptureEvent(state, {
sessionID,
scope: activeScope,
Expand Down
52 changes: 52 additions & 0 deletions src/logger.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import type { OpencodeClient } from "@opencode-ai/sdk";

type LogLevel = "debug" | "info" | "warn" | "error";

const SERVICE_NAME = "lancedb-opencode-pro";

let _client: OpencodeClient | null = null;

export function initLogger(client: OpencodeClient): void {
_client = client;
}

// Routes to client.app.log() when SDK client is bound, otherwise falls back to console.
export function log(
level: LogLevel,
message: string,
extra?: Record<string, unknown>,
): void {
if (_client?.app?.log) {
_client.app
.log({
body: {
service: SERVICE_NAME,
level,
message,
...(extra !== undefined ? { extra } : {}),
},
})
.catch(() => consoleFallback(level, message));
return;
}

consoleFallback(level, message);
}

function consoleFallback(level: LogLevel, message: string): void {
const formatted = `[${SERVICE_NAME}] ${message}`;
switch (level) {
case "error":
console.error(formatted);
break;
case "warn":
console.warn(formatted);
break;
case "info":
console.info(formatted);
break;
default:
console.log(formatted);
break;
}
}
3 changes: 2 additions & 1 deletion src/tools/memory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { tool } from "@opencode-ai/plugin";
import { deriveProjectScope, buildScopeFilter } from "../scope.js";
import { generateId } from "../utils.js";
import { getEmbedderHealth, type Embedder } from "../embedder.js";
import { log } from "../logger.js";
import type { MemoryStore } from "../store.js";
import type { MemoryRuntimeConfig, MemoryCategory, CitationStatus, ValidationOutcome } from "../types.js";

Expand Down Expand Up @@ -68,7 +69,7 @@ export function createMemoryTools(state: ToolRuntimeState) {
const effectiveBm25Weight = isFallback ? 1 : (state.config.retrieval.mode === "vector" ? 0 : state.config.retrieval.bm25Weight);

if (isFallback) {
console.info(`[lancedb-opencode-pro] Using BM25-only search (embedder unavailable)`);
log("info", "Using BM25-only search (embedder unavailable)");
}

const results = await state.store.search({
Expand Down
8 changes: 6 additions & 2 deletions test/config.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@ import assert from "node:assert/strict";
import test from "node:test";
import { resolveMemoryConfig } from "../src/config.js";

async function withPatchedEnv<T>(values: Record<string, string>, run: () => T): Promise<T> {
async function withPatchedEnv<T>(values: Record<string, string | undefined>, run: () => T): Promise<T> {
const oldValues: Record<string, string | undefined> = {};
for (const key of Object.keys(values)) {
oldValues[key] = process.env[key];
process.env[key] = values[key];
if (values[key] === undefined) {
delete process.env[key];
} else {
process.env[key] = values[key] as string;
}
}
try {
return run();
Expand Down
10 changes: 8 additions & 2 deletions test/regression/plugin.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,15 @@ async function withPatchedFetch<T>(run: () => Promise<T>): Promise<T> {
}
}

async function withPatchedEnv<T>(values: Record<string, string>, run: () => Promise<T>): Promise<T> {
async function withPatchedEnv<T>(values: Record<string, string | undefined>, run: () => Promise<T>): Promise<T> {
const previous = new Map<string, string | undefined>();
for (const [key, value] of Object.entries(values)) {
previous.set(key, process.env[key]);
process.env[key] = value;
if (value === undefined) {
delete process.env[key];
} else {
process.env[key] = value;
}
}

try {
Expand Down Expand Up @@ -350,6 +354,8 @@ test("resolveMemoryConfig fails fast for openai without model", async () => {
LANCEDB_OPENCODE_PRO_EMBEDDING_PROVIDER: "openai",
LANCEDB_OPENCODE_PRO_OPENAI_API_KEY: "test-openai-api-key",
LANCEDB_OPENCODE_PRO_SKIP_SIDECAR: "true",
LANCEDB_OPENCODE_PRO_EMBEDDING_MODEL: undefined,
LANCEDB_OPENCODE_PRO_OPENAI_MODEL: undefined,
},
async () => {
assert.throws(
Expand Down