From c26d226ae27c1223c6cf1abe991897ce3c433525 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Tue, 10 Mar 2026 23:27:14 +0200 Subject: [PATCH 1/9] chore: audit deepseek, add provider deps, extend types and config --- deno.json | 3 +++ src/lib/configServiceTypes.d.ts | 18 ++++++++++++++++-- src/lib/constants.ts | 4 ++++ 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/deno.json b/deno.json index 8090d8d..39c605e 100644 --- a/deno.json +++ b/deno.json @@ -18,8 +18,10 @@ "@ai-sdk/deepseek": "npm:@ai-sdk/deepseek@^2.0.20", "@ai-sdk/google": "npm:@ai-sdk/google@^3.0.33", "@ai-sdk/mistral": "npm:@ai-sdk/mistral@^3.0.20", + "@ai-sdk/moonshotai": "npm:@ai-sdk/moonshotai@^2.0.10", "@ai-sdk/openai": "npm:@ai-sdk/openai@^3.0.36", "@ai-sdk/xai": "npm:@ai-sdk/xai@^3.0.59", + "@openrouter/ai-sdk-provider": "npm:@openrouter/ai-sdk-provider@^2.2.5", "@arrirpc/schema": "npm:@arrirpc/schema@^0.81.2", "@cliffy/prompt": "jsr:@cliffy/prompt@^1.0.0", "@std/fmt": "jsr:@std/fmt@1.0.9", @@ -28,6 +30,7 @@ "husky": "npm:husky@^9.1.7", "lib-result": "npm:lib-result@^3.2.2", "ollama-ai-provider-v2": "npm:ollama-ai-provider-v2@^3.3.1", + "vercel-minimax-ai-provider": "npm:vercel-minimax-ai-provider@^0.0.2", "@/": "./src/" }, "lint": { diff --git a/src/lib/configServiceTypes.d.ts b/src/lib/configServiceTypes.d.ts index 8ce5032..ab6bf6d 100644 --- a/src/lib/configServiceTypes.d.ts +++ b/src/lib/configServiceTypes.d.ts @@ -10,6 +10,12 @@ type OllamaConfig = { baseUrl: "http://localhost:11434" | (string & {}); }; +// Configuration for the OpenRouter meta-provider +type OpenRouterConfig = { + model: string; + baseUrl: "https://openrouter.ai/api/v1" | (string & {}); +}; + // Configuration for commit-related settings type CommitConfig = { autoCommit: boolean; @@ -28,7 +34,10 @@ export type ProviderType = | "deepseek" | "mistral" | "xai" - | "ollama"; + | "ollama" + | "moonshotai" + | "zai" + | "minimax"; // Configuration for the provider selection type ProviderConfig = { @@ -43,6 +52,7 @@ export type Config = { readonly $schema: "https://raw.githubusercontent.com/AhmedOsman101/commit-sage-cli/refs/heads/main/config.schema.json"; general: GeneralConfig; ollama: OllamaConfig; + openrouter: OpenRouterConfig; commit: CommitConfig; provider: ProviderConfig; }; @@ -53,7 +63,11 @@ export type ApiService = | "Anthropic" | "DeepSeek" | "Mistral" - | "Xai"; + | "Xai" + | "MoonshotAI" + | "Zai" + | "MiniMax" + | "OpenRouter"; export type ConfigSection = keyof Config; export type ConfigKey = keyof Config[T]; diff --git a/src/lib/constants.ts b/src/lib/constants.ts index 26e8728..82d5b12 100644 --- a/src/lib/constants.ts +++ b/src/lib/constants.ts @@ -41,6 +41,10 @@ export const DEFAULT_CONFIG: Readonly = { model: "llama3.2", baseUrl: "http://localhost:11434/api", }, + openrouter: { + model: "openai/gpt-4.1-mini", + baseUrl: "https://openrouter.ai/api/v1", + }, commit: { autoCommit: false, autoPush: false, From c2f1300e991900e8e066ea053dfc51a904941318 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Tue, 10 Mar 2026 23:38:14 +0200 Subject: [PATCH 2/9] feat: add Moonshot AI, Z.AI, MiniMax, and OpenRouter provider support - Add moonshotService.ts (Kimi models via @ai-sdk/moonshotai) - Add zaiService.ts (GLM models via @ai-sdk/openai with Z.AI base URL) - Add minimaxService.ts (MiniMax-M2.x via vercel-minimax-ai-provider) - Add openrouterService.ts (meta-provider via @openrouter/ai-sdk-provider) - Extend ProviderType union with moonshotai, zai, minimax, openrouter - Extend ApiService union with MoonshotAI, Zai, MiniMax, OpenRouter - Add OpenRouterConfig type and openrouter section to Config type - Update DEFAULT_CONFIG with openrouter section and new provider defaults - Update providerRegistry.ts with all four new providers - Add OpenRouter special-case dispatch in aiService.ts - Update configValidationService.ts SUPPORTED_PROVIDERS and schema - Update configService.ts migrateConfig (modelMap + prefix detection) - Add validateApiKey cases for new providers in configService.ts - Add validateOpenRouterApiKey (sk-or-v1- prefix check) to keyValidationService.ts - Update config.schema.json with new provider enums and openrouter section - Update CHANGELOG.md with all new provider details --- CHANGELOG.md | 45 ++++++++ config.schema.json | 23 +++- deno.lock | 136 +++++++++++++++++++----- src/lib/configServiceTypes.d.ts | 3 +- src/services/aiService.ts | 10 ++ src/services/configService.ts | 21 +++- src/services/configValidationService.ts | 22 +++- src/services/keyValidationService.ts | 11 ++ src/services/minimaxService.ts | 47 ++++++++ src/services/moonshotService.ts | 47 ++++++++ src/services/openrouterService.ts | 64 +++++++++++ src/services/providerRegistry.ts | 8 ++ src/services/zaiService.ts | 55 ++++++++++ 13 files changed, 463 insertions(+), 29 deletions(-) create mode 100644 src/services/minimaxService.ts create mode 100644 src/services/moonshotService.ts create mode 100644 src/services/openrouterService.ts create mode 100644 src/services/zaiService.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index e82d85f..eab6664 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,50 @@ # Changelog +## [Unreleased] + +### Features + +- **New AI Providers**: Add support for four additional AI providers + - **Moonshot AI (Kimi models)** via `@ai-sdk/moonshotai` (official SDK) + - Set `provider.type = "moonshotai"` and `MOONSHOTAI_API_KEY` + - Default model: `kimi-k2.5` (flagship multimodal + reasoning model) + - Reasoning mode supported via `` block middleware + - Register at + - **Z.AI (GLM models)** via `@ai-sdk/openai` adapter with custom base URL + - Set `provider.type = "zai"` and `ZAI_API_KEY` + - Default model: `glm-4.5-flash` (fast GLM model) + - Uses the international Z.AI platform (`api.z.ai`) — no extra dependency + - Register at + - **MiniMax** via `vercel-minimax-ai-provider` (provider-maintained SDK) + - Set `provider.type = "minimax"` and `MINIMAX_API_KEY` + - Default model: `MiniMax-M2.5` + - Uses OpenAI-compatible adapter + - Register at + - **OpenRouter** via `@openrouter/ai-sdk-provider` (OpenRouter-maintained SDK) + - Set `provider.type = "openrouter"` and `OPENROUTER_API_KEY` + - Requires a dedicated `openrouter` config section with `model` and optional `baseUrl` + - Default model: `openai/gpt-4.1-mini` (model IDs are provider-prefixed, e.g. + `anthropic/claude-opus-4-5`, `meta-llama/llama-3.1-405b-instruct`) + - Routes to hundreds of models from a single API key + - Register at + +- **DeepSeek integration confirmed**: The existing DeepSeek integration + (introduced in v1.6.0) has been audited and verified complete across all + integration touchpoints (types, registry, config migration, validation). + +### Configuration + +- New `openrouter` config section added to `config.json` (alongside the existing + `ollama` section) for OpenRouter-specific settings +- Default values added to `DEFAULT_CONFIG` for all new providers +- Config migration (`migrateConfig`) handles automatic model-to-provider + detection for Kimi (`kimi-*`), GLM (`glm-*`), and MiniMax (`MiniMax-*`) models + +### Validation + +- `OPENROUTER_API_KEY` is validated against the known `sk-or-v1-` prefix format +- `config.schema.json` updated with new provider types and `openrouter` section + ## [1.6.0](https://github.com/AhmedOsman101/commit-sage-cli/compare/v1.5.0...v1.6.0) (2026-02-26) ### Features diff --git a/config.schema.json b/config.schema.json index 9eb5bcc..a259c34 100644 --- a/config.schema.json +++ b/config.schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", - "required": ["ollama", "commit", "provider"], + "required": ["ollama", "openrouter", "commit", "provider"], "properties": { "general": { "type": "object", @@ -30,6 +30,21 @@ } } }, + "openrouter": { + "type": "object", + "required": ["model"], + "properties": { + "model": { + "type": "string", + "description": "OpenRouter model ID, e.g. 'openai/gpt-4.1-mini' or 'anthropic/claude-opus-4-5'" + }, + "baseUrl": { + "type": "string", + "format": "uri", + "description": "OpenRouter API base URL (defaults to https://openrouter.ai/api/v1)" + } + } + }, "commit": { "type": "object", "required": ["commitFormat", "onlyStagedChanges", "commitLanguage"], @@ -69,7 +84,11 @@ "deepseek", "mistral", "xai", - "ollama" + "ollama", + "moonshotai", + "zai", + "minimax", + "openrouter" ], "description": "AI provider type" }, diff --git a/deno.lock b/deno.lock index 1f28420..9f4653d 100644 --- a/deno.lock +++ b/deno.lock @@ -17,15 +17,20 @@ "npm:@ai-sdk/deepseek@^2.0.20": "2.0.20_zod@4.1.12", "npm:@ai-sdk/google@^3.0.33": "3.0.33_zod@4.1.12", "npm:@ai-sdk/mistral@^3.0.20": "3.0.20_zod@4.1.12", + "npm:@ai-sdk/moonshotai@^2.0.10": "2.0.10_zod@4.1.12", "npm:@ai-sdk/openai@^3.0.36": "3.0.36_zod@4.1.12", "npm:@ai-sdk/xai@^3.0.59": "3.0.59_zod@4.1.12", "npm:@arrirpc/schema@~0.81.2": "0.81.2", + "npm:@openrouter/ai-sdk-provider@2.2.5": "2.2.5_ai@6.0.103__zod@4.1.12_zod@4.1.12", + "npm:@openrouter/ai-sdk-provider@^2.2.5": "2.2.5_ai@6.0.103__zod@4.1.12_zod@4.1.12", "npm:@types/node@*": "22.15.15", "npm:ai@^6.0.103": "6.0.103_zod@4.1.12", "npm:axios@1.13.5": "1.13.5", "npm:husky@^9.1.7": "9.1.7", "npm:lib-result@^3.2.2": "3.2.2", - "npm:ollama-ai-provider-v2@^3.3.1": "3.3.1_ai@6.0.103__zod@4.1.12_zod@4.1.12" + "npm:ollama-ai-provider-v2@^3.3.1": "3.3.1_ai@6.0.103__zod@4.1.12_zod@4.1.12", + "npm:vercel-minimax-ai-provider@0.0.2": "0.0.2_zod@4.1.12", + "npm:vercel-minimax-ai-provider@^0.0.2": "0.0.2_zod@4.1.12" }, "jsr": { "@cliffy/ansi@1.0.0": { @@ -84,24 +89,32 @@ "@ai-sdk/anthropic@3.0.47_zod@4.1.12": { "integrity": "sha512-E6Z3i/xvxGDxRskMMbuX9+xDK4l5LesrP2O7YQ0CcbAkYP25qTo/kYGf/AsJrLkNIY23HeO/kheUWtG1XZllDA==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", + "zod" + ] + }, + "@ai-sdk/anthropic@3.0.6_zod@4.1.12": { + "integrity": "sha512-Ns5OOPHXbODzitvqCySnAFZCAm9ldpx+fdbC0c/f9QwX5b4MQtQJIQ0xZyKm+tB/ynBoeV6zhtyWDXjYeVEWIw==", + "dependencies": [ + "@ai-sdk/provider@3.0.1", + "@ai-sdk/provider-utils@4.0.3_zod@4.1.12", "zod" ] }, "@ai-sdk/deepseek@2.0.20_zod@4.1.12": { "integrity": "sha512-MAL04sDTOWUiBjAGWaVgyeE4bYRb9QpKYRlIeCTZFga6I8yQs50XakhWEssrmvVihdpHGkqpDtCHsFqCydsWLA==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "zod" ] }, "@ai-sdk/gateway@3.0.57_zod@4.1.12": { "integrity": "sha512-3MugqOlGfCOjlsBGGARJ5Zrioh78X3+rulHCayCMPySYKY+wc8GGFlFCCh4mleWQFShjMyqWT7eeLTuVSj/WSg==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "@vercel/oidc", "zod" ] @@ -109,44 +122,100 @@ "@ai-sdk/google@3.0.33_zod@4.1.12": { "integrity": "sha512-ElHkhMGMJ1MY5AlwLljWWE1jj+Bs3cMyq0KbeWUu2H89OsMAORiE4cB3xhfLlSIEnVmVKx/YHjoW3bN+DFI24A==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "zod" ] }, "@ai-sdk/mistral@3.0.20_zod@4.1.12": { "integrity": "sha512-oZcx2pE6nJ+Qj/U6HFV5mJ52jXJPBSpvki/NtIocZkI/rKxphKBaecOH1h0Y7yK3HIbBxsMqefB1pb72cAHGVg==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", + "zod" + ] + }, + "@ai-sdk/moonshotai@2.0.10_zod@4.1.12": { + "integrity": "sha512-XtBqVQHb6069XQQARtjOq1MxbrA56Ox2hTP3tmsnFVUlXMvS+SINCL6mU7Lq3oFQKADXjjEQibq49e7Gee9n1A==", + "dependencies": [ + "@ai-sdk/openai-compatible@2.0.35_zod@4.1.12", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.19_zod@4.1.12", "zod" ] }, "@ai-sdk/openai-compatible@2.0.30_zod@4.1.12": { "integrity": "sha512-iTjumHf1/u4NhjXYFn/aONM2GId3/o7J1Lp5ql8FCbgIMyRwrmanR5xy1S3aaVkfTscuDvLTzWiy1mAbGzK3nQ==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", + "zod" + ] + }, + "@ai-sdk/openai-compatible@2.0.35_zod@4.1.12": { + "integrity": "sha512-g3wA57IAQFb+3j4YuFndgkUdXyRETZVvbfAWM+UX7bZSxA3xjes0v3XKgIdKdekPtDGsh4ZX2byHD0gJIMPfiA==", + "dependencies": [ + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.19_zod@4.1.12", "zod" ] }, "@ai-sdk/openai@3.0.36_zod@4.1.12": { "integrity": "sha512-foY3onGY8l3q9niMw0Cwe9xrYnm46keIWL57NRw6F3DKzSW9TYTfx0cQJs/j8lXJ8lPzqNxpMO/zXOkqCUt3IQ==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "zod" ] }, "@ai-sdk/provider-utils@4.0.15_zod@4.1.12": { "integrity": "sha512-8XiKWbemmCbvNN0CLR9u3PQiet4gtEVIrX4zzLxnCj06AwsEDJwJVBbKrEI4t6qE8XRSIvU2irka0dcpziKW6w==", "dependencies": [ - "@ai-sdk/provider", + "@ai-sdk/provider@3.0.8", + "@standard-schema/spec@1.1.0", + "eventsource-parser", + "zod" + ] + }, + "@ai-sdk/provider-utils@4.0.19_zod@4.1.12": { + "integrity": "sha512-3eG55CrSWCu2SXlqq2QCsFjo3+E7+Gmg7i/oRVoSZzIodTuDSfLb3MRje67xE9RFea73Zao7Lm4mADIfUETKGg==", + "dependencies": [ + "@ai-sdk/provider@3.0.8", + "@standard-schema/spec@1.1.0", + "eventsource-parser", + "zod" + ] + }, + "@ai-sdk/provider-utils@4.0.3_zod@4.1.12": { + "integrity": "sha512-Vo2p61dDld8Dy/O66zKQpE4nqHojiEEYEjZcSbICjE7h8Z6QmHzBfd+ss/paIDdyXyS0yHmC1GoRYYKo89cqZQ==", + "dependencies": [ + "@ai-sdk/provider@3.0.1", "@standard-schema/spec@1.1.0", "eventsource-parser", "zod" ] }, + "@ai-sdk/provider-utils@4.0.4_zod@4.1.12": { + "integrity": "sha512-VxhX0B/dWGbpNHxrKCWUAJKXIXV015J4e7qYjdIU9lLWeptk0KMLGcqkB4wFxff5Njqur8dt8wRi1MN9lZtDqg==", + "dependencies": [ + "@ai-sdk/provider@3.0.2", + "@standard-schema/spec@1.1.0", + "eventsource-parser", + "zod" + ] + }, + "@ai-sdk/provider@3.0.1": { + "integrity": "sha512-2lR4w7mr9XrydzxBSjir4N6YMGdXD+Np1Sh0RXABh7tWdNFFwIeRI1Q+SaYZMbfL8Pg8RRLcrxQm51yxTLhokg==", + "dependencies": [ + "json-schema" + ] + }, + "@ai-sdk/provider@3.0.2": { + "integrity": "sha512-HrEmNt/BH/hkQ7zpi2o6N3k1ZR1QTb7z85WYhYygiTxOQuaml4CMtHCWRbric5WPU+RNsYI7r1EpyVQMKO1pYw==", + "dependencies": [ + "json-schema" + ] + }, "@ai-sdk/provider@3.0.8": { "integrity": "sha512-oGMAgGoQdBXbZqNG0Ze56CHjDZ1IDYOwGYxYjO5KLSlz5HiNQ9udIXsPZ61VWaHGZ5XW/jyjmr6t2xz2jGVwbQ==", "dependencies": [ @@ -156,9 +225,9 @@ "@ai-sdk/xai@3.0.59_zod@4.1.12": { "integrity": "sha512-lwY3yTZ43rmLsnffGLxEKF7ikuxKhqWLFnZdglwV4mgGiwbExLrIpHkMQUTXis5OmlJb66r+gfu+udoWdRKDrA==", "dependencies": [ - "@ai-sdk/openai-compatible", - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/openai-compatible@2.0.30_zod@4.1.12", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "zod" ] }, @@ -177,6 +246,13 @@ "scule" ] }, + "@openrouter/ai-sdk-provider@2.2.5_ai@6.0.103__zod@4.1.12_zod@4.1.12": { + "integrity": "sha512-IgM96gPvpxMZYYJQSIuXqvHX0mUXHEvsa/AtIlfb1VK4ek584ydAzc/wf3IuKxNof15o38WZMpCwfsOFHv96Jg==", + "dependencies": [ + "ai", + "zod" + ] + }, "@opentelemetry/api@1.9.0": { "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" }, @@ -199,8 +275,8 @@ "integrity": "sha512-4eY6Ut4u41zKH+P2S/oLlZrwxeWQh4kIV1FjE34Jhoiwg+v1AyfSYM8FslXk9rTAtIIaOBimrCUqXacC5RBqJw==", "dependencies": [ "@ai-sdk/gateway", - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.15_zod@4.1.12", "@opentelemetry/api", "zod" ] @@ -345,8 +421,8 @@ "ollama-ai-provider-v2@3.3.1_ai@6.0.103__zod@4.1.12_zod@4.1.12": { "integrity": "sha512-j4BBqqQnvf/uDz+aPYcgU4/MQZERw087Fn1DMGtViA/PgahBq36jHKHVoZfx8mxj+w8cxsKd3eYaDgyZPhE6YA==", "dependencies": [ - "@ai-sdk/provider", - "@ai-sdk/provider-utils", + "@ai-sdk/provider@3.0.8", + "@ai-sdk/provider-utils@4.0.19_zod@4.1.12", "ai", "zod" ] @@ -363,6 +439,15 @@ "undici-types@6.21.0": { "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==" }, + "vercel-minimax-ai-provider@0.0.2_zod@4.1.12": { + "integrity": "sha512-h9QzLL7RBmOreqWfr2fcoFVNTJgusENJVagVm8vAi+DBfd+1t+sVJZ/hAhKrtuCKCrm33BlOSWVdJehQFju5jQ==", + "dependencies": [ + "@ai-sdk/anthropic@3.0.6_zod@4.1.12", + "@ai-sdk/provider@3.0.2", + "@ai-sdk/provider-utils@4.0.4_zod@4.1.12", + "zod" + ] + }, "zod@4.1.12": { "integrity": "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==" } @@ -375,14 +460,17 @@ "npm:@ai-sdk/deepseek@^2.0.20", "npm:@ai-sdk/google@^3.0.33", "npm:@ai-sdk/mistral@^3.0.20", + "npm:@ai-sdk/moonshotai@^2.0.10", "npm:@ai-sdk/openai@^3.0.36", "npm:@ai-sdk/xai@^3.0.59", "npm:@arrirpc/schema@~0.81.2", + "npm:@openrouter/ai-sdk-provider@^2.2.5", "npm:ai@^6.0.103", "npm:axios@1.13.5", "npm:husky@^9.1.7", "npm:lib-result@^3.2.2", - "npm:ollama-ai-provider-v2@^3.3.1" + "npm:ollama-ai-provider-v2@^3.3.1", + "npm:vercel-minimax-ai-provider@^0.0.2" ] } } diff --git a/src/lib/configServiceTypes.d.ts b/src/lib/configServiceTypes.d.ts index ab6bf6d..eb0e383 100644 --- a/src/lib/configServiceTypes.d.ts +++ b/src/lib/configServiceTypes.d.ts @@ -37,7 +37,8 @@ export type ProviderType = | "ollama" | "moonshotai" | "zai" - | "minimax"; + | "minimax" + | "openrouter"; // Configuration for the provider selection type ProviderConfig = { diff --git a/src/services/aiService.ts b/src/services/aiService.ts index 5b1ae5f..a7bf327 100644 --- a/src/services/aiService.ts +++ b/src/services/aiService.ts @@ -5,6 +5,7 @@ import type { CommitMessage } from "@/lib/index.d.ts"; import ConfigService from "./configService.ts"; import GitBlameAnalyzer from "./gitBlameAnalyzer.ts"; import GitService from "./gitService.ts"; +import OpenRouterService from "./openrouterService.ts"; import PromptService from "./promptService.ts"; import { getProviderService } from "./providerRegistry.ts"; @@ -35,6 +36,15 @@ const AiService = { const providerType = providerResult.ok as ProviderType; try { + // OpenRouter reads from its own config section (not provider.model) + if (providerType === "openrouter") { + const commitMessage = await OpenRouterService.generateCommitMessage( + prompt, + 1 + ); + return Ok(commitMessage); + } + const Service = getProviderService(providerType); const commitMessage = await Service.generateCommitMessage(prompt, 1); diff --git a/src/services/configService.ts b/src/services/configService.ts index 1621561..97ec24f 100644 --- a/src/services/configService.ts +++ b/src/services/configService.ts @@ -36,6 +36,9 @@ class ConfigService { mistral: "mistral-small-latest", xai: "grok-3-mini", ollama: "llama3.2", + moonshotai: "kimi-k2.5", + zai: "glm-4.5-flash", + minimax: "MiniMax-M2.5", }; // Case 1: Has type but no model - add default model @@ -78,6 +81,12 @@ class ConfigService { detectedType = "mistral"; } else if (model.startsWith("grok-")) { detectedType = "xai"; + } else if (model.startsWith("kimi-")) { + detectedType = "moonshotai"; + } else if (model.startsWith("glm-")) { + detectedType = "zai"; + } else if (model.startsWith("MiniMax-")) { + detectedType = "minimax"; } else if (model.includes("/")) { // Handle "google/gemini-2.5-flash-lite" format detectedType = model.split("/")[0]; @@ -291,7 +300,17 @@ After adding the line, restart your terminal or run 'source ${shellConfigFile}' case "Anthropic": case "DeepSeek": case "Mistral": - case "Xai": { + case "Xai": + case "MoonshotAI": + case "Zai": + case "MiniMax": { + break; + } + case "OpenRouter": { + const { error } = KeyValidationService.validateOpenRouterApiKey(key); + if (error !== undefined) { + throw new AiServiceError(error.message, { cause: error }); + } break; } } diff --git a/src/services/configValidationService.ts b/src/services/configValidationService.ts index 845c7b0..3d0b93e 100644 --- a/src/services/configValidationService.ts +++ b/src/services/configValidationService.ts @@ -15,6 +15,10 @@ const SUPPORTED_PROVIDERS: ProviderType[] = [ "mistral", "xai", "ollama", + "moonshotai", + "zai", + "minimax", + "openrouter", ]; const ConfigSchema = a.object( @@ -32,6 +36,10 @@ const ConfigSchema = a.object( model: a.string(), baseUrl: a.optional(a.string()), }), + openrouter: a.object({ + model: a.string(), + baseUrl: a.optional(a.string()), + }), commit: a.object({ autoCommit: a.optional(a.boolean()), autoPush: a.optional(a.boolean()), @@ -126,7 +134,10 @@ const ConfigValidationService = { } return Ok(true); }, - validateModelUrl(model: object, name: "ollama"): Result { + validateModelUrl( + model: object, + name: "ollama" | "openrouter" + ): Result { if ("baseUrl" in model) { const baseUrl = this.validateUrl(model.baseUrl); if (baseUrl.isError()) { @@ -198,6 +209,15 @@ const ConfigValidationService = { this.validateModelUrl(configContent.ollama, "ollama"); } } + + if ("openrouter" in configContent) { + if ( + typeof configContent.openrouter === "object" && + configContent.openrouter !== null + ) { + this.validateModelUrl(configContent.openrouter, "openrouter"); + } + } } return Ok(configContent as Config); diff --git a/src/services/keyValidationService.ts b/src/services/keyValidationService.ts index 6291191..45282d3 100644 --- a/src/services/keyValidationService.ts +++ b/src/services/keyValidationService.ts @@ -15,6 +15,8 @@ const apiValidation = { `Custom API validation failed: ${status}`, invalidOpenaiKey: 'Invalid OpenAI API key format. Key should start with "sk-"', + invalidOpenRouterKey: + 'Invalid OpenRouter API key format. Key should start with "sk-or-v1-"', }, } as const; @@ -49,6 +51,15 @@ const KeyValidationService = { } return Ok(key); }, + validateOpenRouterApiKey(key: string): Result { + if (!key) { + return ErrFromText(apiValidation.errorMessages.emptyKey); + } + if (!key.startsWith("sk-or-v1-")) { + return ErrFromText(apiValidation.errorMessages.invalidOpenRouterKey); + } + return Ok(key); + }, }; export default KeyValidationService; diff --git a/src/services/minimaxService.ts b/src/services/minimaxService.ts new file mode 100644 index 0000000..809ab4b --- /dev/null +++ b/src/services/minimaxService.ts @@ -0,0 +1,47 @@ +import { + extractReasoningMiddleware, + generateText, + wrapLanguageModel, +} from "ai"; +import { createMinimaxOpenAI } from "vercel-minimax-ai-provider"; +import type { CommitMessage } from "@/lib/index.d.ts"; +import ConfigService from "./configService.ts"; +import { ModelService } from "./modelService.ts"; + +class MinimaxService extends ModelService { + static override async generateCommitMessage( + prompt: string, + attempt = 1 + ): Promise { + try { + const apiKey = await ConfigService.getApiKey("MiniMax"); + const model = (await ConfigService.get("provider", "model")).unwrap(); + const maxRetries = await ModelService.getMaxRetries(); + + const client = createMinimaxOpenAI({ apiKey }); + + const wrappedModel = wrapLanguageModel({ + model: client(model), + middleware: extractReasoningMiddleware({ tagName: "think" }), + }); + + const { text } = await generateText({ + model: wrappedModel, + prompt, + temperature: 0.7, + maxRetries, + }); + + return { message: text, model }; + } catch (error) { + return await MinimaxService.handleGenerationError( + error, + prompt, + attempt, + MinimaxService.generateCommitMessage.bind(MinimaxService) + ); + } + } +} + +export default MinimaxService; diff --git a/src/services/moonshotService.ts b/src/services/moonshotService.ts new file mode 100644 index 0000000..cd086ce --- /dev/null +++ b/src/services/moonshotService.ts @@ -0,0 +1,47 @@ +import { createMoonshotAI } from "@ai-sdk/moonshotai"; +import { + extractReasoningMiddleware, + generateText, + wrapLanguageModel, +} from "ai"; +import type { CommitMessage } from "@/lib/index.d.ts"; +import ConfigService from "./configService.ts"; +import { ModelService } from "./modelService.ts"; + +class MoonshotService extends ModelService { + static override async generateCommitMessage( + prompt: string, + attempt = 1 + ): Promise { + try { + const apiKey = await ConfigService.getApiKey("MoonshotAI"); + const model = (await ConfigService.get("provider", "model")).unwrap(); + const maxRetries = await ModelService.getMaxRetries(); + + const client = createMoonshotAI({ apiKey }); + + const wrappedModel = wrapLanguageModel({ + model: client(model), + middleware: extractReasoningMiddleware({ tagName: "think" }), + }); + + const { text } = await generateText({ + model: wrappedModel, + prompt, + temperature: 0.7, + maxRetries, + }); + + return { message: text, model }; + } catch (error) { + return await MoonshotService.handleGenerationError( + error, + prompt, + attempt, + MoonshotService.generateCommitMessage.bind(MoonshotService) + ); + } + } +} + +export default MoonshotService; diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts new file mode 100644 index 0000000..d742f82 --- /dev/null +++ b/src/services/openrouterService.ts @@ -0,0 +1,64 @@ +/** + * OpenRouter Service — meta-provider routing to hundreds of AI models. + * Model IDs are provider-prefixed: e.g. "anthropic/claude-opus-4-5" or + * "openai/gpt-4.1-mini". Config lives in the dedicated 'openrouter' section. + * Required headers (HTTP-Referer, X-Title) satisfy OpenRouter usage policy. + * Register at https://openrouter.ai and set OPENROUTER_API_KEY. + */ +import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { + extractReasoningMiddleware, + generateText, + wrapLanguageModel, +} from "ai"; +import type { CommitMessage } from "@/lib/index.d.ts"; +import ConfigService from "./configService.ts"; +import { ModelService } from "./modelService.ts"; + +class OpenRouterService extends ModelService { + static override async generateCommitMessage( + prompt: string, + attempt = 1 + ): Promise { + try { + const apiKey = await ConfigService.getApiKey("OpenRouter"); + const model = (await ConfigService.get("openrouter", "model")).unwrap(); + const baseURL = ( + await ConfigService.get("openrouter", "baseUrl") + ).unwrap(); + const maxRetries = await ModelService.getMaxRetries(); + + const client = createOpenRouter({ + apiKey, + baseURL, + headers: { + "HTTP-Referer": "https://github.com/AhmedOsman101/commit-sage-cli", + "X-Title": "Commit Sage", + }, + }); + + const wrappedModel = wrapLanguageModel({ + model: client(model), + middleware: extractReasoningMiddleware({ tagName: "think" }), + }); + + const { text } = await generateText({ + model: wrappedModel, + prompt, + temperature: 0.7, + maxRetries, + }); + + return { message: text, model }; + } catch (error) { + return await OpenRouterService.handleGenerationError( + error, + prompt, + attempt, + OpenRouterService.generateCommitMessage.bind(OpenRouterService) + ); + } + } +} + +export default OpenRouterService; diff --git a/src/services/providerRegistry.ts b/src/services/providerRegistry.ts index 40be109..7357ab2 100644 --- a/src/services/providerRegistry.ts +++ b/src/services/providerRegistry.ts @@ -2,11 +2,15 @@ import type { ProviderType } from "@/lib/configServiceTypes.d.ts"; import AnthropicService from "./anthropicService.ts"; import DeepseekService from "./deepseekService.ts"; import GeminiService from "./geminiService.ts"; +import MinimaxService from "./minimaxService.ts"; import MistralService from "./mistralService.ts"; import type { ModelService } from "./modelService.ts"; +import MoonshotService from "./moonshotService.ts"; import OllamaService from "./ollamaService.ts"; import OpenAiService from "./openaiService.ts"; +import OpenRouterService from "./openrouterService.ts"; import XaiService from "./xaiService.ts"; +import ZaiService from "./zaiService.ts"; const providers: Record = { gemini: GeminiService, @@ -16,6 +20,10 @@ const providers: Record = { mistral: MistralService, xai: XaiService, ollama: OllamaService, + moonshotai: MoonshotService, + zai: ZaiService, + minimax: MinimaxService, + openrouter: OpenRouterService, }; export function getProviderService(type: ProviderType): typeof ModelService { diff --git a/src/services/zaiService.ts b/src/services/zaiService.ts new file mode 100644 index 0000000..5a27515 --- /dev/null +++ b/src/services/zaiService.ts @@ -0,0 +1,55 @@ +import { createOpenAI } from "@ai-sdk/openai"; +import { + extractReasoningMiddleware, + generateText, + wrapLanguageModel, +} from "ai"; +import type { CommitMessage } from "@/lib/index.d.ts"; +import ConfigService from "./configService.ts"; +import { ModelService } from "./modelService.ts"; + +const ZAI_BASE_URL = "https://api.z.ai/api/paas/v4/"; + +/** + * Z.AI Service — GLM models via the international Z.AI platform (Zhipu AI). + * Uses @ai-sdk/openai with a custom baseURL (no new package dependency). + * The trailing slash on ZAI_BASE_URL is required per Z.AI documentation. + * Register at https://z.ai and set ZAI_API_KEY. + */ +class ZaiService extends ModelService { + static override async generateCommitMessage( + prompt: string, + attempt = 1 + ): Promise { + try { + const apiKey = await ConfigService.getApiKey("Zai"); + const model = (await ConfigService.get("provider", "model")).unwrap(); + const maxRetries = await ModelService.getMaxRetries(); + + const client = createOpenAI({ baseURL: ZAI_BASE_URL, apiKey }); + + const wrappedModel = wrapLanguageModel({ + model: client(model), + middleware: extractReasoningMiddleware({ tagName: "think" }), + }); + + const { text } = await generateText({ + model: wrappedModel, + prompt, + temperature: 0.7, + maxRetries, + }); + + return { message: text, model }; + } catch (error) { + return await ZaiService.handleGenerationError( + error, + prompt, + attempt, + ZaiService.generateCommitMessage.bind(ZaiService) + ); + } + } +} + +export default ZaiService; From 0adddd9f21f6d9a7e75d43d6ca9b8b264347849c Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 01:11:04 +0200 Subject: [PATCH 3/9] fix: remove openrouter from schema required array The openrouter config section is optional for users - it should only be required when using the openrouter provider, not for all users. Kept openrouter as required in TypeScript Config type since DEFAULT_CONFIG always includes it and all service code expects it. --- config.schema.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.schema.json b/config.schema.json index a259c34..c19a970 100644 --- a/config.schema.json +++ b/config.schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", - "required": ["ollama", "openrouter", "commit", "provider"], + "required": ["ollama", "commit", "provider"], "properties": { "general": { "type": "object", From efd8d903bc1cef76673b9cfd7037742a8a01511c Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 01:44:50 +0200 Subject: [PATCH 4/9] fix: resolve infinite migration loop in config loading Root cause: load() was calling migrateConfig() then recursively calling load() again, creating an infinite loop. The fix converts the parsed config directly to Config type instead of re-loading from disk. --- src/services/configService.ts | 73 +++++++++++++++++++++++------------ 1 file changed, 48 insertions(+), 25 deletions(-) diff --git a/src/services/configService.ts b/src/services/configService.ts index 97ec24f..f22f4cf 100644 --- a/src/services/configService.ts +++ b/src/services/configService.ts @@ -7,6 +7,7 @@ import type { ConfigKey, ConfigSection, ConfigValue, + ProviderType, } from "@/lib/configServiceTypes.d.ts"; import { CONFIG_PATH, DEFAULT_CONFIG, OS } from "@/lib/constants.ts"; import { AiServiceError, ConfigurationError } from "@/lib/errors.ts"; @@ -18,17 +19,17 @@ import KeyValidationService from "./keyValidationService.ts"; class ConfigService { protected static shell = ""; - static migrateConfig(config: Record): Config { + static async migrateConfig( + config: Record + ): Promise> { const provider = config.provider as Record | undefined; - if (!provider) { - return config as Config; - } + if (!provider) return Ok(true); const hasType = "type" in provider; const hasModel = "model" in provider; - const modelMap: Record = { + const modelMap: Record = { gemini: "gemini-2.5-flash-lite", openai: "gpt-5-nano", anthropic: "claude-sonnet-4-5", @@ -39,23 +40,30 @@ class ConfigService { moonshotai: "kimi-k2.5", zai: "glm-4.5-flash", minimax: "MiniMax-M2.5", + openrouter: "openai/gpt-4.1-mini", }; // Case 1: Has type but no model - add default model if (hasType && !hasModel) { - const oldType = provider.type as string; + const oldType = provider.type as ProviderType; const newModel = modelMap[oldType] || "gemini-2.5-flash-lite"; logInfo("Migrating config: adding provider.model..."); logInfo(` type="${oldType}", model="${newModel}"`); + const updateTypeResult = await ConfigService.set( + "provider", + "type", + oldType as ProviderType + ); + if (updateTypeResult.isError()) return Err(updateTypeResult.error); + const updateModelResult = await ConfigService.set( + "provider", + "model", + newModel + ); + if (updateModelResult.isError()) return Err(updateModelResult.error); - return { - ...config, - provider: { - type: oldType, - model: newModel, - }, - } as Config; + return Ok(true); } // Case 2: Has model but no type - try to detect type from model string @@ -63,7 +71,7 @@ class ConfigService { const model = provider.model as string; // Detect provider from model string (e.g., "google/gemini-2.5-flash-lite" -> "gemini") - let detectedType = "gemini"; + let detectedType: ProviderType = "gemini"; if ( model.startsWith("gpt-") || model.startsWith("o1") || @@ -89,11 +97,11 @@ class ConfigService { detectedType = "minimax"; } else if (model.includes("/")) { // Handle "google/gemini-2.5-flash-lite" format - detectedType = model.split("/")[0]; + detectedType = model.split("/")[0] as ProviderType; } // Fix common provider names - if (detectedType === "google") detectedType = "gemini"; + if ((detectedType as string) === "google") detectedType = "gemini"; const newModel = modelMap[detectedType] || model; @@ -102,16 +110,23 @@ class ConfigService { ` model="${model}", detected type="${detectedType}", using model="${newModel}"` ); - return { - ...config, - provider: { - type: detectedType, - model: newModel, - }, - } as Config; + const updateTypeResult = await ConfigService.set( + "provider", + "type", + detectedType as ProviderType + ); + if (updateTypeResult.isError()) return Err(updateTypeResult.error); + const updateModelResult = await ConfigService.set( + "provider", + "model", + newModel + ); + if (updateModelResult.isError()) return Err(updateModelResult.error); + + return Ok(true); } - return config as Config; + return Ok(true); } static async createConfigFile(): Promise> { @@ -151,7 +166,15 @@ class ConfigService { } const parsedConfig = JSON.parse(configContents); - const migratedConfig = await ConfigService.migrateConfig(parsedConfig); + const migrationResult = await ConfigService.migrateConfig(parsedConfig); + if (migrationResult.isError()) { + return Err(migrationResult.error); + } + + // Convert parsed config to Config type for validation + // Note: we use the parsed config directly rather than re-loading, + // to avoid infinite migration loop + const migratedConfig = parsedConfig as unknown as Config; const validation = ConfigValidationService.validate(migratedConfig); if (validation.isError()) logError(validation.error.message); From bd5fa5adbc1d82327e38050abed751a1c7f24333 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 02:20:33 +0200 Subject: [PATCH 5/9] feat: add verbose debug logging with DEBUG=1 env var - logDebug checks DEBUG_ENABLED (cached at module load, no env lookup per call) - Added logDebug calls to all major async operations: - aiService: generateCommitMessage, generateAndApplyMessage - All provider services: moonshot, zai, minimax, openrouter - gitService: initialize, getDiff, getChangedFiles - promptService: generatePrompt - gitBlameAnalyzer: analyzeChanges - Format: [timestamp] [functionName] EVENT details - Timestamp format: 2026-03-11@02:00AM --- src/lib/logger.ts | 5 +++ src/services/aiService.ts | 68 ++++++++++++++++++++++++++++++- src/services/gitBlameAnalyzer.ts | 7 ++++ src/services/gitService.ts | 15 ++++++- src/services/minimaxService.ts | 17 ++++++++ src/services/moonshotService.ts | 17 ++++++++ src/services/openrouterService.ts | 17 ++++++++ src/services/promptService.ts | 7 ++++ src/services/zaiService.ts | 17 ++++++++ 9 files changed, 168 insertions(+), 2 deletions(-) diff --git a/src/lib/logger.ts b/src/lib/logger.ts index b18895a..4126533 100644 --- a/src/lib/logger.ts +++ b/src/lib/logger.ts @@ -14,6 +14,9 @@ import FileLogger from "@/services/fileLogger.ts"; const encoder = new TextEncoder(); +// Cached debug flag - checked once at module load +const DEBUG_ENABLED = Deno.env.get("DEBUG") === "1"; + function toCustomString(value: unknown, indentLevel = 0): string { const indent = " ".repeat(indentLevel); // 2 spaces for indentation @@ -104,6 +107,8 @@ export function logSuccess(...data: unknown[]): void { } export function logDebug(...data: unknown[]): void { + if (!DEBUG_ENABLED) return; + const message = makeOutput(...data); console.log(`${magenta("[DEBUG]")} ${message}`); FileLogger.debug(message); diff --git a/src/services/aiService.ts b/src/services/aiService.ts index a7bf327..2c4465e 100644 --- a/src/services/aiService.ts +++ b/src/services/aiService.ts @@ -2,6 +2,7 @@ import { Err, ErrFromText, ErrFromUnknown, Ok, type Result } from "lib-result"; import type { ProviderType } from "@/lib/configServiceTypes.d.ts"; import { ERROR_MESSAGES } from "@/lib/constants.ts"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import GitBlameAnalyzer from "./gitBlameAnalyzer.ts"; import GitService from "./gitService.ts"; @@ -11,6 +12,9 @@ import { getProviderService } from "./providerRegistry.ts"; const MAX_DIFF_LENGTH = 100_000; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + const AiService = { truncateDiff(diff: string): string { return diff.length > MAX_DIFF_LENGTH @@ -22,39 +26,73 @@ const AiService = { diff: string, blameAnalysis: string ): Promise> { + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] ENTRY diff.length=${diff.length}, hasBlame=${!!blameAnalysis}` + ); + if (!diff) return ErrFromText(ERROR_MESSAGES.noChanges); const truncatedDiff = this.truncateDiff(diff); + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] STEP truncated diff, length=${truncatedDiff.length}` + ); + const prompt = await PromptService.generatePrompt( truncatedDiff, blameAnalysis ); + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] STEP prompt generated, length=${prompt.length}` + ); const providerResult = await ConfigService.get("provider", "type"); if (providerResult.isError()) return Err(providerResult.error); const providerType = providerResult.ok as ProviderType; + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] STEP provider=${providerType}` + ); try { // OpenRouter reads from its own config section (not provider.model) if (providerType === "openrouter") { + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] CALL OpenRouterService` + ); const commitMessage = await OpenRouterService.generateCommitMessage( prompt, 1 ); + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` + ); return Ok(commitMessage); } const Service = getProviderService(providerType); + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] CALL ${Service.name}` + ); const commitMessage = await Service.generateCommitMessage(prompt, 1); + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` + ); return Ok(commitMessage); } catch (error) { + logDebug( + `[${timestamp()}] [aiService.generateCommitMessage] ERROR ${error}` + ); return ErrFromUnknown(error); } }, async generateAndApplyMessage(): Promise> { + logDebug(`[${timestamp()}] [aiService.generateAndApplyMessage] ENTRY`); + GitService.initialize(); + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] STEP git initialized` + ); const onlyStagedResult = await ConfigService.get( "commit", @@ -66,16 +104,25 @@ const AiService = { const hasStagedChanges = GitService.hasChanges("staged"); const useStagedChanges = onlyStagedSetting || hasStagedChanges; + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] STEP useStagedChanges=${useStagedChanges}` + ); const diffResult = await GitService.getDiff(useStagedChanges); if (diffResult.isError()) return Err(diffResult.error); const diff = diffResult.ok; + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] STEP diff length=${diff.length}` + ); const changedFilesResult = GitService.getChangedFiles(useStagedChanges); if (changedFilesResult.isError()) return Err(changedFilesResult.error); const changedFiles = changedFilesResult.ok; + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] STEP changed files=${changedFiles.length}` + ); const analysesPromises = changedFiles.map(file => GitBlameAnalyzer.analyzeChanges(file) @@ -92,7 +139,26 @@ const AiService = { } } - return await this.generateCommitMessage(diff, blameAnalysis.join("\n\n")); + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] STEP blame analyses=${blameAnalysis.length}` + ); + + const result = await this.generateCommitMessage( + diff, + blameAnalysis.join("\n\n") + ); + + if (result.isOk()) { + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] EXIT success message="${result.ok.message.substring(0, 50)}..."` + ); + } else { + logDebug( + `[${timestamp()}] [aiService.generateAndApplyMessage] EXIT error=${result.error.message}` + ); + } + + return result; }, }; diff --git a/src/services/gitBlameAnalyzer.ts b/src/services/gitBlameAnalyzer.ts index 3f3e55d..e47fc77 100644 --- a/src/services/gitBlameAnalyzer.ts +++ b/src/services/gitBlameAnalyzer.ts @@ -1,10 +1,14 @@ import * as path from "node:path"; import { Err, ErrFromText, Ok, type Result } from "lib-result"; import { ERROR_MESSAGES, REPO_PATH } from "@/lib/constants.ts"; +import { logDebug } from "@/lib/logger.ts"; import CommandService from "./commandService.ts"; import FileSystemService from "./fileSystemService.ts"; import GitService from "./gitService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + type BlameInfo = { commit: string; author: string; @@ -93,6 +97,9 @@ class GitBlameAnalyzer { static async analyzeChanges( filePath: string ): Promise> { + logDebug( + `[${timestamp()}] [gitBlameAnalyzer.analyzeChanges] ENTRY filePath=${filePath}` + ); const normalizedPath = path.normalize(filePath.replace(/^\/+/, "")); // First check if file is deleted or new, as these don't need blame analysis diff --git a/src/services/gitService.ts b/src/services/gitService.ts index d2537d6..cc44f08 100644 --- a/src/services/gitService.ts +++ b/src/services/gitService.ts @@ -7,10 +7,13 @@ import { NoRepositoriesFoundError, } from "@/lib/errors.ts"; import type { CommandOutput } from "@/lib/index.d.ts"; -import { logError } from "@/lib/logger.ts"; +import { logDebug, logError } from "@/lib/logger.ts"; import CommandService from "./commandService.ts"; import FileSystemService from "./fileSystemService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + const GIT_STATUS_CODES = { modified: "M", added: "A", @@ -33,10 +36,14 @@ class GitService { static repoPath = ""; static initialize(): string { + logDebug(`[${timestamp()}] [gitService.initialize] ENTRY`); const repoPath = GitService.getRepoPath(); if (repoPath.isError()) logError(repoPath.error.message); GitService.setRepoPath(repoPath.ok); + logDebug( + `[${timestamp()}] [gitService.initialize] EXIT repoPath=${repoPath.ok}` + ); return repoPath.ok; } static execGit(args: string[]): Result { @@ -99,6 +106,9 @@ class GitService { static async getDiff( onlyStagedChanges: boolean ): Promise> { + logDebug( + `[${timestamp()}] [gitService.getDiff] ENTRY onlyStagedChanges=${onlyStagedChanges}` + ); try { const hasHead = GitService.hasHead(); @@ -300,6 +310,9 @@ class GitService { } } static getChangedFiles(onlyStaged = false): Result { + logDebug( + `[${timestamp()}] [gitService.getChangedFiles] ENTRY onlyStaged=${onlyStaged}` + ); try { const outputResult = GitService.execGit(["status", "--porcelain"]); if (outputResult.isError()) return Err(outputResult.error); diff --git a/src/services/minimaxService.ts b/src/services/minimaxService.ts index 809ab4b..989ece8 100644 --- a/src/services/minimaxService.ts +++ b/src/services/minimaxService.ts @@ -5,19 +5,30 @@ import { } from "ai"; import { createMinimaxOpenAI } from "vercel-minimax-ai-provider"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + class MinimaxService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { + logDebug( + `[${timestamp()}] [minimaxService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + ); try { const apiKey = await ConfigService.getApiKey("MiniMax"); const model = (await ConfigService.get("provider", "model")).unwrap(); const maxRetries = await ModelService.getMaxRetries(); + logDebug( + `[${timestamp()}] [minimaxService.generateCommitMessage] CALL API model=${model}` + ); + const client = createMinimaxOpenAI({ apiKey }); const wrappedModel = wrapLanguageModel({ @@ -32,8 +43,14 @@ class MinimaxService extends ModelService { maxRetries, }); + logDebug( + `[${timestamp()}] [minimaxService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + ); return { message: text, model }; } catch (error) { + logDebug( + `[${timestamp()}] [minimaxService.generateCommitMessage] ERROR ${error}` + ); return await MinimaxService.handleGenerationError( error, prompt, diff --git a/src/services/moonshotService.ts b/src/services/moonshotService.ts index cd086ce..c0a35d9 100644 --- a/src/services/moonshotService.ts +++ b/src/services/moonshotService.ts @@ -5,19 +5,30 @@ import { wrapLanguageModel, } from "ai"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + class MoonshotService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { + logDebug( + `[${timestamp()}] [moonshotService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + ); try { const apiKey = await ConfigService.getApiKey("MoonshotAI"); const model = (await ConfigService.get("provider", "model")).unwrap(); const maxRetries = await ModelService.getMaxRetries(); + logDebug( + `[${timestamp()}] [moonshotService.generateCommitMessage] CALL API model=${model}, maxRetries=${maxRetries}` + ); + const client = createMoonshotAI({ apiKey }); const wrappedModel = wrapLanguageModel({ @@ -32,8 +43,14 @@ class MoonshotService extends ModelService { maxRetries, }); + logDebug( + `[${timestamp()}] [moonshotService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + ); return { message: text, model }; } catch (error) { + logDebug( + `[${timestamp()}] [moonshotService.generateCommitMessage] ERROR ${error}` + ); return await MoonshotService.handleGenerationError( error, prompt, diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts index d742f82..1d35b82 100644 --- a/src/services/openrouterService.ts +++ b/src/services/openrouterService.ts @@ -12,14 +12,21 @@ import { wrapLanguageModel, } from "ai"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + class OpenRouterService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { + logDebug( + `[${timestamp()}] [openrouterService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + ); try { const apiKey = await ConfigService.getApiKey("OpenRouter"); const model = (await ConfigService.get("openrouter", "model")).unwrap(); @@ -28,6 +35,10 @@ class OpenRouterService extends ModelService { ).unwrap(); const maxRetries = await ModelService.getMaxRetries(); + logDebug( + `[${timestamp()}] [openrouterService.generateCommitMessage] CALL API model=${model}, baseURL=${baseURL}` + ); + const client = createOpenRouter({ apiKey, baseURL, @@ -49,8 +60,14 @@ class OpenRouterService extends ModelService { maxRetries, }); + logDebug( + `[${timestamp()}] [openrouterService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + ); return { message: text, model }; } catch (error) { + logDebug( + `[${timestamp()}] [openrouterService.generateCommitMessage] ERROR ${error}` + ); return await OpenRouterService.handleGenerationError( error, prompt, diff --git a/src/services/promptService.ts b/src/services/promptService.ts index 593af38..2bd1357 100644 --- a/src/services/promptService.ts +++ b/src/services/promptService.ts @@ -1,9 +1,16 @@ import type { CommitLanguage } from "@/lib/configServiceTypes.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import { getTemplate } from "@/templates/index.ts"; import ConfigService from "./configService.ts"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + const PromptService = { async generatePrompt(diff: string, blameAnalysis: string): Promise { + logDebug( + `[${timestamp()}] [promptService.generatePrompt] ENTRY diff.length=${diff.length}, blame.length=${blameAnalysis.length}` + ); const format = await ConfigService.get("commit", "commitFormat").then( result => result.unwrap() ); diff --git a/src/services/zaiService.ts b/src/services/zaiService.ts index 5a27515..b97e7d2 100644 --- a/src/services/zaiService.ts +++ b/src/services/zaiService.ts @@ -5,11 +5,15 @@ import { wrapLanguageModel, } from "ai"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; const ZAI_BASE_URL = "https://api.z.ai/api/paas/v4/"; +const timestamp = () => + new Date().toISOString().replace("T", "@").substring(0, 22); + /** * Z.AI Service — GLM models via the international Z.AI platform (Zhipu AI). * Uses @ai-sdk/openai with a custom baseURL (no new package dependency). @@ -21,11 +25,18 @@ class ZaiService extends ModelService { prompt: string, attempt = 1 ): Promise { + logDebug( + `[${timestamp()}] [zaiService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + ); try { const apiKey = await ConfigService.getApiKey("Zai"); const model = (await ConfigService.get("provider", "model")).unwrap(); const maxRetries = await ModelService.getMaxRetries(); + logDebug( + `[${timestamp()}] [zaiService.generateCommitMessage] CALL API model=${model}, baseURL=${ZAI_BASE_URL}` + ); + const client = createOpenAI({ baseURL: ZAI_BASE_URL, apiKey }); const wrappedModel = wrapLanguageModel({ @@ -40,8 +51,14 @@ class ZaiService extends ModelService { maxRetries, }); + logDebug( + `[${timestamp()}] [zaiService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + ); return { message: text, model }; } catch (error) { + logDebug( + `[${timestamp()}] [zaiService.generateCommitMessage] ERROR ${error}` + ); return await ZaiService.handleGenerationError( error, prompt, From 6b1ef272d2fe6cf5e2267494c7a403451152e015 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 02:29:19 +0200 Subject: [PATCH 6/9] refactor: move timestamp into logDebug, remove duplicate arrow funcs --- src/lib/logger.ts | 3 +- src/services/aiService.ts | 47 +++++++++++-------------------- src/services/gitBlameAnalyzer.ts | 7 +---- src/services/gitService.ts | 15 +++------- src/services/minimaxService.ts | 13 +++------ src/services/moonshotService.ts | 13 +++------ src/services/openrouterService.ts | 13 +++------ src/services/promptService.ts | 5 +--- src/services/zaiService.ts | 13 +++------ 9 files changed, 41 insertions(+), 88 deletions(-) diff --git a/src/lib/logger.ts b/src/lib/logger.ts index 4126533..95943be 100644 --- a/src/lib/logger.ts +++ b/src/lib/logger.ts @@ -109,7 +109,8 @@ export function logSuccess(...data: unknown[]): void { export function logDebug(...data: unknown[]): void { if (!DEBUG_ENABLED) return; + const timestamp = new Date().toISOString().replace("T", "@").substring(0, 22); const message = makeOutput(...data); - console.log(`${magenta("[DEBUG]")} ${message}`); + console.log(`${magenta("[DEBUG]")} [${timestamp}] ${message}`); FileLogger.debug(message); } diff --git a/src/services/aiService.ts b/src/services/aiService.ts index 2c4465e..5e48030 100644 --- a/src/services/aiService.ts +++ b/src/services/aiService.ts @@ -12,9 +12,6 @@ import { getProviderService } from "./providerRegistry.ts"; const MAX_DIFF_LENGTH = 100_000; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - const AiService = { truncateDiff(diff: string): string { return diff.length > MAX_DIFF_LENGTH @@ -27,14 +24,14 @@ const AiService = { blameAnalysis: string ): Promise> { logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] ENTRY diff.length=${diff.length}, hasBlame=${!!blameAnalysis}` + `[aiService.generateCommitMessage] ENTRY diff.length=${diff.length}, hasBlame=${!!blameAnalysis}` ); if (!diff) return ErrFromText(ERROR_MESSAGES.noChanges); const truncatedDiff = this.truncateDiff(diff); logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] STEP truncated diff, length=${truncatedDiff.length}` + `[aiService.generateCommitMessage] STEP truncated diff, length=${truncatedDiff.length}` ); const prompt = await PromptService.generatePrompt( @@ -42,57 +39,47 @@ const AiService = { blameAnalysis ); logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] STEP prompt generated, length=${prompt.length}` + `[aiService.generateCommitMessage] STEP prompt generated, length=${prompt.length}` ); const providerResult = await ConfigService.get("provider", "type"); if (providerResult.isError()) return Err(providerResult.error); const providerType = providerResult.ok as ProviderType; - logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] STEP provider=${providerType}` - ); + logDebug(`[aiService.generateCommitMessage] STEP provider=${providerType}`); try { // OpenRouter reads from its own config section (not provider.model) if (providerType === "openrouter") { - logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] CALL OpenRouterService` - ); + logDebug("[aiService.generateCommitMessage] CALL OpenRouterService"); const commitMessage = await OpenRouterService.generateCommitMessage( prompt, 1 ); logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` + `[aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` ); return Ok(commitMessage); } const Service = getProviderService(providerType); - logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] CALL ${Service.name}` - ); + logDebug(`[aiService.generateCommitMessage] CALL ${Service.name}`); const commitMessage = await Service.generateCommitMessage(prompt, 1); logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` + `[aiService.generateCommitMessage] EXIT message="${commitMessage.message.substring(0, 50)}..."` ); return Ok(commitMessage); } catch (error) { - logDebug( - `[${timestamp()}] [aiService.generateCommitMessage] ERROR ${error}` - ); + logDebug(`[aiService.generateCommitMessage] ERROR ${error}`); return ErrFromUnknown(error); } }, async generateAndApplyMessage(): Promise> { - logDebug(`[${timestamp()}] [aiService.generateAndApplyMessage] ENTRY`); + logDebug("[aiService.generateAndApplyMessage] ENTRY"); GitService.initialize(); - logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] STEP git initialized` - ); + logDebug("[aiService.generateAndApplyMessage] STEP git initialized"); const onlyStagedResult = await ConfigService.get( "commit", @@ -105,7 +92,7 @@ const AiService = { const useStagedChanges = onlyStagedSetting || hasStagedChanges; logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] STEP useStagedChanges=${useStagedChanges}` + `[aiService.generateAndApplyMessage] STEP useStagedChanges=${useStagedChanges}` ); const diffResult = await GitService.getDiff(useStagedChanges); @@ -113,7 +100,7 @@ const AiService = { const diff = diffResult.ok; logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] STEP diff length=${diff.length}` + `[aiService.generateAndApplyMessage] STEP diff length=${diff.length}` ); const changedFilesResult = GitService.getChangedFiles(useStagedChanges); @@ -121,7 +108,7 @@ const AiService = { const changedFiles = changedFilesResult.ok; logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] STEP changed files=${changedFiles.length}` + `[aiService.generateAndApplyMessage] STEP changed files=${changedFiles.length}` ); const analysesPromises = changedFiles.map(file => @@ -140,7 +127,7 @@ const AiService = { } logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] STEP blame analyses=${blameAnalysis.length}` + `[aiService.generateAndApplyMessage] STEP blame analyses=${blameAnalysis.length}` ); const result = await this.generateCommitMessage( @@ -150,11 +137,11 @@ const AiService = { if (result.isOk()) { logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] EXIT success message="${result.ok.message.substring(0, 50)}..."` + `[aiService.generateAndApplyMessage] EXIT success message="${result.ok.message.substring(0, 50)}..."` ); } else { logDebug( - `[${timestamp()}] [aiService.generateAndApplyMessage] EXIT error=${result.error.message}` + `[aiService.generateAndApplyMessage] EXIT error=${result.error.message}` ); } diff --git a/src/services/gitBlameAnalyzer.ts b/src/services/gitBlameAnalyzer.ts index e47fc77..fc555fc 100644 --- a/src/services/gitBlameAnalyzer.ts +++ b/src/services/gitBlameAnalyzer.ts @@ -6,9 +6,6 @@ import CommandService from "./commandService.ts"; import FileSystemService from "./fileSystemService.ts"; import GitService from "./gitService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - type BlameInfo = { commit: string; author: string; @@ -97,9 +94,7 @@ class GitBlameAnalyzer { static async analyzeChanges( filePath: string ): Promise> { - logDebug( - `[${timestamp()}] [gitBlameAnalyzer.analyzeChanges] ENTRY filePath=${filePath}` - ); + logDebug(`[gitBlameAnalyzer.analyzeChanges] ENTRY filePath=${filePath}`); const normalizedPath = path.normalize(filePath.replace(/^\/+/, "")); // First check if file is deleted or new, as these don't need blame analysis diff --git a/src/services/gitService.ts b/src/services/gitService.ts index cc44f08..7c79716 100644 --- a/src/services/gitService.ts +++ b/src/services/gitService.ts @@ -11,9 +11,6 @@ import { logDebug, logError } from "@/lib/logger.ts"; import CommandService from "./commandService.ts"; import FileSystemService from "./fileSystemService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - const GIT_STATUS_CODES = { modified: "M", added: "A", @@ -36,14 +33,12 @@ class GitService { static repoPath = ""; static initialize(): string { - logDebug(`[${timestamp()}] [gitService.initialize] ENTRY`); + logDebug("[gitService.initialize] ENTRY"); const repoPath = GitService.getRepoPath(); if (repoPath.isError()) logError(repoPath.error.message); GitService.setRepoPath(repoPath.ok); - logDebug( - `[${timestamp()}] [gitService.initialize] EXIT repoPath=${repoPath.ok}` - ); + logDebug(`[gitService.initialize] EXIT repoPath=${repoPath.ok}`); return repoPath.ok; } static execGit(args: string[]): Result { @@ -107,7 +102,7 @@ class GitService { onlyStagedChanges: boolean ): Promise> { logDebug( - `[${timestamp()}] [gitService.getDiff] ENTRY onlyStagedChanges=${onlyStagedChanges}` + `[gitService.getDiff] ENTRY onlyStagedChanges=${onlyStagedChanges}` ); try { const hasHead = GitService.hasHead(); @@ -310,9 +305,7 @@ class GitService { } } static getChangedFiles(onlyStaged = false): Result { - logDebug( - `[${timestamp()}] [gitService.getChangedFiles] ENTRY onlyStaged=${onlyStaged}` - ); + logDebug(`[gitService.getChangedFiles] ENTRY onlyStaged=${onlyStaged}`); try { const outputResult = GitService.execGit(["status", "--porcelain"]); if (outputResult.isError()) return Err(outputResult.error); diff --git a/src/services/minimaxService.ts b/src/services/minimaxService.ts index 989ece8..6d2e4b7 100644 --- a/src/services/minimaxService.ts +++ b/src/services/minimaxService.ts @@ -9,16 +9,13 @@ import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - class MinimaxService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { logDebug( - `[${timestamp()}] [minimaxService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + `[minimaxService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` ); try { const apiKey = await ConfigService.getApiKey("MiniMax"); @@ -26,7 +23,7 @@ class MinimaxService extends ModelService { const maxRetries = await ModelService.getMaxRetries(); logDebug( - `[${timestamp()}] [minimaxService.generateCommitMessage] CALL API model=${model}` + `[minimaxService.generateCommitMessage] CALL API model=${model}` ); const client = createMinimaxOpenAI({ apiKey }); @@ -44,13 +41,11 @@ class MinimaxService extends ModelService { }); logDebug( - `[${timestamp()}] [minimaxService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + `[minimaxService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` ); return { message: text, model }; } catch (error) { - logDebug( - `[${timestamp()}] [minimaxService.generateCommitMessage] ERROR ${error}` - ); + logDebug(`[minimaxService.generateCommitMessage] ERROR ${error}`); return await MinimaxService.handleGenerationError( error, prompt, diff --git a/src/services/moonshotService.ts b/src/services/moonshotService.ts index c0a35d9..ab1bad9 100644 --- a/src/services/moonshotService.ts +++ b/src/services/moonshotService.ts @@ -9,16 +9,13 @@ import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - class MoonshotService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { logDebug( - `[${timestamp()}] [moonshotService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + `[moonshotService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` ); try { const apiKey = await ConfigService.getApiKey("MoonshotAI"); @@ -26,7 +23,7 @@ class MoonshotService extends ModelService { const maxRetries = await ModelService.getMaxRetries(); logDebug( - `[${timestamp()}] [moonshotService.generateCommitMessage] CALL API model=${model}, maxRetries=${maxRetries}` + `[moonshotService.generateCommitMessage] CALL API model=${model}, maxRetries=${maxRetries}` ); const client = createMoonshotAI({ apiKey }); @@ -44,13 +41,11 @@ class MoonshotService extends ModelService { }); logDebug( - `[${timestamp()}] [moonshotService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + `[moonshotService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` ); return { message: text, model }; } catch (error) { - logDebug( - `[${timestamp()}] [moonshotService.generateCommitMessage] ERROR ${error}` - ); + logDebug(`[moonshotService.generateCommitMessage] ERROR ${error}`); return await MoonshotService.handleGenerationError( error, prompt, diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts index 1d35b82..1a65482 100644 --- a/src/services/openrouterService.ts +++ b/src/services/openrouterService.ts @@ -16,16 +16,13 @@ import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - class OpenRouterService extends ModelService { static override async generateCommitMessage( prompt: string, attempt = 1 ): Promise { logDebug( - `[${timestamp()}] [openrouterService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + `[openrouterService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` ); try { const apiKey = await ConfigService.getApiKey("OpenRouter"); @@ -36,7 +33,7 @@ class OpenRouterService extends ModelService { const maxRetries = await ModelService.getMaxRetries(); logDebug( - `[${timestamp()}] [openrouterService.generateCommitMessage] CALL API model=${model}, baseURL=${baseURL}` + `[openrouterService.generateCommitMessage] CALL API model=${model}, baseURL=${baseURL}` ); const client = createOpenRouter({ @@ -61,13 +58,11 @@ class OpenRouterService extends ModelService { }); logDebug( - `[${timestamp()}] [openrouterService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + `[openrouterService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` ); return { message: text, model }; } catch (error) { - logDebug( - `[${timestamp()}] [openrouterService.generateCommitMessage] ERROR ${error}` - ); + logDebug(`[openrouterService.generateCommitMessage] ERROR ${error}`); return await OpenRouterService.handleGenerationError( error, prompt, diff --git a/src/services/promptService.ts b/src/services/promptService.ts index 2bd1357..ef38271 100644 --- a/src/services/promptService.ts +++ b/src/services/promptService.ts @@ -3,13 +3,10 @@ import { logDebug } from "@/lib/logger.ts"; import { getTemplate } from "@/templates/index.ts"; import ConfigService from "./configService.ts"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - const PromptService = { async generatePrompt(diff: string, blameAnalysis: string): Promise { logDebug( - `[${timestamp()}] [promptService.generatePrompt] ENTRY diff.length=${diff.length}, blame.length=${blameAnalysis.length}` + `[promptService.generatePrompt] ENTRY diff.length=${diff.length}, blame.length=${blameAnalysis.length}` ); const format = await ConfigService.get("commit", "commitFormat").then( result => result.unwrap() diff --git a/src/services/zaiService.ts b/src/services/zaiService.ts index b97e7d2..93d7c77 100644 --- a/src/services/zaiService.ts +++ b/src/services/zaiService.ts @@ -11,9 +11,6 @@ import { ModelService } from "./modelService.ts"; const ZAI_BASE_URL = "https://api.z.ai/api/paas/v4/"; -const timestamp = () => - new Date().toISOString().replace("T", "@").substring(0, 22); - /** * Z.AI Service — GLM models via the international Z.AI platform (Zhipu AI). * Uses @ai-sdk/openai with a custom baseURL (no new package dependency). @@ -26,7 +23,7 @@ class ZaiService extends ModelService { attempt = 1 ): Promise { logDebug( - `[${timestamp()}] [zaiService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + `[zaiService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` ); try { const apiKey = await ConfigService.getApiKey("Zai"); @@ -34,7 +31,7 @@ class ZaiService extends ModelService { const maxRetries = await ModelService.getMaxRetries(); logDebug( - `[${timestamp()}] [zaiService.generateCommitMessage] CALL API model=${model}, baseURL=${ZAI_BASE_URL}` + `[zaiService.generateCommitMessage] CALL API model=${model}, baseURL=${ZAI_BASE_URL}` ); const client = createOpenAI({ baseURL: ZAI_BASE_URL, apiKey }); @@ -52,13 +49,11 @@ class ZaiService extends ModelService { }); logDebug( - `[${timestamp()}] [zaiService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` + `[zaiService.generateCommitMessage] EXIT message="${text.substring(0, 50)}..."` ); return { message: text, model }; } catch (error) { - logDebug( - `[${timestamp()}] [zaiService.generateCommitMessage] ERROR ${error}` - ); + logDebug(`[zaiService.generateCommitMessage] ERROR ${error}`); return await ZaiService.handleGenerationError( error, prompt, From 5c4b47789ebafb16fa1cea2bd4bb965d79ff3741 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 02:42:47 +0200 Subject: [PATCH 7/9] refactor: use provider.model for ollama and openrouter Ollama and OpenRouter now read model from provider.model instead of ollama.model / openrouter.model. Only baseUrl stays in its own section. --- src/services/ollamaService.ts | 7 ++++++- src/services/openrouterService.ts | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/services/ollamaService.ts b/src/services/ollamaService.ts index dd757bd..ac12108 100644 --- a/src/services/ollamaService.ts +++ b/src/services/ollamaService.ts @@ -5,6 +5,7 @@ import { } from "ai"; import { createOllama } from "ollama-ai-provider-v2"; import type { CommitMessage } from "@/lib/index.d.ts"; +import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; import { ModelService } from "./modelService.ts"; @@ -14,9 +15,13 @@ class OllamaService extends ModelService { attempt = 1 ): Promise { const baseURL = (await ConfigService.get("ollama", "baseUrl")).unwrap(); - const model = (await ConfigService.get("ollama", "model")).unwrap(); + const model = (await ConfigService.get("provider", "model")).unwrap(); const maxRetries = await ModelService.getMaxRetries(); + logDebug( + `[ollamaService.generateCommitMessage] CALL API model=${model}, baseURL=${baseURL}` + ); + const ollama = createOllama({ baseURL }); try { diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts index 1a65482..21bdd26 100644 --- a/src/services/openrouterService.ts +++ b/src/services/openrouterService.ts @@ -26,7 +26,7 @@ class OpenRouterService extends ModelService { ); try { const apiKey = await ConfigService.getApiKey("OpenRouter"); - const model = (await ConfigService.get("openrouter", "model")).unwrap(); + const model = (await ConfigService.get("provider", "model")).unwrap(); const baseURL = ( await ConfigService.get("openrouter", "baseUrl") ).unwrap(); From c057453e6ba040e8cb4be9d82ce949fce879fd5a Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 02:50:01 +0200 Subject: [PATCH 8/9] fix: use DEFAULT_CONFIG for optional baseUrl, require provider.model - OpenRouter and Ollama now throw clear error if provider.model is missing - Optional baseUrl falls back to DEFAULT_CONFIG values instead of hardcoded - Added debug logging to OllamaService --- src/services/ollamaService.ts | 20 ++++++++++++++++++-- src/services/openrouterService.ts | 18 ++++++++++++++---- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/src/services/ollamaService.ts b/src/services/ollamaService.ts index ac12108..4f4918a 100644 --- a/src/services/ollamaService.ts +++ b/src/services/ollamaService.ts @@ -4,6 +4,7 @@ import { wrapLanguageModel, } from "ai"; import { createOllama } from "ollama-ai-provider-v2"; +import { DEFAULT_CONFIG } from "@/lib/constants.ts"; import type { CommitMessage } from "@/lib/index.d.ts"; import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; @@ -14,8 +15,23 @@ class OllamaService extends ModelService { prompt: string, attempt = 1 ): Promise { - const baseURL = (await ConfigService.get("ollama", "baseUrl")).unwrap(); - const model = (await ConfigService.get("provider", "model")).unwrap(); + logDebug( + `[ollamaService.generateCommitMessage] ENTRY attempt=${attempt}, prompt.length=${prompt.length}` + ); + + const baseURLResult = await ConfigService.get("ollama", "baseUrl"); + const baseURL = baseURLResult.isOk() + ? baseURLResult.ok + : DEFAULT_CONFIG.ollama.baseUrl; + + const modelResult = await ConfigService.get("provider", "model"); + if (modelResult.isError()) { + throw new Error( + "provider.model is required for Ollama. Please set it in your config." + ); + } + const model = modelResult.ok; + const maxRetries = await ModelService.getMaxRetries(); logDebug( diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts index 21bdd26..cdfd31e 100644 --- a/src/services/openrouterService.ts +++ b/src/services/openrouterService.ts @@ -11,6 +11,7 @@ import { generateText, wrapLanguageModel, } from "ai"; +import { DEFAULT_CONFIG } from "@/lib/constants.ts"; import type { CommitMessage } from "@/lib/index.d.ts"; import { logDebug } from "@/lib/logger.ts"; import ConfigService from "./configService.ts"; @@ -26,10 +27,19 @@ class OpenRouterService extends ModelService { ); try { const apiKey = await ConfigService.getApiKey("OpenRouter"); - const model = (await ConfigService.get("provider", "model")).unwrap(); - const baseURL = ( - await ConfigService.get("openrouter", "baseUrl") - ).unwrap(); + + const modelResult = await ConfigService.get("provider", "model"); + if (modelResult.isError()) { + throw new Error( + "provider.model is required for OpenRouter. Please set it in your config." + ); + } + const model = modelResult.ok; + + const baseURLResult = await ConfigService.get("openrouter", "baseUrl"); + const baseURL = baseURLResult.isOk() + ? baseURLResult.ok + : DEFAULT_CONFIG.openrouter.baseUrl; const maxRetries = await ModelService.getMaxRetries(); logDebug( From 57dab6b04bdaecfd3467883b4f9946664f4addc3 Mon Sep 17 00:00:00 2001 From: Ahmad Othman Date: Wed, 11 Mar 2026 03:45:31 +0200 Subject: [PATCH 9/9] refactor: Make Ollama/OpenRouter baseUrl optional - Updated config types to allow optional baseUrl - Modified config service to handle optional values - Adjusted Ollama/OpenRouter services to use default baseUrl if not provided - Updated AGENTS.md header --- AGENTS.md | 2 +- src/lib/configServiceTypes.d.ts | 4 ++-- src/services/configService.ts | 6 +++++- src/services/configValidationService.ts | 20 ++++++++++++-------- src/services/ollamaService.ts | 7 ++++--- src/services/openrouterService.ts | 7 ++++--- 6 files changed, 28 insertions(+), 18 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 4b1ec02..41296b6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,4 +1,4 @@ -# AGENTS.md - Commit Sage Developer Guide +# Commit Sage Developer Guide This file provides guidelines for agents working on the Commit Sage codebase. diff --git a/src/lib/configServiceTypes.d.ts b/src/lib/configServiceTypes.d.ts index eb0e383..f9fdbed 100644 --- a/src/lib/configServiceTypes.d.ts +++ b/src/lib/configServiceTypes.d.ts @@ -7,13 +7,13 @@ type GeneralConfig = { // Configuration for the Ollama provider (self-hosted, requires baseUrl) type OllamaConfig = { model: string; - baseUrl: "http://localhost:11434" | (string & {}); + baseUrl?: "http://localhost:11434" | (string & {}); }; // Configuration for the OpenRouter meta-provider type OpenRouterConfig = { model: string; - baseUrl: "https://openrouter.ai/api/v1" | (string & {}); + baseUrl?: "https://openrouter.ai/api/v1" | (string & {}); }; // Configuration for commit-related settings diff --git a/src/services/configService.ts b/src/services/configService.ts index f22f4cf..a4e38d5 100644 --- a/src/services/configService.ts +++ b/src/services/configService.ts @@ -192,7 +192,11 @@ class ConfigService { const configResult = await ConfigService.load(); if (configResult.isError()) return Err(configResult.error); - const value = configResult.ok[section][key] ?? DEFAULT_CONFIG[section][key]; + const sectionValue = configResult.ok[section]; + const value = + sectionValue && typeof sectionValue === "object" && key in sectionValue + ? sectionValue[key] + : DEFAULT_CONFIG[section]?.[key]; return Ok(value); } diff --git a/src/services/configValidationService.ts b/src/services/configValidationService.ts index 3d0b93e..aaecc4e 100644 --- a/src/services/configValidationService.ts +++ b/src/services/configValidationService.ts @@ -32,14 +32,18 @@ const ConfigSchema = a.object( initialRetryDelayMs: a.uint16(), }) ), - ollama: a.object({ - model: a.string(), - baseUrl: a.optional(a.string()), - }), - openrouter: a.object({ - model: a.string(), - baseUrl: a.optional(a.string()), - }), + ollama: a.optional( + a.object({ + model: a.string(), + baseUrl: a.optional(a.string()), + }) + ), + openrouter: a.optional( + a.object({ + model: a.string(), + baseUrl: a.optional(a.string()), + }) + ), commit: a.object({ autoCommit: a.optional(a.boolean()), autoPush: a.optional(a.boolean()), diff --git a/src/services/ollamaService.ts b/src/services/ollamaService.ts index 4f4918a..dc51111 100644 --- a/src/services/ollamaService.ts +++ b/src/services/ollamaService.ts @@ -20,9 +20,10 @@ class OllamaService extends ModelService { ); const baseURLResult = await ConfigService.get("ollama", "baseUrl"); - const baseURL = baseURLResult.isOk() - ? baseURLResult.ok - : DEFAULT_CONFIG.ollama.baseUrl; + const baseURL = + baseURLResult.isOk() && baseURLResult.ok + ? baseURLResult.ok + : (DEFAULT_CONFIG.ollama.baseUrl as string); const modelResult = await ConfigService.get("provider", "model"); if (modelResult.isError()) { diff --git a/src/services/openrouterService.ts b/src/services/openrouterService.ts index cdfd31e..40efe63 100644 --- a/src/services/openrouterService.ts +++ b/src/services/openrouterService.ts @@ -37,9 +37,10 @@ class OpenRouterService extends ModelService { const model = modelResult.ok; const baseURLResult = await ConfigService.get("openrouter", "baseUrl"); - const baseURL = baseURLResult.isOk() - ? baseURLResult.ok - : DEFAULT_CONFIG.openrouter.baseUrl; + const baseURL = + baseURLResult.isOk() && baseURLResult.ok + ? baseURLResult.ok + : (DEFAULT_CONFIG.openrouter.baseUrl as string); const maxRetries = await ModelService.getMaxRetries(); logDebug(