From 27895e98adf37d723652be8d3957d5ccb2d88f6b Mon Sep 17 00:00:00 2001 From: cruiseli Date: Mon, 28 Jul 2025 15:17:10 +0800 Subject: [PATCH 1/5] feat: add support for Azure OpenAI in CustomLLMContentGenerator\n\n- Introduced conditional initialization for OpenAI and Azure OpenAI based on the provider.\n- Added for Azure OpenAI configuration. --- packages/core/src/custom_llm/index.ts | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/packages/core/src/custom_llm/index.ts b/packages/core/src/custom_llm/index.ts index f36a5186..edde99e3 100644 --- a/packages/core/src/custom_llm/index.ts +++ b/packages/core/src/custom_llm/index.ts @@ -12,7 +12,7 @@ import { EmbedContentResponse, EmbedContentParameters, } from '@google/genai'; -import OpenAI from 'openai'; +import {OpenAI, AzureOpenAI} from 'openai'; import { ContentGenerator } from '../core/contentGenerator.js'; import { CustomLLMContentGeneratorConfig, ToolCallMap } from './types.js'; import { extractToolFunctions } from './util.js'; @@ -23,6 +23,7 @@ export class CustomLLMContentGenerator implements ContentGenerator { private apiKey: string = process.env.CUSTOM_LLM_API_KEY || ''; private baseURL: string = process.env.CUSTOM_LLM_ENDPOINT || ''; private modelName: string = process.env.CUSTOM_LLM_MODEL_NAME || ''; + private apiVersion: string = '2023-03-15-preview'; private temperature: number = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0); private maxTokens: number = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192); private topP: number = Number(process.env.CUSTOM_LLM_TOP_P || 1); @@ -34,11 +35,20 @@ export class CustomLLMContentGenerator implements ContentGenerator { }; constructor() { - this.model = new OpenAI({ - apiKey: this.apiKey, - baseURL: this.baseURL, - }); - } + const provider = process.env.CUSTOM_LLM_PROVIDER || 'openai'; + if (provider === 'azure') { + this.model = new AzureOpenAI({ + apiKey: this.apiKey, + endpoint: this.baseURL, + apiVersion: this.apiVersion + }); + } else { + this.model = new OpenAI({ + apiKey: this.apiKey, + baseURL: this.baseURL + }); + } + } /** * Asynchronously generates content responses in a streaming fashion. From 1f4541781bb3c5171e8f0bf5c4079ff9a72bf098 Mon Sep 17 00:00:00 2001 From: cruiseli Date: Mon, 28 Jul 2025 15:21:38 +0800 Subject: [PATCH 2/5] docs: clarify CUSTOM_LLM_PROVIDER usage for Azure OpenAI\n\n- Updated README.md and README.zh-CN.md to specify how to set CUSTOM_LLM_PROVIDER for Azure OpenAI. --- README.md | 2 +- README.zh-CN.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 184db529..48a18a38 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ Easy LLM CLI supports connecting to any OpenAI-compatible LLM API. You can confi # Enable custom LLM support export USE_CUSTOM_LLM=true -export CUSTOM_LLM_PROVIDER="openai" # LLM provider +export CUSTOM_LLM_PROVIDER="openai" # LLM provider, if you use azure openai, set to "azure" export CUSTOM_LLM_API_KEY="your-api-key" # Your LLM provider API key export CUSTOM_LLM_ENDPOINT="https://api.your-llm-provider.com/v1" # API endpoint export CUSTOM_LLM_MODEL_NAME="your-model-name" # Model name diff --git a/README.zh-CN.md b/README.zh-CN.md index 7d974af9..6964035f 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -66,6 +66,8 @@ Easy LLM 命令行工具支持连接到任何兼容 OpenAI 的 LLM API。你可 ```bash # 启用自定义 LLM 支持 export USE_CUSTOM_LLM=true + +export CUSTOM_LLM_PROVIDER="openai" # LLM provider, 如果你使用Azure OpneAI, set to "azure" export CUSTOM_LLM_API_KEY="your-api-key" # 你的 LLM 提供商 API 密钥 export CUSTOM_LLM_ENDPOINT="https://api.your-llm-provider.com/v1" # API 端点 export CUSTOM_LLM_MODEL_NAME="your-model-name" # 模型名称 From 05ce1672494a278c24c78c1ef21c38df43334b8e Mon Sep 17 00:00:00 2001 From: cruiseli Date: Wed, 30 Jul 2025 12:30:23 +0800 Subject: [PATCH 3/5] format --- packages/core/src/custom_llm/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/src/custom_llm/index.ts b/packages/core/src/custom_llm/index.ts index edde99e3..41e3348c 100644 --- a/packages/core/src/custom_llm/index.ts +++ b/packages/core/src/custom_llm/index.ts @@ -12,7 +12,7 @@ import { EmbedContentResponse, EmbedContentParameters, } from '@google/genai'; -import {OpenAI, AzureOpenAI} from 'openai'; +import { OpenAI, AzureOpenAI } from 'openai'; import { ContentGenerator } from '../core/contentGenerator.js'; import { CustomLLMContentGeneratorConfig, ToolCallMap } from './types.js'; import { extractToolFunctions } from './util.js'; From 1c68171828e248359910263030dc2d301b041d56 Mon Sep 17 00:00:00 2001 From: cruiseli Date: Wed, 30 Jul 2025 12:39:25 +0800 Subject: [PATCH 4/5] add comment and api version env variable --- packages/core/src/custom_llm/index.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/core/src/custom_llm/index.ts b/packages/core/src/custom_llm/index.ts index 41e3348c..75f16f1b 100644 --- a/packages/core/src/custom_llm/index.ts +++ b/packages/core/src/custom_llm/index.ts @@ -23,7 +23,7 @@ export class CustomLLMContentGenerator implements ContentGenerator { private apiKey: string = process.env.CUSTOM_LLM_API_KEY || ''; private baseURL: string = process.env.CUSTOM_LLM_ENDPOINT || ''; private modelName: string = process.env.CUSTOM_LLM_MODEL_NAME || ''; - private apiVersion: string = '2023-03-15-preview'; + private apiVersion: string = process.env.OPENAI_API_VERSION || '2023-03-15-preview'; private temperature: number = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0); private maxTokens: number = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192); private topP: number = Number(process.env.CUSTOM_LLM_TOP_P || 1); @@ -40,7 +40,8 @@ export class CustomLLMContentGenerator implements ContentGenerator { this.model = new AzureOpenAI({ apiKey: this.apiKey, endpoint: this.baseURL, - apiVersion: this.apiVersion + apiVersion: this.apiVersion + // Note it must use a version earlier than '2023-03-15-preview'. }); } else { this.model = new OpenAI({ From 363c5a0bfc2cef895263f44ad3613c68104d8f8b Mon Sep 17 00:00:00 2001 From: cruiseli Date: Wed, 30 Jul 2025 17:19:52 +0800 Subject: [PATCH 5/5] fix crash on new azure openai version --- packages/core/src/custom_llm/converter.ts | 2 +- packages/core/src/custom_llm/index.ts | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/core/src/custom_llm/converter.ts b/packages/core/src/custom_llm/converter.ts index a26ed104..fa33c2ce 100644 --- a/packages/core/src/custom_llm/converter.ts +++ b/packages/core/src/custom_llm/converter.ts @@ -341,7 +341,7 @@ export class ModelConverter { } } - if (choice.finish_reason === 'tool_calls' && toolCallMap.size > 0) { + if (choice?.finish_reason === 'tool_calls' && toolCallMap.size > 0) { const response = this.toGeminiStreamToolCallsResponse(toolCallMap); toolCallMap.clear(); return { diff --git a/packages/core/src/custom_llm/index.ts b/packages/core/src/custom_llm/index.ts index 75f16f1b..d214a8d7 100644 --- a/packages/core/src/custom_llm/index.ts +++ b/packages/core/src/custom_llm/index.ts @@ -23,7 +23,7 @@ export class CustomLLMContentGenerator implements ContentGenerator { private apiKey: string = process.env.CUSTOM_LLM_API_KEY || ''; private baseURL: string = process.env.CUSTOM_LLM_ENDPOINT || ''; private modelName: string = process.env.CUSTOM_LLM_MODEL_NAME || ''; - private apiVersion: string = process.env.OPENAI_API_VERSION || '2023-03-15-preview'; + private apiVersion: string = process.env.OPENAI_API_VERSION || '2024-12-01-preview'; private temperature: number = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0); private maxTokens: number = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192); private topP: number = Number(process.env.CUSTOM_LLM_TOP_P || 1); @@ -40,8 +40,7 @@ export class CustomLLMContentGenerator implements ContentGenerator { this.model = new AzureOpenAI({ apiKey: this.apiKey, endpoint: this.baseURL, - apiVersion: this.apiVersion - // Note it must use a version earlier than '2023-03-15-preview'. + apiVersion: this.apiVersion }); } else { this.model = new OpenAI({