Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
159 changes: 24 additions & 135 deletions package-lock.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion plugins/azure-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"openai": "^4.95.0"
},
"peerDependencies": {
"genkit": "^0.9.0 || ^1.0.0"
"genkit": "^1.19.3"
},
"devDependencies": {
"@types/node": "^20.11.16",
Expand Down
8 changes: 4 additions & 4 deletions plugins/azure-openai/src/dalle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@
* limitations under the License.
*/

import type { GenerateRequest, GenerateResponseData, Genkit } from 'genkit';
import type { GenerateRequest, GenerateResponseData } from 'genkit';
import { GenerationCommonConfigSchema, Message, z } from 'genkit';
import type { ModelAction } from 'genkit/model';
import { modelRef } from 'genkit/model';
import { model } from 'genkit/plugin';
import AzureOpenAI from 'openai';
import {
ImageGenerateParams,
Expand All @@ -33,7 +34,7 @@ export const DallE3ConfigSchema = GenerationCommonConfigSchema.extend({
});

export const dallE3 = modelRef({
name: 'azure-openai/dall-e-3',
name: 'dall-e-3',
info: {
label: 'OpenAI - DALL-E 3',
supports: {
Expand Down Expand Up @@ -93,10 +94,9 @@ function toGenerateResponse(result: ImagesResponse): GenerateResponseData {
}

export function dallE3Model(
ai: Genkit,
client: AzureOpenAI
): ModelAction<typeof DallE3ConfigSchema> {
return ai.defineModel<typeof DallE3ConfigSchema>(
return model<typeof DallE3ConfigSchema>(
{
name: dallE3.name,
...dallE3.info,
Expand Down
23 changes: 13 additions & 10 deletions plugins/azure-openai/src/embedder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import type { Genkit } from 'genkit';
import { embedderRef, z } from 'genkit';
import { embedder } from 'genkit/plugin';
import { AzureOpenAI } from 'openai';

export const TextEmbeddingConfigSchema = z.object({
Expand All @@ -30,7 +31,7 @@ export type TextEmbeddingGeckoConfig = z.infer<
export const TextEmbeddingInputSchema = z.string();

export const textEmbedding3Small = embedderRef({
name: 'azure-openai/text-embedding-3-small',
name: 'text-embedding-3-small',
configSchema: TextEmbeddingConfigSchema,
info: {
dimensions: 1536,
Expand All @@ -42,7 +43,7 @@ export const textEmbedding3Small = embedderRef({
});

export const textEmbedding3Large = embedderRef({
name: 'azure-openai/text-embedding-3-large',
name: 'text-embedding-3-large',
configSchema: TextEmbeddingConfigSchema,
info: {
dimensions: 3072,
Expand All @@ -54,7 +55,7 @@ export const textEmbedding3Large = embedderRef({
});

export const textEmbeddingAda002 = embedderRef({
name: 'azure-openai/text-embedding-ada-002',
name: 'text-embedding-ada-002',
configSchema: TextEmbeddingConfigSchema,
info: {
dimensions: 1536,
Expand All @@ -71,17 +72,19 @@ export const SUPPORTED_EMBEDDING_MODELS = {
'text-embedding-ada-002': textEmbeddingAda002,
};

export function openaiEmbedder(ai: Genkit, name: string, client: AzureOpenAI) {
const model = SUPPORTED_EMBEDDING_MODELS[name];
if (!model) throw new Error(`Unsupported model: ${name}`);
export function openaiEmbedder(name: string, client: AzureOpenAI) {
const modelRef = SUPPORTED_EMBEDDING_MODELS[name];
if (!modelRef) throw new Error(`Unsupported model: ${name}`);

return ai.defineEmbedder(
return embedder(
{
info: model.info!,
info: modelRef.info!,
configSchema: TextEmbeddingConfigSchema,
name: model.name,
name: modelRef.name,
},
async (input, options) => {
async (request) => {
const { input, options } = request;

const embeddings = await client.embeddings.create({
model: name,
input: input.map((d) => d.text),
Expand Down
56 changes: 27 additions & 29 deletions plugins/azure-openai/src/gpt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,17 @@
import { GenerationCommonConfigSchema, Message, z } from 'genkit';
import type {
GenerateRequest,
Genkit,
MessageData,
Part,
Role,
StreamingCallback,
ToolRequestPart,
} from 'genkit';
import {
CandidateData,
GenerateResponseChunkData,
modelRef,
ToolDefinition,
} from 'genkit/model';
import { model } from 'genkit/plugin';
import { AzureOpenAI } from 'openai';
import {
type ChatCompletion,
Expand Down Expand Up @@ -79,7 +77,7 @@ export const OpenAiConfigSchema = GenerationCommonConfigSchema.extend({
});

export const gpt4o = modelRef({
name: 'azure-openai/gpt-4o',
name: 'gpt-4o',
info: {
versions: ['gpt-4o'],
label: 'OpenAI - GPT-4o',
Expand All @@ -95,7 +93,7 @@ export const gpt4o = modelRef({
});

export const gpt4oMini = modelRef({
name: 'azure-openai/gpt-4o-mini',
name: 'gpt-4o-mini',
info: {
versions: ['gpt-4o-mini'],
label: 'OpenAI - GPT-4o Mini',
Expand All @@ -111,7 +109,7 @@ export const gpt4oMini = modelRef({
});

export const gpt4 = modelRef({
name: 'azure-openai/gpt-4',
name: 'gpt-4',
info: {
versions: ['gpt-4', 'gpt-4-32k'],
label: 'OpenAI - GPT-4',
Expand All @@ -127,7 +125,7 @@ export const gpt4 = modelRef({
});

export const gpt45 = modelRef({
name: 'azure-openai/gpt-4.5',
name: 'gpt-4.5',
info: {
versions: ['gpt-4.5-preview'],
label: 'OpenAI - GPT-4.5',
Expand All @@ -143,7 +141,7 @@ export const gpt45 = modelRef({
});

export const gpt41 = modelRef({
name: 'azure-openai/gpt-4.1',
name: 'gpt-4.1',
info: {
versions: ['gpt-4.1'],
label: 'OpenAI - GPT-4.1',
Expand All @@ -159,7 +157,7 @@ export const gpt41 = modelRef({
});

export const gpt41Mini = modelRef({
name: 'azure-openai/gpt-4.1-mini',
name: 'gpt-4.1-mini',
info: {
versions: ['gpt-4.1-mini'],
label: 'OpenAI - GPT-4.1 Mini',
Expand All @@ -175,7 +173,7 @@ export const gpt41Mini = modelRef({
});

export const gpt41Nano = modelRef({
name: 'azure-openai/gpt-4.1-nano',
name: 'gpt-4.1-nano',
info: {
versions: ['gpt-4.1-nano'],
label: 'OpenAI - GPT-4.1 Nano',
Expand All @@ -191,7 +189,7 @@ export const gpt41Nano = modelRef({
});

export const o1 = modelRef({
name: 'azure-openai/o1',
name: 'o1',
info: {
versions: ['o1'],
label: 'OpenAI - o1',
Expand All @@ -207,7 +205,7 @@ export const o1 = modelRef({
});

export const o1Mini = modelRef({
name: 'azure-openai/o1-mini',
name: 'o1-mini',
info: {
versions: ['o1-mini'],
label: 'OpenAI - o1 Mini',
Expand All @@ -223,7 +221,7 @@ export const o1Mini = modelRef({
});

export const o1Preview = modelRef({
name: 'azure-openai/o1-preview',
name: 'o1-preview',
info: {
versions: ['o1-preview'],
label: 'OpenAI - o1 Preview',
Expand All @@ -239,7 +237,7 @@ export const o1Preview = modelRef({
});

export const o3 = modelRef({
name: 'azure-openai/o3',
name: 'o3',
info: {
versions: ['o3'],
label: 'OpenAI - o3',
Expand All @@ -255,7 +253,7 @@ export const o3 = modelRef({
});

export const o4Mini = modelRef({
name: 'azure-openai/o4-mini',
name: 'o4-mini',
info: {
versions: ['o4-mini'],
label: 'OpenAI - o4 Mini',
Expand All @@ -271,7 +269,7 @@ export const o4Mini = modelRef({
});

export const o3Mini = modelRef({
name: 'azure-openai/o3-mini',
name: 'o3-mini',
info: {
versions: ['o3-mini'],
label: 'OpenAI - o3 Mini',
Expand All @@ -287,7 +285,7 @@ export const o3Mini = modelRef({
});

export const gpt35Turbo = modelRef({
name: 'azure-openai/gpt-3.5-turbo',
name: 'gpt-3.5-turbo',
info: {
versions: ['gpt-3.5-turbo', 'gpt-35-turbo-instruct', 'gpt-35-turbo-16k'],
label: 'OpenAI - GPT-3.5 Turbo',
Expand Down Expand Up @@ -628,24 +626,24 @@ export function toOpenAiRequestBody(
/**
*
*/
export function gptModel(ai: Genkit, name: string, client: AzureOpenAI) {
const modelId = `azure-openai/${name}`;
const model = SUPPORTED_GPT_MODELS[name];
if (!model) throw new Error(`Unsupported model: ${name}`);
export function gptModel(name: string, client: AzureOpenAI) {
const modelId = `${name}`;
const modelRef = SUPPORTED_GPT_MODELS[name];
if (!modelRef) throw new Error(`Unsupported model: ${name}`);

return ai.defineModel(
return model(
{
name: modelId,
...model.info,
...modelRef.info,
configSchema: SUPPORTED_GPT_MODELS[name].configSchema,
},
async (
request,
streamingCallback?: StreamingCallback<GenerateResponseChunkData>
options,
request
) => {
let response: ChatCompletion;
const body = toOpenAiRequestBody(name, request);
if (streamingCallback) {
const body = toOpenAiRequestBody(name, options);
if (request.sendChunk) {
const stream = client.beta.chat.completions.stream({
...body,
stream: true,
Expand All @@ -656,7 +654,7 @@ export function gptModel(ai: Genkit, name: string, client: AzureOpenAI) {
for await (const chunk of stream) {
chunk.choices?.forEach((chunk) => {
const c = fromOpenAiChunkChoice(chunk);
streamingCallback({
request.sendChunk({
index: c.index,
content: c.message.content,
});
Expand All @@ -668,7 +666,7 @@ export function gptModel(ai: Genkit, name: string, client: AzureOpenAI) {
}
return {
candidates: response.choices.map((c) =>
fromOpenAiChoice(c, request.output?.format === 'json')
fromOpenAiChoice(c, options.output?.format === 'json')
),
usage: {
inputTokens: response.usage?.prompt_tokens,
Expand Down
34 changes: 20 additions & 14 deletions plugins/azure-openai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/

import type { Genkit } from 'genkit';
import { genkitPlugin } from 'genkit/plugin';
import { genkitPluginV2, ResolvableAction } from 'genkit/plugin';
import { AzureClientOptions, AzureOpenAI } from 'openai';

import { dallE3, dallE3Model } from './dalle.js';
Expand Down Expand Up @@ -74,19 +74,25 @@ export {
export interface PluginOptions extends AzureClientOptions {}

export const azureOpenAI = (options?: PluginOptions) =>
genkitPlugin('azure-openai', async (ai: Genkit) => {
const client = new AzureOpenAI(options);
for (const name of Object.keys(SUPPORTED_GPT_MODELS)) {
gptModel(ai, name, client);
}
dallE3Model(ai, client);
whisper1Model(ai, client);
for (const name of Object.keys(SUPPORTED_TTS_MODELS)) {
ttsModel(ai, name, client);
}
for (const name of Object.keys(SUPPORTED_EMBEDDING_MODELS)) {
openaiEmbedder(ai, name, client);
}
genkitPluginV2({
name: 'azure-openai',
init: async () => {
const client = new AzureOpenAI(options);
const actions: ResolvableAction[] = [];

for (const name of Object.keys(SUPPORTED_GPT_MODELS)) {
actions.push(gptModel(name, client));
}
actions.push(dallE3Model(client));
actions.push(whisper1Model(client));
for (const name of Object.keys(SUPPORTED_TTS_MODELS)) {
actions.push(ttsModel(name, client));
}
for (const name of Object.keys(SUPPORTED_EMBEDDING_MODELS)) {
actions.push(openaiEmbedder(name, client));
}
return [];
},
});

export default azureOpenAI;
Loading