From 87b01c379136ac533030839bb06945238ef6e5db Mon Sep 17 00:00:00 2001 From: Berk Durmus Date: Sun, 25 Jan 2026 23:13:01 +0300 Subject: [PATCH] feat(prompt-migrator): add converting prompts across providers with optional BYOK AI rewrite --- components/utils/prompt-migrator.ts | 229 ++++++++++ components/utils/tools-list.ts | 6 + pages/utilities/prompt-migrator.tsx | 624 ++++++++++++++++++++++++++++ 3 files changed, 859 insertions(+) create mode 100644 components/utils/prompt-migrator.ts create mode 100644 pages/utilities/prompt-migrator.tsx diff --git a/components/utils/prompt-migrator.ts b/components/utils/prompt-migrator.ts new file mode 100644 index 0000000..9ee33db --- /dev/null +++ b/components/utils/prompt-migrator.ts @@ -0,0 +1,229 @@ +export type ProviderKey = + | "openai" + | "anthropic" + | "gemini" + | "mistral" + | "azure-openai"; + +export type PromptFormat = "messages" | "template"; + +export type NormalizedRole = + | "system" + | "user" + | "assistant" + | "tool" + | "function"; + +export type NormalizedMessage = { + role: NormalizedRole; + content: string; +}; + +export const PROVIDER_OPTIONS = [ + { + value: "openai" as const, + label: "OpenAI", + models: ["gpt-4o-mini", "gpt-4o", "gpt-4.1-mini"], + }, + { + value: "anthropic" as const, + label: "Anthropic", + models: ["claude-3-5-sonnet-latest", "claude-3-5-haiku-latest"], + }, + { + value: "gemini" as const, + label: "Google Gemini", + models: ["gemini-1.5-flash", "gemini-1.5-pro"], + }, + { + value: "mistral" as const, + label: "Mistral", + models: ["mistral-large-latest", "mistral-small-latest"], + }, + { + value: "azure-openai" as const, + label: "Azure OpenAI", + models: ["gpt-4o-mini", "gpt-4o", "gpt-4.1-mini"], + }, +]; + +const KNOWN_ROLES: NormalizedRole[] = [ + "system", + "user", + "assistant", + "tool", + "function", +]; + +const toRole = (role: string | undefined): NormalizedRole => { + if (role && KNOWN_ROLES.includes(role as NormalizedRole)) { + return role as NormalizedRole; + } + return "user"; +}; + +const contentToString = (content: unknown): string => { + if (typeof content === "string") { + return content; + } + if (Array.isArray(content)) { + const parts = content + .map((part) => { + if (typeof part === "string") return part; + if (part && typeof part === "object" && "text" in part) { + return String((part as { text?: string }).text ?? ""); + } + return ""; + }) + .filter(Boolean); + return parts.join("\n"); + } + if (content && typeof content === "object" && "text" in content) { + return String((content as { text?: string }).text ?? ""); + } + if (content === null || content === undefined) { + return ""; + } + return String(content); +}; + +export const normalizeMessagesFromInput = ( + input: string +): { messages: NormalizedMessage[]; error?: string } => { + const trimmed = input.trim(); + if (!trimmed) { + return { messages: [], error: "Please paste prompt input." }; + } + + try { + const parsed = JSON.parse(trimmed); + + const messages: NormalizedMessage[] = []; + + if (parsed && typeof parsed === "object" && "system" in parsed) { + const systemValue = (parsed as { system?: unknown }).system; + if (systemValue) { + messages.push({ + role: "system", + content: contentToString(systemValue), + }); + } + } + + if (Array.isArray(parsed)) { + parsed.forEach((message) => { + const role = toRole(message?.role); + const content = contentToString(message?.content); + messages.push({ role, content }); + }); + return { messages }; + } + + if (parsed && typeof parsed === "object") { + const objectParsed = parsed as { + messages?: unknown; + contents?: unknown; + }; + + if (Array.isArray(objectParsed.messages)) { + objectParsed.messages.forEach((message) => { + const role = toRole((message as { role?: string })?.role); + const content = contentToString((message as { content?: unknown }) + ?.content); + messages.push({ role, content }); + }); + return { messages }; + } + + if (Array.isArray(objectParsed.contents)) { + objectParsed.contents.forEach((item) => { + const role = toRole((item as { role?: string })?.role); + const content = contentToString((item as { parts?: unknown })?.parts); + messages.push({ role, content }); + }); + return { messages }; + } + } + + return { messages: [], error: "Unsupported prompt shape." }; + } catch (error) { + return { + messages: [], + error: error instanceof Error ? error.message : "Invalid JSON input.", + }; + } +}; + +export const createMessagesFromTemplate = ( + template: string, + isSystem: boolean +): NormalizedMessage[] => { + const trimmed = template.trim(); + if (!trimmed) return []; + return [ + { + role: isSystem ? "system" : "user", + content: trimmed, + }, + ]; +}; + +const normalizeRoleForOpenAI = (role: NormalizedRole) => { + if (role === "function") return "tool"; + return role; +}; + +export const convertToProvider = ( + messages: NormalizedMessage[], + provider: ProviderKey +) => { + if (provider === "anthropic") { + const systemMessages = messages + .filter((message) => message.role === "system") + .map((message) => message.content) + .filter(Boolean); + + const nonSystemMessages = messages.filter( + (message) => message.role !== "system" + ); + + return { + system: systemMessages.join("\n\n"), + messages: nonSystemMessages.map((message) => ({ + role: message.role === "assistant" ? "assistant" : "user", + content: message.content, + })), + }; + } + + if (provider === "gemini") { + const systemMessages = messages + .filter((message) => message.role === "system") + .map((message) => message.content) + .filter(Boolean); + + const contents = messages + .filter((message) => message.role !== "system") + .map((message) => ({ + role: message.role === "assistant" ? "model" : "user", + parts: [{ text: message.content }], + })); + + return { + system_instruction: systemMessages.length + ? { parts: [{ text: systemMessages.join("\n\n") }] } + : undefined, + contents, + }; + } + + return { + messages: messages.map((message) => ({ + role: normalizeRoleForOpenAI(message.role), + content: message.content, + })), + }; +}; + +export const formatJson = (value: unknown) => + JSON.stringify(value, null, 2); diff --git a/components/utils/tools-list.ts b/components/utils/tools-list.ts index aa8ef64..ce7569a 100644 --- a/components/utils/tools-list.ts +++ b/components/utils/tools-list.ts @@ -101,6 +101,12 @@ export const tools = [ "Test and debug your regular expressions in real-time. Provides quick feedback on pattern matching for strings.", link: "/utilities/regex-tester", }, + { + title: "Prompt Migrator", + description: + "Convert prompts across OpenAI, Anthropic, Gemini, Mistral, and Azure OpenAI formats with optional BYOK AI rewrite.", + link: "/utilities/prompt-migrator", + }, { title: "CSS Units Converter", description: diff --git a/pages/utilities/prompt-migrator.tsx b/pages/utilities/prompt-migrator.tsx new file mode 100644 index 0000000..2e7cc0c --- /dev/null +++ b/pages/utilities/prompt-migrator.tsx @@ -0,0 +1,624 @@ +import { useCallback, useEffect, useMemo, useState } from "react"; +import Header from "@/components/Header"; +import { CMDK } from "@/components/CMDK"; +import Meta from "@/components/Meta"; +import PageHeader from "@/components/PageHeader"; +import { Card } from "@/components/ds/CardComponent"; +import { Textarea } from "@/components/ds/TextareaComponent"; +import { Input } from "@/components/ds/InputComponent"; +import { Label } from "@/components/ds/LabelComponent"; +import { Button } from "@/components/ds/ButtonComponent"; +import { Checkbox } from "@/components/ds/CheckboxComponent"; +import { + Tabs, + TabsContent, + TabsList, + TabsTrigger, +} from "@/components/ds/TabsComponent"; +import { Combobox } from "@/components/ds/ComboboxComponent"; +import CallToActionGrid from "@/components/CallToActionGrid"; +import GitHubContribution from "@/components/GitHubContribution"; +import { + NormalizedMessage, + ProviderKey, + PROVIDER_OPTIONS, + convertToProvider, + createMessagesFromTemplate, + formatJson, + normalizeMessagesFromInput, + PromptFormat, +} from "@/components/utils/prompt-migrator"; + +type MigrationOutput = Record; + +const AI_PROVIDER_OPTIONS = PROVIDER_OPTIONS.filter( + (provider) => provider.value !== "azure-openai" +); + +const DEFAULT_TARGETS: ProviderKey[] = [ + "openai", + "anthropic", + "gemini", +]; + +const buildApiKeyStorageKey = (provider: ProviderKey) => + `prompt-migrator-key-${provider}`; + +const extractJsonFromText = (text: string) => { + try { + return JSON.parse(text); + } catch { + const firstIndex = text.indexOf("{"); + const lastIndex = text.lastIndexOf("}"); + if (firstIndex !== -1 && lastIndex !== -1 && lastIndex > firstIndex) { + return JSON.parse(text.slice(firstIndex, lastIndex + 1)); + } + throw new Error("Failed to parse JSON from AI response."); + } +}; + +const callOpenAI = async ( + model: string, + apiKey: string, + systemPrompt: string, + userPrompt: string +) => { + const response = await fetch("https://api.openai.com/v1/chat/completions", { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + temperature: 0.2, + messages: [ + { role: "system", content: systemPrompt }, + { role: "user", content: userPrompt }, + ], + }), + }); + + if (!response.ok) { + throw new Error(`OpenAI error: ${response.status}`); + } + + const data = await response.json(); + return data?.choices?.[0]?.message?.content ?? ""; +}; + +const callMistral = async ( + model: string, + apiKey: string, + systemPrompt: string, + userPrompt: string +) => { + const response = await fetch("https://api.mistral.ai/v1/chat/completions", { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + temperature: 0.2, + messages: [ + { role: "system", content: systemPrompt }, + { role: "user", content: userPrompt }, + ], + }), + }); + + if (!response.ok) { + throw new Error(`Mistral error: ${response.status}`); + } + + const data = await response.json(); + return data?.choices?.[0]?.message?.content ?? ""; +}; + +const callAnthropic = async ( + model: string, + apiKey: string, + systemPrompt: string, + userPrompt: string +) => { + const response = await fetch("https://api.anthropic.com/v1/messages", { + method: "POST", + headers: { + "Content-Type": "application/json", + "anthropic-version": "2023-06-01", + "x-api-key": apiKey, + }, + body: JSON.stringify({ + model, + max_tokens: 1024, + system: systemPrompt, + messages: [{ role: "user", content: userPrompt }], + }), + }); + + if (!response.ok) { + throw new Error(`Anthropic error: ${response.status}`); + } + + const data = await response.json(); + const content = data?.content?.[0]?.text; + return content ?? ""; +}; + +const callGemini = async ( + model: string, + apiKey: string, + systemPrompt: string, + userPrompt: string +) => { + const response = await fetch( + `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${apiKey}`, + { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + system_instruction: { parts: [{ text: systemPrompt }] }, + contents: [{ role: "user", parts: [{ text: userPrompt }] }], + }), + } + ); + + if (!response.ok) { + throw new Error(`Gemini error: ${response.status}`); + } + + const data = await response.json(); + return data?.candidates?.[0]?.content?.parts?.[0]?.text ?? ""; +}; + +const callAiProvider = async ( + provider: ProviderKey, + model: string, + apiKey: string, + systemPrompt: string, + userPrompt: string +) => { + switch (provider) { + case "anthropic": + return callAnthropic(model, apiKey, systemPrompt, userPrompt); + case "gemini": + return callGemini(model, apiKey, systemPrompt, userPrompt); + case "mistral": + return callMistral(model, apiKey, systemPrompt, userPrompt); + case "openai": + default: + return callOpenAI(model, apiKey, systemPrompt, userPrompt); + } +}; + +export default function PromptMigrator() { + const [sourceProvider, setSourceProvider] = + useState("openai"); + const [sourceFormat, setSourceFormat] = + useState("messages"); + const [promptInput, setPromptInput] = useState(""); + const [templateIsSystem, setTemplateIsSystem] = useState(false); + const [targetProviders, setTargetProviders] = + useState(DEFAULT_TARGETS); + const [useAiRewrite, setUseAiRewrite] = useState(false); + const [aiProvider, setAiProvider] = useState("openai"); + const [aiModel, setAiModel] = useState( + AI_PROVIDER_OPTIONS[0]?.models[0] ?? "gpt-4o-mini" + ); + const [apiKey, setApiKey] = useState(""); + const [rememberKey, setRememberKey] = useState(false); + const [error, setError] = useState(null); + const [outputs, setOutputs] = useState({}); + const [isLoading, setIsLoading] = useState(false); + const [copiedTarget, setCopiedTarget] = useState(null); + const [activeOutput, setActiveOutput] = useState( + DEFAULT_TARGETS[0] + ); + + const aiModelOptions = useMemo(() => { + const provider = AI_PROVIDER_OPTIONS.find( + (option) => option.value === aiProvider + ); + return provider?.models ?? []; + }, [aiProvider]); + + useEffect(() => { + if (aiModelOptions.length > 0) { + setAiModel((current) => + aiModelOptions.includes(current) ? current : aiModelOptions[0] + ); + } + }, [aiModelOptions]); + + useEffect(() => { + try { + const storedKey = localStorage.getItem( + buildApiKeyStorageKey(aiProvider) + ); + if (storedKey) { + setApiKey(storedKey); + setRememberKey(true); + } else { + setApiKey(""); + setRememberKey(false); + } + } catch { + setApiKey(""); + setRememberKey(false); + } + }, [aiProvider]); + + useEffect(() => { + try { + if (!rememberKey) { + localStorage.removeItem(buildApiKeyStorageKey(aiProvider)); + return; + } + if (apiKey) { + localStorage.setItem(buildApiKeyStorageKey(aiProvider), apiKey); + } + } catch { + // Ignore localStorage failures. + } + }, [aiProvider, apiKey, rememberKey]); + + const toggleTargetProvider = (provider: ProviderKey) => { + setTargetProviders((prev) => { + if (prev.includes(provider)) { + return prev.filter((item) => item !== provider); + } + return [...prev, provider]; + }); + }; + + const deterministicOutputs = useCallback(() => { + let messages: NormalizedMessage[] = []; + let parseError: string | undefined; + + if (sourceFormat === "messages") { + const result = normalizeMessagesFromInput(promptInput); + messages = result.messages; + parseError = result.error; + } else { + messages = createMessagesFromTemplate(promptInput, templateIsSystem); + if (messages.length === 0) { + parseError = "Please paste a template to migrate."; + } + } + + if (parseError) { + setError(parseError); + setOutputs({}); + return { messages: [], outputs: {} as MigrationOutput, error: parseError }; + } + + const nextOutputs: MigrationOutput = {}; + targetProviders.forEach((provider) => { + const converted = convertToProvider(messages, provider); + nextOutputs[provider] = formatJson(converted); + }); + + setOutputs(nextOutputs); + return { messages, outputs: nextOutputs }; + }, [promptInput, sourceFormat, targetProviders, templateIsSystem]); + + const handleCopy = (provider: ProviderKey) => { + const value = outputs[provider]; + if (!value) return; + navigator.clipboard.writeText(value).then(() => { + setCopiedTarget(provider); + setTimeout(() => setCopiedTarget(null), 1200); + }); + }; + + const handleClear = () => { + setPromptInput(""); + setOutputs({}); + setError(null); + }; + + const handleMigrate = async () => { + setError(null); + setIsLoading(true); + setCopiedTarget(null); + + const deterministic = deterministicOutputs(); + if (deterministic.error || !useAiRewrite) { + setIsLoading(false); + return; + } + + if (!apiKey) { + setError("Add an API key or disable AI rewrite."); + setIsLoading(false); + return; + } + + const systemPrompt = + "You are a prompt migrator. Return only valid JSON matching the target provider schema."; + + const results: MigrationOutput = { ...deterministic.outputs }; + + await Promise.all( + targetProviders.map(async (targetProvider) => { + const userPrompt = [ + `Source provider: ${sourceProvider}`, + `Source format: ${sourceFormat}`, + `Target provider: ${targetProvider}`, + "Input:", + promptInput, + ].join("\n"); + + try { + const responseText = await callAiProvider( + aiProvider, + aiModel, + apiKey, + systemPrompt, + userPrompt + ); + const parsed = extractJsonFromText(responseText); + results[targetProvider] = formatJson(parsed); + } catch (aiError) { + setError( + aiError instanceof Error + ? aiError.message + : "AI rewrite failed." + ); + } + }) + ); + + setOutputs(results); + setIsLoading(false); + }; + + const selectedTargetTabs = useMemo(() => { + const activeTargets = targetProviders.length + ? targetProviders + : DEFAULT_TARGETS; + return PROVIDER_OPTIONS.map((provider) => provider.value).filter((value) => + activeTargets.includes(value) + ); + }, [targetProviders]); + + useEffect(() => { + if (!activeOutput || !selectedTargetTabs.includes(activeOutput)) { + setActiveOutput(selectedTargetTabs[0]); + } + }, [activeOutput, selectedTargetTabs]); + + return ( +
+ +
+ + +
+ +
+ +
+ +
+
+ + + setSourceFormat(value as PromptFormat) + } + className="w-full" + > + + Chat messages + Template + + +
+ +
+
+ + ({ + value: provider.value, + label: provider.label, + }))} + value={sourceProvider} + onSelect={(value) => setSourceProvider(value as ProviderKey)} + /> +
+
+ +
+ {PROVIDER_OPTIONS.map((provider) => ( + + ))} +
+
+
+ +
+ +