diff --git a/api.ts b/api.ts index f3b3ca26d..618672208 100644 --- a/api.ts +++ b/api.ts @@ -4801,6 +4801,10 @@ export interface CustomLLMModel { metadataSendMode?: "off" | "variable" | "destructured"; /** These is the URL we'll use for the OpenAI client's `baseURL`. Ex. https://openrouter.ai/api/v1 */ url: string; + /** + * Custom headers to send with requests to the custom LLM endpoint. + */ + headers?: Record; /** * This sets the timeout for the connection to the custom provider without needing to stream any tokens back. Default is 20 seconds. * @min 20