11lockVersion: 2.0.0
22id: 8b6cd71c-ea04-44da-af45-e43968b5928d
33management:
4- docChecksum: 591fccdc6b495b1230644174853f9113
4+ docChecksum: 2e56a000b1486d774bf299b99ef6d4ef
55 docVersion: 1.0.0
66 speakeasyVersion: 1.666.0
77 generationVersion: 2.768.0
8- releaseVersion: 0.2.1
9- configChecksum: 20b0e93ec59772256ddbc8cfd514e30e
8+ releaseVersion: 0.2.2
9+ configChecksum: eab7e61a24e830c367d7deebb355351e
1010 repoURL: https://github.com/OpenRouterTeam/typescript-sdk.git
1111 installationURL: https://github.com/OpenRouterTeam/typescript-sdk
1212 published: true
@@ -108,6 +108,7 @@ generatedFiles:
108108 - docs/models/costdetails.md
109109 - docs/models/createchargerequest.md
110110 - docs/models/datacollection.md
111+ - docs/models/debug.md
111112 - docs/models/defaultparameters.md
112113 - docs/models/edgenetworktimeoutresponseerrordata.md
113114 - docs/models/effort.md
@@ -1504,7 +1505,7 @@ examples:
15041505 use_rss_chat_links: "<value>"
15051506 responses:
15061507 "200":
1507- application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": " 0.00003" , "completion": " 0.00006" }, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
1508+ application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": 0.00003, "completion": 0.00006}, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
15081509 application/rss+xml: "<value>"
15091510 default:
15101511 application/json: {"error": {"code": 400, "message": "Invalid request parameters", "metadata": {"field": "temperature", "reason": "Must be between 0 and 2"}}, "user_id": "user-abc123"}
@@ -1516,7 +1517,7 @@ examples:
15161517 speakeasy-default-list-models-user:
15171518 responses:
15181519 "200":
1519- application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": " 0.00003" , "completion": " 0.00006" }, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
1520+ application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": 0.00003, "completion": 0.00006}, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
15201521 default:
15211522 application/json: {"error": {"code": 400, "message": "Invalid request parameters", "metadata": {"field": "temperature", "reason": "Must be between 0 and 2"}}, "user_id": "user-abc123"}
15221523 "401":
@@ -1531,7 +1532,7 @@ examples:
15311532 slug: "<value>"
15321533 responses:
15331534 "200":
1534- application/json: {"data": {"id": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "description": "GPT-4 is a large multimodal model that can solve difficult problems with greater accuracy.", "architecture": {"tokenizer": "GPT", "instruct_type": "chatml", "modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "endpoints": [{"name": "OpenAI: GPT-4", "model_name": "GPT-4", "context_length": 8192, "pricing": {"prompt": " 0.00003" , "completion": " 0.00006" }, "provider_name": "OpenAI", "tag": "openai", "quantization": "fp16", "max_completion_tokens": 4096, "max_prompt_tokens": 8192, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "uptime_last_30m": 99.5, "supports_implicit_caching": true}]}}
1535+ application/json: {"data": {"id": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "description": "GPT-4 is a large multimodal model that can solve difficult problems with greater accuracy.", "architecture": {"tokenizer": "GPT", "instruct_type": "chatml", "modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "endpoints": [{"name": "OpenAI: GPT-4", "model_name": "GPT-4", "context_length": 8192, "pricing": {"prompt": 0.00003, "completion": 0.00006}, "provider_name": "OpenAI", "tag": "openai", "quantization": "fp16", "max_completion_tokens": 4096, "max_prompt_tokens": 8192, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "uptime_last_30m": 99.5, "supports_implicit_caching": true}]}}
15351536 default:
15361537 application/json: {"error": {"code": 400, "message": "Invalid request parameters", "metadata": {"field": "temperature", "reason": "Must be between 0 and 2"}}, "user_id": "user-abc123"}
15371538 4XX:
@@ -1546,7 +1547,7 @@ examples:
15461547 speakeasy-default-list-endpoints-zdr:
15471548 responses:
15481549 "200":
1549- application/json: {"data": [{"name": "<value>", "model_name": "<value>", "context_length": 8891.09, "pricing": {"prompt": " 1000" , "completion": 1000}, "provider_name": "OpenAI", "tag": "<value>", "quantization": "fp16", "max_completion_tokens": 4685.25, "max_prompt_tokens": 22.7, "supported_parameters": ["temperature"], "uptime_last_30m": 6060.66, "supports_implicit_caching": true}]}
1550+ application/json: {"data": [{"name": "<value>", "model_name": "<value>", "context_length": 8891.09, "pricing": {"prompt": 1000, "completion": 1000}, "provider_name": "OpenAI", "tag": "<value>", "quantization": "fp16", "max_completion_tokens": 4685.25, "max_prompt_tokens": 22.7, "supported_parameters": ["temperature"], "uptime_last_30m": 6060.66, "supports_implicit_caching": true}]}
15501551 default:
15511552 application/json: {"error": {"code": 400, "message": "Invalid request parameters", "metadata": {"field": "temperature", "reason": "Must be between 0 and 2"}}, "user_id": "user-abc123"}
15521553 "500":
@@ -1742,7 +1743,7 @@ examples:
17421743 speakeasy-default-list-embeddings-models:
17431744 responses:
17441745 "200":
1745- application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": " 0.00003" , "completion": " 0.00006" }, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
1746+ application/json: {"data": [{"id": "openai/gpt-4", "canonical_slug": "openai/gpt-4", "name": "GPT-4", "created": 1692901234, "pricing": {"prompt": 0.00003, "completion": 0.00006}, "context_length": 8192, "architecture": {"modality": "text->text", "input_modalities": ["text"], "output_modalities": ["text"]}, "top_provider": {"is_moderated": true}, "per_request_limits": null, "supported_parameters": ["temperature", "top_p", "max_tokens", "frequency_penalty", "presence_penalty"], "default_parameters": null}]}
17461747 "400":
17471748 application/json: {"error": {"code": 400, "message": "Invalid request parameters"}}
17481749 "500":
0 commit comments