From 147de24ab111caabdbbb568d9fae54e1cb5fb54f Mon Sep 17 00:00:00 2001 From: vrushankportkey Date: Thu, 30 May 2024 00:06:35 +0530 Subject: [PATCH] Add Portkey example --- docs/capabilities/code-generation.mdx | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/capabilities/code-generation.mdx b/docs/capabilities/code-generation.mdx index 6b944176..408edfd0 100644 --- a/docs/capabilities/code-generation.mdx +++ b/docs/capabilities/code-generation.mdx @@ -350,3 +350,28 @@ messages = [ MistralAI(api_key=api_key, model=mistral_model).chat(messages) ``` Check out more details in this [notebook](https://github.com/run-llama/llama_index/blob/main/docs/docs/examples/cookbooks/codestral.ipynb). + +## Integration with Portkey +Portkey provides observability, reliability, and caching layer over Codestral Instruct. Here is how you can use it with Portkey: + +```py +# make sure to install `portkey-ai` in your Python enviornment + +import os +from portkey_ai import Portkey + +portkey = Portkey( + api_key=os.environ["PORTKEY_API_KEY"], + provider="mistral-ai", + authorization="Bearer MISTRAL_API_KEY" +) + +mistral_model = "codestral-latest" +messages=[{"role": "user", "content": "Write a function for fibonacci"}] + +code_completion = portkey.chat.completions.create( + model=mistral_model, + messages=messages +) +``` +Check out more details in this [doc](https://portkey.ai/docs/welcome/integration-guides/mistral-ai).