Skip to content

Commit 8227c31

Browse files
viniciusdsmellogustavocidornelas
authored andcommitted
feat(closes OPEN-8244): Adds Gemini API client tracing
1 parent fa6236b commit 8227c31

File tree

4 files changed

+958
-0
lines changed

4 files changed

+958
-0
lines changed
Lines changed: 241 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,241 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "2722b419",
6+
"metadata": {},
7+
"source": [
8+
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/openlayer-python/blob/main/examples/tracing/google-gemini/gemini_tracing.ipynb)\n",
9+
"\n",
10+
"\n",
11+
"# <a id=\"top\">Google Gemini API tracing</a>\n",
12+
"\n",
13+
"This notebook illustrates how to get started tracing Google Gemini API calls with Openlayer."
14+
]
15+
},
16+
{
17+
"cell_type": "code",
18+
"execution_count": null,
19+
"id": "020c8f6a",
20+
"metadata": {},
21+
"outputs": [],
22+
"source": [
23+
"!pip install google-generativeai openlayer"
24+
]
25+
},
26+
{
27+
"cell_type": "markdown",
28+
"id": "75c2a473",
29+
"metadata": {},
30+
"source": [
31+
"## 1. Set the environment variables"
32+
]
33+
},
34+
{
35+
"cell_type": "code",
36+
"execution_count": null,
37+
"id": "f3f4fa13",
38+
"metadata": {},
39+
"outputs": [],
40+
"source": [
41+
"import os\n",
42+
"\n",
43+
"import google.generativeai as genai\n",
44+
"\n",
45+
"# Gemini API key\n",
46+
"os.environ[\"GOOGLE_AI_API_KEY\"] = \"YOUR_GOOGLE_AI_API_KEY\"\n",
47+
"\n",
48+
"# Openlayer env variables\n",
49+
"os.environ[\"OPENLAYER_API_KEY\"] = \"YOUR_OPENLAYER_API_KEY\"\n",
50+
"os.environ[\"OPENLAYER_INFERENCE_PIPELINE_ID\"] = \"YOUR_OPENLAYER_INFERENCE_PIPELINE_ID\""
51+
]
52+
},
53+
{
54+
"cell_type": "markdown",
55+
"id": "9758533f",
56+
"metadata": {},
57+
"source": [
58+
"## 2. Configure Gemini and create a traced model"
59+
]
60+
},
61+
{
62+
"cell_type": "code",
63+
"execution_count": null,
64+
"id": "c35d9860-dc41-4f7c-8d69-cc2ac7e5e485",
65+
"metadata": {},
66+
"outputs": [],
67+
"source": [
68+
"from openlayer.lib import trace_gemini\n",
69+
"\n",
70+
"genai.configure(api_key=os.environ[\"GOOGLE_AI_API_KEY\"])\n",
71+
"\n",
72+
"model = genai.GenerativeModel(\"gemini-2.5-flash\")\n",
73+
"traced_model = trace_gemini(model)"
74+
]
75+
},
76+
{
77+
"cell_type": "markdown",
78+
"id": "72a6b954",
79+
"metadata": {},
80+
"source": [
81+
"## 3. Use the traced Gemini model normally"
82+
]
83+
},
84+
{
85+
"cell_type": "markdown",
86+
"id": "76a350b4",
87+
"metadata": {},
88+
"source": [
89+
"That's it! Now you can continue using the traced Gemini model normally. The data is automatically published to Openlayer and you can start creating tests around it!"
90+
]
91+
},
92+
{
93+
"cell_type": "markdown",
94+
"id": "fb5ebdad",
95+
"metadata": {},
96+
"source": [
97+
"### 3.1 Non-streaming generation"
98+
]
99+
},
100+
{
101+
"cell_type": "code",
102+
"execution_count": null,
103+
"id": "e00c1c79",
104+
"metadata": {},
105+
"outputs": [],
106+
"source": [
107+
"response = traced_model.generate_content(\"What is the meaning of life?\")"
108+
]
109+
},
110+
{
111+
"cell_type": "code",
112+
"execution_count": null,
113+
"id": "41b39bff",
114+
"metadata": {},
115+
"outputs": [],
116+
"source": [
117+
"model.generate_content(\"What is the meaning of life?\")"
118+
]
119+
},
120+
{
121+
"cell_type": "code",
122+
"execution_count": null,
123+
"id": "b5e8c9f0",
124+
"metadata": {},
125+
"outputs": [],
126+
"source": [
127+
"response.text"
128+
]
129+
},
130+
{
131+
"cell_type": "markdown",
132+
"id": "09d39983",
133+
"metadata": {},
134+
"source": [
135+
"### 3.2 Streaming generation"
136+
]
137+
},
138+
{
139+
"cell_type": "code",
140+
"execution_count": null,
141+
"id": "9a86642c",
142+
"metadata": {},
143+
"outputs": [],
144+
"source": [
145+
"response = traced_model.generate_content(\"Tell me a short story.\", stream=True)\n",
146+
"\n",
147+
"for chunk in response:\n",
148+
" if hasattr(chunk, 'text'):\n",
149+
" continue # Process chunks as needed"
150+
]
151+
},
152+
{
153+
"cell_type": "markdown",
154+
"id": "4e6fb396",
155+
"metadata": {},
156+
"source": [
157+
"### 3.3 Multi-turn conversation"
158+
]
159+
},
160+
{
161+
"cell_type": "code",
162+
"execution_count": null,
163+
"id": "21369c42",
164+
"metadata": {},
165+
"outputs": [],
166+
"source": [
167+
"chat = traced_model.start_chat(history=[])\n",
168+
"\n",
169+
"response1 = chat.send_message(\"Hello, I'm learning about AI.\")\n",
170+
"response2 = chat.send_message(\"Can you explain neural networks?\")"
171+
]
172+
},
173+
{
174+
"cell_type": "code",
175+
"execution_count": null,
176+
"id": "22369c43",
177+
"metadata": {},
178+
"outputs": [],
179+
"source": [
180+
"response2.text"
181+
]
182+
},
183+
{
184+
"cell_type": "markdown",
185+
"id": "5e6fb397",
186+
"metadata": {},
187+
"source": [
188+
"### 3.4 With generation configuration"
189+
]
190+
},
191+
{
192+
"cell_type": "code",
193+
"execution_count": null,
194+
"id": "31369c44",
195+
"metadata": {},
196+
"outputs": [],
197+
"source": [
198+
"response = traced_model.generate_content(\n",
199+
" \"Write a haiku about technology.\",\n",
200+
" generation_config=genai.types.GenerationConfig(\n",
201+
" temperature=0.7,\n",
202+
" top_p=0.9,\n",
203+
" top_k=40,\n",
204+
" max_output_tokens=100,\n",
205+
" ),\n",
206+
")"
207+
]
208+
},
209+
{
210+
"cell_type": "code",
211+
"execution_count": null,
212+
"id": "41369c45",
213+
"metadata": {},
214+
"outputs": [],
215+
"source": [
216+
"response.text"
217+
]
218+
}
219+
],
220+
"metadata": {
221+
"kernelspec": {
222+
"display_name": ".venv",
223+
"language": "python",
224+
"name": "python3"
225+
},
226+
"language_info": {
227+
"codemirror_mode": {
228+
"name": "ipython",
229+
"version": 3
230+
},
231+
"file_extension": ".py",
232+
"mimetype": "text/x-python",
233+
"name": "python",
234+
"nbconvert_exporter": "python",
235+
"pygments_lexer": "ipython3",
236+
"version": "3.12.8"
237+
}
238+
},
239+
"nbformat": 4,
240+
"nbformat_minor": 5
241+
}

src/openlayer/lib/__init__.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
"trace_litellm",
1717
"trace_google_adk",
1818
"unpatch_google_adk",
19+
"trace_gemini",
1920
"update_current_trace",
2021
"update_current_step",
2122
# Offline buffer management functions
@@ -249,3 +250,21 @@ def unpatch_google_adk():
249250
from .integrations import google_adk_tracer
250251

251252
return google_adk_tracer.unpatch_google_adk()
253+
254+
255+
# -------------------------------- Google Gemini --------------------------------- #
256+
def trace_gemini(client):
257+
"""Trace Google Gemini chat completions."""
258+
# pylint: disable=import-outside-toplevel
259+
try:
260+
import google.generativeai as genai
261+
except ImportError:
262+
raise ImportError(
263+
"google-generativeai is required for Gemini tracing. Install with: pip install google-generativeai"
264+
)
265+
266+
from .integrations import gemini_tracer
267+
268+
if not isinstance(client, genai.GenerativeModel):
269+
raise ValueError("Invalid client. Please provide a google.generativeai.GenerativeModel instance.")
270+
return gemini_tracer.trace_gemini(client)

src/openlayer/lib/integrations/__init__.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,3 +31,10 @@
3131
__all__.extend(["trace_google_adk", "unpatch_google_adk"])
3232
except ImportError:
3333
pass
34+
35+
try:
36+
from .gemini_tracer import trace_gemini
37+
38+
__all__.extend(["trace_gemini"])
39+
except ImportError:
40+
pass

0 commit comments

Comments
 (0)