Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions examples/openai_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,19 @@
javelin_api_key = os.getenv('JAVELIN_API_KEY')
gemini_api_key = os.getenv("GEMINI_API_KEY")

# Initialize Javelin Client
# Global JavelinClient, used for everything
config = JavelinConfig(
base_url="https://api-dev.javelin.live",
# base_url="http://localhost:8000",
javelin_api_key=javelin_api_key,
)
client = JavelinClient(config)
client = JavelinClient(config) # Global JavelinClient

# Initialize Javelin Client
def initialize_javelin_client():
javelin_api_key = os.getenv('JAVELIN_API_KEY')
config = JavelinConfig(
base_url="https://api-dev.javelin.live",
# base_url="http://localhost:8000",
javelin_api_key=javelin_api_key,
)
return JavelinClient(config)
Expand Down Expand Up @@ -288,8 +288,13 @@ def main_sync():
openai_completions()
openai_embeddings()
openai_streaming_chat()

openai_client = create_azureopenai_client() # same global client
register_azureopenai(client, openai_client)
azure_openai_chat_completions(openai_client)

client = initialize_javelin_client()
# Pending: model specs, uncomment after model is available
'''
openai_client = create_gemini_client()
register_gemini(client, openai_client)
gemini_chat_completions(openai_client)
Expand All @@ -298,20 +303,15 @@ def main_sync():
gemini_image_understanding(openai_client)
gemini_structured_output(openai_client)
gemini_embeddings(openai_client)

client = initialize_javelin_client()
openai_client = create_azureopenai_client()
register_azureopenai(client, openai_client)
azure_openai_chat_completions(openai_client)

client = initialize_javelin_client()
openai_client = create_deepseek_client()
register_deepseek(client, openai_client)
deepseek_chat_completions(openai_client)

# deepseek_reasoning_model()

mistral_chat_completions()
'''

async def main_async():
await async_openai_chat_completions()
Expand Down
19 changes: 14 additions & 5 deletions javelin_sdk/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from javelin_sdk.tracing_setup import configure_span_exporter
import inspect
from opentelemetry.trace import SpanKind
from opentelemetry.trace import Status, StatusCode
from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
import openai

API_BASEURL = "https://api-dev.javelin.live"
API_BASE_PATH = "/v1"
Expand Down Expand Up @@ -158,13 +158,13 @@ def register_provider(self,

client_id = id(openai_client)
if client_id in self.patched_clients:
print (f"Client {client_id} already patched")
return openai_client # Skip if already patched

self.patched_clients.add(client_id) # Mark as patched

# Store the OpenAI base URL
if self.openai_base_url is None:
self.openai_base_url = openai_client.base_url
self.openai_base_url = openai_client.base_url

# Point the OpenAI client to Javelin's base URL
openai_client.base_url = f"{self.base_url}/{provider_name}"
Expand Down Expand Up @@ -244,8 +244,8 @@ def _sync_execution(span):
else:
return _sync_execution(span)
except Exception as e:
span.set_attribute("error", True)
span.set_attribute("error.message", str(e))
span.set_status(Status(StatusCode.ERROR, str(e)))
span.set_attribute("is_exception", True)
raise
else:
# Tracing is disabled
Expand All @@ -259,6 +259,15 @@ def _capture_response_details(span, response, kwargs, system_name):
if hasattr(response, "to_json"):
response_data = response.to_dict()

# Set status code based on response
status_code = response_data.get("status_code", 200)
status_message = response_data.get("status_message", "OK")

if status_code >= 400:
span.set_status(Status(StatusCode.ERROR, status_message))
else:
span.set_status(Status(StatusCode.OK, status_message))

# Set basic response attributes
JavelinClient.set_span_attribute_if_not_none(span, gen_ai_attributes.GEN_AI_RESPONSE_MODEL, response_data.get('model'))
JavelinClient.set_span_attribute_if_not_none(span, gen_ai_attributes.GEN_AI_RESPONSE_ID, response_data.get('id'))
Expand Down
Loading