Skip to content

Commit b6b725c

Browse files
committed
more tests
1 parent b077327 commit b6b725c

File tree

4 files changed

+130
-2
lines changed

4 files changed

+130
-2
lines changed
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import { ChatAnthropic } from '@langchain/anthropic';
2+
import * as Sentry from '@sentry/node';
3+
import express from 'express';
4+
5+
function startMockAnthropicServer() {
6+
const app = express();
7+
app.use(express.json());
8+
9+
app.post('/v1/messages', (req, res) => {
10+
const model = req.body.model;
11+
12+
// Simulate tool call response
13+
res.json({
14+
id: 'msg_tool_test_123',
15+
type: 'message',
16+
role: 'assistant',
17+
model: model,
18+
content: [
19+
{
20+
type: 'text',
21+
text: 'Let me check the weather for you.',
22+
},
23+
{
24+
type: 'tool_use',
25+
id: 'toolu_01A09q90qw90lq917835lq9',
26+
name: 'get_weather',
27+
input: { location: 'San Francisco, CA' },
28+
},
29+
{
30+
type: 'text',
31+
text: 'The weather looks great!',
32+
},
33+
],
34+
stop_reason: 'tool_use',
35+
stop_sequence: null,
36+
usage: {
37+
input_tokens: 20,
38+
output_tokens: 30,
39+
},
40+
});
41+
});
42+
43+
return new Promise(resolve => {
44+
const server = app.listen(0, () => {
45+
resolve(server);
46+
});
47+
});
48+
}
49+
50+
async function run() {
51+
const server = await startMockAnthropicServer();
52+
const baseUrl = `http://localhost:${server.address().port}`;
53+
54+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
55+
const model = new ChatAnthropic({
56+
model: 'claude-3-5-sonnet-20241022',
57+
temperature: 0.7,
58+
maxTokens: 150,
59+
apiKey: 'mock-api-key',
60+
clientOptions: {
61+
baseURL: baseUrl,
62+
},
63+
});
64+
65+
await model.invoke('What is the weather in San Francisco?', {
66+
tools: [
67+
{
68+
name: 'get_weather',
69+
description: 'Get the current weather in a given location',
70+
input_schema: {
71+
type: 'object',
72+
properties: {
73+
location: {
74+
type: 'string',
75+
description: 'The city and state, e.g. San Francisco, CA',
76+
},
77+
},
78+
required: ['location'],
79+
},
80+
},
81+
],
82+
});
83+
});
84+
85+
await Sentry.flush(2000);
86+
87+
server.close();
88+
}
89+
90+
run();
91+

dev-packages/node-integration-tests/suites/tracing/langchain/scenario.mjs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,8 @@ async function run() {
102102
}
103103
});
104104

105+
await Sentry.flush(2000);
106+
105107
server.close();
106108
}
107109

dev-packages/node-integration-tests/suites/tracing/langchain/test.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,4 +160,38 @@ describe('LangChain integration', () => {
160160
.completed();
161161
});
162162
});
163+
164+
const EXPECTED_TRANSACTION_TOOL_CALLS = {
165+
transaction: 'main',
166+
spans: expect.arrayContaining([
167+
expect.objectContaining({
168+
data: expect.objectContaining({
169+
'gen_ai.operation.name': 'chat',
170+
'sentry.op': 'gen_ai.chat',
171+
'sentry.origin': 'auto.ai.langchain',
172+
'gen_ai.system': 'anthropic',
173+
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
174+
'gen_ai.request.temperature': 0.7,
175+
'gen_ai.request.max_tokens': 150,
176+
'gen_ai.usage.input_tokens': 20,
177+
'gen_ai.usage.output_tokens': 30,
178+
'gen_ai.usage.total_tokens': 50,
179+
'gen_ai.response.id': expect.any(String),
180+
'gen_ai.response.model': expect.any(String),
181+
'gen_ai.response.stop_reason': 'tool_use',
182+
'gen_ai.response.tool_calls': expect.any(String),
183+
}),
184+
description: 'chat claude-3-5-sonnet-20241022',
185+
op: 'gen_ai.chat',
186+
origin: 'auto.ai.langchain',
187+
status: 'ok',
188+
}),
189+
]),
190+
};
191+
192+
createEsmAndCjsTests(__dirname, 'scenario-tools.mjs', 'instrument.mjs', (createRunner, test) => {
193+
test('creates langchain spans with tool calls', async () => {
194+
await createRunner().ignore('event').expect({ transaction: EXPECTED_TRANSACTION_TOOL_CALLS }).start().completed();
195+
});
196+
});
163197
});

packages/core/src/utils/langchain/utils.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -388,6 +388,9 @@ export function extractLlmResponseAttributes(
388388
setIfDefined(attrs, GEN_AI_RESPONSE_FINISH_REASONS_ATTRIBUTE, asString(finishReasons));
389389
}
390390

391+
// Tool calls metadata (names, IDs) are not PII, so capture them regardless of recordOutputs
392+
addToolCallsAttributes(response.generations as LangChainMessage[][], attrs);
393+
391394
if (recordOutputs) {
392395
const texts = response.generations
393396
.flat()
@@ -397,8 +400,6 @@ export function extractLlmResponseAttributes(
397400
if (texts.length > 0) {
398401
setIfDefined(attrs, GEN_AI_RESPONSE_TEXT_ATTRIBUTE, asString(texts));
399402
}
400-
401-
addToolCallsAttributes(response.generations as LangChainMessage[][], attrs);
402403
}
403404
}
404405

0 commit comments

Comments
 (0)