Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import * as Sentry from '@sentry/cloudflare';
import { MockChain, MockChatModel, MockTool } from './mocks';

interface Env {
SENTRY_DSN: string;
}

export default Sentry.withSentry(
(env: Env) => ({
dsn: env.SENTRY_DSN,
tracesSampleRate: 1.0,
}),
{
async fetch(_request, _env, _ctx) {
// Create LangChain callback handler
const callbackHandler = Sentry.createLangChainCallbackHandler({
recordInputs: false,
recordOutputs: false,
});

// Test 1: Chat model invocation
const chatModel = new MockChatModel({
model: 'claude-3-5-sonnet-20241022',
temperature: 0.7,
maxTokens: 100,
});

await chatModel.invoke('Tell me a joke', {
callbacks: [callbackHandler],
});

// Test 2: Chain invocation
const chain = new MockChain('my_test_chain');
await chain.invoke(
{ input: 'test input' },
{
callbacks: [callbackHandler],
},
);

// Test 3: Tool invocation
const tool = new MockTool('search_tool');
await tool.call('search query', {
callbacks: [callbackHandler],
});

return new Response(JSON.stringify({ success: true }));
},
},
);
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
// Mock LangChain types and classes for testing the callback handler

// Minimal callback handler interface to match LangChain's callback handler signature
export interface CallbackHandler {
handleChatModelStart?: (
llm: unknown,
messages: unknown,
runId: string,
parentRunId?: string,
extraParams?: Record<string, unknown>,
tags?: string[] | Record<string, unknown>,
metadata?: Record<string, unknown>,
runName?: string,
) => unknown;
handleLLMEnd?: (output: unknown, runId: string) => unknown;
handleChainStart?: (chain: { name?: string }, inputs: Record<string, unknown>, runId: string) => unknown;
handleChainEnd?: (outputs: unknown, runId: string) => unknown;
handleToolStart?: (tool: { name?: string }, input: string, runId: string) => unknown;
handleToolEnd?: (output: unknown, runId: string) => unknown;
}

export interface LangChainMessage {
role: string;
content: string;
}

export interface LangChainLLMResult {
generations: Array<
Array<{
text: string;
generationInfo?: Record<string, unknown>;
}>
>;
llmOutput?: {
tokenUsage?: {
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
};
};
}

export interface InvocationParams {
model: string;
temperature?: number;
maxTokens?: number;
}

// Mock LangChain Chat Model
export class MockChatModel {
private _model: string;
private _temperature?: number;
private _maxTokens?: number;

public constructor(params: InvocationParams) {
this._model = params.model;
this._temperature = params.temperature;
this._maxTokens = params.maxTokens;
}

public async invoke(
messages: LangChainMessage[] | string,
options?: { callbacks?: CallbackHandler[] },
): Promise<LangChainLLMResult> {
const callbacks = options?.callbacks || [];
const runId = crypto.randomUUID();

// Get invocation params to match LangChain's signature
const invocationParams = {
model: this._model,
temperature: this._temperature,
max_tokens: this._maxTokens,
};

// Create serialized representation similar to LangChain
const serialized = {
lc: 1,
type: 'constructor',
id: ['langchain', 'anthropic', 'anthropic'], // Third element is used as system provider
kwargs: invocationParams,
};

// Call handleChatModelStart
// Pass tags as a record with invocation_params for proper extraction
// The callback handler's getInvocationParams utility accepts both string[] and Record<string, unknown>
for (const callback of callbacks) {
if (callback.handleChatModelStart) {
await callback.handleChatModelStart(
serialized,
messages,
runId,
undefined,
undefined,
{ invocation_params: invocationParams },
{ ls_model_name: this._model, ls_provider: 'anthropic' },
);
}
}

// Create mock result
const result: LangChainLLMResult = {
generations: [
[
{
text: 'Mock response from LangChain!',
generationInfo: {
finish_reason: 'stop',
},
},
],
],
llmOutput: {
tokenUsage: {
promptTokens: 10,
completionTokens: 15,
totalTokens: 25,
},
},
};

// Call handleLLMEnd
for (const callback of callbacks) {
if (callback.handleLLMEnd) {
await callback.handleLLMEnd(result, runId);
}
}

return result;
}
}

// Mock LangChain Chain
export class MockChain {
private _name: string;

public constructor(name: string) {
this._name = name;
}

public async invoke(
inputs: Record<string, unknown>,
options?: { callbacks?: CallbackHandler[] },
): Promise<Record<string, unknown>> {
const callbacks = options?.callbacks || [];
const runId = crypto.randomUUID();

// Call handleChainStart
for (const callback of callbacks) {
if (callback.handleChainStart) {
await callback.handleChainStart({ name: this._name }, inputs, runId);
}
}

const outputs = { result: 'Chain execution completed!' };

// Call handleChainEnd
for (const callback of callbacks) {
if (callback.handleChainEnd) {
await callback.handleChainEnd(outputs, runId);
}
}

return outputs;
}
}

// Mock LangChain Tool
export class MockTool {
private _name: string;

public constructor(name: string) {
this._name = name;
}

public async call(input: string, options?: { callbacks?: CallbackHandler[] }): Promise<string> {
const callbacks = options?.callbacks || [];
const runId = crypto.randomUUID();

// Call handleToolStart
for (const callback of callbacks) {
if (callback.handleToolStart) {
await callback.handleToolStart({ name: this._name }, input, runId);
}
}

const output = `Tool ${this._name} executed with input: ${input}`;

// Call handleToolEnd
for (const callback of callbacks) {
if (callback.handleToolEnd) {
await callback.handleToolEnd(output, runId);
}
}

return output;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import { expect, it } from 'vitest';
import { createRunner } from '../../../runner';

// These tests are not exhaustive because the instrumentation is
// already tested in the node integration tests and we merely
// want to test that the instrumentation does not break in our
// cloudflare SDK.

it('traces langchain chat model, chain, and tool invocations', async ({ signal }) => {
const runner = createRunner(__dirname)
.ignore('event')
.expect(envelope => {
const transactionEvent = envelope[1]?.[0]?.[1] as any;

expect(transactionEvent.transaction).toBe('GET /');
expect(transactionEvent.spans).toEqual(
expect.arrayContaining([
// Chat model span
expect.objectContaining({
data: expect.objectContaining({
'gen_ai.operation.name': 'chat',
'sentry.op': 'gen_ai.chat',
'sentry.origin': 'auto.ai.langchain',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
'gen_ai.request.temperature': 0.7,
'gen_ai.request.max_tokens': 100,
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
}),
description: 'chat claude-3-5-sonnet-20241022',
op: 'gen_ai.chat',
origin: 'auto.ai.langchain',
}),
// Chain span
expect.objectContaining({
data: expect.objectContaining({
'sentry.origin': 'auto.ai.langchain',
'sentry.op': 'gen_ai.invoke_agent',
'langchain.chain.name': 'my_test_chain',
}),
description: 'chain my_test_chain',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langchain',
}),
// Tool span
expect.objectContaining({
data: expect.objectContaining({
'sentry.origin': 'auto.ai.langchain',
'sentry.op': 'gen_ai.execute_tool',
'gen_ai.tool.name': 'search_tool',
}),
description: 'execute_tool search_tool',
op: 'gen_ai.execute_tool',
origin: 'auto.ai.langchain',
}),
]),
);
})
.start(signal);
await runner.makeRequest('get', '/');
await runner.completed();
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"name": "worker-name",
"compatibility_date": "2025-06-17",
"main": "index.ts",
"compatibility_flags": ["nodejs_compat"],
}
1 change: 1 addition & 0 deletions packages/cloudflare/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ export {
wrapMcpServerWithSentry,
consoleLoggingIntegration,
createConsolaReporter,
createLangChainCallbackHandler,
featureFlagsIntegration,
growthbookIntegration,
logger,
Expand Down
1 change: 1 addition & 0 deletions packages/vercel-edge/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ export {
wrapMcpServerWithSentry,
consoleLoggingIntegration,
createConsolaReporter,
createLangChainCallbackHandler,
featureFlagsIntegration,
logger,
} from '@sentry/core';
Expand Down
Loading