diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/index.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/index.ts new file mode 100644 index 000000000000..0d59fd91c2b7 --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/index.ts @@ -0,0 +1,50 @@ +import * as Sentry from '@sentry/cloudflare'; +import { MockChain, MockChatModel, MockTool } from './mocks'; + +interface Env { + SENTRY_DSN: string; +} + +export default Sentry.withSentry( + (env: Env) => ({ + dsn: env.SENTRY_DSN, + tracesSampleRate: 1.0, + }), + { + async fetch(_request, _env, _ctx) { + // Create LangChain callback handler + const callbackHandler = Sentry.createLangChainCallbackHandler({ + recordInputs: false, + recordOutputs: false, + }); + + // Test 1: Chat model invocation + const chatModel = new MockChatModel({ + model: 'claude-3-5-sonnet-20241022', + temperature: 0.7, + maxTokens: 100, + }); + + await chatModel.invoke('Tell me a joke', { + callbacks: [callbackHandler], + }); + + // Test 2: Chain invocation + const chain = new MockChain('my_test_chain'); + await chain.invoke( + { input: 'test input' }, + { + callbacks: [callbackHandler], + }, + ); + + // Test 3: Tool invocation + const tool = new MockTool('search_tool'); + await tool.call('search query', { + callbacks: [callbackHandler], + }); + + return new Response(JSON.stringify({ success: true })); + }, + }, +); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/mocks.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/mocks.ts new file mode 100644 index 000000000000..946ae8252dbe --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/mocks.ts @@ -0,0 +1,197 @@ +// Mock LangChain types and classes for testing the callback handler + +// Minimal callback handler interface to match LangChain's callback handler signature +export interface CallbackHandler { + handleChatModelStart?: ( + llm: unknown, + messages: unknown, + runId: string, + parentRunId?: string, + extraParams?: Record, + tags?: string[] | Record, + metadata?: Record, + runName?: string, + ) => unknown; + handleLLMEnd?: (output: unknown, runId: string) => unknown; + handleChainStart?: (chain: { name?: string }, inputs: Record, runId: string) => unknown; + handleChainEnd?: (outputs: unknown, runId: string) => unknown; + handleToolStart?: (tool: { name?: string }, input: string, runId: string) => unknown; + handleToolEnd?: (output: unknown, runId: string) => unknown; +} + +export interface LangChainMessage { + role: string; + content: string; +} + +export interface LangChainLLMResult { + generations: Array< + Array<{ + text: string; + generationInfo?: Record; + }> + >; + llmOutput?: { + tokenUsage?: { + promptTokens?: number; + completionTokens?: number; + totalTokens?: number; + }; + }; +} + +export interface InvocationParams { + model: string; + temperature?: number; + maxTokens?: number; +} + +// Mock LangChain Chat Model +export class MockChatModel { + private _model: string; + private _temperature?: number; + private _maxTokens?: number; + + public constructor(params: InvocationParams) { + this._model = params.model; + this._temperature = params.temperature; + this._maxTokens = params.maxTokens; + } + + public async invoke( + messages: LangChainMessage[] | string, + options?: { callbacks?: CallbackHandler[] }, + ): Promise { + const callbacks = options?.callbacks || []; + const runId = crypto.randomUUID(); + + // Get invocation params to match LangChain's signature + const invocationParams = { + model: this._model, + temperature: this._temperature, + max_tokens: this._maxTokens, + }; + + // Create serialized representation similar to LangChain + const serialized = { + lc: 1, + type: 'constructor', + id: ['langchain', 'anthropic', 'anthropic'], // Third element is used as system provider + kwargs: invocationParams, + }; + + // Call handleChatModelStart + // Pass tags as a record with invocation_params for proper extraction + // The callback handler's getInvocationParams utility accepts both string[] and Record + for (const callback of callbacks) { + if (callback.handleChatModelStart) { + await callback.handleChatModelStart( + serialized, + messages, + runId, + undefined, + undefined, + { invocation_params: invocationParams }, + { ls_model_name: this._model, ls_provider: 'anthropic' }, + ); + } + } + + // Create mock result + const result: LangChainLLMResult = { + generations: [ + [ + { + text: 'Mock response from LangChain!', + generationInfo: { + finish_reason: 'stop', + }, + }, + ], + ], + llmOutput: { + tokenUsage: { + promptTokens: 10, + completionTokens: 15, + totalTokens: 25, + }, + }, + }; + + // Call handleLLMEnd + for (const callback of callbacks) { + if (callback.handleLLMEnd) { + await callback.handleLLMEnd(result, runId); + } + } + + return result; + } +} + +// Mock LangChain Chain +export class MockChain { + private _name: string; + + public constructor(name: string) { + this._name = name; + } + + public async invoke( + inputs: Record, + options?: { callbacks?: CallbackHandler[] }, + ): Promise> { + const callbacks = options?.callbacks || []; + const runId = crypto.randomUUID(); + + // Call handleChainStart + for (const callback of callbacks) { + if (callback.handleChainStart) { + await callback.handleChainStart({ name: this._name }, inputs, runId); + } + } + + const outputs = { result: 'Chain execution completed!' }; + + // Call handleChainEnd + for (const callback of callbacks) { + if (callback.handleChainEnd) { + await callback.handleChainEnd(outputs, runId); + } + } + + return outputs; + } +} + +// Mock LangChain Tool +export class MockTool { + private _name: string; + + public constructor(name: string) { + this._name = name; + } + + public async call(input: string, options?: { callbacks?: CallbackHandler[] }): Promise { + const callbacks = options?.callbacks || []; + const runId = crypto.randomUUID(); + + // Call handleToolStart + for (const callback of callbacks) { + if (callback.handleToolStart) { + await callback.handleToolStart({ name: this._name }, input, runId); + } + } + + const output = `Tool ${this._name} executed with input: ${input}`; + + // Call handleToolEnd + for (const callback of callbacks) { + if (callback.handleToolEnd) { + await callback.handleToolEnd(output, runId); + } + } + + return output; + } +} diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/test.ts b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/test.ts new file mode 100644 index 000000000000..875b4191b84b --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/test.ts @@ -0,0 +1,64 @@ +import { expect, it } from 'vitest'; +import { createRunner } from '../../../runner'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not break in our +// cloudflare SDK. + +it('traces langchain chat model, chain, and tool invocations', async ({ signal }) => { + const runner = createRunner(__dirname) + .ignore('event') + .expect(envelope => { + const transactionEvent = envelope[1]?.[0]?.[1] as any; + + expect(transactionEvent.transaction).toBe('GET /'); + expect(transactionEvent.spans).toEqual( + expect.arrayContaining([ + // Chat model span + expect.objectContaining({ + data: expect.objectContaining({ + 'gen_ai.operation.name': 'chat', + 'sentry.op': 'gen_ai.chat', + 'sentry.origin': 'auto.ai.langchain', + 'gen_ai.system': 'anthropic', + 'gen_ai.request.model': 'claude-3-5-sonnet-20241022', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.request.max_tokens': 100, + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + 'gen_ai.usage.total_tokens': 25, + }), + description: 'chat claude-3-5-sonnet-20241022', + op: 'gen_ai.chat', + origin: 'auto.ai.langchain', + }), + // Chain span + expect.objectContaining({ + data: expect.objectContaining({ + 'sentry.origin': 'auto.ai.langchain', + 'sentry.op': 'gen_ai.invoke_agent', + 'langchain.chain.name': 'my_test_chain', + }), + description: 'chain my_test_chain', + op: 'gen_ai.invoke_agent', + origin: 'auto.ai.langchain', + }), + // Tool span + expect.objectContaining({ + data: expect.objectContaining({ + 'sentry.origin': 'auto.ai.langchain', + 'sentry.op': 'gen_ai.execute_tool', + 'gen_ai.tool.name': 'search_tool', + }), + description: 'execute_tool search_tool', + op: 'gen_ai.execute_tool', + origin: 'auto.ai.langchain', + }), + ]), + ); + }) + .start(signal); + await runner.makeRequest('get', '/'); + await runner.completed(); +}); diff --git a/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/wrangler.jsonc b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/wrangler.jsonc new file mode 100644 index 000000000000..d6be01281f0c --- /dev/null +++ b/dev-packages/cloudflare-integration-tests/suites/tracing/langchain/wrangler.jsonc @@ -0,0 +1,6 @@ +{ + "name": "worker-name", + "compatibility_date": "2025-06-17", + "main": "index.ts", + "compatibility_flags": ["nodejs_compat"], +} diff --git a/packages/cloudflare/src/index.ts b/packages/cloudflare/src/index.ts index 6f731cb8d980..a6aa7ffc8d9a 100644 --- a/packages/cloudflare/src/index.ts +++ b/packages/cloudflare/src/index.ts @@ -96,6 +96,7 @@ export { wrapMcpServerWithSentry, consoleLoggingIntegration, createConsolaReporter, + createLangChainCallbackHandler, featureFlagsIntegration, growthbookIntegration, logger, diff --git a/packages/vercel-edge/src/index.ts b/packages/vercel-edge/src/index.ts index d8362ff31c98..7a73234f535e 100644 --- a/packages/vercel-edge/src/index.ts +++ b/packages/vercel-edge/src/index.ts @@ -95,6 +95,7 @@ export { wrapMcpServerWithSentry, consoleLoggingIntegration, createConsolaReporter, + createLangChainCallbackHandler, featureFlagsIntegration, logger, } from '@sentry/core';