diff --git a/packages/typescript/ai/src/middlewares/otel.ts b/packages/typescript/ai/src/middlewares/otel.ts index c51040daf..f4e39cd2f 100644 --- a/packages/typescript/ai/src/middlewares/otel.ts +++ b/packages/typescript/ai/src/middlewares/otel.ts @@ -406,10 +406,28 @@ export function otelMiddleware(options: OtelMiddlewareOptions): ChatMiddleware { }) } if (inputMessages.length > 0) { - iterSpan.setAttribute( - 'gen_ai.input.messages', - JSON.stringify(inputMessages), - ) + const inputJson = JSON.stringify(inputMessages) + // Current OTel GenAI semconv — Sentry / PostHog / Datadog read + // prompt content from this attribute. + iterSpan.setAttribute('gen_ai.input.messages', inputJson) + // Langfuse-native attribute. Highest priority in Langfuse's OTLP + // ingestion (checked before events and gen_ai.input.messages) so + // the Input panel populates reliably. Harmless to other backends — + // the attribute is namespaced and unrecognised keys are ignored. + iterSpan.setAttribute('langfuse.observation.input', inputJson) + + // Mirror the first iteration's input onto the root span and at + // trace level so Langfuse fills Input on the trace card and the + // chat-level observation. Later iterations append tool-call / + // assistant messages that are useful per-iteration but noise at + // the chat / trace level. + if (state.iterationCount === 0) { + state.rootSpan.setAttribute( + 'langfuse.observation.input', + inputJson, + ) + state.rootSpan.setAttribute('langfuse.trace.input', inputJson) + } } } @@ -452,14 +470,21 @@ export function otelMiddleware(options: OtelMiddlewareOptions): ChatMiddleware { if (captureContent && state.assistantTextBuffer.length > 0) { const completion = redactContent(state.assistantTextBuffer) + const outputJson = JSON.stringify([ + { role: 'assistant', content: completion }, + ]) // Event form (older semconv) — kept for backends that consume it. span.addEvent('gen_ai.choice', { content: completion }) // Attribute form (current semconv) — required by backends like // PostHog that read completion content from `gen_ai.output.messages`. - span.setAttribute( - 'gen_ai.output.messages', - JSON.stringify([{ role: 'assistant', content: completion }]), - ) + span.setAttribute('gen_ai.output.messages', outputJson) + // Langfuse-native attribute (highest priority in Langfuse mapping). + span.setAttribute('langfuse.observation.output', outputJson) + // Mirror to the root span and trace card. Each iteration overwrites, + // so the final iteration's completion lands on the root — which is + // the final answer the user saw, not an intermediate tool-call turn. + state.rootSpan.setAttribute('langfuse.observation.output', outputJson) + state.rootSpan.setAttribute('langfuse.trace.output', outputJson) state.assistantTextBuffer = '' state.assistantTextBufferTruncated = false } @@ -555,12 +580,13 @@ export function otelMiddleware(options: OtelMiddlewareOptions): ChatMiddleware { : (safeCall('otel.serializeToolArgs', () => JSON.stringify(hookCtx.args ?? null), ) ?? '[unserializable_tool_args]') - toolSpan.setAttribute( - 'gen_ai.input.messages', - JSON.stringify([ - { role: 'tool', content: redactContent(argsBody) }, - ]), - ) + const redactedArgs = redactContent(argsBody) + const toolInputJson = JSON.stringify([ + { role: 'tool', content: redactedArgs }, + ]) + toolSpan.setAttribute('gen_ai.input.messages', toolInputJson) + // Langfuse-native (highest priority in Langfuse mapping). + toolSpan.setAttribute('langfuse.observation.input', toolInputJson) } state.toolSpans.set(hookCtx.toolCallId, { @@ -610,10 +636,12 @@ export function otelMiddleware(options: OtelMiddlewareOptions): ChatMiddleware { } // Output panel of the tool span itself — `gen_ai.output.messages` is // what current GenAI semconv consumers (e.g. PostHog) read. - toolSpan.setAttribute( - 'gen_ai.output.messages', - JSON.stringify([{ role: 'tool', content: redactedBody }]), - ) + const toolOutputJson = JSON.stringify([ + { role: 'tool', content: redactedBody }, + ]) + toolSpan.setAttribute('gen_ai.output.messages', toolOutputJson) + // Langfuse-native (highest priority in Langfuse mapping). + toolSpan.setAttribute('langfuse.observation.output', toolOutputJson) } safeCall('otel.onSpanEnd', () => diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 15eea0236..8f46aa506 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -589,7 +589,7 @@ importers: version: 0.561.0(react@19.2.3) nitro: specifier: latest - version: 3.0.260415-beta(chokidar@5.0.0)(dotenv@17.2.3)(giget@2.0.0)(jiti@2.6.1)(rollup@4.60.1)(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) + version: 3.0.260429-beta(chokidar@5.0.0)(dotenv@17.2.3)(giget@2.0.0)(jiti@2.6.1)(rollup@4.60.1)(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) radix-ui: specifier: ^1.4.3 version: 1.4.3(@types/react-dom@19.2.3(@types/react@19.2.7))(@types/react@19.2.7)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) @@ -1671,7 +1671,7 @@ importers: dependencies: '@copilotkit/aimock': specifier: latest - version: 1.14.0 + version: 1.16.4(vitest@4.1.4(@opentelemetry/api@1.9.1)(@types/node@24.10.3)(happy-dom@20.0.11)(jsdom@27.3.0(postcss@8.5.9))(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) '@opentelemetry/api': specifier: ^1.9.0 version: 1.9.1 @@ -1773,6 +1773,49 @@ importers: specifier: ^7.2.7 version: 7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2) + testing/otel-verify: + dependencies: + '@copilotkit/aimock': + specifier: latest + version: 1.16.4(vitest@4.1.4(@opentelemetry/api@1.9.1)(@types/node@24.10.3)(happy-dom@20.0.11)(jsdom@27.3.0(postcss@8.5.9))(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2))) + '@opentelemetry/api': + specifier: ^1.9.0 + version: 1.9.1 + '@opentelemetry/exporter-trace-otlp-proto': + specifier: ^0.57.0 + version: 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': + specifier: ^1.30.0 + version: 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-node': + specifier: ^0.57.0 + version: 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': + specifier: ^1.30.0 + version: 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': + specifier: ^1.30.0 + version: 1.40.0 + '@tanstack/ai': + specifier: workspace:* + version: link:../../packages/typescript/ai + '@tanstack/ai-openai': + specifier: workspace:* + version: link:../../packages/typescript/ai-openai + zod: + specifier: ^4.2.0 + version: 4.3.6 + devDependencies: + '@types/node': + specifier: ^24.10.1 + version: 24.10.3 + tsx: + specifier: ^4.21.0 + version: 4.21.0 + typescript: + specifier: 5.9.3 + version: 5.9.3 + testing/panel: dependencies: '@tailwindcss/vite': @@ -2192,10 +2235,18 @@ packages: '@cloudflare/workers-types@4.20260317.1': resolution: {integrity: sha512-+G4eVwyCpm8Au1ex8vQBCuA9wnwqetz4tPNRoB/53qvktERWBRMQnrtvC1k584yRE3emMThtuY0gWshvSJ++PQ==} - '@copilotkit/aimock@1.14.0': - resolution: {integrity: sha512-1NqwWEameArC7HWT7UHBlkq3pNlCA0eHBocaeL6mS5CULolT9XFL27tC9jJ+OSmREzLwkKbFYaAl2SssaXexVA==} - engines: {node: '>=20.15.0'} + '@copilotkit/aimock@1.16.4': + resolution: {integrity: sha512-DA9WjJWpi2Yh36ltsnfMycj+BbifSS9G0pyHw0JjQZQPm41+FziGIdl2gusBtwYebStypQ4v9Jj2rjqjJqqtvQ==} + engines: {node: '>=24.0.0'} hasBin: true + peerDependencies: + jest: '>=29' + vitest: '>=3' + peerDependenciesMeta: + jest: + optional: true + vitest: + optional: true '@crazydos/vue-markdown@1.1.4': resolution: {integrity: sha512-0I1QMP59LJ3aEjE7bolgvPU4JAFt+pykdDo5674CbsCwFo7OVFos50+MPhGdWflCz1mac5t152lB1qvV/tR/rw==} @@ -3121,6 +3172,15 @@ packages: '@modelcontextprotocol/sdk': optional: true + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.8.0': + resolution: {integrity: sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==} + engines: {node: '>=6'} + hasBin: true + '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -3354,6 +3414,9 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@livekit/mutex@1.1.1': resolution: {integrity: sha512-EsshAucklmpuUAfkABPxJNhzj9v2sG7JuzFDL4ML1oJQSV14sqrpTYnsaOudMAw9yOaW53NU3QQTlUQoRs4czw==} @@ -3509,10 +3572,172 @@ packages: '@openrouter/sdk@0.12.14': resolution: {integrity: sha512-G32CZ1IkmtsGfQF7/mzcvt7W0Lmd6HUHFGjDWv5knBvL6sJcMmX6i3VPSIpHQYSgEqRQSxFuDROP6iErTu7XcA==} + '@opentelemetry/api-logs@0.57.2': + resolution: {integrity: sha512-uIX52NnTM0iBh84MShlpouI7UKqkZ7MrUszTmaypHBu4r7NofznSnQRfJ+uUeDtQDj6w8eFGg5KBLDAwAPz1+A==} + engines: {node: '>=14'} + '@opentelemetry/api@1.9.1': resolution: {integrity: sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==} engines: {node: '>=8.0.0'} + '@opentelemetry/context-async-hooks@1.30.1': + resolution: {integrity: sha512-s5vvxXPVdjqS3kTLKMeBMvop9hbWkwzBpu+mUO2M7sZtlkyDJGwFe33wRKnbaYDo8ExRVBIIdwIGrqpxHuKttA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@1.30.1': + resolution: {integrity: sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-logs-otlp-grpc@0.57.2': + resolution: {integrity: sha512-eovEy10n3umjKJl2Ey6TLzikPE+W4cUQ4gCwgGP1RqzTGtgDra0WjIqdy29ohiUKfvmbiL3MndZww58xfIvyFw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-logs-otlp-http@0.57.2': + resolution: {integrity: sha512-0rygmvLcehBRp56NQVLSleJ5ITTduq/QfU7obOkyWgPpFHulwpw2LYTqNIz5TczKZuy5YY+5D3SDnXZL1tXImg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-logs-otlp-proto@0.57.2': + resolution: {integrity: sha512-ta0ithCin0F8lu9eOf4lEz9YAScecezCHkMMyDkvd9S7AnZNX5ikUmC5EQOQADU+oCcgo/qkQIaKcZvQ0TYKDw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-grpc@0.57.2': + resolution: {integrity: sha512-r70B8yKR41F0EC443b5CGB4rUaOMm99I5N75QQt6sHKxYDzSEc6gm48Diz1CI1biwa5tDPznpylTrywO/pT7qw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-http@0.57.2': + resolution: {integrity: sha512-ttb9+4iKw04IMubjm3t0EZsYRNWr3kg44uUuzfo9CaccYlOh8cDooe4QObDUkvx9d5qQUrbEckhrWKfJnKhemA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-metrics-otlp-proto@0.57.2': + resolution: {integrity: sha512-HX068Q2eNs38uf7RIkNN9Hl4Ynl+3lP0++KELkXMCpsCbFO03+0XNNZ1SkwxPlP9jrhQahsMPMkzNXpq3fKsnw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-prometheus@0.57.2': + resolution: {integrity: sha512-VqIqXnuxWMWE/1NatAGtB1PvsQipwxDcdG4RwA/umdBcW3/iOHp0uejvFHTRN2O78ZPged87ErJajyUBPUhlDQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-grpc@0.57.2': + resolution: {integrity: sha512-gHU1vA3JnHbNxEXg5iysqCWxN9j83d7/epTYBZflqQnTyCC4N7yZXn/dMM+bEmyhQPGjhCkNZLx4vZuChH1PYw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-http@0.57.2': + resolution: {integrity: sha512-sB/gkSYFu+0w2dVQ0PWY9fAMl172PKMZ/JrHkkW8dmjCL0CYkmXeE+ssqIL/yBUTPOvpLIpenX5T9RwXRBW/3g==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-proto@0.57.2': + resolution: {integrity: sha512-awDdNRMIwDvUtoRYxRhja5QYH6+McBLtoz1q9BeEsskhZcrGmH/V1fWpGx8n+Rc+542e8pJA6y+aullbIzQmlw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-zipkin@1.30.1': + resolution: {integrity: sha512-6S2QIMJahIquvFaaxmcwpvQQRD/YFaMTNoIxrfPIPOeITN+a8lfEcPDxNxn8JDAaxkg+4EnXhz8upVDYenoQjA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + + '@opentelemetry/instrumentation@0.57.2': + resolution: {integrity: sha512-BdBGhQBh8IjZ2oIIX6F2/Q3LKm/FDDKi6ccYKcBTeilh6SNdNKveDOLk73BkSJjQLJk6qe4Yh+hHw1UPhCDdrg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-exporter-base@0.57.2': + resolution: {integrity: sha512-XdxEzL23Urhidyebg5E6jZoaiW5ygP/mRjxLHixogbqwDy2Faduzb5N0o/Oi+XTIJu+iyxXdVORjXax+Qgfxag==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.57.2': + resolution: {integrity: sha512-USn173KTWy0saqqRB5yU9xUZ2xdgb1Rdu5IosJnm9aV4hMTuFFRTUsQxbgc24QxpCHeoKzzCSnS/JzdV0oM2iQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-transformer@0.57.2': + resolution: {integrity: sha512-48IIRj49gbQVK52jYsw70+Jv+JbahT8BqT2Th7C4H7RCM9d0gZ5sgNPoMpWldmfjvIsSgiGJtjfk9MeZvjhoig==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/propagator-b3@1.30.1': + resolution: {integrity: sha512-oATwWWDIJzybAZ4pO76ATN5N6FFbOA1otibAVlS8v90B4S1wClnhRUk7K+2CHAwN1JKYuj4jh/lpCEG5BAqFuQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/propagator-jaeger@1.30.1': + resolution: {integrity: sha512-Pj/BfnYEKIOImirH76M4hDaBSx6HyZ2CXUqk+Kj02m6BB80c/yo4BdWkn/1gDFfU+YPY+bPR2U0DKBfdxCKwmg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/resources@1.30.1': + resolution: {integrity: sha512-5UxZqiAgLYGFjS4s9qm5mBVo433u+dSPUFWVWXmLAD4wB65oMCoXaJP1KJa9DIYYMeHu3z4BZcStG3LC593cWA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.57.2': + resolution: {integrity: sha512-TXFHJ5c+BKggWbdEQ/inpgIzEmS2BGQowLE9UhsMd7YYlUfBQJ4uax0VF/B5NYigdM/75OoJGhAV3upEhK+3gg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + + '@opentelemetry/sdk-metrics@1.30.1': + resolution: {integrity: sha512-q9zcZ0Okl8jRgmy7eNW3Ku1XSgg3sDLa5evHZpCwjspw7E8Is4K/haRPDJrBcX3YSn/Y7gUvFnByNYEKQNbNog==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-node@0.57.2': + resolution: {integrity: sha512-8BaeqZyN5sTuPBtAoY+UtKwXBdqyuRKmekN5bFzAO40CgbGzAxfTpiL3PBerT7rhZ7p2nBdq7FaMv/tBQgHE4A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@1.30.1': + resolution: {integrity: sha512-jVPgBbH1gCy2Lb7X0AVQ8XAfgg0pJ4nvl8/IiQA6nxOsPvS+0zMJaFSs2ltXe0J6C8dqjcnpyqINDJmU30+uOg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/sdk-trace-node@1.30.1': + resolution: {integrity: sha512-cBjYOINt1JxXdpw1e5MlHmFRc5fgj4GW/86vsKFxJCJ8AL4PdVtYH41gWwl4qd4uQjqEL1oJVrXkSy5cnduAnQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.28.0': + resolution: {integrity: sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==} + engines: {node: '>=14'} + + '@opentelemetry/semantic-conventions@1.40.0': + resolution: {integrity: sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw==} + engines: {node: '>=14'} + '@oxc-minify/binding-android-arm-eabi@0.110.0': resolution: {integrity: sha512-43fMTO8/5bMlqfOiNSZNKUzIqeLIYuB9Hr1Ohyf58B1wU11S2dPGibTXOGNaWsfgHy99eeZ1bSgeIHy/fEYqbw==} engines: {node: ^20.19.0 || >=22.12.0} @@ -6366,6 +6591,9 @@ packages: '@types/retry@0.12.0': resolution: {integrity: sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==} + '@types/shimmer@1.2.0': + resolution: {integrity: sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==} + '@types/unist@2.0.11': resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} @@ -7192,6 +7420,9 @@ packages: citty@0.1.6: resolution: {integrity: sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==} + cjs-module-lexer@1.4.3: + resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==} + class-variance-authority@0.7.1: resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} @@ -8520,6 +8751,9 @@ packages: resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} engines: {node: '>=6'} + import-in-the-middle@1.15.0: + resolution: {integrity: sha512-bpQy+CrsRmYmoPMAE/0G33iwRqwW4ouqdRg8jgbH3aKuCtOc8lxgmYXg2dMM92CRiGP660EtBcymH/eVUpCSaA==} + import-lazy@4.0.0: resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} engines: {node: '>=8'} @@ -9058,6 +9292,9 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + lodash.defaults@4.2.0: resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} @@ -9421,6 +9658,9 @@ packages: mlly@1.8.0: resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} + module-details-from-path@1.0.4: + resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} + motion-dom@11.18.1: resolution: {integrity: sha512-g76KvA001z+atjfxczdRtw/RXOM3OMSdd1f4DL77qCTF/+avrRJiawSG4yDibEQ215sr9kpinSlX2pCTJ9zbhw==} @@ -9493,16 +9733,16 @@ packages: xml2js: optional: true - nitro@3.0.260415-beta: - resolution: {integrity: sha512-J0ntJERWtIdvweZdmkCiF8eOFvP9fIAJR2gpeIDrHbAlYavK41WQfADo/YoZ/LF7RMTZBiPaH/pt2s/nPru9Iw==} + nitro@3.0.260429-beta: + resolution: {integrity: sha512-KweLVCUN5X9v9g+4yxAyRcz3FcOlnjmt9FyrAIWDxJETJmNT7I0JV0clgsONjo2nI0U5gwedXYA3RaNtF5XWzg==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: - '@vercel/queue': ^0.1.4 + '@vercel/queue': ^0.1.6 dotenv: '*' giget: '*' jiti: ^2.6.1 - rollup: ^4.60.1 + rollup: ^4.60.2 vite: ^7 || ^8 xml2js: ^0.6.2 zephyr-agent: ^0.2.0 @@ -10249,6 +10489,10 @@ packages: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} engines: {node: '>=0.10.0'} + require-in-the-middle@7.5.2: + resolution: {integrity: sha512-gAZ+kLqBdHarXB64XpAe2VCjB7rIRv+mU8tfRWziHRJ5umKsIHN2tLLv6EtMw7WCdP19S0ERVMldNvxYCHnhSQ==} + engines: {node: '>=8.6.0'} + requires-port@1.0.0: resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} @@ -10552,6 +10796,9 @@ packages: resolution: {integrity: sha512-5n7zqPAjL+RzR7n09NPKpWBXmDCtuRpQzIL+ycj8pe6MayV7cDuFmceoyPQJ0c95oFj6feY7SZvhX/+S0i1ukg==} hasBin: true + shimmer@1.2.1: + resolution: {integrity: sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==} + side-channel-list@1.0.0: resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} engines: {node: '>= 0.4'} @@ -12444,7 +12691,9 @@ snapshots: '@cloudflare/workers-types@4.20260317.1': {} - '@copilotkit/aimock@1.14.0': {} + '@copilotkit/aimock@1.16.4(vitest@4.1.4(@opentelemetry/api@1.9.1)(@types/node@24.10.3)(happy-dom@20.0.11)(jsdom@27.3.0(postcss@8.5.9))(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))': + optionalDependencies: + vitest: 4.1.4(@opentelemetry/api@1.9.1)(@types/node@24.10.3)(happy-dom@20.0.11)(jsdom@27.3.0(postcss@8.5.9))(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)) '@crazydos/vue-markdown@1.1.4(vue@3.5.25(typescript@5.9.3))': dependencies: @@ -13029,6 +13278,18 @@ snapshots: - supports-color - utf-8-validate + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.0 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.8.0': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -13221,6 +13482,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-sdsl/ordered-map@4.4.2': {} + '@livekit/mutex@1.1.1': {} '@livekit/protocol@1.44.0': @@ -13439,8 +13702,239 @@ snapshots: dependencies: zod: 4.3.6 + '@opentelemetry/api-logs@0.57.2': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api@1.9.1': {} + '@opentelemetry/context-async-hooks@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + + '@opentelemetry/core@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/semantic-conventions': 1.28.0 + + '@opentelemetry/exporter-logs-otlp-grpc@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.57.2(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-logs-otlp-http@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.57.2(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-logs-otlp-proto@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-grpc@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-http@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-metrics-otlp-proto@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-prometheus@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-grpc@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-http@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-trace-otlp-proto@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/exporter-zipkin@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.28.0 + + '@opentelemetry/instrumentation@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@types/shimmer': 1.2.0 + import-in-the-middle: 1.15.0 + require-in-the-middle: 7.5.2 + semver: 7.7.4 + shimmer: 1.2.1 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/otlp-exporter-base@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + + '@opentelemetry/otlp-grpc-exporter-base@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.57.2(@opentelemetry/api@1.9.1) + + '@opentelemetry/otlp-transformer@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + protobufjs: 7.5.4 + + '@opentelemetry/propagator-b3@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/propagator-jaeger@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/resources@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.28.0 + + '@opentelemetry/sdk-logs@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/sdk-metrics@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + + '@opentelemetry/sdk-node@0.57.2(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.57.2 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-grpc': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-http': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-logs-otlp-proto': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-grpc': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-http': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-metrics-otlp-proto': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-prometheus': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-grpc': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-http': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-proto': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-zipkin': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/instrumentation': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.57.2(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-node': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.28.0 + transitivePeerDependencies: + - supports-color + + '@opentelemetry/sdk-trace-base@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.28.0 + + '@opentelemetry/sdk-trace-node@1.30.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/context-async-hooks': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/core': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/propagator-b3': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/propagator-jaeger': 1.30.1(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 1.30.1(@opentelemetry/api@1.9.1) + semver: 7.7.4 + + '@opentelemetry/semantic-conventions@1.28.0': {} + + '@opentelemetry/semantic-conventions@1.40.0': {} + '@oxc-minify/binding-android-arm-eabi@0.110.0': optional: true @@ -16911,6 +17405,8 @@ snapshots: '@types/retry@0.12.0': {} + '@types/shimmer@1.2.0': {} + '@types/unist@2.0.11': {} '@types/unist@3.0.3': {} @@ -17973,6 +18469,8 @@ snapshots: dependencies: consola: 3.4.2 + cjs-module-lexer@1.4.3: {} + class-variance-authority@0.7.1: dependencies: clsx: 2.1.1 @@ -19534,6 +20032,13 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 + import-in-the-middle@1.15.0: + dependencies: + acorn: 8.15.0 + acorn-import-attributes: 1.9.5(acorn@8.15.0) + cjs-module-lexer: 1.4.3 + module-details-from-path: 1.0.4 + import-lazy@4.0.0: {} import-meta-resolve@4.2.0: {} @@ -20066,6 +20571,8 @@ snapshots: dependencies: p-locate: 5.0.0 + lodash.camelcase@4.3.0: {} + lodash.defaults@4.2.0: {} lodash.isarguments@3.1.0: {} @@ -20615,6 +21122,8 @@ snapshots: pkg-types: 1.3.1 ufo: 1.6.1 + module-details-from-path@1.0.4: {} + motion-dom@11.18.1: dependencies: motion-utils: 11.18.1 @@ -20700,7 +21209,7 @@ snapshots: - sqlite3 - uploadthing - nitro@3.0.260415-beta(chokidar@5.0.0)(dotenv@17.2.3)(giget@2.0.0)(jiti@2.6.1)(rollup@4.60.1)(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)): + nitro@3.0.260429-beta(chokidar@5.0.0)(dotenv@17.2.3)(giget@2.0.0)(jiti@2.6.1)(rollup@4.60.1)(vite@7.3.1(@types/node@24.10.3)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)): dependencies: consola: 3.4.2 crossws: 0.4.5(srvx@0.11.15) @@ -21882,6 +22391,14 @@ snapshots: require-from-string@2.0.2: {} + require-in-the-middle@7.5.2: + dependencies: + debug: 4.4.3 + module-details-from-path: 1.0.4 + resolve: 1.22.11 + transitivePeerDependencies: + - supports-color + requires-port@1.0.0: {} resolve-from@4.0.0: {} @@ -22317,6 +22834,8 @@ snapshots: sherif-windows-arm64: 1.9.0 sherif-windows-x64: 1.9.0 + shimmer@1.2.1: {} + side-channel-list@1.0.0: dependencies: es-errors: 1.3.0 diff --git a/testing/otel-verify/README.md b/testing/otel-verify/README.md new file mode 100644 index 000000000..57e19cb8e --- /dev/null +++ b/testing/otel-verify/README.md @@ -0,0 +1,84 @@ +# `@tanstack/ai-otel-verify` + +Manual smoke harness for verifying the [`otelMiddleware`](../../packages/typescript/ai/src/middlewares/otel.ts) against real OTLP backends. Boots an in-process [aimock](https://github.com/CopilotKit/aimock), runs three deterministic chat scenarios with the middleware attached, and exports spans + metrics over OTLP/HTTP to whichever backend you point it at. + +This package is **`private: true`** — it ships in the repo for ergonomics but is never published to npm. It is **not** part of the automated test suite or CI. Use it when adding a new backend, after material changes to `otelMiddleware`, or to reproduce a user-reported rendering problem. + +## Prerequisite: build the workspace + +The harness imports `@tanstack/ai/middlewares/otel`, which resolves through the package's `dist/` directory. Build the workspace first so that subpath export exists: + +```bash +pnpm build:all +``` + +Use `pnpm build:all` (Nx topo order) rather than `pnpm --filter @tanstack/ai build` — the latter skips workspace dependencies like `@tanstack/ai-event-client` and surfaces stale-dist type errors in unrelated activities. + +## Quick start + +```bash +# 1. Start a self-hosted backend (any single service from docker-compose.yml) +docker compose -f testing/otel-verify/docker-compose.yml up jaeger + +# 2. Run the harness against it +OTEL_BACKEND=jaeger pnpm --filter @tanstack/ai-otel-verify verify + +# 3. Open Jaeger at http://localhost:16686 and look for service +# "tanstack-ai-otel-verify" +``` + +## Backends + +| `OTEL_BACKEND` | Mode | Required env | +| ---------------- | --------- | ---------------------------------------------------------------------- | +| `jaeger` | self-host | none | +| `phoenix` | self-host | none | +| `langfuse-self` | self-host | `LANGFUSE_PUBLIC_KEY`, `LANGFUSE_SECRET_KEY` | +| `helicone` | self-host | `HELICONE_API_KEY` | +| `langfuse-cloud` | SaaS | `LANGFUSE_PUBLIC_KEY`, `LANGFUSE_SECRET_KEY`, optional `LANGFUSE_HOST` | +| `posthog` | SaaS | `POSTHOG_API_KEY`, optional `POSTHOG_HOST` | +| `sentry` | SaaS | `SENTRY_DSN` | +| `logfire` | SaaS | `LOGFIRE_TOKEN` | +| `traceloop` | SaaS | `TRACELOOP_API_KEY` | +| `datadog` | SaaS | `DD_API_KEY`, optional `DD_SITE` | + +Any required env var that's missing surfaces a clear error before the SDK starts. See [`src/backends.ts`](src/backends.ts) for the exact endpoint each preset hits. + +## Scenarios + +Each run sends three traces unless filtered with `SCENARIO=…`: + +| ID | What it exercises | Expected span tree | +| ------------ | -------------------------------------------------------------- | -------------------------------------------------------- | +| `basic-text` | Single-iteration chat with prompt + completion content capture | `chat → iter#0` | +| `with-tool` | Two-iteration chat with one tool call | `chat → iter#0 → execute_tool get_weather` then `iter#1` | +| `error` | Forced mid-stream throw via a sibling middleware | `chat → iter#0` with `status=ERROR` and exception event | + +Filter examples: + +```bash +SCENARIO=basic-text OTEL_BACKEND=jaeger pnpm verify +SCENARIO=with-tool,error OTEL_BACKEND=langfuse-self pnpm verify +``` + +## What to look for in each backend's UI + +For each backend, screenshot all three scenarios and check: + +1. **Span hierarchy** — root `chat` span has child iteration spans; tool spans nest under the iteration that triggered them. +2. **GenAI rendering** — the backend recognises `gen_ai.system`, `gen_ai.request.model`, `gen_ai.response.model`, `gen_ai.usage.input_tokens`, `gen_ai.usage.output_tokens` and shows them somewhere in the UI (chips, sidebar, generation card). +3. **Prompt + completion display** — with `captureContent: true` the harness emits both `gen_ai.input.messages` / `gen_ai.output.messages` attributes (current semconv) and `gen_ai.user.message` / `gen_ai.choice` events (legacy). At least one form should render. +4. **Tool call panel** — `gen_ai.tool.name`, `gen_ai.tool.call.id`, args + result. +5. **Token cost** — most backends derive cost from input/output token counts. Phoenix is the known holdout (uses OpenInference token attrs). +6. **Error rendering** — the `error` scenario should appear as a failed span with the exception message visible. + +## Adding a new backend + +1. Add a preset to [`src/backends.ts`](src/backends.ts). +2. If self-hostable, add a service to [`docker-compose.yml`](docker-compose.yml). +3. Add a row to the table above. +4. Run all three scenarios and capture screenshots locally. + +## Why this isn't an automated test + +Most of what we're verifying — "does Langfuse's UI display the tool call card?" — is a render question that needs human eyes. The wire format is already locked down by `packages/typescript/ai/tests/middlewares/otel.test.ts` and the in-process E2E specs in `testing/e2e/tests/middleware.spec.ts`. This harness exists to verify that our wire format is _understood_ by real backends — a one-shot verification, not something CI should run. diff --git a/testing/otel-verify/docker-compose.yml b/testing/otel-verify/docker-compose.yml new file mode 100644 index 000000000..706bcecfd --- /dev/null +++ b/testing/otel-verify/docker-compose.yml @@ -0,0 +1,84 @@ +# Self-hostable OTel backends used by the verification harness. +# +# Each service is independent — start one at a time: +# +# docker compose -f testing/otel-verify/docker-compose.yml up jaeger +# docker compose -f testing/otel-verify/docker-compose.yml up phoenix +# docker compose -f testing/otel-verify/docker-compose.yml up langfuse +# docker compose -f testing/otel-verify/docker-compose.yml up helicone +# +# Versions are pinned to specific tags; bump intentionally when you re-run +# the verification matrix so the screenshots in the docs match the version +# users will actually deploy. + +services: + # ---------------------------------------------------------------- Jaeger + # Generic OTel sink. UI: http://localhost:16686 + # OTLP/HTTP traces in: http://localhost:4318/v1/traces + jaeger: + image: jaegertracing/all-in-one:1.76.0 + container_name: tanstack-ai-otel-jaeger + environment: + COLLECTOR_OTLP_ENABLED: 'true' + ports: + - '16686:16686' # UI + - '4318:4318' # OTLP HTTP + - '4317:4317' # OTLP gRPC (unused by harness, exposed for parity) + + # ---------------------------------------------------------------- Phoenix + # Arize Phoenix. UI: http://localhost:6006 + # OTLP/HTTP traces in: http://localhost:6006/v1/traces + phoenix: + image: arizephoenix/phoenix:version-15.2.0 + container_name: tanstack-ai-otel-phoenix + ports: + - '6006:6006' + environment: + PHOENIX_WORKING_DIR: /tmp/phoenix + + # --------------------------------------------------------------- Langfuse + # Self-hosted Langfuse + Postgres. UI: http://localhost:3000 + # OTLP/HTTP traces in: http://localhost:3000/api/public/otel/v1/traces + # + # First-run setup: open the UI, sign up locally, create a project, copy + # the Public + Secret keys into LANGFUSE_PUBLIC_KEY / LANGFUSE_SECRET_KEY. + langfuse-db: + image: postgres:16-alpine + container_name: tanstack-ai-otel-langfuse-db + environment: + POSTGRES_USER: langfuse + POSTGRES_PASSWORD: langfuse + POSTGRES_DB: langfuse + volumes: + - langfuse-db:/var/lib/postgresql/data + profiles: [langfuse] + + langfuse: + image: langfuse/langfuse:3 + container_name: tanstack-ai-otel-langfuse + depends_on: + - langfuse-db + ports: + - '3000:3000' + environment: + DATABASE_URL: postgresql://langfuse:langfuse@langfuse-db:5432/langfuse + NEXTAUTH_SECRET: dev-only-not-secure + SALT: dev-only-not-secure + NEXTAUTH_URL: http://localhost:3000 + TELEMETRY_ENABLED: 'false' + LANGFUSE_INIT_ORG_ID: verify-org + LANGFUSE_INIT_PROJECT_ID: verify-project + profiles: [langfuse] + + # ---------------------------------------------------------------- Helicone + # Helicone OSS. Lower priority — only spin up if specifically testing. + # OTLP/HTTP traces in: http://localhost:8585/v1/traces (varies by version) + helicone: + image: helicone/helicone:latest + container_name: tanstack-ai-otel-helicone + ports: + - '8585:8585' + profiles: [helicone] + +volumes: + langfuse-db: diff --git a/testing/otel-verify/package.json b/testing/otel-verify/package.json new file mode 100644 index 000000000..572258850 --- /dev/null +++ b/testing/otel-verify/package.json @@ -0,0 +1,26 @@ +{ + "name": "@tanstack/ai-otel-verify", + "private": true, + "type": "module", + "description": "Manual smoke harness for verifying TanStack AI's otelMiddleware against real OTLP backends.", + "scripts": { + "verify": "tsx src/index.ts" + }, + "dependencies": { + "@copilotkit/aimock": "latest", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/exporter-trace-otlp-proto": "^0.57.0", + "@opentelemetry/resources": "^1.30.0", + "@opentelemetry/sdk-node": "^0.57.0", + "@opentelemetry/sdk-trace-base": "^1.30.0", + "@opentelemetry/semantic-conventions": "^1.30.0", + "@tanstack/ai": "workspace:*", + "@tanstack/ai-openai": "workspace:*", + "zod": "^4.2.0" + }, + "devDependencies": { + "@types/node": "^24.10.1", + "tsx": "^4.21.0", + "typescript": "5.9.3" + } +} diff --git a/testing/otel-verify/src/backends.ts b/testing/otel-verify/src/backends.ts new file mode 100644 index 000000000..5e75476a1 --- /dev/null +++ b/testing/otel-verify/src/backends.ts @@ -0,0 +1,197 @@ +/** + * Backend presets for OTLP/HTTP export. + * + * Each preset describes how to talk to one OTel backend: + * - `endpoint()`: the OTLP/HTTP traces URL (a function so presets can derive + * it from env vars like SENTRY_DSN or DD_SITE). + * - `headers()`: required headers, reading env vars. Throws if a required + * env var is missing so the harness fails fast with a clear message. + * - `notes`: short prose surfaced in the harness banner. Lists per-backend + * setup gotchas (resource attrs, semconv flags, mapping rules). + * + * Adding a backend: add a new entry, then run `OTEL_BACKEND= pnpm verify`. + */ + +export interface BackendPreset { + name: string + endpoint: () => string + headers: () => Record + notes: string +} + +function envOrThrow(name: string, hint?: string): string { + const value = process.env[name] + if (!value) { + const hintSuffix = hint ? ` — ${hint}` : '' + throw new Error(`Missing required env var ${name}${hintSuffix}`) + } + return value +} + +function basicAuth(user: string, pass: string): string { + return `Basic ${Buffer.from(`${user}:${pass}`).toString('base64')}` +} + +export const BACKENDS: Record = { + // ---------------------------------------------------------------- self-hosted + jaeger: { + name: 'Jaeger (local)', + endpoint: () => 'http://localhost:4318/v1/traces', + headers: () => ({}), + notes: + 'Generic OTel sink. Verifies span hierarchy + raw attribute round-trip. ' + + 'No GenAI-aware UI — useful for proving wire format only.', + }, + + 'langfuse-self': { + name: 'Langfuse (self-hosted)', + endpoint: () => 'http://localhost:3000/api/public/otel/v1/traces', + headers: () => ({ + Authorization: basicAuth( + envOrThrow('LANGFUSE_PUBLIC_KEY'), + envOrThrow('LANGFUSE_SECRET_KEY'), + ), + }), + notes: + 'OSS LLM-observability backend. Basic auth using the public+secret key ' + + 'pair from /api/public/projects.', + }, + + phoenix: { + name: 'Arize Phoenix (local)', + endpoint: () => 'http://localhost:6006/v1/traces', + headers: () => ({}), + notes: + 'Uses OpenInference semconv, NOT gen_ai.*. Spans round-trip but token ' + + 'cost calc and most rich panels will be empty until an OpenInference ' + + 'shim lands. See https://github.com/Arize-ai/openinference/issues/2205', + }, + + helicone: { + name: 'Helicone (self-hosted)', + endpoint: () => 'http://localhost:8585/v1/traces', + headers: () => ({ + Authorization: `Bearer ${envOrThrow('HELICONE_API_KEY')}`, + }), + notes: + 'Proxy-based; OTel ingestion is best-effort. Lower-priority backend.', + }, + + // ---------------------------------------------------------------------- SaaS + posthog: { + name: 'PostHog (Cloud)', + endpoint: () => { + const host = process.env.POSTHOG_HOST ?? 'https://us.i.posthog.com' + return `${host.replace(/\/$/, '')}/i/v0/otel/v1/traces` + }, + headers: () => ({ + Authorization: `Bearer ${envOrThrow('POSTHOG_API_KEY', 'use a project Personal API key')}`, + }), + notes: + 'PostHogSpanProcessor maps gen_ai.* → $ai_* events. Only forwards spans ' + + 'whose name/attrs start with gen_ai., llm., ai., or traceloop.', + }, + + 'langfuse-cloud': { + name: 'Langfuse (Cloud EU)', + endpoint: () => { + // Accept either env var: LANGFUSE_HOST (this harness's original name) or + // LANGFUSE_BASE_URL (used by the Langfuse JS SDK). If neither is set, + // default to EU. Mismatched region is the most common cause of 401 + // "Invalid credentials. Confirm that you've configured the correct host." + const host = + process.env.LANGFUSE_HOST ?? + process.env.LANGFUSE_BASE_URL ?? + 'https://cloud.langfuse.com' + return `${host.replace(/\/$/, '')}/api/public/otel/v1/traces` + }, + headers: () => ({ + Authorization: basicAuth( + envOrThrow('LANGFUSE_PUBLIC_KEY'), + envOrThrow('LANGFUSE_SECRET_KEY'), + ), + }), + notes: + 'For US region set LANGFUSE_HOST or LANGFUSE_BASE_URL to ' + + 'https://us.cloud.langfuse.com. A 401 "Invalid credentials. Confirm ' + + 'that you\'ve configured the correct host" usually means region ' + + 'mismatch between your keys and the host.', + }, + + sentry: { + name: 'Sentry', + endpoint: () => { + const dsn = envOrThrow('SENTRY_DSN', 'project DSN from Sentry settings') + const match = dsn.match(/^https?:\/\/[^@]+@([^/]+)\/(\d+)$/) + if (!match) throw new Error('SENTRY_DSN format unrecognized') + const [, host, projectId] = match + return `https://${host}/api/${projectId}/otel/v1/traces` + }, + headers: () => { + const dsn = envOrThrow('SENTRY_DSN') + const match = dsn.match(/^https?:\/\/([^@]+)@/) + if (!match) throw new Error('SENTRY_DSN format unrecognized') + return { + 'X-Sentry-Auth': + `Sentry sentry_version=7, sentry_key=${match[1]}, ` + + `sentry_client=tanstack-ai-otel-verify/0.1`, + } + }, + notes: + 'Sentry maps gen_ai.* per OTel semconv v1.36.0. AI Agents UI surfaces ' + + 'the trace tree natively.', + }, + + logfire: { + name: 'Logfire (Pydantic)', + endpoint: () => 'https://logfire-api.pydantic.dev/v1/traces', + headers: () => ({ + Authorization: `Bearer ${envOrThrow('LOGFIRE_TOKEN', 'write token from project settings')}`, + }), + notes: + 'Strictest semconv validator. If anything fails to render here, fix ' + + 'before testing wider. UI: https://logfire.pydantic.dev', + }, + + traceloop: { + name: 'Traceloop / OpenLLMetry Hub', + endpoint: () => 'https://api.traceloop.com/v1/traces', + headers: () => ({ + Authorization: `Bearer ${envOrThrow('TRACELOOP_API_KEY')}`, + }), + notes: + 'Authors of OpenLLMetry semconv. Canonical conformance reference for ' + + 'gen_ai.* attribute shape.', + }, + + datadog: { + name: 'Datadog', + endpoint: () => + `https://trace.agent.${process.env.DD_SITE ?? 'datadoghq.com'}/api/v0.2/traces`, + headers: () => ({ + 'DD-API-KEY': envOrThrow('DD_API_KEY'), + }), + notes: + 'Native OTel v1.37+ GenAI support. May require ' + + 'OTEL_SEMCONV_STABILITY_OPT_IN=gen_ai_latest_experimental.', + }, +} + +export function resolveBackend(name: string): { + name: string + endpoint: string + headers: Record + notes: string +} { + const preset = BACKENDS[name] + if (!preset) { + const known = Object.keys(BACKENDS).join(', ') + throw new Error(`Unknown OTEL_BACKEND=${name}. Known backends: ${known}`) + } + return { + name: preset.name, + endpoint: preset.endpoint(), + headers: preset.headers(), + notes: preset.notes, + } +} diff --git a/testing/otel-verify/src/index.ts b/testing/otel-verify/src/index.ts new file mode 100644 index 000000000..1feaacdd2 --- /dev/null +++ b/testing/otel-verify/src/index.ts @@ -0,0 +1,127 @@ +/** + * Manual OTel verification harness. + * + * OTEL_BACKEND= [SCENARIO=] pnpm verify + * + * Wires `@opentelemetry/sdk-node` with an OTLP/HTTP trace exporter pointed at + * one backend preset (see `backends.ts`), boots an in-process aimock for + * deterministic LLM responses, and runs the scenarios in `scenarios.ts`. Each + * scenario triggers `otelMiddleware` end-to-end so the resulting spans/events/ + * histograms land on the configured backend. + * + * Filter scenarios with `SCENARIO=` (comma-separated) — e.g. + * `SCENARIO=basic-text,error` to skip the tool round-trip while smoke-testing. + */ + +import { trace, metrics } from '@opentelemetry/api' +// Proto-encoded OTLP over HTTP. Some backends (e.g. Phoenix) reject +// application/json with HTTP 415; protobuf is the canonical OTLP encoding and +// is accepted by every backend preset in backends.ts. +import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto' +import { Resource } from '@opentelemetry/resources' +import { NodeSDK } from '@opentelemetry/sdk-node' +import { + ATTR_SERVICE_NAME, + ATTR_SERVICE_VERSION, +} from '@opentelemetry/semantic-conventions' + +import { resolveBackend } from './backends.js' +import { SCENARIOS, startAimock } from './scenarios.js' + +const TRACER_NAME = 'tanstack-ai-otel-verify' +const SERVICE_NAME = 'tanstack-ai-otel-verify' +const SERVICE_VERSION = '0.1.0' + +function pickScenarios() { + const filter = process.env.SCENARIO?.split(',').map((s) => s.trim()) + if (!filter || filter.length === 0) return SCENARIOS + const filtered = SCENARIOS.filter((s) => filter.includes(s.id)) + if (filtered.length === 0) { + const known = SCENARIOS.map((s) => s.id).join(', ') + throw new Error( + `SCENARIO=${process.env.SCENARIO} matched nothing. Known: ${known}`, + ) + } + return filtered +} + +async function main(): Promise { + const backendName = process.env.OTEL_BACKEND + if (!backendName) { + const known = Object.keys((await import('./backends.js')).BACKENDS).join( + ', ', + ) + throw new Error( + `OTEL_BACKEND env var is required. Known backends: ${known}`, + ) + } + + const backend = resolveBackend(backendName) + const scenarios = pickScenarios() + + console.log(`\n=== TanStack AI · OTel verification ===`) + console.log(`backend: ${backend.name}`) + console.log(`endpoint: ${backend.endpoint}`) + console.log(`scenarios: ${scenarios.map((s) => s.id).join(', ')}`) + console.log(`notes: ${backend.notes}\n`) + + const sdk = new NodeSDK({ + resource: new Resource({ + [ATTR_SERVICE_NAME]: SERVICE_NAME, + [ATTR_SERVICE_VERSION]: SERVICE_VERSION, + // Resource-level marker so backends with multi-service UIs (Datadog, + // Sentry) bucket these traces under a recognisable name. + 'deployment.environment': 'verify', + }), + traceExporter: new OTLPTraceExporter({ + url: backend.endpoint, + headers: backend.headers, + }), + }) + + sdk.start() + + const stopAimock = await startAimock() + const tracer = trace.getTracer(TRACER_NAME) + // Backends like Phoenix/Jaeger don't ingest metrics over OTLP/HTTP traces + // endpoint — pass the no-op meter so the middleware stays happy without + // doubling the network surface area. The middleware's own histograms remain + // exercised, just not exported to the backend. + const meter = metrics.getMeter(TRACER_NAME) + + let exitCode = 0 + try { + for (const scenario of scenarios) { + process.stdout.write(`▶ ${scenario.id}: ${scenario.label}... `) + const t0 = Date.now() + try { + await scenario.run(tracer, meter) + console.log(`ok (${Date.now() - t0}ms)`) + } catch (err) { + // Scenario `error` is expected to throw; others shouldn't. Surface + // the failure but don't abort the run — partial trace data is still + // useful for diagnosing other backends. + console.log(`failed: ${(err as Error).message}`) + if (scenario.id !== 'error') exitCode = 1 + } + } + } finally { + // Order matters: stop aimock first so any in-flight requests fail fast, + // then flush + shutdown the SDK so spans actually leave the process before + // exit. Without the explicit shutdown, BatchSpanProcessor may drop the + // last batch on Node exit. + await stopAimock() + await sdk.shutdown() + } + + console.log( + `\n→ traces sent. Open ${backend.name} and look for service "${SERVICE_NAME}".`, + ) + process.exit(exitCode) +} + +main().catch((err) => { + console.error('\n✗ verify-otel failed:') + console.error(err) + process.exit(1) +}) diff --git a/testing/otel-verify/src/scenarios.ts b/testing/otel-verify/src/scenarios.ts new file mode 100644 index 000000000..552b6ca6e --- /dev/null +++ b/testing/otel-verify/src/scenarios.ts @@ -0,0 +1,153 @@ +import { LLMock } from '@copilotkit/aimock' +import { + chat, + maxIterations, + toolDefinition, + type ChatMiddleware, +} from '@tanstack/ai' +import { otelMiddleware } from '@tanstack/ai/middlewares/otel' +import { createOpenaiChat } from '@tanstack/ai-openai' +import type { Tracer, Meter } from '@opentelemetry/api' +import { z } from 'zod' + +const AIMOCK_PORT = 4099 // distinct from e2e (4010) so both can run side by side +const AIMOCK_BASE = `http://127.0.0.1:${AIMOCK_PORT}` + +/** + * Spin up a per-process aimock instance and load the same fixture shapes the + * e2e suite uses. Returns a stop() handle that the caller MUST await before + * the harness exits, so the port releases cleanly. + */ +export async function startAimock(): Promise<() => Promise> { + const mock = new LLMock({ + port: AIMOCK_PORT, + host: '127.0.0.1', + logLevel: 'silent', + }) + + // Fixtures are inlined rather than loaded from testing/e2e/fixtures so this + // package stays self-contained — no cross-workspace path dependencies. + mock.addFixturesFromJSON([ + { + match: { userMessage: '[basic-text] run test', sequenceIndex: 0 }, + response: { content: 'Hello from the assistant.' }, + }, + { + match: { userMessage: '[with-tool] run test', sequenceIndex: 0 }, + response: { + toolCalls: [{ name: 'get_weather', arguments: '{"city":"NYC"}' }], + }, + }, + { + match: { userMessage: '[with-tool] run test', sequenceIndex: 1 }, + response: { content: 'The weather is sunny.' }, + }, + { + match: { userMessage: '[error] run test', sequenceIndex: 0 }, + response: { content: 'About to throw...' }, + }, + ]) + + await mock.start() + return async () => { + await mock.stop() + } +} + +function makeAdapter() { + return createOpenaiChat('gpt-4o', 'sk-otel-verify-dummy', { + baseURL: `${AIMOCK_BASE}/v1`, + }) +} + +function makeOtelMiddleware(tracer: Tracer, meter: Meter): ChatMiddleware { + return otelMiddleware({ + tracer, + meter, + captureContent: true, + // Trivial demonstration redactor — strip an obviously-fake SSN. Real + // users plug in something stronger; we leave content mostly intact so + // backends have something to display. + redact: (text: string) => text.replace(/\b\d{3}-\d{2}-\d{4}\b/g, '[SSN]'), + }) +} + +const weatherTool = toolDefinition({ + name: 'get_weather', + description: 'Get current weather for a city.', + inputSchema: z.object({ city: z.string() }), +}).server(async (args) => + JSON.stringify({ city: args.city, temperature: 72, condition: 'sunny' }), +) + +/** + * Drain a chat stream synchronously. The harness doesn't render or stream + * to the user — it just needs the middleware lifecycle to fire end-to-end so + * spans land on the exporter. + */ +async function drain(stream: AsyncIterable): Promise { + for await (const _chunk of stream) { + // intentionally empty + } +} + +export async function runBasicText( + tracer: Tracer, + meter: Meter, +): Promise { + const stream = chat({ + adapter: makeAdapter(), + messages: [{ role: 'user', content: '[basic-text] run test' }], + middleware: [makeOtelMiddleware(tracer, meter)], + agentLoopStrategy: maxIterations(1), + }) + await drain(stream) +} + +export async function runWithTool(tracer: Tracer, meter: Meter): Promise { + const stream = chat({ + adapter: makeAdapter(), + messages: [{ role: 'user', content: '[with-tool] run test' }], + tools: [weatherTool], + middleware: [makeOtelMiddleware(tracer, meter)], + agentLoopStrategy: maxIterations(5), + }) + await drain(stream) +} + +/** + * Error scenario: drives a normal chat to completion, then synthesizes an + * error via a middleware that throws on the first chunk. This guarantees + * `onError` fires inside the otel middleware regardless of provider quirks, + * which is exactly what backends should render as a failed trace. + */ +export async function runError(tracer: Tracer, meter: Meter): Promise { + const explode: ChatMiddleware = { + name: 'explode', + onChunk(_ctx, _chunk) { + throw new Error('synthetic verify-otel error') + }, + } + try { + const stream = chat({ + adapter: makeAdapter(), + messages: [{ role: 'user', content: '[error] run test' }], + middleware: [makeOtelMiddleware(tracer, meter), explode], + agentLoopStrategy: maxIterations(1), + }) + await drain(stream) + } catch { + // The whole point of this scenario is to land an error span — swallow + // the rethrow so the harness continues to the next scenario. + } +} + +export const SCENARIOS: Array<{ + id: string + label: string + run: (tracer: Tracer, meter: Meter) => Promise +}> = [ + { id: 'basic-text', label: 'Basic text', run: runBasicText }, + { id: 'with-tool', label: 'Tool call (1 round trip)', run: runWithTool }, + { id: 'error', label: 'Error path', run: runError }, +] diff --git a/testing/otel-verify/tsconfig.json b/testing/otel-verify/tsconfig.json new file mode 100644 index 000000000..7daa3fad3 --- /dev/null +++ b/testing/otel-verify/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "lib": ["ES2022"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "noEmit": true, + "types": ["node"] + }, + "include": ["src/**/*.ts"] +}