Skip to content

Commit 93b2388

Browse files
committed
fix(vercel): remove duplicate token attributes (prompt/input and completion/output)
1 parent d6ebc23 commit 93b2388

File tree

1 file changed

+16
-9
lines changed

1 file changed

+16
-9
lines changed

packages/traceloop-sdk/src/lib/tracing/ai-sdk-transformations.ts

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,10 @@ const AI_PROMPT_MESSAGES = "ai.prompt.messages";
2121
const AI_PROMPT = "ai.prompt";
2222
const AI_USAGE_PROMPT_TOKENS = "ai.usage.promptTokens";
2323
const AI_USAGE_COMPLETION_TOKENS = "ai.usage.completionTokens";
24+
const GEN_AI_USAGE_PROMPT_TOKENS = "gen_ai.usage.prompt_tokens";
25+
const GEN_AI_USAGE_COMPLETION_TOKENS = "gen_ai.usage.completion_tokens";
26+
const GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens";
27+
const GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens";
2428
const AI_MODEL_PROVIDER = "ai.model.provider";
2529
const AI_PROMPT_TOOLS = "ai.prompt.tools";
2630
const AI_TELEMETRY_METADATA_PREFIX = "ai.telemetry.metadata.";
@@ -319,28 +323,31 @@ const transformPrompts = (attributes: Record<string, any>): void => {
319323

320324
const transformPromptTokens = (attributes: Record<string, any>): void => {
321325
if (AI_USAGE_PROMPT_TOKENS in attributes) {
322-
attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`] =
323-
attributes[AI_USAGE_PROMPT_TOKENS];
324326
delete attributes[AI_USAGE_PROMPT_TOKENS];
325327
}
328+
329+
if (GEN_AI_USAGE_PROMPT_TOKENS in attributes) {
330+
delete attributes[GEN_AI_USAGE_PROMPT_TOKENS];
331+
}
326332
};
327333

328334
const transformCompletionTokens = (attributes: Record<string, any>): void => {
329335
if (AI_USAGE_COMPLETION_TOKENS in attributes) {
330-
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`] =
331-
attributes[AI_USAGE_COMPLETION_TOKENS];
332336
delete attributes[AI_USAGE_COMPLETION_TOKENS];
333337
}
338+
339+
if (GEN_AI_USAGE_COMPLETION_TOKENS in attributes) {
340+
delete attributes[GEN_AI_USAGE_COMPLETION_TOKENS];
341+
}
334342
};
335343

336344
const calculateTotalTokens = (attributes: Record<string, any>): void => {
337-
const promptTokens = attributes[`${SpanAttributes.LLM_USAGE_PROMPT_TOKENS}`];
338-
const completionTokens =
339-
attributes[`${SpanAttributes.LLM_USAGE_COMPLETION_TOKENS}`];
345+
const inputTokens = attributes[GEN_AI_USAGE_INPUT_TOKENS];
346+
const outputTokens = attributes[GEN_AI_USAGE_OUTPUT_TOKENS];
340347

341-
if (promptTokens && completionTokens) {
348+
if (inputTokens && outputTokens) {
342349
attributes[`${SpanAttributes.LLM_USAGE_TOTAL_TOKENS}`] =
343-
Number(promptTokens) + Number(completionTokens);
350+
Number(inputTokens) + Number(outputTokens);
344351
}
345352
};
346353

0 commit comments

Comments
 (0)