@@ -21,6 +21,10 @@ const AI_PROMPT_MESSAGES = "ai.prompt.messages";
2121const AI_PROMPT = "ai.prompt" ;
2222const AI_USAGE_PROMPT_TOKENS = "ai.usage.promptTokens" ;
2323const AI_USAGE_COMPLETION_TOKENS = "ai.usage.completionTokens" ;
24+ const GEN_AI_USAGE_PROMPT_TOKENS = "gen_ai.usage.prompt_tokens" ;
25+ const GEN_AI_USAGE_COMPLETION_TOKENS = "gen_ai.usage.completion_tokens" ;
26+ const GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" ;
27+ const GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" ;
2428const AI_MODEL_PROVIDER = "ai.model.provider" ;
2529const AI_PROMPT_TOOLS = "ai.prompt.tools" ;
2630const AI_TELEMETRY_METADATA_PREFIX = "ai.telemetry.metadata." ;
@@ -319,28 +323,31 @@ const transformPrompts = (attributes: Record<string, any>): void => {
319323
320324const transformPromptTokens = ( attributes : Record < string , any > ) : void => {
321325 if ( AI_USAGE_PROMPT_TOKENS in attributes ) {
322- attributes [ `${ SpanAttributes . LLM_USAGE_PROMPT_TOKENS } ` ] =
323- attributes [ AI_USAGE_PROMPT_TOKENS ] ;
324326 delete attributes [ AI_USAGE_PROMPT_TOKENS ] ;
325327 }
328+
329+ if ( GEN_AI_USAGE_PROMPT_TOKENS in attributes ) {
330+ delete attributes [ GEN_AI_USAGE_PROMPT_TOKENS ] ;
331+ }
326332} ;
327333
328334const transformCompletionTokens = ( attributes : Record < string , any > ) : void => {
329335 if ( AI_USAGE_COMPLETION_TOKENS in attributes ) {
330- attributes [ `${ SpanAttributes . LLM_USAGE_COMPLETION_TOKENS } ` ] =
331- attributes [ AI_USAGE_COMPLETION_TOKENS ] ;
332336 delete attributes [ AI_USAGE_COMPLETION_TOKENS ] ;
333337 }
338+
339+ if ( GEN_AI_USAGE_COMPLETION_TOKENS in attributes ) {
340+ delete attributes [ GEN_AI_USAGE_COMPLETION_TOKENS ] ;
341+ }
334342} ;
335343
336344const calculateTotalTokens = ( attributes : Record < string , any > ) : void => {
337- const promptTokens = attributes [ `${ SpanAttributes . LLM_USAGE_PROMPT_TOKENS } ` ] ;
338- const completionTokens =
339- attributes [ `${ SpanAttributes . LLM_USAGE_COMPLETION_TOKENS } ` ] ;
345+ const inputTokens = attributes [ GEN_AI_USAGE_INPUT_TOKENS ] ;
346+ const outputTokens = attributes [ GEN_AI_USAGE_OUTPUT_TOKENS ] ;
340347
341- if ( promptTokens && completionTokens ) {
348+ if ( inputTokens && outputTokens ) {
342349 attributes [ `${ SpanAttributes . LLM_USAGE_TOTAL_TOKENS } ` ] =
343- Number ( promptTokens ) + Number ( completionTokens ) ;
350+ Number ( inputTokens ) + Number ( outputTokens ) ;
344351 }
345352} ;
346353
0 commit comments