Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/ai-semantic-conventions/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
},
"dependencies": {
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.36.0"
"@opentelemetry/semantic-conventions": "^1.38.0"
},
"homepage": "https://github.com/traceloop/openllmetry-js/tree/main/packages/ai-semantic-conventions",
"gitHead": "ef1e70d6037f7b5c061056ef2be16e3f55f02ed5"
Expand Down
64 changes: 48 additions & 16 deletions packages/ai-semantic-conventions/src/SemanticAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,33 +15,65 @@
*/

import {
ATTR_GEN_AI_AGENT_NAME,
ATTR_GEN_AI_COMPLETION,
ATTR_GEN_AI_CONVERSATION_ID,
ATTR_GEN_AI_INPUT_MESSAGES,
ATTR_GEN_AI_OPERATION_NAME,
ATTR_GEN_AI_OUTPUT_MESSAGES,
ATTR_GEN_AI_PROMPT,
ATTR_GEN_AI_PROVIDER_NAME,
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
ATTR_GEN_AI_REQUEST_MODEL,
ATTR_GEN_AI_REQUEST_TEMPERATURE,
ATTR_GEN_AI_REQUEST_TOP_P,
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
ATTR_GEN_AI_RESPONSE_ID,
ATTR_GEN_AI_RESPONSE_MODEL,
ATTR_GEN_AI_SYSTEM,
ATTR_GEN_AI_TOOL_CALL_ARGUMENTS,
ATTR_GEN_AI_TOOL_CALL_ID,
ATTR_GEN_AI_TOOL_CALL_RESULT,
ATTR_GEN_AI_TOOL_NAME,
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
// @ts-expect-error - Using exports path that TypeScript doesn't recognize but works at runtime
} from "@opentelemetry/semantic-conventions/incubating";

export const SpanAttributes = {
LLM_SYSTEM: "gen_ai.system",
LLM_REQUEST_MODEL: "gen_ai.request.model",
LLM_REQUEST_MAX_TOKENS: "gen_ai.request.max_tokens",
LLM_REQUEST_TEMPERATURE: "gen_ai.request.temperature",
LLM_REQUEST_TOP_P: "gen_ai.request.top_p",
LLM_PROMPTS: "gen_ai.prompt",
LLM_COMPLETIONS: "gen_ai.completion",
LLM_INPUT_MESSAGES: "gen_ai.input.messages",
LLM_OUTPUT_MESSAGES: "gen_ai.output.messages",
LLM_RESPONSE_MODEL: "gen_ai.response.model",
LLM_USAGE_PROMPT_TOKENS: "gen_ai.usage.prompt_tokens",
LLM_USAGE_COMPLETION_TOKENS: "gen_ai.usage.completion_tokens",
LLM_USAGE_INPUT_TOKENS: ATTR_GEN_AI_USAGE_INPUT_TOKENS,
LLM_USAGE_OUTPUT_TOKENS: ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
ATTR_GEN_AI_SYSTEM,
ATTR_GEN_AI_REQUEST_MODEL,
ATTR_GEN_AI_REQUEST_MAX_TOKENS,
ATTR_GEN_AI_REQUEST_TEMPERATURE,
ATTR_GEN_AI_REQUEST_TOP_P,
ATTR_GEN_AI_PROMPT,
ATTR_GEN_AI_COMPLETION,
ATTR_GEN_AI_INPUT_MESSAGES,
ATTR_GEN_AI_OUTPUT_MESSAGES,
ATTR_GEN_AI_RESPONSE_MODEL,
ATTR_GEN_AI_USAGE_PROMPT_TOKENS,
ATTR_GEN_AI_USAGE_COMPLETION_TOKENS,
ATTR_GEN_AI_USAGE_INPUT_TOKENS,
ATTR_GEN_AI_USAGE_OUTPUT_TOKENS,
ATTR_GEN_AI_OPERATION_NAME,
ATTR_GEN_AI_PROVIDER_NAME,
ATTR_GEN_AI_TOOL_NAME,
ATTR_GEN_AI_TOOL_CALL_ID,
ATTR_GEN_AI_TOOL_CALL_ARGUMENTS,
ATTR_GEN_AI_TOOL_CALL_RESULT,
ATTR_GEN_AI_RESPONSE_ID,
ATTR_GEN_AI_RESPONSE_FINISH_REASONS,
ATTR_GEN_AI_CONVERSATION_ID,
ATTR_GEN_AI_AGENT_NAME,

// Attributes not yet in @opentelemetry/semantic-conventions
Comment thread
nina-kollman marked this conversation as resolved.
GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS:
"gen_ai.usage.cache_creation_input_tokens",
GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS: "gen_ai.usage.cache_read_input_tokens",
GEN_AI_USAGE_REASONING_TOKENS: "gen_ai.usage.reasoning_tokens",

GEN_AI_AGENT_NAME: "gen_ai.agent.name",

// LLM
LLM_REQUEST_TYPE: "llm.request.type",
LLM_USAGE_TOTAL_TOKENS: "llm.usage.total_tokens",
Expand Down
Loading
Loading