Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions packages/ai-semantic-conventions/src/SemanticAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,15 @@ export const SpanAttributes = {
"gen_ai.usage.cache_creation_input_tokens",
GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS: "gen_ai.usage.cache_read_input_tokens",
GEN_AI_USAGE_REASONING_TOKENS: "gen_ai.usage.reasoning_tokens",
GEN_AI_OPERATION_NAME: "gen_ai.operation.name",
GEN_AI_PROVIDER_NAME: "gen_ai.provider.name",
GEN_AI_TOOL_NAME: "gen_ai.tool.name",
GEN_AI_TOOL_CALL_ID: "gen_ai.tool.call.id",
GEN_AI_TOOL_CALL_ARGUMENTS: "gen_ai.tool.call.arguments",
GEN_AI_TOOL_CALL_RESULT: "gen_ai.tool.call.result",
GEN_AI_RESPONSE_ID: "gen_ai.response.id",
GEN_AI_RESPONSE_FINISH_REASONS: "gen_ai.response.finish_reasons",
GEN_AI_CONVERSATION_ID: "gen_ai.conversation.id",

GEN_AI_AGENT_NAME: "gen_ai.agent.name",

Expand Down
154 changes: 152 additions & 2 deletions packages/traceloop-sdk/src/lib/tracing/ai-sdk-transformations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,43 @@ const transformPrompts = (attributes: Record<string, any>): void => {
if (AI_PROMPT in attributes) {
try {
const promptData = JSON.parse(attributes[AI_PROMPT] as string);
if (promptData.prompt && typeof promptData.prompt === "string") {

// Handle case where promptData has a "messages" array
if (promptData.messages && Array.isArray(promptData.messages)) {
const messages = promptData.messages;
const inputMessages: any[] = [];

messages.forEach(
(msg: { role: string; content: any }, index: number) => {
const processedContent = processMessageContent(msg.content);
const contentKey = `${SpanAttributes.LLM_PROMPTS}.${index}.content`;
attributes[contentKey] = processedContent;
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
msg.role;

// Add to OpenTelemetry standard gen_ai.input.messages format
inputMessages.push({
role: msg.role,
parts: [
{
type: TYPE_TEXT,
content: processedContent,
},
],
});
},
);

// Set the OpenTelemetry standard input messages attribute
if (inputMessages.length > 0) {
attributes[SpanAttributes.LLM_INPUT_MESSAGES] =
JSON.stringify(inputMessages);
}

delete attributes[AI_PROMPT];
}
// Handle case where promptData has a "prompt" string
else if (promptData.prompt && typeof promptData.prompt === "string") {
attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
promptData.prompt;
attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = ROLE_USER;
Expand Down Expand Up @@ -432,6 +468,10 @@ const transformVendor = (attributes: Record<string, any>): void => {
// Find matching vendor prefix in mapping
let mappedVendor = null;
if (typeof vendor === "string" && vendor.length > 0) {
// Extract base provider name for OpenTelemetry standard (e.g., "openai" from "openai.chat")
const providerName = vendor.split(".")[0];
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why?

attributes[SpanAttributes.GEN_AI_PROVIDER_NAME] = providerName;

for (const prefix of Object.keys(VENDOR_MAPPING)) {
if (vendor.startsWith(prefix)) {
mappedVendor = VENDOR_MAPPING[prefix];
Expand All @@ -445,6 +485,109 @@ const transformVendor = (attributes: Record<string, any>): void => {
}
};

const transformOperationName = (
attributes: Record<string, any>,
spanName?: string,
): void => {
if (!spanName) return;

let operationName: string | undefined;
if (
spanName.includes("generateText") ||
spanName.includes("streamText") ||
spanName.includes("generateObject") ||
spanName.includes("streamObject")
) {
operationName = "chat";
} else if (spanName === "ai.toolCall" || spanName.endsWith(".tool")) {
operationName = "execute_tool";
}

if (operationName) {
attributes[SpanAttributes.GEN_AI_OPERATION_NAME] = operationName;
}
};

const transformModelId = (attributes: Record<string, any>): void => {
const AI_MODEL_ID = "ai.model.id";
if (AI_MODEL_ID in attributes) {
attributes[SpanAttributes.LLM_REQUEST_MODEL] = attributes[AI_MODEL_ID];
delete attributes[AI_MODEL_ID];
}
};

const transformFinishReason = (attributes: Record<string, any>): void => {
const AI_RESPONSE_FINISH_REASON = "ai.response.finishReason";
if (AI_RESPONSE_FINISH_REASON in attributes) {
const finishReason = attributes[AI_RESPONSE_FINISH_REASON];
// Convert to array format for OTel standard
attributes[SpanAttributes.GEN_AI_RESPONSE_FINISH_REASONS] = Array.isArray(
finishReason,
)
? finishReason
: [finishReason];
delete attributes[AI_RESPONSE_FINISH_REASON];
}
};

const transformToolCallAttributes = (attributes: Record<string, any>): void => {
// Transform tool name
if ("ai.toolCall.name" in attributes) {
attributes[SpanAttributes.GEN_AI_TOOL_NAME] =
attributes["ai.toolCall.name"];
// Keep ai.toolCall.name for now, will be deleted in transformToolCalls
}

// Transform tool call ID
if ("ai.toolCall.id" in attributes) {
attributes[SpanAttributes.GEN_AI_TOOL_CALL_ID] =
attributes["ai.toolCall.id"];
delete attributes["ai.toolCall.id"];
}

// Transform tool arguments (keep both OTel and Traceloop versions)
if ("ai.toolCall.args" in attributes) {
attributes[SpanAttributes.GEN_AI_TOOL_CALL_ARGUMENTS] =
attributes["ai.toolCall.args"];
// Don't delete yet - transformToolCalls will handle entity input/output
}

// Transform tool result (keep both OTel and Traceloop versions)
if ("ai.toolCall.result" in attributes) {
attributes[SpanAttributes.GEN_AI_TOOL_CALL_RESULT] =
attributes["ai.toolCall.result"];
// Don't delete yet - transformToolCalls will handle entity input/output
}
};
Comment thread
nina-kollman marked this conversation as resolved.

const transformConversationId = (attributes: Record<string, any>): void => {
// Check for conversation/session ID in metadata
const conversationId = attributes["ai.telemetry.metadata.conversationId"];
const sessionId = attributes["ai.telemetry.metadata.sessionId"];

if (conversationId) {
attributes[SpanAttributes.GEN_AI_CONVERSATION_ID] = conversationId;
} else if (sessionId) {
attributes[SpanAttributes.GEN_AI_CONVERSATION_ID] = sessionId;
}
};

const transformResponseMetadata = (attributes: Record<string, any>): void => {
const AI_RESPONSE_MODEL = "ai.response.model";
const AI_RESPONSE_ID = "ai.response.id";

if (AI_RESPONSE_MODEL in attributes) {
attributes[SpanAttributes.LLM_RESPONSE_MODEL] =
attributes[AI_RESPONSE_MODEL];
delete attributes[AI_RESPONSE_MODEL];
}

if (AI_RESPONSE_ID in attributes) {
attributes[SpanAttributes.GEN_AI_RESPONSE_ID] = attributes[AI_RESPONSE_ID];
delete attributes[AI_RESPONSE_ID];
}
};

const transformTelemetryMetadata = (
attributes: Record<string, any>,
spanName?: string,
Expand Down Expand Up @@ -525,6 +668,8 @@ export const transformLLMSpans = (
attributes: Record<string, any>,
spanName?: string,
): void => {
transformOperationName(attributes, spanName);
transformModelId(attributes);
transformResponseText(attributes);
transformResponseObject(attributes);
transformResponseToolCalls(attributes);
Expand All @@ -533,8 +678,12 @@ export const transformLLMSpans = (
transformPromptTokens(attributes);
transformCompletionTokens(attributes);
transformProviderMetadata(attributes);
transformFinishReason(attributes);
transformResponseMetadata(attributes);
calculateTotalTokens(attributes);
transformVendor(attributes);
transformVendor(attributes); // Also sets GEN_AI_PROVIDER_NAME
transformConversationId(attributes);
transformToolCallAttributes(attributes);
transformTelemetryMetadata(attributes, spanName);
};

Expand All @@ -556,6 +705,7 @@ const transformToolCalls = (span: ReadableSpan): void => {
const toolName = span.attributes["ai.toolCall.name"];
if (toolName) {
span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME] = toolName;
delete span.attributes["ai.toolCall.name"];
}
}
};
Expand Down
Loading