Skip to content

Commit 9ce7687

Browse files
fix
1 parent 43e8951 commit 9ce7687

File tree

2 files changed

+45
-39
lines changed

2 files changed

+45
-39
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export const ATTR_GEN_AI_REQUEST_REASONING_EFFORT = "gen_ai.request.reasoning_effort";

packages/instrumentation-openai/src/instrumentation.ts

Lines changed: 44 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ import { encodingForModel, TiktokenModel, Tiktoken } from "js-tiktoken";
5656
type APIPromiseType<T> = Promise<T> & {
5757
_thenUnwrap: <U>(onFulfilled: (value: T) => U) => APIPromiseType<U>;
5858
};
59+
import { ATTR_GEN_AI_REQUEST_REASONING_EFFORT } from "./attrs";
5960
import {
6061
wrapImageGeneration,
6162
wrapImageEdit,
@@ -227,19 +228,19 @@ export class OpenAIInstrumentation extends InstrumentationBase {
227228
const span =
228229
type === "chat"
229230
? plugin.startSpan({
230-
type,
231-
params: args[0] as ChatCompletionCreateParamsNonStreaming & {
232-
extraAttributes?: Record<string, any>;
233-
},
234-
client: this,
235-
})
231+
type,
232+
params: args[0] as ChatCompletionCreateParamsNonStreaming & {
233+
extraAttributes?: Record<string, any>;
234+
},
235+
client: this,
236+
})
236237
: plugin.startSpan({
237-
type,
238-
params: args[0] as CompletionCreateParamsNonStreaming & {
239-
extraAttributes?: Record<string, any>;
240-
},
241-
client: this,
242-
});
238+
type,
239+
params: args[0] as CompletionCreateParamsNonStreaming & {
240+
extraAttributes?: Record<string, any>;
241+
},
242+
client: this,
243+
});
243244

244245
const execContext = trace.setSpan(context.active(), span);
245246
const execPromise = safeExecuteInTheMiddle(
@@ -261,8 +262,8 @@ export class OpenAIInstrumentation extends InstrumentationBase {
261262
if (
262263
(
263264
args[0] as
264-
| ChatCompletionCreateParamsStreaming
265-
| CompletionCreateParamsStreaming
265+
| ChatCompletionCreateParamsStreaming
266+
| CompletionCreateParamsStreaming
266267
).stream
267268
) {
268269
return context.bind(
@@ -294,19 +295,19 @@ export class OpenAIInstrumentation extends InstrumentationBase {
294295
client,
295296
}:
296297
| {
297-
type: "chat";
298-
params: ChatCompletionCreateParamsNonStreaming & {
299-
extraAttributes?: Record<string, any>;
300-
};
301-
client: any;
302-
}
298+
type: "chat";
299+
params: ChatCompletionCreateParamsNonStreaming & {
300+
extraAttributes?: Record<string, any>;
301+
};
302+
client: any;
303+
}
303304
| {
304-
type: "completion";
305-
params: CompletionCreateParamsNonStreaming & {
306-
extraAttributes?: Record<string, any>;
307-
};
308-
client: any;
309-
}): Span {
305+
type: "completion";
306+
params: CompletionCreateParamsNonStreaming & {
307+
extraAttributes?: Record<string, any>;
308+
};
309+
client: any;
310+
}): Span {
310311
const { provider } = this._detectVendorFromURL(client);
311312

312313
const attributes: Attributes = {
@@ -322,9 +323,6 @@ export class OpenAIInstrumentation extends InstrumentationBase {
322323
if (params.temperature) {
323324
attributes[ATTR_GEN_AI_REQUEST_TEMPERATURE] = params.temperature;
324325
}
325-
if ('reasoning_effort' in params){
326-
attributes["gen_ai.request.reasoning_effort"] = params.reasoning_effort as string;
327-
}
328326
if (params.top_p) {
329327
attributes[ATTR_GEN_AI_REQUEST_TOP_P] = params.top_p;
330328
}
@@ -336,6 +334,13 @@ export class OpenAIInstrumentation extends InstrumentationBase {
336334
attributes[SpanAttributes.LLM_PRESENCE_PENALTY] =
337335
params.presence_penalty;
338336
}
337+
if ("reasoning_effort" in params) {
338+
if (typeof params.reasoning_effort === "string") {
339+
attributes[ATTR_GEN_AI_REQUEST_REASONING_EFFORT] = params.reasoning_effort;
340+
} else {
341+
attributes[ATTR_GEN_AI_REQUEST_REASONING_EFFORT] = JSON.stringify(params.reasoning_effort);
342+
}
343+
}
339344

340345
if (
341346
params.extraAttributes !== undefined &&
@@ -418,17 +423,17 @@ export class OpenAIInstrumentation extends InstrumentationBase {
418423
promise,
419424
}:
420425
| {
421-
span: Span;
422-
type: "chat";
423-
params: ChatCompletionCreateParamsStreaming;
424-
promise: APIPromiseType<Stream<ChatCompletionChunk>>;
425-
}
426+
span: Span;
427+
type: "chat";
428+
params: ChatCompletionCreateParamsStreaming;
429+
promise: APIPromiseType<Stream<ChatCompletionChunk>>;
430+
}
426431
| {
427-
span: Span;
428-
params: CompletionCreateParamsStreaming;
429-
type: "completion";
430-
promise: APIPromiseType<Stream<Completion>>;
431-
}) {
432+
span: Span;
433+
params: CompletionCreateParamsStreaming;
434+
type: "completion";
435+
promise: APIPromiseType<Stream<Completion>>;
436+
}) {
432437
if (type === "chat") {
433438
const result: ChatCompletion = {
434439
id: "0",

0 commit comments

Comments
 (0)