Skip to content

Commit b7991e4

Browse files
committed
revert and lock
1 parent 1a854f5 commit b7991e4

File tree

1 file changed

+36
-49
lines changed

1 file changed

+36
-49
lines changed

packages/traceloop-sdk/test/ai-sdk-integration.test.ts

Lines changed: 36 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ Polly.register(NodeHttpAdapter);
3535
Polly.register(FetchAdapter);
3636
Polly.register(FSPersister);
3737

38-
describe("Test AI SDK Integration with Recording", function() {
38+
describe("Test AI SDK Integration with Recording", function () {
3939
this.timeout(10000); // Increase timeout to 10 seconds for API calls
4040
setupPolly({
4141
adapters: ["node-http", "fetch"],
@@ -77,8 +77,10 @@ describe("Test AI SDK Integration with Recording", function() {
7777
const { server } = this.polly as Polly;
7878
server.any().on("beforePersist", (_req, recording) => {
7979
recording.request.headers = recording.request.headers.filter(
80-
({ name }: { name: string }) =>
81-
!["authorization", "x-api-key", "x-goog-api-key"].includes(name.toLowerCase())
80+
({ name }: { name: string }) =>
81+
!["authorization", "x-api-key", "x-goog-api-key"].includes(
82+
name.toLowerCase(),
83+
),
8284
);
8385
});
8486
});
@@ -93,20 +95,24 @@ describe("Test AI SDK Integration with Recording", function() {
9395
{ name: "test_openai_workflow" },
9496
async () => {
9597
return await generateText({
96-
messages: [{ role: "user", content: "What is 2+2? Give a brief answer." }],
98+
messages: [
99+
{ role: "user", content: "What is 2+2? Give a brief answer." },
100+
],
97101
model: vercel_openai("gpt-3.5-turbo"),
98102
experimental_telemetry: { isEnabled: true },
99103
});
100-
}
104+
},
101105
);
102106

103107
// Force flush to ensure all spans are exported
104108
await traceloop.forceFlush();
105-
109+
106110
const spans = memoryExporter.getFinishedSpans();
107-
111+
108112
const generateTextSpan = spans.find(
109-
(span) => span.name === "ai.generateText.generate" || span.name === "ai.generateText.doGenerate",
113+
(span) =>
114+
span.name === "ai.generateText.generate" ||
115+
span.name === "ai.generateText.doGenerate",
110116
);
111117

112118
assert.ok(result);
@@ -117,10 +123,7 @@ describe("Test AI SDK Integration with Recording", function() {
117123
assert.strictEqual(generateTextSpan.name, "ai.generateText.generate");
118124

119125
// Verify vendor
120-
assert.strictEqual(
121-
generateTextSpan.attributes["gen_ai.system"],
122-
"OpenAI",
123-
);
126+
assert.strictEqual(generateTextSpan.attributes["gen_ai.system"], "OpenAI");
124127

125128
// Verify model information
126129
assert.strictEqual(
@@ -133,9 +136,7 @@ describe("Test AI SDK Integration with Recording", function() {
133136
generateTextSpan.attributes["gen_ai.prompt.0.role"],
134137
"user",
135138
);
136-
assert.ok(
137-
generateTextSpan.attributes["gen_ai.prompt.0.content"],
138-
);
139+
assert.ok(generateTextSpan.attributes["gen_ai.prompt.0.content"]);
139140

140141
// Verify response
141142
assert.strictEqual(
@@ -148,41 +149,39 @@ describe("Test AI SDK Integration with Recording", function() {
148149
);
149150

150151
// Verify token usage
151-
assert.ok(
152-
generateTextSpan.attributes["gen_ai.usage.prompt_tokens"],
153-
);
154-
assert.ok(
155-
generateTextSpan.attributes["gen_ai.usage.completion_tokens"],
156-
);
157-
assert.ok(
158-
generateTextSpan.attributes["llm.usage.total_tokens"],
159-
);
152+
assert.ok(generateTextSpan.attributes["gen_ai.usage.prompt_tokens"]);
153+
assert.ok(generateTextSpan.attributes["gen_ai.usage.completion_tokens"]);
154+
assert.ok(generateTextSpan.attributes["llm.usage.total_tokens"]);
160155
});
161156

162157
it("should capture Google Gemini provider spans correctly with recording", async () => {
163158
// Clear any leftover spans from previous tests
164159
memoryExporter.reset();
165-
160+
166161
const result = await traceloop.withWorkflow(
167162
{ name: "test_google_workflow" },
168163
async () => {
169164
return await generateText({
170-
messages: [{ role: "user", content: "What is 2+2? Give a brief answer." }],
165+
messages: [
166+
{ role: "user", content: "What is 2+2? Give a brief answer." },
167+
],
171168
model: vercel_google("gemini-1.5-flash"),
172169
experimental_telemetry: { isEnabled: true },
173170
});
174-
}
171+
},
175172
);
176173

177174
// Force flush to ensure all spans are exported
178175
await traceloop.forceFlush();
179-
176+
180177
const spans = memoryExporter.getFinishedSpans();
181-
178+
182179
// Find the Google span specifically (should have workflow name test_google_workflow)
183180
const generateTextSpan = spans.find(
184-
(span) => (span.name === "ai.generateText.generate" || span.name === "ai.generateText.doGenerate")
185-
&& span.attributes["traceloop.workflow.name"] === "test_google_workflow",
181+
(span) =>
182+
(span.name === "ai.generateText.generate" ||
183+
span.name === "ai.generateText.doGenerate") &&
184+
span.attributes["traceloop.workflow.name"] === "test_google_workflow",
186185
);
187186

188187
assert.ok(result);
@@ -193,10 +192,7 @@ describe("Test AI SDK Integration with Recording", function() {
193192
assert.strictEqual(generateTextSpan.name, "ai.generateText.generate");
194193

195194
// Verify vendor
196-
assert.strictEqual(
197-
generateTextSpan.attributes["gen_ai.system"],
198-
"Google",
199-
);
195+
assert.strictEqual(generateTextSpan.attributes["gen_ai.system"], "Google");
200196

201197
// Verify model information
202198
assert.strictEqual(
@@ -209,9 +205,7 @@ describe("Test AI SDK Integration with Recording", function() {
209205
generateTextSpan.attributes["gen_ai.prompt.0.role"],
210206
"user",
211207
);
212-
assert.ok(
213-
generateTextSpan.attributes["gen_ai.prompt.0.content"],
214-
);
208+
assert.ok(generateTextSpan.attributes["gen_ai.prompt.0.content"]);
215209

216210
// Verify response
217211
assert.strictEqual(
@@ -224,15 +218,8 @@ describe("Test AI SDK Integration with Recording", function() {
224218
);
225219

226220
// Verify token usage
227-
assert.ok(
228-
generateTextSpan.attributes["gen_ai.usage.prompt_tokens"],
229-
);
230-
assert.ok(
231-
generateTextSpan.attributes["gen_ai.usage.completion_tokens"],
232-
);
233-
assert.ok(
234-
generateTextSpan.attributes["llm.usage.total_tokens"],
235-
);
221+
assert.ok(generateTextSpan.attributes["gen_ai.usage.prompt_tokens"]);
222+
assert.ok(generateTextSpan.attributes["gen_ai.usage.completion_tokens"]);
223+
assert.ok(generateTextSpan.attributes["llm.usage.total_tokens"]);
236224
});
237-
238-
});
225+
});

0 commit comments

Comments
 (0)