-
Notifications
You must be signed in to change notification settings - Fork 137
Expand file tree
/
Copy pathllm_client.ts
More file actions
93 lines (82 loc) · 2.81 KB
/
llm_client.ts
File metadata and controls
93 lines (82 loc) · 2.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
/**
* LLM client for calling OpenRouter API
* Phase 3: Added support for tool calling
*/
import OpenAI from 'openai';
// eslint-disable-next-line import/extensions
import type { ChatCompletionMessageParam, ChatCompletionTool } from 'openai/resources/chat/completions';
// eslint-disable-next-line import/extensions
import type { ResponseFormatJSONSchema } from 'openai/resources/shared';
import { OPENROUTER_CONFIG } from './config.js';
/**
* Response from LLM - either text or tool calls
*/
export type LlmResponse = {
/** Text content from LLM (if no tool calls) */
content: string | null;
/** Tool calls requested by LLM (if any) */
toolCalls?: {
id: string;
name: string;
arguments: string;
}[];
}
/**
* LLM client for chat completions with optional tool support
*/
export class LlmClient {
private openai: OpenAI;
constructor() {
if (!OPENROUTER_CONFIG.apiKey) {
throw new Error('OPENROUTER_API_KEY environment variable is required');
}
this.openai = new OpenAI({
baseURL: OPENROUTER_CONFIG.baseURL,
apiKey: OPENROUTER_CONFIG.apiKey,
});
}
/**
* Call LLM with messages and optional tools
* Phase 3: Added tools parameter
* Phase 4: Added responseFormat for structured outputs
*/
async callLlm(
messages: ChatCompletionMessageParam[],
model: string,
tools?: ChatCompletionTool[],
responseFormat?: ResponseFormatJSONSchema,
): Promise<LlmResponse> {
const response = await this.openai.chat.completions.create({
model,
messages,
temperature: 0.15, // Low temperature for deterministic evaluation results
...(tools && tools.length > 0 ? { tools } : {}),
...(responseFormat ? { response_format: responseFormat } : {}),
});
const message = response.choices[0]?.message;
if (!message) {
throw new Error('LLM returned no message');
}
// Check if LLM wants to call tools
if (message.tool_calls && message.tool_calls.length > 0) {
return {
content: message.content,
toolCalls: message.tool_calls.map((tc) => {
// Handle both function and custom tool calls
if (tc.type === 'function') {
return {
id: tc.id,
name: tc.function.name,
arguments: tc.function.arguments,
};
}
throw new Error(`Unsupported tool call type: ${tc.type}`);
}),
};
}
// Regular text response
return {
content: message.content || null,
};
}
}