Skip to content

Commit ff89544

Browse files
committed
Message Content is always just a string
* use structuredContent if needed, but that simplifies a lot the code and fixes invalid casts
1 parent f5d78ca commit ff89544

11 files changed

Lines changed: 60 additions & 142 deletions

File tree

src/repl/llm/ai.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ type OpenAIToolFunction struct {
2828

2929
// Message represents a chat message with a role and content.
3030
type Message struct {
31-
Role string `json:"role"`
32-
Content interface{} `json:"content"`
31+
Role string `json:"role"`
32+
Content string `json:"content"`
3333
// Optional images attached to this message (base64 or URLs depending on provider)
3434
Images []string `json:"images,omitempty"`
3535
// Tool calls for native tool calling protocol

src/repl/llm/gemini.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,9 @@ func (p *GeminiProvider) SendMessage(messages []Message, stream bool, images []s
113113
content := ""
114114
for _, msg := range messages {
115115
if msg.Role == "system" {
116-
content += "System: " + msg.Content.(string) + "\n\n"
116+
content += "System: " + msg.Content + "\n\n"
117117
} else {
118-
content += msg.Content.(string)
118+
content += msg.Content
119119
}
120120
}
121121

src/repl/llm/llamacpp.go

Lines changed: 4 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -82,17 +82,13 @@ func (p *LlamaCppProvider) ListModels(ctx context.Context) ([]Model, error) {
8282
}
8383

8484
func (p *LlamaCppProvider) SendMessage(messages []Message, stream bool, images []string, tools []OpenAITool) (string, error) {
85-
if len(images) > 0 {
86-
messages = appendImagesToLastUserMessage(messages, images)
87-
}
88-
8985
effectiveModel := strings.TrimSpace(p.config.Model)
9086
if effectiveModel == "" {
9187
effectiveModel = strings.TrimSpace(p.DefaultModel())
9288
}
9389

9490
request := map[string]interface{}{
95-
"messages": messages,
91+
"messages": buildRequestMessages(messages, images),
9692
}
9793
if effectiveModel != "" {
9894
request["model"] = effectiveModel
@@ -153,8 +149,8 @@ func (p *LlamaCppProvider) SendMessage(messages []Message, stream bool, images [
153149
var response struct {
154150
Choices []struct {
155151
Message struct {
156-
Content interface{} `json:"content"`
157-
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
152+
Content string `json:"content"`
153+
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
158154
} `json:"message"`
159155
FinishReason string `json:"finish_reason"`
160156
} `json:"choices"`
@@ -175,7 +171,7 @@ func (p *LlamaCppProvider) SendMessage(messages []Message, stream bool, images [
175171
if len(response.Choices[0].Message.ToolCalls) > 0 {
176172
return string(respBody), nil
177173
}
178-
return extractMessageText(response.Choices[0].Message.Content), nil
174+
return response.Choices[0].Message.Content, nil
179175
}
180176

181177
func (p *LlamaCppProvider) parseStream(reader io.Reader) (string, error) {
@@ -359,50 +355,3 @@ func parseLlamaCppModels(body []byte) []Model {
359355
}}
360356
}
361357

362-
func appendImagesToLastUserMessage(messages []Message, images []string) []Message {
363-
blocks := make([]ContentBlock, 0, len(images)+1)
364-
for _, uri := range images {
365-
blocks = append(blocks, ContentBlock{
366-
Type: "image_url",
367-
ImageURL: &struct {
368-
URL string `json:"url"`
369-
}{URL: uri},
370-
})
371-
}
372-
373-
for i := len(messages) - 1; i >= 0; i-- {
374-
if messages[i].Role != "user" {
375-
continue
376-
}
377-
if text, ok := messages[i].Content.(string); ok && strings.TrimSpace(text) != "" {
378-
blocks = append([]ContentBlock{{Type: "text", Text: text}}, blocks...)
379-
}
380-
messages[i].Content = blocks
381-
return messages
382-
}
383-
384-
return append(messages, Message{Role: "user", Content: blocks})
385-
}
386-
387-
func extractMessageText(content interface{}) string {
388-
switch v := content.(type) {
389-
case nil:
390-
return ""
391-
case string:
392-
return v
393-
case []interface{}:
394-
var out strings.Builder
395-
for _, item := range v {
396-
obj, ok := item.(map[string]interface{})
397-
if !ok {
398-
continue
399-
}
400-
if text, ok := obj["text"].(string); ok {
401-
out.WriteString(text)
402-
}
403-
}
404-
return out.String()
405-
default:
406-
return fmt.Sprintf("%v", v)
407-
}
408-
}

src/repl/llm/llm.go

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -547,18 +547,7 @@ func (c *LLMClient) SendMessage(messages []Message, stream bool, images []string
547547
if c.Config != nil && c.Config.Debug {
548548
var buf bytes.Buffer
549549
for _, m := range messagesToSend {
550-
// Attempt to pretty-print the content
551-
var contentStr string
552-
switch v := m.Content.(type) {
553-
case string:
554-
contentStr = v
555-
default:
556-
if b, err := MarshalNoEscape(v); err == nil {
557-
contentStr = string(b)
558-
} else {
559-
contentStr = fmt.Sprintf("<unprintable content: %T>", v)
560-
}
561-
}
550+
contentStr := m.Content
562551
// fmt.Fprintf(&buf, " - [%d] role=%s\n", i, m.Role)
563552
// When not using rawdog, show a user/content split if present
564553
if !c.Config.Rawdog && m.Role == "user" {

src/repl/llm/messages.go

Lines changed: 12 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package llm
22

33
import (
4-
"fmt"
54
"strings"
65
)
76

@@ -27,25 +26,18 @@ func BuildConversationString(messages []Message, includeLLM bool, includeSystem
2726
if includeSystem {
2827
for _, m := range messages {
2928
if strings.ToLower(m.Role) == "system" {
30-
var content string
31-
switch c := m.Content.(type) {
32-
case string:
33-
content = c
34-
default:
35-
content = fmt.Sprintf("%v", c)
36-
}
3729
switch format {
3830
case "tokens":
3931
b.WriteString("<|start_of_turn>system\n")
40-
b.WriteString(content)
32+
b.WriteString(m.Content)
4133
b.WriteString("<|end_of_turn>\n")
4234
case "labeled":
4335
b.WriteString("System: ")
44-
b.WriteString(content)
36+
b.WriteString(m.Content)
4537
b.WriteString("\n")
4638
default:
47-
b.WriteString(content)
48-
if !strings.HasSuffix(content, "\n") {
39+
b.WriteString(m.Content)
40+
if !strings.HasSuffix(m.Content, "\n") {
4941
b.WriteString("\n")
5042
}
5143
}
@@ -66,27 +58,19 @@ func BuildConversationString(messages []Message, includeLLM bool, includeSystem
6658
return BuildConversationString(messages, includeLLM, includeSystem, format, false)
6759
}
6860

69-
var content string
70-
switch c := lastUser.Content.(type) {
71-
case string:
72-
content = c
73-
default:
74-
content = fmt.Sprintf("%v", c)
75-
}
76-
7761
// Append the last user message according to format
7862
switch format {
7963
case "tokens":
8064
b.WriteString("<|start_of_turn>user\n")
81-
b.WriteString(content)
65+
b.WriteString(lastUser.Content)
8266
b.WriteString("<|end_of_turn>\n")
8367
case "labeled":
8468
b.WriteString("User: ")
85-
b.WriteString(content)
69+
b.WriteString(lastUser.Content)
8670
b.WriteString("\n")
8771
default:
88-
b.WriteString(content)
89-
if !strings.HasSuffix(content, "\n") {
72+
b.WriteString(lastUser.Content)
73+
if !strings.HasSuffix(lastUser.Content, "\n") {
9074
b.WriteString("\n")
9175
}
9276
}
@@ -110,33 +94,24 @@ func BuildConversationString(messages []Message, includeLLM bool, includeSystem
11094
}
11195
}
11296

113-
// Extract content as string
114-
var content string
115-
switch c := m.Content.(type) {
116-
case string:
117-
content = c
118-
default:
119-
content = fmt.Sprintf("%v", c)
120-
}
121-
12297
switch format {
12398
case "tokens":
12499
// Use explicit start/end of turn tokens which some models expect
125100
b.WriteString("<|start_of_turn>")
126101
b.WriteString(role)
127102
b.WriteString("\n")
128-
b.WriteString(content)
103+
b.WriteString(m.Content)
129104
b.WriteString("<|end_of_turn>\n")
130105
case "labeled":
131106
// Human-friendly labeled format
132107
b.WriteString(strings.ToUpper(role[:1]) + role[1:])
133108
b.WriteString(": ")
134-
b.WriteString(content)
109+
b.WriteString(m.Content)
135110
b.WriteString("\n")
136111
default:
137112
// plain: just concatenate contents separated by newlines
138-
b.WriteString(content)
139-
if !strings.HasSuffix(content, "\n") {
113+
b.WriteString(m.Content)
114+
if !strings.HasSuffix(m.Content, "\n") {
140115
b.WriteString("\n")
141116
}
142117
}

src/repl/llm/ollama.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ func (p *OllamaProvider) SendMessage(messages []Message, stream bool, images []s
271271
if p.config.Rawdog {
272272
messageline := "" // <start_of_turn>user\nhello world<end_of_turn>\n<start_of_turn>model\n"
273273
for _, msg := range messages {
274-
messageline += msg.Content.(string)
274+
messageline += msg.Content
275275
}
276276
effectiveModel := p.config.Model
277277
if effectiveModel == "" {

src/repl/llm/openai.go

Lines changed: 27 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -333,16 +333,15 @@ func (p *OpenAIProvider) ListModels(ctx context.Context) ([]Model, error) {
333333
// nothing more to do; parsing helper already returned results or an error
334334
}
335335

336-
func (p *OpenAIProvider) SendMessage(messages []Message, stream bool, images []string, tools []OpenAITool) (string, error) {
337-
provider := strings.ToLower(p.config.PROVIDER)
338-
339-
// If images are provided, prepend a user message with OpenAI vision content blocks
336+
// buildRequestMessages returns a JSON-ready slice of chat messages. Regular
337+
// messages are forwarded as-is (their Content is a plain string). When image
338+
// URIs are supplied, a user message carrying the OpenAI-style content blocks
339+
// is prepended — that payload is assembled locally as a map so the public
340+
// Message type can keep a flat string content.
341+
func buildRequestMessages(messages []Message, images []string) []interface{} {
342+
out := make([]interface{}, 0, len(messages)+1)
340343
if len(images) > 0 {
341-
if provider == "deepseek" || provider == "mistral" {
342-
return "", fmt.Errorf("images not supported by provider: %s", p.GetName())
343-
}
344-
fmt.Println("sending images")
345-
var blocks []ContentBlock
344+
blocks := make([]ContentBlock, 0, len(images))
346345
for _, uri := range images {
347346
blocks = append(blocks, ContentBlock{
348347
Type: "image_url",
@@ -351,18 +350,30 @@ func (p *OpenAIProvider) SendMessage(messages []Message, stream bool, images []s
351350
}{URL: uri},
352351
})
353352
}
354-
imageMessage := Message{Role: "user", Content: blocks}
355-
messages = append([]Message{imageMessage}, messages...)
353+
out = append(out, map[string]interface{}{
354+
"role": "user",
355+
"content": blocks,
356+
})
357+
}
358+
for _, m := range messages {
359+
out = append(out, m)
360+
}
361+
return out
362+
}
363+
364+
func (p *OpenAIProvider) SendMessage(messages []Message, stream bool, images []string, tools []OpenAITool) (string, error) {
365+
provider := strings.ToLower(p.config.PROVIDER)
366+
367+
if len(images) > 0 && (provider == "deepseek" || provider == "mistral") {
368+
return "", fmt.Errorf("images not supported by provider: %s", p.GetName())
356369
}
357370

358371
// Mistral rejects empty assistant messages
359372
if provider == "mistral" {
360373
filtered := make([]Message, 0, len(messages))
361374
for _, msg := range messages {
362-
if msg.Role == "assistant" {
363-
if s, ok := msg.Content.(string); ok && s == "" && len(msg.ToolCalls) == 0 {
364-
continue
365-
}
375+
if msg.Role == "assistant" && msg.Content == "" && len(msg.ToolCalls) == 0 {
376+
continue
366377
}
367378
filtered = append(filtered, msg)
368379
}
@@ -375,7 +386,7 @@ func (p *OpenAIProvider) SendMessage(messages []Message, stream bool, images []s
375386
}
376387
request := map[string]interface{}{
377388
"model": effectiveModel,
378-
"messages": messages,
389+
"messages": buildRequestMessages(messages, images),
379390
}
380391

381392
// Add tools if provided

src/repl/llm/openapi.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,9 +147,9 @@ func (p *OpenAPIProvider) SendMessage(messages []Message, stream bool, images []
147147
content := ""
148148
for _, msg := range messages {
149149
if msg.Role == "system" {
150-
content += "System: " + msg.Content.(string) + "\n\n"
150+
content += "System: " + msg.Content + "\n\n"
151151
} else {
152-
content += msg.Content.(string)
152+
content += msg.Content
153153
}
154154
}
155155

src/repl/react_json.go

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -365,13 +365,7 @@ func buildChatHistory(input string, messages []llm.Message) string {
365365
for _, m := range messages {
366366
role := strings.ToLower(m.Role)
367367
if role == "assistant" || role == "model" || role == "ai" {
368-
var content string
369-
switch c := m.Content.(type) {
370-
case string:
371-
content = c
372-
default:
373-
content = fmt.Sprintf("%v", c)
374-
}
368+
content := m.Content
375369
if !strings.HasSuffix(content, "\n") {
376370
content += "\n"
377371
}

0 commit comments

Comments
 (0)