Skip to content

Commit a462864

Browse files
committed
More work towards rawdogging ollama
1 parent 9164e78 commit a462864

4 files changed

Lines changed: 65 additions & 33 deletions

File tree

src/repl/conf.go

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -290,25 +290,27 @@ func (r *REPL) resolvePromptPath(promptName string) (string, error) {
290290
func (r *REPL) handleSetCommand(args []string) error {
291291
// Special handling for specific configOptions
292292
if len(args) >= 3 {
293+
val := strings.Join(args[2:], " ")
293294
switch args[1] {
295+
case "deterministic":
296+
b, _ := strconv.ParseBool(val)
297+
r.config.Deterministic = b
298+
return nil
294299
case "rawdog":
295-
fmt.Println("RAWDOG")
296-
r.config.Rawdog = true
300+
b, _ := strconv.ParseBool(val)
301+
r.config.Rawdog = b
297302
return nil
298303
case "promptfile":
299-
filePath := args[2]
300-
return r.loadSystemPrompt(filePath)
304+
return r.loadSystemPrompt(val)
301305
case "systemprompt":
302-
promptText := strings.Join(args[2:], " ")
303-
r.systemPrompt = promptText
304-
r.configOptions.Set("systemprompt", promptText)
305-
fmt.Printf("System prompt set (%d chars)\r\n", len(promptText))
306+
r.systemPrompt = val
307+
r.configOptions.Set("systemprompt", val)
308+
fmt.Printf("System prompt set (%d chars)\r\n", len(val))
306309
return nil
307310
case "model":
308-
model := strings.Join(args[2:], " ")
309-
return r.setModel(model)
311+
return r.setModel(val)
310312
case "provider":
311-
provider := strings.ToLower(args[2])
313+
provider := strings.ToLower(val)
312314
return r.setProvider(provider)
313315
}
314316
}

src/repl/llm/llm.go

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package llm
33
import (
44
"bytes"
55
"context"
6+
"encoding/json"
67
"fmt"
78
"io"
89
"net/http"
@@ -362,4 +363,14 @@ func buildURL(defaultURL, baseURL, host, port, suffix string) string {
362363
return fmt.Sprintf("http://%s:%s%s", host, port, suffix)
363364
}
364365

365-
// ==================== PROVIDER IMPLEMENTATIONS ====================
366+
func MarshalNoEscape(v any) ([]byte, error) {
367+
var buf bytes.Buffer
368+
enc := json.NewEncoder(&buf)
369+
enc.SetEscapeHTML(false)
370+
err := enc.Encode(v)
371+
if err != nil {
372+
return nil, err
373+
}
374+
// Remove trailing newline added by Encoder
375+
return bytes.TrimRight(buf.Bytes(), "\n"), nil
376+
}

src/repl/llm/ollama.go

Lines changed: 39 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ func (c *LLMClient) sendOllamaWithImages(ctx context.Context, messages []Message
4949
}
5050
}
5151

52-
jsonData, err := json.Marshal(request)
52+
jsonData, err := MarshalNoEscape(request)
5353
if err != nil {
5454
return "", err
5555
}
@@ -168,7 +168,6 @@ func (p *OllamaProvider) ListModels(ctx context.Context) ([]Model, error) {
168168

169169
func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, stream bool) (string, error) {
170170
if p.config.Rawdog {
171-
stream = false
172171
messageline := "" // <start_of_turn>user\nhello world<end_of_turn>\n<start_of_turn>model\n"
173172
for _, msg := range messages {
174173
messageline += msg.Content.(string)
@@ -184,17 +183,17 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
184183
Prompt: messageline,
185184
}
186185
// Apply deterministic settings if enabled
187-
if p.config.Deterministic {
186+
if p.config.Deterministic || true {
188187
request.Options = map[string]float64{
189188
"repeat_last_n": 0,
190189
"top_p": 0.0,
191190
"top_k": 1.0,
192191
"temperature": 0.0,
193192
"repeat_penalty": 1.0,
194-
"seed": 123,
193+
"seed": 0,
195194
}
196195
}
197-
jsonData, err := json.Marshal(request)
196+
jsonData, err := MarshalNoEscape(request)
198197
if err != nil {
199198
return "", err
200199
}
@@ -211,7 +210,9 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
211210
}
212211

213212
// stream-mode
214-
// if stream { return llmMakeStreamingRequest(ctx, "POST", url, headers, jsonData, p.parseStream) }
213+
if stream {
214+
return llmMakeStreamingRequest(ctx, "POST", url, headers, jsonData, p.parseStream)
215+
}
215216

216217
// non-stream
217218
respBody, err := llmMakeRequest(ctx, "POST", url, headers, jsonData)
@@ -258,7 +259,7 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
258259
}
259260
}
260261

261-
jsonData, err := json.Marshal(request)
262+
jsonData, err := MarshalNoEscape(request)
262263
if err != nil {
263264
return "", err
264265
}
@@ -319,19 +320,37 @@ func (p *OllamaProvider) parseStream(reader io.Reader) (string, error) {
319320
continue
320321
}
321322

322-
var response struct {
323-
Message struct {
324-
Content string `json:"content""`
325-
} `json:"message""`
326-
Done bool `json:"done""`
327-
}
323+
isDone := false
324+
content := ""
325+
if p.config.Rawdog {
326+
var response struct {
327+
Response string `json:"response""`
328+
Done bool `json:"done""`
329+
}
328330

329-
if err := json.Unmarshal([]byte(line), &response); err != nil {
330-
continue
331-
}
331+
if err := json.Unmarshal([]byte(line), &response); err != nil {
332+
continue
333+
}
332334

333-
// Format content based on markdown setting
334-
content := response.Message.Content
335+
// Format content based on markdown setting
336+
content = response.Response
337+
isDone = response.Done
338+
} else {
339+
var response struct {
340+
Message struct {
341+
Content string `json:"content""`
342+
} `json:"message""`
343+
Done bool `json:"done""`
344+
}
345+
346+
if err := json.Unmarshal([]byte(line), &response); err != nil {
347+
continue
348+
}
349+
350+
// Format content based on markdown setting
351+
content = response.Message.Content
352+
isDone = response.Done
353+
}
335354
if !markdownEnabled {
336355
// Standard formatting - just replace newlines for terminal display
337356
content = strings.ReplaceAll(content, "\n", "\n\r")
@@ -340,9 +359,9 @@ func (p *OllamaProvider) parseStream(reader io.Reader) (string, error) {
340359
content = FormatStreamingChunk(content, markdownEnabled)
341360
}
342361
fmt.Print(content)
343-
fullResponse.WriteString(response.Message.Content)
362+
fullResponse.WriteString(content) // response.Message.Content)
344363

345-
if response.Done {
364+
if isDone {
346365
break
347366
}
348367
}

src/repl/repl.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1741,7 +1741,7 @@ func (r *REPL) sendToAI(input string) error {
17411741
}
17421742

17431743
// If reasoning is disabled, append /no_think to the last message sent to the LLM
1744-
if !r.reasoningEnabled {
1744+
if !r.reasoningEnabled && r.configOptions.GetBool("rawdog") {
17451745
// Create a copy of the messages for the API call with /no_think appended
17461746
messagesCopy := make([]llm.Message, len(messages))
17471747
copy(messagesCopy, messages)

0 commit comments

Comments
 (0)