@@ -49,7 +49,7 @@ func (c *LLMClient) sendOllamaWithImages(ctx context.Context, messages []Message
4949 }
5050 }
5151
52- jsonData , err := json . Marshal (request )
52+ jsonData , err := MarshalNoEscape (request )
5353 if err != nil {
5454 return "" , err
5555 }
@@ -168,7 +168,6 @@ func (p *OllamaProvider) ListModels(ctx context.Context) ([]Model, error) {
168168
169169func (p * OllamaProvider ) SendMessage (ctx context.Context , messages []Message , stream bool ) (string , error ) {
170170 if p .config .Rawdog {
171- stream = false
172171 messageline := "" // <start_of_turn>user\nhello world<end_of_turn>\n<start_of_turn>model\n"
173172 for _ , msg := range messages {
174173 messageline += msg .Content .(string )
@@ -184,17 +183,17 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
184183 Prompt : messageline ,
185184 }
186185 // Apply deterministic settings if enabled
187- if p .config .Deterministic {
186+ if p .config .Deterministic || true {
188187 request .Options = map [string ]float64 {
189188 "repeat_last_n" : 0 ,
190189 "top_p" : 0.0 ,
191190 "top_k" : 1.0 ,
192191 "temperature" : 0.0 ,
193192 "repeat_penalty" : 1.0 ,
194- "seed" : 123 ,
193+ "seed" : 0 ,
195194 }
196195 }
197- jsonData , err := json . Marshal (request )
196+ jsonData , err := MarshalNoEscape (request )
198197 if err != nil {
199198 return "" , err
200199 }
@@ -211,7 +210,9 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
211210 }
212211
213212 // stream-mode
214- // if stream { return llmMakeStreamingRequest(ctx, "POST", url, headers, jsonData, p.parseStream) }
213+ if stream {
214+ return llmMakeStreamingRequest (ctx , "POST" , url , headers , jsonData , p .parseStream )
215+ }
215216
216217 // non-stream
217218 respBody , err := llmMakeRequest (ctx , "POST" , url , headers , jsonData )
@@ -258,7 +259,7 @@ func (p *OllamaProvider) SendMessage(ctx context.Context, messages []Message, st
258259 }
259260 }
260261
261- jsonData , err := json . Marshal (request )
262+ jsonData , err := MarshalNoEscape (request )
262263 if err != nil {
263264 return "" , err
264265 }
@@ -319,19 +320,37 @@ func (p *OllamaProvider) parseStream(reader io.Reader) (string, error) {
319320 continue
320321 }
321322
322- var response struct {
323- Message struct {
324- Content string `json:"content""`
325- } `json:"message""`
326- Done bool `json:"done""`
327- }
323+ isDone := false
324+ content := ""
325+ if p .config .Rawdog {
326+ var response struct {
327+ Response string `json:"response""`
328+ Done bool `json:"done""`
329+ }
328330
329- if err := json .Unmarshal ([]byte (line ), & response ); err != nil {
330- continue
331- }
331+ if err := json .Unmarshal ([]byte (line ), & response ); err != nil {
332+ continue
333+ }
332334
333- // Format content based on markdown setting
334- content := response .Message .Content
335+ // Format content based on markdown setting
336+ content = response .Response
337+ isDone = response .Done
338+ } else {
339+ var response struct {
340+ Message struct {
341+ Content string `json:"content""`
342+ } `json:"message""`
343+ Done bool `json:"done""`
344+ }
345+
346+ if err := json .Unmarshal ([]byte (line ), & response ); err != nil {
347+ continue
348+ }
349+
350+ // Format content based on markdown setting
351+ content = response .Message .Content
352+ isDone = response .Done
353+ }
335354 if ! markdownEnabled {
336355 // Standard formatting - just replace newlines for terminal display
337356 content = strings .ReplaceAll (content , "\n " , "\n \r " )
@@ -340,9 +359,9 @@ func (p *OllamaProvider) parseStream(reader io.Reader) (string, error) {
340359 content = FormatStreamingChunk (content , markdownEnabled )
341360 }
342361 fmt .Print (content )
343- fullResponse .WriteString (response .Message .Content )
362+ fullResponse .WriteString (content ) // response.Message.Content)
344363
345- if response . Done {
364+ if isDone {
346365 break
347366 }
348367 }
0 commit comments