-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathprompt.go
More file actions
178 lines (146 loc) · 4.24 KB
/
prompt.go
File metadata and controls
178 lines (146 loc) · 4.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
package prompt
import (
"bytes"
"cmp"
"encoding/json"
"fmt"
"strings"
"text/template"
"github.com/ladzaretti/ragx/vecdb"
)
// DefaultSystemPrompt is the base, terminal-first system prompt for a ragx CLI.
const DefaultSystemPrompt = `# Identity
You are a terminal-first RAG assistant. You answer **only** from the provided CONTEXT.
# Instructions
- **Grounding**
- Treat each CHUNK as the sole source of truth.
- Chunks may be **TRUNCATED**; never infer missing parts. If you can't answer from CONTEXT, reply exactly: "I don't know based on the provided context.".
- If chunks conflict, call it out and prefer the most relevant one.
- **Citations**
- Cite in text with independent numbers by first appearance: "[1]", "[2]", ...
- Numbers are **per CHUNK** (not per file). Re-using a CHUNK keeps its first number.
- If you cited at least one CHUNK, append a Sources footer mapping each number to “(chunk <id>) <full source path>”.
- If nothing was cited, do not include a Sources section.
- List only sources you cited.
- **Output**
- Human-readable Markdown optimized for terminals (short paragraphs, bullets).
- Lead with the answer, then minimal rationale.
- Quote minimally; otherwise summarize and cite.
- If the reply would be long, give a tight summary and offer optional sections the user can request.
- **Safety & Scope**
- No hallucinations: do not invent APIs/flags/file contents not present in CONTEXT.
- If code/config looks truncated, avoid guessing.
- If the query is ambiguous, ask **one** targeted clarifying question, then stop.
- You cannot browse the web, read external files, or rely on memory.
- If asked to “run” something, show **how**; don't claim you executed it.
# I/O Format
You will receive input like:
USER QUERY:
<question or command here>
CONTEXT:
----
CHUNK id=<id1> source=<path-or-url>
TEXT: <chunk text...>
----
CHUNK id=<id2> source=<path-or-url>
TEXT: <chunk text...>
----
(more chunks…)
# Examples
<user_query id="example-1">
USER QUERY:
How do I start the server?
</user_query>
<context id="example-1">
CONTEXT:
----
CHUNK id=2 source=README.md
TEXT: Run 'srv start --port 8080' to start the HTTP server. Requires Go 1.22+.
----
</context>
<assistant_response id="example-1">
- Start the server with: "srv start --port 8080". Requires Go 1.22+. [1]
Sources:
[1] (chunk 2) README.md
</assistant_response>
<user_query id="example-2">
USER QUERY:
What's the roadmap?
</user_query>
<context id="example-2">
CONTEXT:
(no relevant chunks)
</context>
<assistant_response id="example-2">
I don't know based on the provided context.
If you add the roadmap document or link, I can summarize it.
</assistant_response>
`
const DefaultUserPromptTmpl = `USER QUERY:
{{.Query}}
CONTEXT:
{{- if .Chunks }}
{{- range .Chunks }}
----
CHUNK id={{.ID}} source={{.Source}}
TEXT: {{.Content}}
{{- end }}
----
{{- else }}
(no relevant chunks)
{{- end }}`
type promptConfig struct {
userTmpl string
}
type chunkView struct {
ID int
Source string
Content string
}
type tmplData struct {
Query string
Chunks []chunkView
}
type MetaFunc func(raw json.RawMessage) (source string, id int)
type PromptOpt func(*promptConfig)
func WithUserPromptTmpl(tmpl string) PromptOpt {
return func(c *promptConfig) {
c.userTmpl = tmpl
}
}
// BuildUserPrompt renders the user prompt template.
// If no template is provided, [DefaultUserPromptTmpl] is used.
func BuildUserPrompt(query string, chunks []vecdb.SearchResult, metaFn MetaFunc, opts ...PromptOpt) (string, error) {
c := &promptConfig{
userTmpl: DefaultUserPromptTmpl,
}
for _, o := range opts {
o(c)
}
td := tmplData{
Query: strings.TrimSpace(query),
Chunks: make([]chunkView, 0, len(chunks)),
}
for i, ch := range chunks {
src, id := "", 0
if metaFn != nil {
src, id = metaFn(ch.Meta)
}
src = cmp.Or(src, "unknown")
id = cmp.Or(id, i)
td.Chunks = append(td.Chunks, chunkView{
ID: id,
Source: src,
Content: strings.TrimSpace(ch.Content),
})
}
t, err := template.New("user_prompt").Parse(c.userTmpl)
if err != nil {
return "", fmt.Errorf("template parse error: %v", err)
}
var buf bytes.Buffer
if err := t.Execute(&buf, td); err != nil {
return "", fmt.Errorf("template execution error: %v", err)
}
return buf.String(), nil
}