• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

kshard / chatter / 16538940978

26 Jul 2025 10:35AM UTC coverage: 67.584% (+44.9%) from 22.674%
16538940978

Pull #53

github

fogfish
update license
Pull Request #53: Enable multi-content I/O within prompts & responses

596 of 837 new or added lines in 27 files covered. (71.21%)

2 existing lines in 2 files now uncovered.

884 of 1308 relevant lines covered (67.58%)

0.72 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.42
/content.go
1
//
2
// Copyright (C) 2024 Dmitry Kolesnikov
3
//
4
// This file may be modified and distributed under the terms
5
// of the MIT license.  See the LICENSE file for details.
6
// https://github.com/kshard/chatter
7
//
8

9
package chatter
10

11
import (
12
        "encoding/json"
13
        "fmt"
14
        "strconv"
15
        "strings"
16
)
17

18
// Content is the core building block for I/O with LLMs.
19
// It defines either input prompt or result of the LLM execution.
20
// For example,
21
//   - Prompt is a content either simple plain [Text] or semistructured [Prompt].
22
//   - LLM replies with generated [Text], [Vector] or [Invoke] instructions.
23
//   - Invocation of external tools is orchestrated using [Json] content.
24
//   - etc.
25
//
26
// The content itself is encapsulated in sequence of [Message] forming a conversation.
27
type Content interface{ fmt.Stringer }
28

29
// Text is a plain text either part of prompt or LLM's reply.
30
// For simplicity of library's api, text is also representing
31
// a [Message] (HKT1(Message)), allowing it to be used directly as input (prompt) to LLM
32
type Text string
33

34
func (t Text) HKT1(Message)   {}
×
35
func (t Text) String() string { return string(t) }
×
36

NEW
37
func (t Text) MarshalJSON() ([]byte, error) {
×
NEW
38
        return json.Marshal(struct {
×
NEW
39
                Text string `json:"text,omitempty"`
×
NEW
40
        }{Text: string(t)})
×
UNCOV
41
}
×
42

43
// Json is a structured object (JSON object) that can be used as input to LLMs
44
// or as a reply from LLMs.
45
//
46
// Json is a key abstraction for LLMs integration with external tools.
47
// It is used to pass structured data from LLM to the tool and vice versa,
48
// supporting invocation and answering the resuls.
49
type Json struct {
50
        // Unique identifier of Json objects, used for tracking in the conversation
51
        // and correlating the input with output (invocations with answers).
52
        ID string `json:"id,omitempty"`
53

54
        // Unique identifier of the source of the Json object.
55
        // For example, it can be a name of the tool that produced the output.
56
        Source string `json:"source,omitempty"`
57

58
        // Value of JSON Object
59
        Value json.RawMessage `json:"bag,omitempty"`
60
}
61

NEW
62
func (j Json) String() string {
×
NEW
63
        return string(j.Value)
×
NEW
64
}
×
65

66
//------------------------------------------------------------------------------
67

68
// Task is part of the [Prompt] that defines the task to be solved by LLM.
69
type Task string
70

71
func (t Task) HKT1(Message)   {}
×
72
func (t Task) String() string { return string(t) }
1✔
73

74
// Guide is part of the [Prompt] that guides LLM on how to complete the task.
75
type Guide struct {
76
        Note string   `json:"note,omitempty"`
77
        Text []string `json:"guide,omitempty"`
78
}
79

80
func (g Guide) String() string {
1✔
81
        seq := make([]string, 0)
1✔
82
        if len(g.Note) > 0 {
1✔
83
                seq = append(seq, Sentence(g.Note))
×
84
        }
×
85
        for _, t := range g.Text {
2✔
86
                seq = append(seq, Sentence(t))
1✔
87
        }
1✔
88

89
        return strings.Join(seq, "\n")
1✔
90
}
91

92
// Rules is part of the [Prompt] that defines the rules and requirements to be
93
// followed by LLM. Use it to give as much information as possible to ensure
94
// your response does not use any incorrect assumptions.
95
type Rules struct {
96
        Note string   `json:"note,omitempty"`
97
        Text []string `json:"rules,omitempty"`
98
}
99

100
func (r Rules) String() string {
1✔
101
        seq := make([]string, 0)
1✔
102
        if len(r.Note) > 0 {
2✔
103
                seq = append(seq, sentence(r.Note, ":"))
1✔
104
        }
1✔
105
        for i, t := range r.Text {
2✔
106
                seq = append(seq, strconv.Itoa(i+1)+". "+Sentence(t))
1✔
107
        }
1✔
108

109
        return strings.Join(seq, "\n")
1✔
110
}
111

112
// Feedback is part of the [Prompt] that gives feedback to LLM on previous
113
// completion of the task (e.g. errors).
114
type Feedback struct {
115
        Note string   `json:"note,omitempty"`
116
        Text []string `json:"feedback,omitempty"`
117
}
118

119
func (f Feedback) String() string {
1✔
120
        seq := make([]string, 0)
1✔
121
        if len(f.Note) > 0 {
2✔
122
                seq = append(seq, sentence(f.Note, ":"))
1✔
123
        }
1✔
124
        for _, t := range f.Text {
2✔
125
                seq = append(seq, "- "+Sentence(t))
1✔
126
        }
1✔
127

128
        return strings.Join(seq, "\n")
1✔
129
}
130

131
func (f Feedback) Error() string { return f.String() }
1✔
132

133
// Example is part of the [Prompt] that gives examples how to complete the task.
134
type Example struct {
135
        Input string `json:"input,omitempty"`
136
        Reply string `json:"reply,omitempty"`
137
}
138

139
func (e Example) String() string {
1✔
140
        return fmt.Sprintf("Example Input:\n%s\nExpected Output:\n%s\n\n", e.Input, e.Reply)
1✔
141
}
1✔
142

143
// Context is part of the [Prompt] that provides additional information
144
// required to complete the task.
145
type Context struct {
146
        Note string   `json:"note,omitempty"`
147
        Text []string `json:"context,omitempty"`
148
}
149

150
func (c Context) String() string {
1✔
151
        seq := make([]string, 0)
1✔
152
        if len(c.Note) > 0 {
2✔
153
                seq = append(seq, sentence(c.Note, ":"))
1✔
154
        }
1✔
155
        for _, t := range c.Text {
2✔
156
                seq = append(seq, "- "+Sentence(t))
1✔
157
        }
1✔
158

159
        return strings.Join(seq, "\n")
1✔
160
}
161

162
// Input is part of the [Prompt] that provides input data required to
163
// complete the task.
164
type Input struct {
165
        Note string   `json:"note,omitempty"`
166
        Text []string `json:"input,omitempty"`
167
}
168

169
func (i Input) String() string {
1✔
170
        seq := make([]string, 0)
1✔
171
        if len(i.Note) > 0 {
2✔
172
                seq = append(seq, sentence(i.Note, ":"))
1✔
173
        }
1✔
174
        for _, t := range i.Text {
2✔
175
                seq = append(seq, "- "+Sentence(t))
1✔
176
        }
1✔
177

178
        return strings.Join(seq, "\n")
1✔
179
}
180

181
// Blob is part of the [Prompt] that provides unformatted input data required to
182
// complete the task.
183
type Blob struct {
184
        Note string `json:"text,omitempty"`
185
        Text string `json:"blob,omitempty"`
186
}
187

188
func (b Blob) String() string {
1✔
189
        var sb strings.Builder
1✔
190
        if len(b.Note) > 0 {
2✔
191
                sb.WriteString(sentence(b.Note, ":"))
1✔
192
                sb.WriteString("\n")
1✔
193
        }
1✔
194
        sb.WriteString(b.Text)
1✔
195
        sb.WriteString("\n")
1✔
196

1✔
197
        return sb.String()
1✔
198
}
199

200
//------------------------------------------------------------------------------
201

202
// Invoke is a special content bloc defining interaction with external functions.
203
// Invoke is generated by LLMs when execution of external tools is required.
204
//
205
// It is expected that client code will use [Reply.Invoke] to process
206
// the invocation and call the function with the name and arguments.
207
//
208
// [Answer] is returned with the results of the function call.
209
type Invoke struct {
210
        // Unique identifier of the tool model wants to use.
211
        // The name is used to lookup the tool in the registry.
212
        Cmd string `json:"name"`
213

214
        // Arguments to the tool, which are passed as a JSON object.
215
        Args Json `json:"args"`
216

217
        // Original LLM message that triggered the invocation, as defined by the providers API.
218
        // The message is used to maintain the converstation history and context.
219
        Message any `json:"-"`
220
}
221

NEW
222
func (inv Invoke) String() string  { return fmt.Sprintf("invoke @%s", inv.Cmd) }
×
NEW
223
func (inv Invoke) RawMessage() any { return inv.Message }
×
224

225
//------------------------------------------------------------------------------
226

227
// Vector is a sequence of float32 numbers representing the embedding vector.
228
type Vector []float32
229

NEW
230
func (v Vector) String() string { return fmt.Sprintf("%v", []float32(v)) }
×
231

NEW
232
func (v Vector) MarshalJSON() ([]byte, error) {
×
NEW
233
        return json.Marshal(struct {
×
NEW
234
                Vector []float32 `json:"vector,omitempty"`
×
NEW
235
        }{Vector: []float32(v)})
×
NEW
236
}
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc