• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

kshard / chatter / 16538940978

26 Jul 2025 10:35AM UTC coverage: 67.584% (+44.9%) from 22.674%
16538940978

Pull #53

github

fogfish
update license
Pull Request #53: Enable multi-content I/O within prompts & responses

596 of 837 new or added lines in 27 files covered. (71.21%)

2 existing lines in 2 files now uncovered.

884 of 1308 relevant lines covered (67.58%)

0.72 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/message.go
1
//
2
// Copyright (C) 2025 Dmitry Kolesnikov
3
//
4
// This file may be modified and distributed under the terms
5
// of the MIT license.  See the LICENSE file for details.
6
// https://github.com/kshard/chatter
7
//
8

9
package chatter
10

11
import (
12
        "encoding/json"
13
        "fmt"
14
        "strings"
15
)
16

17
// Message is an element of the conversation with LLMs.
18
// Sequence of messages forms a conversation, memory or history.
19
//
20
// Messages are composed from different [Content] blocks.
21
// We distinguish between input messages (prompts) and output messages (replies).
22
type Message interface {
23
        fmt.Stringer
24
        HKT1(Message)
25
}
26

27
// Stage of the interaction with LLM
28
type Stage string
29

30
const (
31
        // LLM has a result to return
32
        LLM_RETURN = Stage("return")
33

34
        // LLM has a result to return but it was truncated (e.g. max tokens, stop sequence)
35
        LLM_INCOMPLETE = Stage("incomplete")
36

37
        // LLM requires to invoke external command/tools
38
        LLM_INVOKE = Stage("invoke")
39

40
        // LLM has aborted execution due to error
41
        LLM_ERROR = Stage("error")
42
)
43

44
// The reply from LLMs
45
type Reply struct {
46
        Stage   Stage     `json:"stage"`
47
        Usage   Usage     `json:"usage"`
48
        Content []Content `json:"content"`
49
}
50

51
var _ Message = (*Reply)(nil)
52

NEW
53
func (*Reply) HKT1(Message) {}
×
54

55
func (reply Reply) String() string {
×
56
        seq := make([]string, 0)
×
57
        for _, c := range reply.Content {
×
58
                switch v := (c).(type) {
×
59
                case Text:
×
60
                        seq = append(seq, v.String())
×
61
                }
62
        }
63
        return strings.Join(seq, "")
×
64
}
65

66
// Helper function to invoke external tools
67
func (reply Reply) Invoke(f func(string, json.RawMessage) (json.RawMessage, error)) (Answer, error) {
×
68
        if reply.Stage != LLM_INVOKE {
×
69
                return Answer{}, nil
×
70
        }
×
71

72
        answer := Answer{Yield: make([]Json, 0)}
×
73
        for _, inv := range reply.Content {
×
74
                switch v := inv.(type) {
×
75
                case Invoke:
×
NEW
76
                        val, err := f(v.Cmd, v.Args.Value)
×
77
                        if err != nil {
×
78
                                return answer, err
×
79
                        }
×
NEW
80
                        answer.Yield = append(answer.Yield, Json{ID: v.Args.ID, Source: v.Cmd, Value: val})
×
81
                }
82
        }
83

84
        return answer, nil
×
85
}
86

87
// Answer from external tools
88
type Answer struct {
89
        Yield []Json `json:"yield,omitempty"`
90
}
91

92
var _ Message = (*Reply)(nil)
93

NEW
94
func (*Answer) HKT1(Message) {}
×
95

96
func (Answer) String() string { return "" }
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc