• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

kshard / chatter / 16538940978

26 Jul 2025 10:35AM UTC coverage: 67.584% (+44.9%) from 22.674%
16538940978

Pull #53

github

fogfish
update license
Pull Request #53: Enable multi-content I/O within prompts & responses

596 of 837 new or added lines in 27 files covered. (71.21%)

2 existing lines in 2 files now uncovered.

884 of 1308 relevant lines covered (67.58%)

0.72 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/provider/bedrock/foundation/llama/types.go
1
//
2
// Copyright (C) 2024 - 2025 Dmitry Kolesnikov
3
//
4
// This file may be modified and distributed under the terms
5
// of the MIT license.  See the LICENSE file for details.
6
// https://github.com/kshard/chatter
7
//
8

9
package llama
10

11
import (
12
        "strings"
13

14
        "github.com/kshard/chatter/aio/provider"
15
        "github.com/kshard/chatter/provider/bedrock"
16
)
17

18
const (
19
        begin_of_text   = "<|begin_of_text|>"
20
        start_header_id = "\n<|start_header_id|>"
21
        end_header_id   = "<|end_header_id|>\n"
22
        end_of_turn     = "\n<|eot_id|>\n"
23
        system          = "system"
24
        assistant       = "assistant"
25
        human           = "user"
26
)
27

28
type input struct {
29
        Prompt      string  `json:"prompt"`
30
        Temperature float64 `json:"temperature,omitempty"`
31
        TopP        float64 `json:"top_p,omitempty"`
32
        MaxTokens   int     `json:"max_gen_len,omitempty"`
33
}
34

35
type reply struct {
36
        Text             string `json:"generation"`
37
        UsedPromptTokens int    `json:"prompt_token_count"`
38
        UsedTextTokens   int    `json:"generation_token_count"`
39
        StopReason       string `json:"stop_reason"`
40
}
41

42
type encoder struct {
43
        w   strings.Builder
44
        req input
45
}
46

47
type decoder struct{}
48

49
type Llama = provider.Provider[*input, *reply]
50

NEW
51
func New(model string, opts ...bedrock.Option) (*Llama, error) {
×
NEW
52
        service, err := bedrock.New[*input, *reply](model, opts...)
×
NEW
53
        if err != nil {
×
NEW
54
                return nil, err
×
NEW
55
        }
×
56

NEW
57
        return provider.New(factory, decoder{}, service), nil
×
58
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc