• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

kshard / chatter / 13421360470

19 Feb 2025 08:04PM UTC coverage: 28.588% (-0.2%) from 28.833%
13421360470

push

github

web-flow
Update LLMs api with latest version (#23)

8 of 117 new or added lines in 5 files covered. (6.84%)

4 existing lines in 2 files now uncovered.

251 of 878 relevant lines covered (28.59%)

0.31 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

75.0
/cache/cache.go
1
//
2
// Copyright (C) 2025 Dmitry Kolesnikov
3
//
4
// This file may be modified and distributed under the terms
5
// of the MIT license.  See the LICENSE file for details.
6
// https://github.com/kshard/chatter
7
//
8

9
package cache
10

11
import (
12
        "context"
13
        "crypto/sha1"
14
        "fmt"
15
        "log/slog"
16

17
        "github.com/kshard/chatter"
18
)
19

20
// Creates caching layer for LLM client.
21
//
22
// Use github.com/akrylysov/pogreb to cache chatter on local file systems:
23
//
24
//        llm, err := /* create LLM client */
25
//        db, err := pogreb.Open("llm.cache", nil)
26
//        text := cache.New(db, llm)
27
func New(cache Cache, chatter chatter.Chatter) *Client {
1✔
28
        return &Client{
1✔
29
                Chatter: chatter,
1✔
30
                cache:   cache,
1✔
31
        }
1✔
32
}
1✔
33

34
func (c *Client) HashKey(prompt string) []byte {
1✔
35
        hash := sha1.New()
1✔
36
        hash.Write([]byte(prompt))
1✔
37
        return hash.Sum(nil)
1✔
38
}
1✔
39

40
func (c *Client) Prompt(ctx context.Context, prompt []fmt.Stringer, opts ...chatter.Opt) (chatter.Reply, error) {
1✔
41
        if len(prompt) == 0 {
1✔
NEW
42
                return chatter.Reply{}, fmt.Errorf("bad request, empty prompt")
×
UNCOV
43
        }
×
44

45
        hkey := c.HashKey(prompt[len(prompt)-1].String())
1✔
46
        val, err := c.cache.Get(hkey)
1✔
47
        if err != nil {
1✔
NEW
48
                return chatter.Reply{}, err
×
49
        }
×
50

51
        if len(val) != 0 {
2✔
52
                return chatter.Reply{Text: string(val)}, nil
1✔
53
        }
1✔
54

55
        reply, err := c.Chatter.Prompt(ctx, prompt, opts...)
1✔
56
        if err != nil {
1✔
NEW
57
                return chatter.Reply{}, err
×
58
        }
×
59

60
        err = c.cache.Put(hkey, []byte(reply.Text))
1✔
61
        if err != nil {
1✔
62
                slog.Warn("failed to cache LLM reply", "err", err)
×
63
        }
×
64

65
        return reply, nil
1✔
66
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc