• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

0plus1 / sottovoce / #4

10 Dec 2025 01:17PM UTC coverage: 72.956% (+0.4%) from 72.589%
#4

push

coveralls-python

0plus1
Add speech synthesis

89 of 133 new or added lines in 7 files covered. (66.92%)

6 existing lines in 2 files now uncovered.

232 of 318 relevant lines covered (72.96%)

0.73 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

90.91
/src/llm_client.py
1
from __future__ import annotations
1✔
2

3
from dataclasses import dataclass
1✔
4
from pathlib import Path
1✔
5
from typing import Any, Dict, List, Optional
1✔
6

7
import requests
1✔
8

9

10
@dataclass
1✔
11
class LLMConfig:
1✔
12
    endpoint: str
1✔
13
    model: str
1✔
14
    timeout: float = 60.0
1✔
15

16

17
class LLMClient:
1✔
18
    # Minimal OpenAI-compatible chat client.
19

20
    def __init__(self, config: LLMConfig):
1✔
21
        self.config = config
1✔
22
        self.system_prompt: Optional[str] = None
1✔
23

24
    def load_system_prompt(self, path: Path) -> None:
1✔
NEW
25
        self.system_prompt = path.read_text(encoding="utf-8")
×
26

27
    def complete(self, prompt: str) -> str:
1✔
28
        messages: List[Dict[str, str]] = []
1✔
29
        if self.system_prompt:
1✔
30
            messages.append({"role": "system", "content": self.system_prompt})
1✔
31
        messages.append({"role": "user", "content": prompt})
1✔
32
        payload: Dict[str, Any] = {
1✔
33
            "model": self.config.model,
34
            "messages": messages,
35
            "stream": False,
36
        }
37
        response = requests.post(
1✔
38
            self.config.endpoint,
39
            json=payload,
40
            timeout=self.config.timeout,
41
        )
42
        response.raise_for_status()
1✔
43
        data = response.json()
1✔
44
        # Expected OpenAI-compatible structure
45
        choices = data.get("choices", [])
1✔
46
        if not choices:
1✔
47
            raise ValueError("LLM response missing choices")
×
48
        message = choices[0].get("message", {})
1✔
49
        content = message.get("content")
1✔
50
        if not isinstance(content, str):
1✔
51
            raise ValueError("LLM response content missing or invalid")
×
52
        return content
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc