• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

scope3data / scope3ai-py / 13396554446

18 Feb 2025 05:28PM UTC coverage: 96.179% (+15.6%) from 80.557%
13396554446

Pull #91

github

45362b
web-flow
Merge a1470984a into 37d564f57
Pull Request #91: docs: minor readme edits

2542 of 2643 relevant lines covered (96.18%)

3.84 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.49
/scope3ai/tracers/huggingface/translation.py
1
import time
4✔
2
from dataclasses import asdict, dataclass
4✔
3
from typing import Any, Callable, Optional, Tuple, Union
4✔
4

5
import tiktoken
4✔
6
from aiohttp import ClientResponse
4✔
7
from huggingface_hub import (  # type: ignore[import-untyped]
4✔
8
    AsyncInferenceClient,
9
    InferenceClient,
10
)
11
from huggingface_hub import TranslationOutput as _TranslationOutput
4✔
12
from requests import Response
4✔
13

14
from scope3ai.api.types import ImpactRow, Scope3AIContext
4✔
15
from scope3ai.api.typesgen import Task
4✔
16
from scope3ai.lib import Scope3AI
4✔
17
from scope3ai.response_interceptor.aiohttp_interceptor import aiohttp_response_capture
4✔
18
from scope3ai.response_interceptor.requests_interceptor import requests_response_capture
4✔
19

20
HUGGING_FACE_TRANSLATION_TASK = "translation"
4✔
21

22

23
@dataclass
4✔
24
class TranslationOutput(_TranslationOutput):
4✔
25
    scope3ai: Optional[Scope3AIContext] = None
4✔
26

27

28
def _hugging_face_translation_get_impact_row(
4✔
29
    timer_start: Any,
30
    model: Any,
31
    response: Any,
32
    http_response: Optional[Union[ClientResponse, Response]],
33
    args: Any,
34
    kwargs: Any,
35
) -> Tuple[TranslationOutput, ImpactRow]:
36
    encoder = tiktoken.get_encoding("cl100k_base")
4✔
37
    input_tokens = 0
4✔
38
    compute_time = time.perf_counter() - timer_start
4✔
39
    if http_response:
4✔
40
        compute_time = http_response.headers.get("x-compute-time") or compute_time
4✔
41
        input_tokens = http_response.headers.get("x-compute-characters")
4✔
42
    if not input_tokens:
4✔
43
        prompt = args[0] if len(args) > 0 else kwargs.get("text", "")
×
44
        input_tokens = len(encoder.encode(prompt)) if prompt != "" else 0
×
45
    output_tokens = len(encoder.encode(response.translation_text))
4✔
46
    scope3_row = ImpactRow(
4✔
47
        model_id=model,
48
        task=Task.translation,
49
        input_tokens=int(input_tokens),
50
        output_tokens=output_tokens,
51
        request_duration_ms=float(compute_time) * 1000,
52
    )
53
    result = TranslationOutput(**asdict(response))
4✔
54
    return result, scope3_row
4✔
55

56

57
def huggingface_translation_wrapper_non_stream(
4✔
58
    wrapped: Callable, instance: InferenceClient, args: Any, kwargs: Any
59
) -> TranslationOutput:
60
    timer_start = time.perf_counter()
4✔
61
    http_response: Response | None = None
4✔
62
    with requests_response_capture() as responses:
4✔
63
        response = wrapped(*args, **kwargs)
4✔
64
        http_responses = responses.get()
4✔
65
        if http_responses:
4✔
66
            http_response = http_responses[-1]
4✔
67
    model = kwargs.get("model") or instance.get_recommended_model(
4✔
68
        HUGGING_FACE_TRANSLATION_TASK
69
    )
70
    result, impact_row = _hugging_face_translation_get_impact_row(
4✔
71
        timer_start, model, response, http_response, args, kwargs
72
    )
73
    scope3_ctx = Scope3AI.get_instance().submit_impact(impact_row)
4✔
74
    result.scope3ai = scope3_ctx
4✔
75
    return result
4✔
76

77

78
async def huggingface_translation_wrapper_async_non_stream(
4✔
79
    wrapped: Callable, instance: AsyncInferenceClient, args: Any, kwargs: Any
80
) -> TranslationOutput:
81
    timer_start = time.perf_counter()
4✔
82
    http_response: ClientResponse | None = None
4✔
83
    with aiohttp_response_capture() as responses:
4✔
84
        response = await wrapped(*args, **kwargs)
4✔
85
        http_responses = responses.get()
4✔
86
        if http_responses:
4✔
87
            http_response = http_responses[-1]
4✔
88
    model = kwargs.get("model") or instance.get_recommended_model(
4✔
89
        HUGGING_FACE_TRANSLATION_TASK
90
    )
91
    result, impact_row = _hugging_face_translation_get_impact_row(
4✔
92
        timer_start, model, response, http_response, args, kwargs
93
    )
94
    scope3_ctx = await Scope3AI.get_instance().asubmit_impact(impact_row)
4✔
95
    result.scope3ai = scope3_ctx
4✔
96
    return result
4✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc