• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

scope3data / scope3ai-py / 12953930249

24 Jan 2025 04:50PM UTC coverage: 96.405% (+15.8%) from 80.557%
12953930249

Pull #78

github

89ada1
web-flow
Merge 1e5564797 into 0cfbba85d
Pull Request #78: feat(api): synchronize api, fixes pyright issues and api example

69 of 71 new or added lines in 2 files covered. (97.18%)

48 existing lines in 10 files now uncovered.

2440 of 2531 relevant lines covered (96.4%)

3.85 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.61
/scope3ai/tracers/huggingface/translation.py
1
import time
4✔
2
from dataclasses import asdict, dataclass
4✔
3
from typing import Any, Callable, Optional, Union
4✔
4

5
import tiktoken
4✔
6
from aiohttp import ClientResponse
4✔
7
from huggingface_hub import (  # type: ignore[import-untyped]
4✔
8
    AsyncInferenceClient,
9
    InferenceClient,
10
)
11
from huggingface_hub import TranslationOutput as _TranslationOutput
4✔
12
from requests import Response
4✔
13

14
from scope3ai.api.types import ImpactRow, Scope3AIContext
4✔
15
from scope3ai.api.typesgen import Task
4✔
16
from scope3ai.constants import PROVIDERS
4✔
17
from scope3ai.lib import Scope3AI
4✔
18
from scope3ai.response_interceptor.aiohttp_interceptor import aiohttp_response_capture
4✔
19
from scope3ai.response_interceptor.requests_interceptor import requests_response_capture
4✔
20

21
PROVIDER = PROVIDERS.HUGGINGFACE_HUB.value
4✔
22
HUGGING_FACE_TRANSLATION_TASK = "translation"
4✔
23

24

25
@dataclass
4✔
26
class TranslationOutput(_TranslationOutput):
4✔
27
    scope3ai: Optional[Scope3AIContext] = None
4✔
28

29

30
def _hugging_face_translation_get_impact_row(
4✔
31
    timer_start: Any,
32
    model: Any,
33
    response: Any,
34
    http_response: Optional[Union[ClientResponse, Response]],
35
    args: Any,
36
    kwargs: Any,
37
) -> (TranslationOutput, ImpactRow):
38
    encoder = tiktoken.get_encoding("cl100k_base")
4✔
39
    input_tokens = 0
4✔
40
    compute_time = time.perf_counter() - timer_start
4✔
41
    if http_response:
4✔
42
        compute_time = http_response.headers.get("x-compute-time") or compute_time
4✔
43
        input_tokens = http_response.headers.get("x-compute-characters")
4✔
44
    if not input_tokens:
4✔
UNCOV
45
        prompt = args[0] if len(args) > 0 else kwargs.get("text", "")
×
UNCOV
46
        input_tokens = len(encoder.encode(prompt)) if prompt != "" else 0
×
47
    output_tokens = len(encoder.encode(response.translation_text))
4✔
48
    scope3_row = ImpactRow(
4✔
49
        model_id=model,
50
        task=Task.translation,
51
        input_tokens=int(input_tokens),
52
        output_tokens=output_tokens,
53
        request_duration_ms=float(compute_time) * 1000,
54
        managed_service_id=PROVIDER,
55
    )
56
    result = TranslationOutput(**asdict(response))
4✔
57
    return result, scope3_row
4✔
58

59

60
def huggingface_translation_wrapper_non_stream(
4✔
61
    wrapped: Callable, instance: InferenceClient, args: Any, kwargs: Any
62
) -> TranslationOutput:
63
    timer_start = time.perf_counter()
4✔
64
    http_response: Response | None = None
4✔
65
    with requests_response_capture() as responses:
4✔
66
        response = wrapped(*args, **kwargs)
4✔
67
        http_responses = responses.get()
4✔
68
        if http_responses:
4✔
69
            http_response = http_responses[-1]
4✔
70
    model = kwargs.get("model") or instance.get_recommended_model(
4✔
71
        HUGGING_FACE_TRANSLATION_TASK
72
    )
73
    result, impact_row = _hugging_face_translation_get_impact_row(
4✔
74
        timer_start, model, response, http_response, args, kwargs
75
    )
76
    scope3_ctx = Scope3AI.get_instance().submit_impact(impact_row)
4✔
77
    result.scope3ai = scope3_ctx
4✔
78
    return result
4✔
79

80

81
async def huggingface_translation_wrapper_async_non_stream(
4✔
82
    wrapped: Callable, instance: AsyncInferenceClient, args: Any, kwargs: Any
83
) -> TranslationOutput:
84
    timer_start = time.perf_counter()
4✔
85
    http_response: ClientResponse | None = None
4✔
86
    with aiohttp_response_capture() as responses:
4✔
87
        response = await wrapped(*args, **kwargs)
4✔
88
        http_responses = responses.get()
4✔
89
        if http_responses:
4✔
90
            http_response = http_responses[-1]
4✔
91
    model = kwargs.get("model") or instance.get_recommended_model(
4✔
92
        HUGGING_FACE_TRANSLATION_TASK
93
    )
94
    result, impact_row = _hugging_face_translation_get_impact_row(
4✔
95
        timer_start, model, response, http_response, args, kwargs
96
    )
97
    scope3_ctx = await Scope3AI.get_instance().asubmit_impact(impact_row)
4✔
98
    result.scope3ai = scope3_ctx
4✔
99
    return result
4✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc