• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

scope3data / scope3ai-py / 12817474237

16 Jan 2025 08:58PM UTC coverage: 95.822% (+15.3%) from 80.557%
12817474237

Pull #66

github

dd0110
kevdevg
fix: fixing tracers tests
Pull Request #66: fix: fix async submit impact

55 of 56 new or added lines in 10 files covered. (98.21%)

45 existing lines in 9 files now uncovered.

2110 of 2202 relevant lines covered (95.82%)

3.83 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.74
/scope3ai/tracers/openai/translation.py
1
import logging
4✔
2
import time
4✔
3
from typing import Any, Callable, Optional, Union, Tuple
4✔
4

5
import tiktoken
4✔
6
from openai.resources.audio.translations import AsyncTranslations, Translations
4✔
7
from openai.types.audio.translation import Translation as _Translation
4✔
8
from openai.types.audio.translation_verbose import (
4✔
9
    TranslationVerbose as _TranslationVerbose,
10
)
11

12
from scope3ai.api.types import ImpactRow, Scope3AIContext, Task
4✔
13
from scope3ai.constants import PROVIDERS
4✔
14
from scope3ai.lib import Scope3AI
4✔
15

16
from .utils import _get_file_audio_duration
4✔
17

18
PROVIDER = PROVIDERS.OPENAI.value
4✔
19

20
logger = logging.getLogger(__name__)
4✔
21

22

23
class AnnotatedStr(str):
4✔
24
    scope3ai: Optional[Scope3AIContext] = None
4✔
25

26

27
class Translation(_Translation):
4✔
28
    scope3ai: Optional[Scope3AIContext] = None
4✔
29

30

31
class TranslationVerbose(_TranslationVerbose):
4✔
32
    scope3ai: Optional[Scope3AIContext] = None
4✔
33

34

35
def _openai_translation_get_impact_row(
4✔
36
    response: Any, request_latency: float, kwargs: dict
37
) -> Tuple[Union[Translation, TranslationVerbose, AnnotatedStr], ImpactRow]:
38
    model = kwargs["model"]
4✔
39
    encoder = tiktoken.get_encoding("cl100k_base")
4✔
40

41
    if isinstance(response, (_Translation, _TranslationVerbose)):
4✔
42
        output_tokens = len(encoder.encode(response.text))
4✔
43
    elif isinstance(response, str):
4✔
44
        output_tokens = len(encoder.encode(response))
4✔
45
    else:
46
        output_tokens = None
×
47

48
    options = {}
4✔
49
    duration = _get_file_audio_duration(kwargs["file"])
4✔
50
    if duration is not None:
4✔
51
        options["input_audio_seconds"] = int(duration)
4✔
52

53
    scope3_row = ImpactRow(
4✔
54
        model_id=model,
55
        managed_service_id=PROVIDER,
56
        output_tokens=output_tokens,
57
        request_duration_ms=request_latency,
58
        task=Task.translation,
59
        **options,
60
    )
61

62
    if isinstance(response, _Translation):
4✔
63
        result = Translation.model_construct(**response.model_dump())
4✔
64
    elif isinstance(response, _TranslationVerbose):
4✔
65
        result = TranslationVerbose.model_construct(**response.model_dump())
4✔
66
    elif isinstance(response, str):
4✔
67
        result = AnnotatedStr(str)
4✔
68
    else:
69
        logger.error(f"Unexpected response type: {type(response)}")
×
NEW
70
        return response, scope3_row
×
71
    return result, scope3_row
4✔
72

73

74
def openai_translation_wrapper(
4✔
75
    wrapped: Callable, instance: Translations, args: Any, kwargs: Any
76
) -> Union[Translation, TranslationVerbose, AnnotatedStr]:
77
    timer_start = time.perf_counter()
4✔
78
    response = wrapped(*args, **kwargs)
4✔
79
    request_latency = (time.perf_counter() - timer_start) * 1000
4✔
80
    result, impact_row = _openai_translation_get_impact_row(
4✔
81
        response, request_latency, kwargs
82
    )
83
    scope3_ctx = Scope3AI.get_instance().submit_impact(impact_row)
4✔
84
    result.scope3ai = scope3_ctx
4✔
85
    return result
4✔
86

87

88
async def openai_async_translation_wrapper(
4✔
89
    wrapped: Callable, instance: AsyncTranslations, args: Any, kwargs: Any
90
) -> Union[Translation, TranslationVerbose, AnnotatedStr]:
91
    timer_start = time.perf_counter()
4✔
92
    response = await wrapped(*args, **kwargs)
4✔
93
    request_latency = (time.perf_counter() - timer_start) * 1000
4✔
94
    result, impact_row = _openai_translation_get_impact_row(
4✔
95
        response, request_latency, kwargs
96
    )
97
    scope3_ctx = await Scope3AI.get_instance().asubmit_impact(impact_row)
4✔
98
    result.scope3ai = scope3_ctx
4✔
99
    return result
4✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc