• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 25565075335

08 May 2026 03:47PM UTC coverage: 92.787% (-0.1%) from 92.887%
25565075335

push

github

web-flow
add OpenTelemetry backend for work unit reporting (#23284)

# Overview

Add a new `pants.backend.observability.opentelemetry` backend to report
work unit tracing to OpenTelemetry. The backend is based on
[shoalsoft-pants-opentelemetry-plugin](https://github.com/shoalsoft/shoalsoft-pants-opentelemetry-plugin)
with unnecessary compatibility code and "shoalsoft" branding removed.

Notes:
- This backend only reports Pants engine work units to OpenTelemetry; it
does not report tracing data for Pants rule code or Rust code.
- This backend does not support gRPC export due to fork safety issues
with the gRPC C library and Python. See
https://github.com/shoalsoft/shoalsoft-pants-opentelemetry-plugin/issues/84
and https://github.com/grpc/grpc/blob/master/doc/fork_support.md for
additional details.

# Lockfile

```
    Lockfile diff: 3rdparty/python/user_reqs.lock [python-default]

    ==                    Upgraded dependencies                     ==

      anyio                          4.12.1       -->   4.13.0
      certifi                        2026.1.4     -->   2026.4.22
      charset-normalizer             3.4.4        -->   3.4.7
      click                          8.3.1        -->   8.3.2
      cross-web                      0.4.1        -->   0.6.0
      cryptography                   46.0.5       -->   46.0.7
      graphql-core                   3.2.7        -->   3.2.8
      idna                           3.11         -->   3.12
      librt                          0.8.1        -->   0.9.0
      pydantic                       2.12.5       -->   2.13.3
      pydantic-core                  2.41.5       -->   2.46.3
      pygments                       2.19.2       -->   2.20.0
      pyjwt                          2.11.0       -->   2.12.1
      python-dotenv                  1.2.1        -->   1.2.2
      python-multipart               0.0.22       -->   0.0.26
      ujson                          5.11.0       -->   5.12.0

    ==                   ... (continued)

564 of 740 new or added lines in 12 files covered. (76.22%)

1 existing line in 1 file now uncovered.

92944 of 100169 relevant lines covered (92.79%)

4.02 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.36
/src/python/pants/backend/observability/opentelemetry/exception_logging_processor_test.py
1
# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
import datetime
1✔
5
import logging
1✔
6
from collections import defaultdict
1✔
7
from collections.abc import Mapping
1✔
8

9
import pytest
1✔
10

11
from pants.backend.observability.opentelemetry.exception_logging_processor import (
1✔
12
    ExceptionLoggingProcessor,
13
)
14
from pants.backend.observability.opentelemetry.processor import (
1✔
15
    IncompleteWorkunit,
16
    Level,
17
    Processor,
18
    ProcessorContext,
19
    Workunit,
20
)
21
from pants.util.frozendict import FrozenDict
1✔
22

23

24
class AlwaysRaisesExceptionProcessor(Processor):
1✔
25
    def initialize(self) -> None:
1✔
26
        raise ValueError("initialize")
1✔
27

28
    def start_workunit(self, workunit: IncompleteWorkunit, *, context: ProcessorContext) -> None:
1✔
29
        raise ValueError("start_workunit")
1✔
30

31
    def complete_workunit(self, workunit: Workunit, *, context: ProcessorContext) -> None:
1✔
32
        raise ValueError("complete_workunit")
1✔
33

34
    def finish(
1✔
35
        self, timeout: datetime.timedelta | None = None, *, context: ProcessorContext
36
    ) -> None:
37
        raise ValueError("finish")
1✔
38

39

40
class MockProcessorContext(ProcessorContext):
1✔
41
    def get_metrics(self) -> Mapping[str, int]:
1✔
NEW
42
        return {}
×
43

44

45
@pytest.fixture
1✔
46
def incomplete_workunit() -> IncompleteWorkunit:
1✔
47
    start_time = datetime.datetime.now(datetime.UTC)
1✔
48
    return IncompleteWorkunit(
1✔
49
        name="test-span",
50
        span_id="SOME_SPAN_ID",
51
        parent_ids=("A_PARENT_SPAN_ID",),
52
        level=Level.INFO,
53
        description="This is where the span is described.",
54
        start_time=start_time,
55
    )
56

57

58
@pytest.fixture
1✔
59
def workunit(incomplete_workunit: IncompleteWorkunit) -> Workunit:
1✔
60
    return Workunit(
1✔
61
        name=incomplete_workunit.name,
62
        span_id=incomplete_workunit.span_id,
63
        parent_ids=incomplete_workunit.parent_ids,
64
        level=incomplete_workunit.level,
65
        description=incomplete_workunit.description,
66
        start_time=incomplete_workunit.start_time,
67
        end_time=incomplete_workunit.start_time + datetime.timedelta(milliseconds=100),
68
        metadata=FrozenDict(),
69
    )
70

71

72
def test_exception_logging_proessor(
1✔
73
    incomplete_workunit: IncompleteWorkunit, workunit: Workunit, caplog
74
) -> None:
75
    processor = ExceptionLoggingProcessor(AlwaysRaisesExceptionProcessor(), name="test")
1✔
76
    context = MockProcessorContext()
1✔
77

78
    assert len(caplog.record_tuples) == 0
1✔
79
    processor.initialize()
1✔
80
    assert len(caplog.record_tuples) == 1
1✔
81
    assert caplog.record_tuples[0][1] == logging.WARNING
1✔
82
    assert caplog.record_tuples[0][2] == (
1✔
83
        "Ignored an exception from the test workunit tracing handler. These exceptions will be logged "
84
        "at DEBUG level. No further warnings will be logged."
85
    )
86

87
    caplog.clear()
1✔
88
    processor.start_workunit(workunit=incomplete_workunit, context=context)
1✔
89
    assert len(caplog.record_tuples) == 0
1✔
90

91
    caplog.clear()
1✔
92
    processor.complete_workunit(workunit=workunit, context=context)
1✔
93
    assert len(caplog.record_tuples) == 0
1✔
94

95
    caplog.clear()
1✔
96
    processor.finish(context=context)
1✔
97
    assert len(caplog.record_tuples) == 1
1✔
98
    assert caplog.record_tuples[0][1] == logging.WARNING
1✔
99
    assert (
1✔
100
        caplog.record_tuples[0][2] == "Ignored 4 exceptions from the test workunit tracing handler."
101
    )
102

103
    assert processor._exception_count == 4
1✔
104

105

106
def test_exceptions_logged_at_debug_level(
1✔
107
    incomplete_workunit: IncompleteWorkunit, workunit: Workunit, caplog
108
) -> None:
109
    """With logging level set to DEBUG, exceptions should now be logged at
110
    DEBUG level."""
111

112
    processor = ExceptionLoggingProcessor(AlwaysRaisesExceptionProcessor(), name="test")
1✔
113
    context = MockProcessorContext()
1✔
114

115
    with caplog.at_level(logging.DEBUG):
1✔
116
        processor.initialize()
1✔
117
        processor.start_workunit(workunit=incomplete_workunit, context=context)
1✔
118
        processor.complete_workunit(workunit=workunit, context=context)
1✔
119
        processor.finish(context=context)
1✔
120

121
    assert len(caplog.record_tuples) == 6
1✔
122
    log_level_counts: dict[int, int] = defaultdict(int)
1✔
123
    for record in caplog.record_tuples:
1✔
124
        log_level_counts[record[1]] += 1
1✔
125

126
    assert log_level_counts[logging.WARNING] == 2
1✔
127
    assert log_level_counts[logging.DEBUG] == 4
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc