Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions 3rdparty/python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@ mypy~=1.19.1
mypy-typing-asserts==0.1.1
node-semver==0.9.0

# OpenTelemetry backend dependencies
opentelemetry-api==1.41.0
opentelemetry-exporter-otlp-proto-http==1.41.0
opentelemetry-sdk==1.41.0

# OpenTelemetry backend test dependencies
opentelemetry-proto==1.41.0

# These dependencies are for scripts that rules run in an external process (and for script tests).
elfdeps==0.2.0 # see: pants.backends.nfpm.native_libs.elfdeps
Expand Down
938 changes: 642 additions & 296 deletions 3rdparty/python/user_reqs.lock

Large diffs are not rendered by default.

7 changes: 6 additions & 1 deletion 3rdparty/python/user_reqs.lock.metadata
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"version": 6,
"version": 7,
"valid_for_interpreter_constraints": [
"CPython==3.14.*"
],
Expand All @@ -19,6 +19,10 @@
"mypy-typing-asserts==0.1.1",
"mypy~=1.19.1",
"node-semver==0.9.0",
"opentelemetry-api==1.41.0",
"opentelemetry-exporter-otlp-proto-http==1.41.0",
"opentelemetry-proto==1.41.0",
"opentelemetry-sdk==1.41.0",
"packaging==26.0",
"psutil==5.9.8",
"pydevd-pycharm==261.20362.36",
Expand Down Expand Up @@ -47,5 +51,6 @@
"sources": [],
"lock_style": "universal",
"complete_platforms": [],
"uploaded_prior_to": null,
"description": "This lockfile was generated by Pants. To regenerate, run: pants generate-lockfiles --resolve=python-default"
}
6 changes: 6 additions & 0 deletions docs/notes/2.33.x.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ Thank you to [Klaviyo](https://www.klaviyo.com/) for their Platinum tier support

### Highlights

- `pants.backend.observability.opentelemetry` backend for reporting work unit tracing to OpenTelemetry

### Deprecations

### General
Expand All @@ -24,6 +26,10 @@ Thank you to [Klaviyo](https://www.klaviyo.com/) for their Platinum tier support

The `docker_image` target now supports capturing file and directory artifacts from Docker builds using the `output_files` and `output_directories` fields. This supports workflows where a Dockerfile stage creates a build artifact(s) that should be consumed by other Pants targets, using the same behavior as the `shell_command` or `adhoc_tool` targets. This feature uses the BuildKit local output exporter and requires `[docker].use_buildx = true`.

#### NEW: OpenTelemetry

Add a new `pants.backend.observability.opentelemetry` backend to report work unit tracing to OpenTelemetry.

#### Helm

#### JVM
Expand Down
Empty file.
8 changes: 8 additions & 0 deletions src/python/pants/backend/observability/opentelemetry/BUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

python_sources()

python_tests(
name="tests",
)
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import annotations

import datetime
import logging
from contextlib import contextmanager
from typing import Generator

from pants.backend.observability.opentelemetry.processor import (
IncompleteWorkunit,
Processor,
ProcessorContext,
Workunit,
)

logger = logging.getLogger(__name__)


class ExceptionLoggingProcessor(Processor):
def __init__(self, processor: Processor, *, name: str) -> None:
self._processor = processor
self._name = name
self._exception_count = 0

@contextmanager
def _wrapper(self) -> Generator[None]:
try:
yield
except Exception as ex:
logger.debug(
f"An exception occurred while processing a workunit in the {self._name} workunit tracing handler: {ex}",
exc_info=True,
)
if self._exception_count == 0:
logger.warning(
f"Ignored an exception from the {self._name} workunit tracing handler. These exceptions will be logged "
"at DEBUG level. No further warnings will be logged."
)
self._exception_count += 1

def initialize(self) -> None:
with self._wrapper():
self._processor.initialize()

def start_workunit(self, workunit: IncompleteWorkunit, *, context: ProcessorContext) -> None:
with self._wrapper():
self._processor.start_workunit(workunit=workunit, context=context)

def complete_workunit(self, workunit: Workunit, *, context: ProcessorContext) -> None:
with self._wrapper():
self._processor.complete_workunit(workunit=workunit, context=context)

def finish(
self, timeout: datetime.timedelta | None = None, *, context: ProcessorContext
) -> None:
with self._wrapper():
self._processor.finish(timeout=timeout, context=context)
if self._exception_count > 1:
logger.warning(
f"Ignored {self._exception_count} exceptions from the {self._name} workunit tracing handler."
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

import datetime
import logging
from collections import defaultdict
from collections.abc import Mapping

import pytest

from pants.backend.observability.opentelemetry.exception_logging_processor import (
ExceptionLoggingProcessor,
)
from pants.backend.observability.opentelemetry.processor import (
IncompleteWorkunit,
Level,
Processor,
ProcessorContext,
Workunit,
)
from pants.util.frozendict import FrozenDict


class AlwaysRaisesExceptionProcessor(Processor):
def initialize(self) -> None:
raise ValueError("initialize")

def start_workunit(self, workunit: IncompleteWorkunit, *, context: ProcessorContext) -> None:
raise ValueError("start_workunit")

def complete_workunit(self, workunit: Workunit, *, context: ProcessorContext) -> None:
raise ValueError("complete_workunit")

def finish(
self, timeout: datetime.timedelta | None = None, *, context: ProcessorContext
) -> None:
raise ValueError("finish")


class MockProcessorContext(ProcessorContext):
def get_metrics(self) -> Mapping[str, int]:
return {}


@pytest.fixture
def incomplete_workunit() -> IncompleteWorkunit:
start_time = datetime.datetime.now(datetime.UTC)
return IncompleteWorkunit(
name="test-span",
span_id="SOME_SPAN_ID",
parent_ids=("A_PARENT_SPAN_ID",),
level=Level.INFO,
description="This is where the span is described.",
start_time=start_time,
)


@pytest.fixture
def workunit(incomplete_workunit: IncompleteWorkunit) -> Workunit:
return Workunit(
name=incomplete_workunit.name,
span_id=incomplete_workunit.span_id,
parent_ids=incomplete_workunit.parent_ids,
level=incomplete_workunit.level,
description=incomplete_workunit.description,
start_time=incomplete_workunit.start_time,
end_time=incomplete_workunit.start_time + datetime.timedelta(milliseconds=100),
metadata=FrozenDict(),
)


def test_exception_logging_proessor(
incomplete_workunit: IncompleteWorkunit, workunit: Workunit, caplog
) -> None:
processor = ExceptionLoggingProcessor(AlwaysRaisesExceptionProcessor(), name="test")
context = MockProcessorContext()

assert len(caplog.record_tuples) == 0
processor.initialize()
assert len(caplog.record_tuples) == 1
assert caplog.record_tuples[0][1] == logging.WARNING
assert caplog.record_tuples[0][2] == (
"Ignored an exception from the test workunit tracing handler. These exceptions will be logged "
"at DEBUG level. No further warnings will be logged."
)

caplog.clear()
processor.start_workunit(workunit=incomplete_workunit, context=context)
assert len(caplog.record_tuples) == 0

caplog.clear()
processor.complete_workunit(workunit=workunit, context=context)
assert len(caplog.record_tuples) == 0

caplog.clear()
processor.finish(context=context)
assert len(caplog.record_tuples) == 1
assert caplog.record_tuples[0][1] == logging.WARNING
assert (
caplog.record_tuples[0][2] == "Ignored 4 exceptions from the test workunit tracing handler."
)

assert processor._exception_count == 4


def test_exceptions_logged_at_debug_level(
incomplete_workunit: IncompleteWorkunit, workunit: Workunit, caplog
) -> None:
"""With logging level set to DEBUG, exceptions should now be logged at
DEBUG level."""

processor = ExceptionLoggingProcessor(AlwaysRaisesExceptionProcessor(), name="test")
context = MockProcessorContext()

with caplog.at_level(logging.DEBUG):
processor.initialize()
processor.start_workunit(workunit=incomplete_workunit, context=context)
processor.complete_workunit(workunit=workunit, context=context)
processor.finish(context=context)

assert len(caplog.record_tuples) == 6
log_level_counts: dict[int, int] = defaultdict(int)
for record in caplog.record_tuples:
log_level_counts[record[1]] += 1

assert log_level_counts[logging.WARNING] == 2
assert log_level_counts[logging.DEBUG] == 4
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright 2026 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import annotations

import urllib.parse
from dataclasses import dataclass
from typing import Mapping


@dataclass(frozen=True)
class OtlpParameters:
endpoint: str | None
traces_endpoint: str | None
certificate_file: str | None
client_key_file: str | None
client_certificate_file: str | None
headers: Mapping[str, str] | None
timeout: int | None
compression: str | None

def resolve_traces_endpoint(self) -> str:
if self.traces_endpoint:
return self.traces_endpoint

if not self.endpoint:
return "http://localhost:4317"

url = urllib.parse.urlparse(self.endpoint)
scheme = url.scheme if url.scheme else "http"
path = url.path
if not path.endswith("/"):
path = path + "/"
path = f"{path}/v1/traces"
url = url._replace(scheme=scheme, path=path)
return url.geturl()
Loading
Loading