Skip to content

Commit 39036e5

Browse files
authored
Merge branch 'main' into TID-rule
2 parents af7883b + afcf8d7 commit 39036e5

32 files changed

Lines changed: 1119 additions & 181 deletions

File tree

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1414

1515
- Enabled the flake8-tidy-import plugins rules for the ruff linter. These rules throw warnings for relative imports in the modules. The banned-api and banned-module-level-imports rules have not been configured yet. They can be configured as per their need in future
1616
([#5019](https://github.com/open-telemetry/opentelemetry-python/pull/5019))
17+
- `opentelemetry-sdk`: Add `process` resource detector support to declarative file configuration via `detection_development.detectors[].process`
18+
([#5001](https://github.com/open-telemetry/opentelemetry-python/pull/5001))
19+
- `opentelemetry-sdk`: Add shared `_parse_headers` helper for declarative config OTLP exporters
20+
([#5021](https://github.com/open-telemetry/opentelemetry-python/pull/5021))
1721
- `opentelemetry-api`: Replace a broad exception in attribute cleaning tests to satisfy pylint in the `lint-opentelemetry-api` CI job
1822
- `opentelemetry-sdk`: Add `create_resource` and `create_propagator`/`configure_propagator` to declarative file configuration, enabling Resource and propagator instantiation from config files without reading env vars
1923
([#4979](https://github.com/open-telemetry/opentelemetry-python/pull/4979))
@@ -43,6 +47,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
4347
([#4910](https://github.com/open-telemetry/opentelemetry-python/pull/4910))
4448
- Add configurable `max_export_batch_size` to OTLP HTTP metrics exporter
4549
([#4576](https://github.com/open-telemetry/opentelemetry-python/pull/4576))
50+
- `opentelemetry-sdk`: Implement experimental Meter configurator
51+
([#4966](https://github.com/open-telemetry/opentelemetry-python/pull/4966))
52+
- `opentelemetry-exporter-otlp-proto-http`: use consistent protobuf for export request
53+
([#5015](https://github.com/open-telemetry/opentelemetry-python/pull/5015))
54+
- `opentelemetry-sdk`: cache TracerConfig into the tracer, this changes an internal interface. Only one Tracer with the same instrumentation scope will be created
55+
([#5007](https://github.com/open-telemetry/opentelemetry-python/pull/5007))
4656

4757
## Version 1.40.0/0.61b0 (2026-03-04)
4858

docs/conf.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -228,11 +228,11 @@
228228
scm_web = "https://github.com/" + REPO + "blob/" + branch
229229

230230
# Store variables in the epilogue so they are globally available.
231-
rst_epilog = """
232-
.. |SCM_WEB| replace:: {s}
233-
.. |SCM_RAW_WEB| replace:: {sr}
234-
.. |SCM_BRANCH| replace:: {b}
235-
""".format(s=scm_web, sr=scm_raw_web, b=branch)
231+
rst_epilog = f"""
232+
.. |SCM_WEB| replace:: {scm_web}
233+
.. |SCM_RAW_WEB| replace:: {scm_raw_web}
234+
.. |SCM_BRANCH| replace:: {branch}
235+
"""
236236

237237
# used to have links to repo files
238238
extlinks = {

exporter/opentelemetry-exporter-otlp-proto-common/src/opentelemetry/exporter/otlp/proto/common/_internal/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def _get_resource_data(
170170
resource_class(
171171
**{
172172
"resource": collector_resource,
173-
"scope_{}".format(name): scope_data.values(),
173+
f"scope_{name}": scope_data.values(),
174174
}
175175
)
176176
)

exporter/opentelemetry-exporter-otlp-proto-grpc/src/opentelemetry/exporter/otlp/proto/grpc/exporter.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -155,9 +155,7 @@
155155
class InvalidCompressionValueException(Exception):
156156
def __init__(self, environ_key: str, environ_value: str):
157157
super().__init__(
158-
'Invalid value "{}" for compression envvar {}'.format(
159-
environ_value, environ_key
160-
)
158+
f'Invalid value "{environ_value}" for compression envvar {environ_key}'
161159
)
162160

163161

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/_common/__init__.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,6 @@
1919

2020
from opentelemetry.sdk.environment_variables import (
2121
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_CREDENTIAL_PROVIDER,
22-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
23-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
24-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
2522
)
2623
from opentelemetry.util._importlib_metadata import entry_points
2724

@@ -36,9 +33,9 @@ def _is_retryable(resp: requests.Response) -> bool:
3633

3734
def _load_session_from_envvar(
3835
cred_envvar: Literal[
39-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
40-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
41-
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
36+
"OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER",
37+
"OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER",
38+
"OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER",
4239
],
4340
) -> Optional[requests.Session]:
4441
_credential_env = environ.get(

exporter/opentelemetry-exporter-otlp-proto-http/src/opentelemetry/exporter/otlp/proto/http/metric_exporter/__init__.py

Lines changed: 25 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
_is_retryable,
5151
_load_session_from_envvar,
5252
)
53-
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ( # noqa: F401
53+
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import (
5454
ExportMetricsServiceRequest,
5555
)
5656
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
@@ -60,7 +60,7 @@
6060
KeyValue,
6161
KeyValueList,
6262
)
63-
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2 # noqa: F401
63+
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2
6464
from opentelemetry.proto.resource.v1.resource_pb2 import Resource # noqa: F401
6565
from opentelemetry.proto.resource.v1.resource_pb2 import (
6666
Resource as PB2Resource,
@@ -243,18 +243,19 @@ def _export(
243243

244244
def _export_with_retries(
245245
self,
246-
serialized_data: bytes,
246+
export_request: ExportMetricsServiceRequest,
247247
deadline_sec: float,
248248
) -> MetricExportResult:
249249
"""Export serialized data with retry logic until success, non-transient error, or exponential backoff maxed out.
250250
251251
Args:
252-
serialized_data: serialized metrics data to export
252+
export_request: ExportMetricsServiceRequest object containing metrics data to export
253253
deadline_sec: timestamp deadline for the export
254254
255255
Returns:
256256
MetricExportResult: SUCCESS if export succeeded, FAILURE otherwise
257257
"""
258+
serialized_data = export_request.SerializeToString()
258259
for retry_num in range(_MAX_RETRYS):
259260
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
260261
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
@@ -310,23 +311,21 @@ def export(
310311
_logger.warning("Exporter already shutdown, ignoring batch")
311312
return MetricExportResult.FAILURE
312313

313-
serialized_data = encode_metrics(metrics_data)
314+
export_request = encode_metrics(metrics_data)
314315
deadline_sec = time() + self._timeout
315316

316317
# If no batch size configured, export as single batch with retries as configured
317318
if self._max_export_batch_size is None:
318-
return self._export_with_retries(
319-
serialized_data.SerializeToString(), deadline_sec
320-
)
319+
return self._export_with_retries(export_request, deadline_sec)
321320

322321
# Else, export in batches of configured size
323-
split_metrics_batches = list(
324-
_split_metrics_data(serialized_data, self._max_export_batch_size)
322+
batched_export_requests = _split_metrics_data(
323+
export_request, self._max_export_batch_size
325324
)
326325

327-
for split_metrics_data in split_metrics_batches:
326+
for split_metrics_data in batched_export_requests:
328327
export_result = self._export_with_retries(
329-
split_metrics_data.SerializeToString(),
328+
split_metrics_data,
330329
deadline_sec,
331330
)
332331
if export_result != MetricExportResult.SUCCESS:
@@ -353,18 +352,18 @@ def force_flush(self, timeout_millis: float = 10_000) -> bool:
353352

354353

355354
def _split_metrics_data(
356-
metrics_data: pb2.MetricsData,
355+
metrics_data: ExportMetricsServiceRequest,
357356
max_export_batch_size: int | None = None,
358-
) -> Iterable[pb2.MetricsData]:
359-
"""Splits metrics data into several MetricsData (copies protobuf originals),
357+
) -> Iterable[ExportMetricsServiceRequest]:
358+
"""Splits metrics data into several ExportMetricsServiceRequest (copies protobuf originals),
360359
based on configured data point max export batch size.
361360
362361
Args:
363362
metrics_data: metrics object based on HTTP protocol buffer definition
364363
365364
Returns:
366-
Iterable[pb2.MetricsData]: An iterable of pb2.MetricsData objects containing
367-
pb2.ResourceMetrics, pb2.ScopeMetrics, pb2.Metrics, and data points
365+
Iterable[ExportMetricsServiceRequest]: An iterable of ExportMetricsServiceRequest objects containing
366+
ExportMetricsServiceRequest.ResourceMetrics, ExportMetricsServiceRequest.ScopeMetrics, ExportMetricsServiceRequest.Metrics, and data points
368367
"""
369368
if not max_export_batch_size:
370369
return metrics_data
@@ -430,7 +429,7 @@ def _split_metrics_data(
430429
batch_size += 1
431430

432431
if batch_size >= max_export_batch_size:
433-
yield pb2.MetricsData(
432+
yield ExportMetricsServiceRequest(
434433
resource_metrics=_get_split_resource_metrics_pb2(
435434
split_resource_metrics
436435
)
@@ -444,6 +443,11 @@ def _split_metrics_data(
444443

445444
# Rebuild metric dict generically using same approach as initial creation
446445
field_name = metric.WhichOneof("data")
446+
if field_name is None:
447+
_logger.warning(
448+
"Tried to split and export an unsupported metric type. Skipping."
449+
)
450+
continue
447451
data_container = getattr(metric, field_name)
448452
metric_dict = {
449453
"name": metric.name,
@@ -491,7 +495,7 @@ def _split_metrics_data(
491495
split_resource_metrics.pop()
492496

493497
if batch_size > 0:
494-
yield pb2.MetricsData(
498+
yield ExportMetricsServiceRequest(
495499
resource_metrics=_get_split_resource_metrics_pb2(
496500
split_resource_metrics
497501
)
@@ -553,13 +557,13 @@ def _get_split_resource_metrics_pb2(
553557
new_resource_metrics = pb2.ResourceMetrics(
554558
resource=resource_metrics.get("resource"),
555559
scope_metrics=[],
556-
schema_url=resource_metrics.get("schema_url"),
560+
schema_url=resource_metrics.get("schema_url") or "",
557561
)
558562
for scope_metrics in resource_metrics.get("scope_metrics", []):
559563
new_scope_metrics = pb2.ScopeMetrics(
560564
scope=scope_metrics.get("scope"),
561565
metrics=[],
562-
schema_url=scope_metrics.get("schema_url"),
566+
schema_url=scope_metrics.get("schema_url") or "",
563567
)
564568

565569
for metric in scope_metrics.get("metrics", []):

0 commit comments

Comments
 (0)