diff --git a/CHANGELOG.md b/CHANGELOG.md index 09ad03d825..66fa83df4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#4260](https://github.com/open-telemetry/opentelemetry-python/pull/4260)) - semantic-conventions: Bump to 1.29.0 ([#4337](https://github.com/open-telemetry/opentelemetry-python/pull/4337)) +- sdk: Execute mypy on opentelemety-sdk/src + ([#773](https://github.com/open-telemetry/opentelemetry-python/pull/4352)) ## Version 1.28.0/0.49b0 (2024-11-05) @@ -124,20 +126,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Improve resource field structure for LogRecords ([#3972](https://github.com/open-telemetry/opentelemetry-python/pull/3972)) - Update Semantic Conventions code generation scripts: - - fix namespace exclusion that resulted in dropping `os` and `net` namespaces. + - fix namespace exclusion that resulted in dropping `os` and `net` namespaces. - add `Final` decorator to constants to prevent collisions - enable mypy and fix detected issues - allow to drop specific attributes in preparation for Semantic Conventions v1.26.0 - ([#3973](https://github.com/open-telemetry/opentelemetry-python/pull/3966)) + ([#3973](https://github.com/open-telemetry/opentelemetry-python/pull/3966)) - Update semantic conventions to version 1.26.0. ([#3964](https://github.com/open-telemetry/opentelemetry-python/pull/3964)) - Use semconv exception attributes for record exceptions in spans ([#3979](https://github.com/open-telemetry/opentelemetry-python/pull/3979)) -- Fix _encode_events assumes events.attributes.dropped exists +- Fix \_encode_events assumes events.attributes.dropped exists ([#3965](https://github.com/open-telemetry/opentelemetry-python/pull/3965)) - Validate links at span creation ([#3991](https://github.com/open-telemetry/opentelemetry-python/pull/3991)) -- Add attributes field in `MeterProvider.get_meter` and `InstrumentationScope` +- Add attributes field in `MeterProvider.get_meter` and `InstrumentationScope` ([#4015](https://github.com/open-telemetry/opentelemetry-python/pull/4015)) - Fix inaccessible `SCHEMA_URL` constants in `opentelemetry-semantic-conventions` ([#4069](https://github.com/open-telemetry/opentelemetry-python/pull/4069)) @@ -167,10 +169,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `SpanAttributes`, `ResourceAttributes`, and `MetricInstruments` are deprecated. - Attribute and metric definitions are now grouped by the namespace. - Stable attributes and metrics are moved to `opentelemetry.semconv.attributes` - and `opentelemetry.semconv.metrics` modules. + and `opentelemetry.semconv.metrics` modules. - Stable and experimental attributes and metrics are defined under - `opentelemetry.semconv._incubating` import path. - ([#3586](https://github.com/open-telemetry/opentelemetry-python/pull/3586)) + `opentelemetry.semconv._incubating` import path. + ([#3586](https://github.com/open-telemetry/opentelemetry-python/pull/3586)) - Rename test objects to avoid pytest warnings ([#3823] (https://github.com/open-telemetry/opentelemetry-python/pull/3823)) - Add span flags to OTLP spans and links @@ -182,11 +184,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fix prometheus metric name and unit conversion ([#3924](https://github.com/open-telemetry/opentelemetry-python/pull/3924)) - this is a breaking change to prometheus metric names so they comply with the - [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/v1.33.0/specification/compatibility/prometheus_and_openmetrics.md#otlp-metric-points-to-prometheus). + [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/v1.33.0/specification/compatibility/prometheus_and_openmetrics.md#otlp-metric-points-to-prometheus). - you can temporarily opt-out of the unit normalization by setting the environment variable - `OTEL_PYTHON_EXPERIMENTAL_DISABLE_PROMETHEUS_UNIT_NORMALIZATION=true` + `OTEL_PYTHON_EXPERIMENTAL_DISABLE_PROMETHEUS_UNIT_NORMALIZATION=true` - common unit abbreviations are converted to Prometheus conventions (`s` -> `seconds`), - following the [collector's implementation](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/c0b51136575aa7ba89326d18edb4549e7e1bbdb9/pkg/translator/prometheus/normalize_name.go#L108) + following the [collector's implementation](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/c0b51136575aa7ba89326d18edb4549e7e1bbdb9/pkg/translator/prometheus/normalize_name.go#L108) - repeated `_` are replaced with a single `_` - unit annotations (enclosed in curly braces like `{requests}`) are stripped away - units with slash are converted e.g. `m/s` -> `meters_per_second`. @@ -272,7 +274,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Version 1.21.0/0.42b0 (2023-11-01) - Fix `SumAggregation` - ([#3390](https://github.com/open-telemetry/opentelemetry-python/pull/3390)) +  ([#3390](https://github.com/open-telemetry/opentelemetry-python/pull/3390)) - Fix handling of empty metric collection cycles ([#3335](https://github.com/open-telemetry/opentelemetry-python/pull/3335)) - Fix error when no LoggerProvider configured for LoggingHandler @@ -292,7 +294,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Implement Process Resource detector ([#3472](https://github.com/open-telemetry/opentelemetry-python/pull/3472)) - ## Version 1.20.0/0.41b0 (2023-09-04) - Modify Prometheus exporter to translate non-monotonic Sums into Gauges @@ -321,7 +322,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Default LogRecord observed_timestamp to current timestamp [#3377](https://github.com/open-telemetry/opentelemetry-python/pull/3377)) - ## Version 1.18.0/0.39b0 (2023-05-19) - Select histogram aggregation with an environment variable @@ -341,7 +341,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add benchmark tests for metrics ([#3267](https://github.com/open-telemetry/opentelemetry-python/pull/3267)) - ## Version 1.17.0/0.38b0 (2023-03-22) - Implement LowMemory temporality @@ -361,7 +360,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Version 1.16.0/0.37b0 (2023-02-17) -- Change ``__all__`` to be statically defined. +- Change `__all__` to be statically defined. ([#3143](https://github.com/open-telemetry/opentelemetry-python/pull/3143)) - Remove the ability to set a global metric prefix for Prometheus exporter ([#3137](https://github.com/open-telemetry/opentelemetry-python/pull/3137)) @@ -401,7 +400,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#3027](https://github.com/open-telemetry/opentelemetry-python/pull/3027)) - Update logging to include logging api as per specification ([#3038](https://github.com/open-telemetry/opentelemetry-python/pull/3038)) -- Fix: Avoid generator in metrics _ViewInstrumentMatch.collect() +- Fix: Avoid generator in metrics \_ViewInstrumentMatch.collect() ([#3035](https://github.com/open-telemetry/opentelemetry-python/pull/3035) - [exporter-otlp-proto-grpc] add user agent string ([#3009](https://github.com/open-telemetry/opentelemetry-python/pull/3009)) @@ -1689,7 +1688,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Remove dependency on 'backoff' library ([#3679](https://github.com/open-telemetry/opentelemetry-python/pull/3679)) - - Make create_gauge non-abstract method ([#3817](https://github.com/open-telemetry/opentelemetry-python/pull/3817)) - Make `tracer.start_as_current_span()` decorator work with async functions diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/error_handler/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/error_handler/__init__.py index 3e0e778f1a..619c24be2d 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/error_handler/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/error_handler/__init__.py @@ -61,6 +61,7 @@ def _handle(self, error: Exception, *args, **kwargs): from abc import ABC, abstractmethod from logging import getLogger +from typing import Optional from opentelemetry.util._importlib_metadata import entry_points @@ -69,7 +70,7 @@ def _handle(self, error: Exception, *args, **kwargs): class ErrorHandler(ABC): @abstractmethod - def _handle(self, error: Exception, *args, **kwargs): + def _handle(self, error: Exception, *args, **kwargs) -> None: # type: ignore """ Handle an exception """ @@ -83,7 +84,7 @@ class _DefaultErrorHandler(ErrorHandler): """ # pylint: disable=useless-return - def _handle(self, error: Exception, *args, **kwargs): + def _handle(self, error: Exception, *args, **kwargs) -> None: # type: ignore logger.exception("Error handled by default error handler: ") return None @@ -105,26 +106,26 @@ def __new__(cls) -> "GlobalErrorHandler": return cls._instance - def __enter__(self): + def __enter__(self) -> None: pass # pylint: disable=no-self-use - def __exit__(self, exc_type, exc_value, traceback): - if exc_value is None: + def __exit__(self, exc_type, exc_value, traceback) -> Optional[bool]: # type: ignore + if exc_value is None: # type: ignore return None plugin_handled = False - error_handler_entry_points = entry_points( + error_handler_entry_points = entry_points( # type: ignore group="opentelemetry_error_handler" ) - for error_handler_entry_point in error_handler_entry_points: - error_handler_class = error_handler_entry_point.load() + for error_handler_entry_point in error_handler_entry_points: # type: ignore + error_handler_class = error_handler_entry_point.load() # type: ignore - if issubclass(error_handler_class, exc_value.__class__): + if issubclass(error_handler_class, exc_value.__class__): # type: ignore try: - error_handler_class()._handle(exc_value) + error_handler_class()._handle(exc_value) # type: ignore plugin_handled = True # pylint: disable=broad-exception-caught @@ -133,11 +134,11 @@ def __exit__(self, exc_type, exc_value, traceback): "%s error while handling error" " %s by error handler %s", error_handling_error.__class__.__name__, - exc_value.__class__.__name__, - error_handler_class.__name__, + exc_value.__class__.__name__, # type: ignore + error_handler_class.__name__, # type: ignore ) if not plugin_handled: - _DefaultErrorHandler()._handle(exc_value) + _DefaultErrorHandler()._handle(exc_value) # type: ignore return True diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exemplar/exemplar_reservoir.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exemplar/exemplar_reservoir.py index 22d1ee9f75..d4feb877c2 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exemplar/exemplar_reservoir.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exemplar/exemplar_reservoir.py @@ -129,7 +129,7 @@ def collect(self, point_attributes: Attributes) -> Optional[Exemplar]: { k: v for k, v in self.__attributes.items() - if k not in point_attributes + if k not in point_attributes # type: ignore } if self.__attributes else None @@ -162,8 +162,8 @@ class BucketIndexError(ValueError): class FixedSizeExemplarReservoirABC(ExemplarReservoir): """Abstract class for a reservoir with fixed size.""" - def __init__(self, size: int, **kwargs) -> None: - super().__init__(**kwargs) + def __init__(self, size: int, **kwargs) -> None: # type: ignore + super().__init__(**kwargs) # type: ignore self._size: int = size self._reservoir_storage: Mapping[int, ExemplarBucket] = defaultdict( ExemplarBucket @@ -184,7 +184,7 @@ def collect(self, point_attributes: Attributes) -> List[Exemplar]: exemplars = [ e for e in ( - bucket.collect(point_attributes) + bucket.collect(point_attributes) # type: ignore for _, bucket in sorted(self._reservoir_storage.items()) ) if e is not None @@ -257,8 +257,8 @@ class SimpleFixedSizeExemplarReservoir(FixedSizeExemplarReservoirABC): https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#simplefixedsizeexemplarreservoir """ - def __init__(self, size: int = 1, **kwargs) -> None: - super().__init__(size, **kwargs) + def __init__(self, size: int = 1, **kwargs) -> None: # type: ignore + super().__init__(size, **kwargs) # type: ignore self._measurements_seen: int = 0 def _reset(self) -> None: @@ -292,8 +292,8 @@ class AlignedHistogramBucketExemplarReservoir(FixedSizeExemplarReservoirABC): https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/sdk.md#alignedhistogrambucketexemplarreservoir """ - def __init__(self, boundaries: Sequence[float], **kwargs) -> None: - super().__init__(len(boundaries) + 1, **kwargs) + def __init__(self, boundaries: Sequence[float], **kwargs) -> None: # type: ignore + super().__init__(len(boundaries) + 1, **kwargs) # type: ignore self._boundaries: Sequence[float] = boundaries def offer( diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py index e8a9332608..fd19732f79 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py @@ -13,13 +13,14 @@ # limitations under the License. from math import ceil, log2 +from typing import List class Buckets: # No method of this class is protected by locks because instances of this # class are only used in methods that are protected by locks themselves. - def __init__(self): + def __init__(self): # type: ignore self._counts = [0] # The term index refers to the number of the exponential histogram bucket @@ -69,16 +70,16 @@ def index_base(self, value: int) -> None: self.__index_base = value @property - def counts(self): + def counts(self) -> List[int]: return self._counts - def get_offset_counts(self): + def get_offset_counts(self) -> List[int]: bias = self.__index_base - self.__index_start return self._counts[-bias:] + self._counts[:-bias] def grow(self, needed: int, max_size: int) -> None: - size = len(self._counts) - bias = self.__index_base - self.__index_start + size: int = len(self._counts) + bias: int = self.__index_base - self.__index_start old_positive_limit = size - bias # 2 ** ceil(log2(needed)) finds the smallest power of two that is larger @@ -91,9 +92,9 @@ def grow(self, needed: int, max_size: int) -> None: # 2 ** ceil(log2(6)) == 8 # 2 ** ceil(log2(7)) == 8 # 2 ** ceil(log2(8)) == 8 - new_size = min(2 ** ceil(log2(needed)), max_size) + new_size: int = min(2 ** ceil(log2(needed)), max_size) # type: ignore - new_positive_limit = new_size - bias + new_positive_limit: int = new_size - bias tmp = [0] * new_size tmp[new_positive_limit:] = self._counts[old_positive_limit:] @@ -176,15 +177,15 @@ def increment_bucket(self, bucket_index: int, increment: int = 1) -> None: self._counts[bucket_index] += increment def copy_empty(self) -> "Buckets": - copy = Buckets() + copy = Buckets() # type: ignore # pylint: disable=no-member # pylint: disable=protected-access # pylint: disable=attribute-defined-outside-init # pylint: disable=invalid-name - copy._Buckets__index_base = self._Buckets__index_base - copy._Buckets__index_start = self._Buckets__index_start - copy._Buckets__index_end = self._Buckets__index_end + copy._Buckets__index_base = self._Buckets__index_base # type: ignore + copy._Buckets__index_start = self._Buckets__index_start # type: ignore + copy._Buckets__index_end = self._Buckets__index_end # type: ignore copy._counts = [0 for _ in self._counts] return copy diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__init__.py index 387b1d1444..eb56ac91eb 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__init__.py @@ -21,18 +21,18 @@ class Mapping(ABC): """ # pylint: disable=no-member - def __new__(cls, scale: int): - with cls._mappings_lock: + def __new__(cls, scale: int) -> "Mapping": + with cls._mappings_lock: # type: ignore # cls._mappings and cls._mappings_lock are implemented in each of # the child classes as a dictionary and a lock, respectively. They # are not instantiated here because that would lead to both child # classes having the same instance of cls._mappings and # cls._mappings_lock. - if scale not in cls._mappings: - cls._mappings[scale] = super().__new__(cls) - cls._mappings[scale]._init(scale) + if scale not in cls._mappings: # type: ignore + cls._mappings[scale] = super().__new__(cls) # type: ignore + cls._mappings[scale]._init(scale) # type: ignore - return cls._mappings[scale] + return cls._mappings[scale] # type: ignore @abstractmethod def _init(self, scale: int) -> None: @@ -40,11 +40,11 @@ def _init(self, scale: int) -> None: if scale > self._get_max_scale(): # pylint: disable=broad-exception-raised - raise Exception(f"scale is larger than {self._max_scale}") + raise Exception(f"scale is larger than {self._max_scale}") # type: ignore if scale < self._get_min_scale(): # pylint: disable=broad-exception-raised - raise Exception(f"scale is smaller than {self._min_scale}") + raise Exception(f"scale is smaller than {self._min_scale}") # type: ignore # The size of the exponential histogram buckets is determined by a # parameter known as scale, larger values of scale will produce smaller diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/exponent_mapping.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/exponent_mapping.py index 297bb7a483..27887c97a2 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/exponent_mapping.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/exponent_mapping.py @@ -36,24 +36,24 @@ class ExponentMapping(Mapping): # Reference implementation here: # https://github.com/open-telemetry/opentelemetry-go/blob/0e6f9c29c10d6078e8131418e1d1d166c7195d61/sdk/metric/aggregator/exponential/mapping/exponent/exponent.go - _mappings = {} + _mappings = {} # type: ignore _mappings_lock = Lock() _min_scale = -10 _max_scale = 0 - def _get_min_scale(self): + def _get_min_scale(self) -> int: # _min_scale defines the point at which the exponential mapping # function becomes useless for 64-bit floats. With scale -10, ignoring # subnormal values, bucket indices range from -1 to 1. return -10 - def _get_max_scale(self): + def _get_max_scale(self) -> int: # _max_scale is the largest scale supported by exponential mapping. Use # a logarithm mapping for larger scales. return 0 - def _init(self, scale: int): + def _init(self, scale: int) -> None: # pylint: disable=attribute-defined-outside-init super()._init(scale) @@ -64,15 +64,15 @@ def _init(self, scale: int): # bucket with this index covers the range # (base ** index, base (index + 1)], including MIN_NORMAL_VALUE. This # is the smallest valid index that contains at least one normal value. - index = MIN_NORMAL_EXPONENT >> -self._scale + index = MIN_NORMAL_EXPONENT >> -self._scale # type: ignore if -self._scale < 2: # For scales -1 and 0, the maximum value 2 ** -1022 is a # power-of-two multiple, meaning base ** index == MIN_NORMAL_VALUE. # Subtracting 1 so that base ** (index + 1) == MIN_NORMAL_VALUE. - index -= 1 + index -= 1 # type: ignore - self._min_normal_lower_boundary_index = index + self._min_normal_lower_boundary_index = index # type: ignore # self._max_normal_lower_boundary_index is the index such that # base**index equals the greatest representable lower boundary. An @@ -83,12 +83,12 @@ def _init(self, scale: int): # represented. One greater than this index corresponds with the bucket # containing values > 2 ** 1024. self._max_normal_lower_boundary_index = ( - MAX_NORMAL_EXPONENT >> -self._scale + MAX_NORMAL_EXPONENT >> -self._scale # type: ignore ) def map_to_index(self, value: float) -> int: if value < MIN_NORMAL_VALUE: - return self._min_normal_lower_boundary_index + return self._min_normal_lower_boundary_index # type: ignore exponent = get_ieee_754_exponent(value) @@ -128,10 +128,10 @@ def map_to_index(self, value: float) -> int: return (exponent + correction) >> -self._scale def get_lower_boundary(self, index: int) -> float: - if index < self._min_normal_lower_boundary_index: + if index < self._min_normal_lower_boundary_index: # type: ignore raise MappingUnderflowError() - if index > self._max_normal_lower_boundary_index: + if index > self._max_normal_lower_boundary_index: # type: ignore raise MappingOverflowError() return ldexp(1, index << -self._scale) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.py index d4b7e86148..c0bc2e7154 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.py @@ -29,7 +29,7 @@ # from 1 to 2046. To calculate the exponent value, 1023 (the bias) is # subtracted from the exponent, so the exponent value range is from -1022 to # +1023. -EXPONENT_BIAS = (2 ** (EXPONENT_WIDTH - 1)) - 1 +EXPONENT_BIAS = (2 ** (EXPONENT_WIDTH - 1)) - 1 # type: ignore # All the exponent mask bits are set to 1 for the 11 exponent bits. EXPONENT_MASK = ((1 << EXPONENT_WIDTH) - 1) << MANTISSA_WIDTH @@ -39,8 +39,8 @@ # For normal floating point numbers, the exponent can have a value in the # range [-1022, 1023]. -MIN_NORMAL_EXPONENT = -EXPONENT_BIAS + 1 -MAX_NORMAL_EXPONENT = EXPONENT_BIAS +MIN_NORMAL_EXPONENT = -EXPONENT_BIAS + 1 # type: ignore +MAX_NORMAL_EXPONENT = EXPONENT_BIAS # type: ignore # The smallest possible normal value is 2.2250738585072014e-308. # This value is the result of using the smallest possible number in the @@ -65,7 +65,7 @@ def get_ieee_754_exponent(value: float) -> int: Gets the exponent of the IEEE 754 representation of a float. """ - return ( + return int(( ( # This step gives the integer that corresponds to the IEEE 754 # representation of a float. For example, consider @@ -101,7 +101,7 @@ def get_ieee_754_exponent(value: float) -> int: # exponent. # This step subtracts the exponent bias from the IEEE 754 value, # leaving the actual exponent value. - ) - EXPONENT_BIAS + ) - EXPONENT_BIAS) # type: ignore # For the example this means: # 2046 - EXPONENT_BIAS == 1023 # As mentioned in a comment above, the largest value for the exponent is diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/logarithm_mapping.py b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/logarithm_mapping.py index e73f3a81e2..b3416a55fc 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/logarithm_mapping.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/logarithm_mapping.py @@ -14,6 +14,7 @@ from math import exp, floor, ldexp, log from threading import Lock +from typing import Any, Dict from opentelemetry.sdk.metrics._internal.exponential_histogram.mapping import ( Mapping, @@ -35,25 +36,25 @@ class LogarithmMapping(Mapping): # Reference implementation here: # https://github.com/open-telemetry/opentelemetry-go/blob/0e6f9c29c10d6078e8131418e1d1d166c7195d61/sdk/metric/aggregator/exponential/mapping/logarithm/logarithm.go - _mappings = {} + _mappings: Dict[int, Any] = {} _mappings_lock = Lock() _min_scale = 1 _max_scale = 20 - def _get_min_scale(self): + def _get_min_scale(self) -> int: # _min_scale ensures that ExponentMapping is used for zero and negative # scale values. return self._min_scale - def _get_max_scale(self): + def _get_max_scale(self) -> int: # FIXME The Go implementation uses a value of 20 here, find out the # right value for this implementation, more information here: # https://github.com/lightstep/otel-launcher-go/blob/c9ca8483be067a39ab306b09060446e7fda65f35/lightstep/sdk/metric/aggregator/histogram/structure/README.md#mapping-function # https://github.com/open-telemetry/opentelemetry-go/blob/0e6f9c29c10d6078e8131418e1d1d166c7195d61/sdk/metric/aggregator/exponential/mapping/logarithm/logarithm.go#L32-L45 return self._max_scale - def _init(self, scale: int): + def _init(self, scale: int) -> None: # pylint: disable=attribute-defined-outside-init super()._init(scale) @@ -80,7 +81,7 @@ def _init(self, scale: int): # (MIN_NORMAL_VALUE, MIN_NORMAL_VALUE * base]. One less than this index # corresponds with the bucket containing values <= MIN_NORMAL_VALUE. self._min_normal_lower_boundary_index = ( - MIN_NORMAL_EXPONENT << self._scale + MIN_NORMAL_EXPONENT << self._scale # type: ignore ) # self._max_normal_lower_boundary_index is the index such that @@ -92,7 +93,7 @@ def _init(self, scale: int): # represented. One greater than this index corresponds with the bucket # containing values > 2 ** 1024. self._max_normal_lower_boundary_index = ( - (MAX_NORMAL_EXPONENT + 1) << self._scale + (MAX_NORMAL_EXPONENT + 1) << self._scale # type: ignore ) - 1 def map_to_index(self, value: float) -> int: @@ -102,30 +103,30 @@ def map_to_index(self, value: float) -> int: # value is subnormal if value <= MIN_NORMAL_VALUE: - return self._min_normal_lower_boundary_index - 1 + return self._min_normal_lower_boundary_index - 1 # type: ignore # value is an exact power of two. if get_ieee_754_mantissa(value) == 0: exponent = get_ieee_754_exponent(value) return (exponent << self._scale) - 1 - return min( + return min( # type: ignore floor(log(value) * self._scale_factor), - self._max_normal_lower_boundary_index, + self._max_normal_lower_boundary_index, # type: ignore ) def get_lower_boundary(self, index: int) -> float: - if index >= self._max_normal_lower_boundary_index: - if index == self._max_normal_lower_boundary_index: + if index >= self._max_normal_lower_boundary_index: # type: ignore + if index == self._max_normal_lower_boundary_index: # type: ignore return 2 * exp( (index - (1 << self._scale)) / self._scale_factor ) raise MappingOverflowError() - if index <= self._min_normal_lower_boundary_index: - if index == self._min_normal_lower_boundary_index: + if index <= self._min_normal_lower_boundary_index: # type: ignore + if index == self._min_normal_lower_boundary_index: # type: ignore return MIN_NORMAL_VALUE - if index == self._min_normal_lower_boundary_index - 1: + if index == self._min_normal_lower_boundary_index - 1: # type: ignore return ( exp((index + (1 << self._scale)) / self._scale_factor) / 2 ) diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py index b144e071a8..5a83bb328a 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/__init__.py @@ -136,6 +136,7 @@ def force_flush(self, timeout_millis: int = 30000) -> bool: Returns: False if the timeout is exceeded, True otherwise. """ + return False # Temporary fix until https://github.com/PyCQA/pylint/issues/4098 is resolved @@ -150,7 +151,7 @@ class SynchronousMultiSpanProcessor(SpanProcessor): _span_processors: Tuple[SpanProcessor, ...] - def __init__(self): + def __init__(self) -> None: # use a tuple to avoid race conditions when adding a new span and # iterating through it on "on_start" and "on_end". self._span_processors = () @@ -236,10 +237,10 @@ def _submit_and_await( func: Callable[[SpanProcessor], Callable[..., None]], *args: Any, **kwargs: Any, - ): + ) -> None: futures = [] for sp in self._span_processors: - future = self._executor.submit(func(sp), *args, **kwargs) + future = self._executor.submit(func(sp), *args, **kwargs) # type: ignore futures.append(future) for future in futures: future.result() @@ -344,19 +345,19 @@ def dropped_attributes(self) -> int: return 0 -def _check_span_ended(func): - def wrapper(self, *args, **kwargs): +def _check_span_ended(func): # type: ignore + def wrapper(self, *args, **kwargs): # type: ignore already_ended = False - with self._lock: # pylint: disable=protected-access - if self._end_time is None: # pylint: disable=protected-access - func(self, *args, **kwargs) + with self._lock: # type: ignore # pylint: disable=protected-access + if self._end_time is None: # type: ignore # pylint: disable=protected-access + func(self, *args, **kwargs) # type: ignore else: already_ended = True if already_ended: - logger.warning("Tried calling %s on an ended span.", func.__name__) + logger.warning("Tried calling %s on an ended span.", func.__name__) # type: ignore - return wrapper + return wrapper # type: ignore def _is_valid_link(context: SpanContext, attributes: types.Attributes) -> bool: @@ -428,11 +429,11 @@ def dropped_links(self) -> int: def name(self) -> str: return self._name - def get_span_context(self): + def get_span_context(self) -> Optional[trace_api.SpanContext]: return self._context @property - def context(self): + def context(self) -> Optional[trace_api.SpanContext]: return self._context @property @@ -472,7 +473,7 @@ def resource(self) -> Resource: return self._resource @property - @deprecated( + @deprecated( # type: ignore version="1.11.1", reason="You should use instrumentation_scope" ) def instrumentation_info(self) -> Optional[InstrumentationInfo]: @@ -482,7 +483,7 @@ def instrumentation_info(self) -> Optional[InstrumentationInfo]: def instrumentation_scope(self) -> Optional[InstrumentationScope]: return self._instrumentation_scope - def to_json(self, indent: int = 4): + def to_json(self, indent: int = 4) -> str: parent_id = None if self.parent is not None: parent_id = f"0x{trace_api.format_span_id(self.parent.span_id)}" @@ -501,7 +502,7 @@ def to_json(self, indent: int = 4): if self._status.description: status["description"] = self._status.description - f_span = { + f_span = { # type: ignore "name": self._name, "context": ( self._format_context(self._context) if self._context else None @@ -511,13 +512,13 @@ def to_json(self, indent: int = 4): "start_time": start_time, "end_time": end_time, "status": status, - "attributes": self._format_attributes(self._attributes), - "events": self._format_events(self._events), - "links": self._format_links(self._links), - "resource": json.loads(self.resource.to_json()), + "attributes": self._format_attributes(self._attributes), # type: ignore + "events": self._format_events(self._events), # type: ignore + "links": self._format_links(self._links), # type: ignore + "resource": json.loads(self.resource.to_json()), # type: ignore } - return json.dumps(f_span, indent=indent) + return json.dumps(f_span, indent=indent) # type: ignore @staticmethod def _format_context(context: SpanContext) -> Dict[str, str]: @@ -528,20 +529,20 @@ def _format_context(context: SpanContext) -> Dict[str, str]: } @staticmethod - def _format_attributes( + def _format_attributes( # type: ignore attributes: types.Attributes, ) -> Optional[Dict[str, Any]]: if attributes is not None and not isinstance(attributes, dict): - return dict(attributes) + return dict(attributes) # type: ignore return attributes @staticmethod - def _format_events(events: Sequence[Event]) -> List[Dict[str, Any]]: - return [ - { + def _format_events(events: Sequence[Event]) -> List[Dict[str, Any]]: # type: ignore + return [ # type: ignore + { # type: ignore "name": event.name, "timestamp": util.ns_to_iso_str(event.timestamp), - "attributes": Span._format_attributes( # pylint: disable=protected-access + "attributes": Span._format_attributes( # type: ignore # pylint: disable=protected-access event.attributes ), } @@ -549,13 +550,13 @@ def _format_events(events: Sequence[Event]) -> List[Dict[str, Any]]: ] @staticmethod - def _format_links(links: Sequence[trace_api.Link]) -> List[Dict[str, Any]]: - return [ - { - "context": Span._format_context( # pylint: disable=protected-access + def _format_links(links: Sequence[trace_api.Link]) -> List[Dict[str, Any]]: # type: ignore + return [ # type: ignore + { # type: ignore + "context": Span._format_context(# pylint: disable=protected-access link.context ), - "attributes": Span._format_attributes( # pylint: disable=protected-access + "attributes": Span._format_attributes( # type: ignore # pylint: disable=protected-access link.attributes ), } @@ -681,7 +682,7 @@ def __init__( self.max_attribute_length, ) - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}(max_span_attributes={self.max_span_attributes}, max_events_attributes={self.max_event_attributes}, max_link_attributes={self.max_link_attributes}, max_attributes={self.max_attributes}, max_events={self.max_events}, max_links={self.max_links}, max_attribute_length={self.max_attribute_length})" @classmethod @@ -754,7 +755,7 @@ class Span(trace_api.Span, ReadableSpan): limits: `SpanLimits` instance that was passed to the `TracerProvider` """ - def __new__(cls, *args, **kwargs): + def __new__(cls, *args, **kwargs): # type: ignore if cls is Span: raise TypeError("Span must be instantiated via a tracer.") return super().__new__(cls) @@ -776,7 +777,7 @@ def __init__( instrumentation_info: Optional[InstrumentationInfo] = None, record_exception: bool = True, set_status_on_exception: bool = True, - limits=_UnsetLimits, + limits: SpanLimits = _UnsetLimits, instrumentation_scope: Optional[InstrumentationScope] = None, ) -> None: if resource is None: @@ -803,7 +804,7 @@ def __init__( immutable=False, max_value_len=self._limits.max_span_attribute_length, ) - self._events = self._new_events() + self._events = self._new_events() # type: ignore if events: for event in events: event._attributes = BoundedAttributes( @@ -811,19 +812,19 @@ def __init__( event.attributes, max_value_len=self._limits.max_attribute_length, ) - self._events.append(event) + self._events.append(event) # type: ignore self._links = self._new_links(links) - def __repr__(self): + def __repr__(self) -> str: return f'{type(self).__name__}(name="{self._name}", context={self._context})' - def _new_events(self): - return BoundedList(self._limits.max_events) + def _new_events(self): # type: ignore + return BoundedList(self._limits.max_events) # type: ignore - def _new_links(self, links: Sequence[trace_api.Link]): + def _new_links(self, links: Sequence[trace_api.Link]): # type: ignore if not links: - return BoundedList(self._limits.max_links) + return BoundedList(self._limits.max_links) # type: ignore valid_links = [] for link in links: @@ -836,9 +837,9 @@ def _new_links(self, links: Sequence[trace_api.Link]): ) valid_links.append(link) - return BoundedList.from_seq(self._limits.max_links, valid_links) + return BoundedList.from_seq(self._limits.max_links, valid_links) # type: ignore - def get_span_context(self): + def get_span_context(self): # type: ignore return self._context def set_attributes( @@ -850,14 +851,14 @@ def set_attributes( return for key, value in attributes.items(): - self._attributes[key] = value + self._attributes[key] = value # type: ignore def set_attribute(self, key: str, value: types.AttributeValue) -> None: return self.set_attributes({key: value}) - @_check_span_ended + @_check_span_ended # type: ignore def _add_event(self, event: EventBase) -> None: - self._events.append(event) + self._events.append(event) # type: ignore def add_event( self, @@ -878,9 +879,9 @@ def add_event( ) ) - @_check_span_ended + @_check_span_ended # type: ignore def _add_link(self, link: trace_api.Link) -> None: - self._links.append(link) + self._links.append(link) # type: ignore def add_link( self, @@ -946,14 +947,14 @@ def end(self, end_time: Optional[int] = None) -> None: self._span_processor.on_end(self._readable_span()) - @_check_span_ended + @_check_span_ended # type: ignore def update_name(self, name: str) -> None: self._name = name def is_recording(self) -> bool: return self._end_time is None - @_check_span_ended + @_check_span_ended # type: ignore def set_status( self, status: typing.Union[Status, StatusCode], @@ -998,10 +999,14 @@ def __exit__( # Records status if span is used as context manager # i.e. with tracer.start_span() as span: if self._set_status_on_exception: + if exc_type is not None: + description = f"{exc_type.__name__}: {exc_val}" + else: + description = f"{exc_val}" self.set_status( Status( status_code=StatusCode.ERROR, - description=f"{exc_type.__name__}: {exc_val}", + description=description, ) ) @@ -1127,7 +1132,7 @@ def start_span( # pylint: disable=too-many-locals # is_valid determines root span if parent_span_context is None or not parent_span_context.is_valid: - parent_span_context = None + parent_span_context = None # type: ignore trace_id = self.id_generator.generate_trace_id() else: trace_id = parent_span_context.trace_id @@ -1164,10 +1169,10 @@ def start_span( # pylint: disable=too-many-locals parent=parent_span_context, sampler=self.sampler, resource=self.resource, - attributes=sampling_result.attributes.copy(), + attributes=sampling_result.attributes.copy(), # type: ignore span_processor=self.span_processor, kind=kind, - links=links, + links=links, # type: ignore instrumentation_info=self.instrumentation_info, record_exception=record_exception, set_status_on_exception=set_status_on_exception, @@ -1176,7 +1181,7 @@ def start_span( # pylint: disable=too-many-locals ) span.start(start_time=start_time, parent_context=context) else: - span = trace_api.NonRecordingSpan(context=span_context) + span = trace_api.NonRecordingSpan(context=span_context) # type: ignore return span @@ -1200,7 +1205,7 @@ def __init__( if id_generator is None: self.id_generator = RandomIdGenerator() else: - self.id_generator = id_generator + self.id_generator = id_generator # type: ignore if resource is None: self._resource = Resource.create({}) else: @@ -1277,7 +1282,7 @@ def add_span_processor(self, span_processor: SpanProcessor) -> None: # SynchronousMultiSpanProcessor and ConcurrentMultiSpanProcessor. self._active_span_processor.add_span_processor(span_processor) - def shutdown(self): + def shutdown(self) -> None: """Shut down the span processors added to the tracer provider.""" self._active_span_processor.shutdown() if self._atexit_handler is not None: diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py index fb6990a007..c362a0f195 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/trace/sampling.py @@ -160,10 +160,10 @@ class Decision(enum.Enum): # IsRecording() == true AND Sampled flag` MUST be set. RECORD_AND_SAMPLE = 2 - def is_recording(self): + def is_recording(self) -> bool: return self in (Decision.RECORD_ONLY, Decision.RECORD_AND_SAMPLE) - def is_sampled(self): + def is_sampled(self) -> bool: return self is Decision.RECORD_AND_SAMPLE @@ -179,7 +179,7 @@ class SamplingResult: """ def __repr__(self) -> str: - return f"{type(self).__name__}({str(self.decision)}, attributes={str(self.attributes)})" + return f"{type(self).__name__}({str(self.decision)}, attributes={str(self.attributes)})" # type: ignore def __init__( self, @@ -189,7 +189,7 @@ def __init__( ) -> None: self.decision = decision if attributes is None: - self.attributes = MappingProxyType({}) + self.attributes = MappingProxyType({}) # type: ignore else: self.attributes = MappingProxyType(attributes) self.trace_state = trace_state @@ -373,7 +373,7 @@ def should_sample( links=links, ) - def get_description(self): + def get_description(self) -> str: return f"ParentBased{{root:{self._root.get_description()},remoteParentSampled:{self._remote_parent_sampled.get_description()},remoteParentNotSampled:{self._remote_parent_not_sampled.get_description()},localParentSampled:{self._local_parent_sampled.get_description()},localParentNotSampled:{self._local_parent_not_sampled.get_description()}}}" @@ -396,22 +396,22 @@ def __init__(self, rate: float): class _AlwaysOff(StaticSampler): - def __init__(self, _): + def __init__(self, _): # type: ignore super().__init__(Decision.DROP) class _AlwaysOn(StaticSampler): - def __init__(self, _): + def __init__(self, _): # type: ignore super().__init__(Decision.RECORD_AND_SAMPLE) class _ParentBasedAlwaysOff(ParentBased): - def __init__(self, _): + def __init__(self, _): # type: ignore super().__init__(ALWAYS_OFF) class _ParentBasedAlwaysOn(ParentBased): - def __init__(self, _): + def __init__(self, _): # type: ignore super().__init__(ALWAYS_ON) @@ -435,13 +435,13 @@ def _get_from_env_or_default() -> Sampler: if trace_sampler in ("traceidratio", "parentbased_traceidratio"): try: - rate = float(os.getenv(OTEL_TRACES_SAMPLER_ARG)) + rate = float(os.getenv(OTEL_TRACES_SAMPLER_ARG, 1.0)) except (ValueError, TypeError): _logger.warning("Could not convert TRACES_SAMPLER_ARG to float.") rate = 1.0 - return _KNOWN_SAMPLERS[trace_sampler](rate) + return _KNOWN_SAMPLERS[trace_sampler](rate) # type: ignore - return _KNOWN_SAMPLERS[trace_sampler] + return _KNOWN_SAMPLERS[trace_sampler] # type: ignore def _get_parent_trace_state( diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/util/__init__.pyi b/opentelemetry-sdk/src/opentelemetry/sdk/util/__init__.pyi index e949ad4ebc..2a9bb64546 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/util/__init__.pyi +++ b/opentelemetry-sdk/src/opentelemetry/sdk/util/__init__.pyi @@ -49,8 +49,8 @@ class BoundedList(Sequence[_T]): @overload def __getitem__(self, s: slice) -> Sequence[_T]: ... def __len__(self) -> int: ... - def append(self, item: _T): ... - def extend(self, seq: Sequence[_T]): ... + def append(self, item: _T) -> Sequence[_T]: ... + def extend(self, seq: Sequence[_T]) -> Sequence[_T]: ... @classmethod def from_seq(cls, maxlen: int, seq: Iterable[_T]) -> BoundedList[_T]: ... # pylint: disable=undefined-variable diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py b/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py index a6fd7d7f66..0849e22b67 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/util/instrumentation.py @@ -29,7 +29,7 @@ class InstrumentationInfo: __slots__ = ("_name", "_version", "_schema_url") - @deprecated(version="1.11.1", reason="You should use InstrumentationScope") + @deprecated(version="1.11.1", reason="You should use InstrumentationScope") # type: ignore def __init__( self, name: str, @@ -42,26 +42,26 @@ def __init__( schema_url = "" self._schema_url = schema_url - def __repr__(self): + def __repr__(self) -> str: return f"{type(self).__name__}({self._name}, {self._version}, {self._schema_url})" - def __hash__(self): + def __hash__(self) -> int: return hash((self._name, self._version, self._schema_url)) - def __eq__(self, value): + def __eq__(self, value: object) -> bool: return type(value) is type(self) and ( self._name, self._version, self._schema_url, - ) == (value._name, value._version, value._schema_url) + ) == (value._name, value._version, value._schema_url) # type: ignore - def __lt__(self, value): + def __lt__(self, value: object) -> bool: if type(value) is not type(self): return NotImplemented - return (self._name, self._version, self._schema_url) < ( - value._name, - value._version, - value._schema_url, + return (self._name, self._version, self._schema_url) < ( # type: ignore + value._name, # type: ignore + value._version, # type: ignore + value._schema_url, # type: ignore ) @property @@ -153,14 +153,14 @@ def name(self) -> str: def attributes(self) -> Attributes: return self._attributes - def to_json(self, indent=4) -> str: + def to_json(self, indent: int=4) -> str: return dumps( - { + { # type: ignore "name": self._name, "version": self._version, "schema_url": self._schema_url, - "attributes": ( - dict(self._attributes) if bool(self._attributes) else None + "attributes": ( # type: ignore + dict(self._attributes) if bool(self._attributes) else None # type: ignore ), }, indent=indent, diff --git a/tox.ini b/tox.ini index f9a4c8e091..554b74e92e 100644 --- a/tox.ini +++ b/tox.ini @@ -231,6 +231,7 @@ commands = coverage: {toxinidir}/scripts/coverage.sh mypy: mypy --version + mypy: mypy --install-types --non-interactive --namespace-packages --explicit-package-bases opentelemetry-sdk/src/ mypy: mypy --install-types --non-interactive --namespace-packages --explicit-package-bases opentelemetry-api/src/opentelemetry/ mypy: mypy --install-types --non-interactive --namespace-packages --explicit-package-bases opentelemetry-sdk/src/opentelemetry/sdk/resources mypy: mypy --install-types --non-interactive --namespace-packages --explicit-package-bases opentelemetry-semantic-conventions/src/opentelemetry/semconv/