diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 8253d330b95b811..874b985d2ff8b3d 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -260,7 +260,7 @@ def clear_caches(): except KeyError: pass else: - for f in typing._cleanups: + for f in typing._CACHE_CLEANUPS: f() try: diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index fbca198aab5180f..361077e47e0833a 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -24,7 +24,7 @@ class Example: class Forward: ... def clear_typing_caches(): - for f in typing._cleanups: + for f in typing._CACHE_CLEANUPS: f() diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index f10b0aea3cd7b9c..0d3e2726f6e43df 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -71,7 +71,7 @@ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): raise self.failureException(message) def clear_caches(self): - for f in typing._cleanups: + for f in typing._CACHE_CLEANUPS: f() @@ -6398,10 +6398,10 @@ def blah(): @cpython_only # gh-98713 def test_overload_on_compiled_functions(self): - with patch("typing._overload_registry", + with patch("typing._OVERLOAD_REGISTRY", defaultdict(lambda: defaultdict(dict))): # The registry starts out empty: - self.assertEqual(typing._overload_registry, {}) + self.assertEqual(typing._OVERLOAD_REGISTRY, {}) # This should just not fail: overload(sum) @@ -6430,14 +6430,14 @@ def blah(): return blah, [overload1, overload2] # Make sure we don't clear the global overload registry - @patch("typing._overload_registry", + @patch("typing._OVERLOAD_REGISTRY", defaultdict(lambda: defaultdict(dict))) def test_overload_registry(self): # The registry starts out empty - self.assertEqual(typing._overload_registry, {}) + self.assertEqual(typing._OVERLOAD_REGISTRY, {}) impl, overloads = self.set_up_overloads() - self.assertNotEqual(typing._overload_registry, {}) + self.assertNotEqual(typing._OVERLOAD_REGISTRY, {}) self.assertEqual(list(get_overloads(impl)), overloads) def some_other_func(): pass @@ -6452,13 +6452,13 @@ def not_overloaded(): pass # Make sure that after we clear all overloads, the registry is # completely empty. clear_overloads() - self.assertEqual(typing._overload_registry, {}) + self.assertEqual(typing._OVERLOAD_REGISTRY, {}) self.assertEqual(get_overloads(impl), []) # Querying a function with no overloads shouldn't change the registry. def the_only_one(): pass self.assertEqual(get_overloads(the_only_one), []) - self.assertEqual(typing._overload_registry, {}) + self.assertEqual(typing._OVERLOAD_REGISTRY, {}) def test_overload_registry_repeated(self): for _ in range(2): diff --git a/Lib/typing.py b/Lib/typing.py index 434574559e04fcb..538ea8d6343adda 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -256,8 +256,8 @@ def _type_repr(obj): return repr(obj) -def _collect_type_parameters(args, *, enforce_default_ordering: bool = True): - """Collect all type parameters in args +def _collect_type_parameters(params, *, enforce_default_ordering: bool = True): + """Collect all type parameters in params in order of first appearance (lexicographic order). For example:: @@ -272,37 +272,39 @@ def _collect_type_parameters(args, *, enforce_default_ordering: bool = True): # or after TypeVarTuple type_var_tuple_encountered = False parameters = [] - for t in args: - if isinstance(t, type): + for param in params: + if isinstance(param, type): # We don't want __parameters__ descriptor of a bare Python class. pass - elif isinstance(t, tuple): + elif isinstance(param, tuple): # `t` might be a tuple, when `ParamSpec` is substituted with # `[T, int]`, or `[int, *Ts]`, etc. - for x in t: - for collected in _collect_type_parameters([x]): + for elem in param: + for collected in _collect_type_parameters([elem]): if collected not in parameters: parameters.append(collected) - elif hasattr(t, '__typing_subst__'): - if t not in parameters: + elif hasattr(param, '__typing_subst__'): + if param not in parameters: if enforce_default_ordering: - if type_var_tuple_encountered and t.has_default(): + if type_var_tuple_encountered and param.has_default(): raise TypeError('Type parameter with a default' ' follows TypeVarTuple') - if t.has_default(): + elif param.has_default(): default_encountered = True elif default_encountered: - raise TypeError(f'Type parameter {t!r} without a default' - ' follows type parameter with a default') + raise TypeError( + f'Type parameter {param!r} without a default' + ' follows type parameter with a default' + ) - parameters.append(t) + parameters.append(param) else: - if _is_unpacked_typevartuple(t): + if _is_unpacked_typevartuple(param): type_var_tuple_encountered = True - for x in getattr(t, '__parameters__', ()): - if x not in parameters: - parameters.append(x) + for elem in getattr(param, '__parameters__', ()): + if elem not in parameters: + parameters.append(elem) return tuple(parameters) @@ -333,20 +335,25 @@ def _check_generic_specialization(cls, arguments): else: expect_val = expected_len - raise TypeError(f"Too {'many' if actual_len > expected_len else 'few'} arguments" - f" for {cls}; actual {actual_len}, expected {expect_val}") + raise TypeError( + f"Too {'many' if actual_len > expected_len else 'few'} arguments" + f" for {cls}; actual {actual_len}, expected {expect_val}" + ) def _unpack_args(*args): newargs = [] for arg in args: - subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) - if subargs is not None and not (subargs and subargs[-1] is ...): - newargs.extend(subargs) - else: - newargs.append(arg) + match getattr(arg, '__typing_unpacked_tuple_args__', None): + case [*_, types.EllipsisType()]: + newargs.append(arg) + case tuple() as subargs: + newargs.extend(subargs) + case _: + newargs.append(arg) return newargs + def _deduplicate(params, *, unhashable_fallback=False): # Weed out strict duplicates, preserving the first of each occurrence. try: @@ -357,13 +364,15 @@ def _deduplicate(params, *, unhashable_fallback=False): # Happens for cases like `Annotated[dict, {'x': IntValidator()}]` return _deduplicate_unhashable(params) + def _deduplicate_unhashable(unhashable_params): new_unhashable = [] - for t in unhashable_params: - if t not in new_unhashable: - new_unhashable.append(t) + for param in unhashable_params: + if param not in new_unhashable: + new_unhashable.append(param) return new_unhashable + def _compare_args_orderless(first_args, second_args): first_unhashable = _deduplicate_unhashable(first_args) second_unhashable = _deduplicate_unhashable(second_args) @@ -375,6 +384,7 @@ def _compare_args_orderless(first_args, second_args): return False return not t + def _remove_dups_flatten(parameters): """Internal helper for Union creation and substitution. @@ -382,11 +392,11 @@ def _remove_dups_flatten(parameters): """ # Flatten out Union[Union[...], ...]. params = [] - for p in parameters: - if isinstance(p, (_UnionGenericAlias, types.UnionType)): - params.extend(p.__args__) + for param in parameters: + if isinstance(param, (_UnionGenericAlias, types.UnionType)): + params.extend(param.__args__) else: - params.append(p) + params.append(param) return tuple(_deduplicate(params, unhashable_fallback=True)) @@ -394,16 +404,16 @@ def _remove_dups_flatten(parameters): def _flatten_literal_params(parameters): """Internal helper for Literal creation: flatten Literals among parameters.""" params = [] - for p in parameters: - if isinstance(p, _LiteralGenericAlias): - params.extend(p.__args__) + for param in parameters: + if isinstance(param, _LiteralGenericAlias): + params.extend(param.__args__) else: - params.append(p) + params.append(param) return tuple(params) -_cleanups = [] -_caches = {} +_CACHES = {} +_CACHE_CLEANUPS = [] def _tp_cache(func=None, /, *, typed=False): @@ -413,19 +423,19 @@ def _tp_cache(func=None, /, *, typed=False): """ def decorator(func): # The callback 'inner' references the newly created lru_cache - # indirectly by performing a lookup in the global '_caches' dictionary. + # indirectly by performing a lookup in the global '_CACHES' dictionary. # This breaks a reference that can be problematic when combined with # C API extensions that leak references to types. See GH-98253. cache = functools.lru_cache(typed=typed)(func) - _caches[func] = cache - _cleanups.append(cache.cache_clear) + _CACHES[func] = cache + _CACHE_CLEANUPS.append(cache.cache_clear) del cache @functools.wraps(func) def inner(*args, **kwds): try: - return _caches[func](*args, **kwds) + return _CACHES[func](*args, **kwds) except TypeError: pass # All real errors (not unhashable args) are raised below. return func(*args, **kwds) @@ -459,7 +469,9 @@ def __repr__(self): _sentinel = _Sentinel() -def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset()): +def _eval_type( + typ, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset() + ): """Evaluate all forward references in the given type t. For use of globalns and localns see the docstring for get_type_hints(). @@ -469,37 +481,39 @@ def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=f if type_params is _sentinel: _deprecation_warning_for_no_type_params_passed("typing._eval_type") type_params = () - if isinstance(t, ForwardRef): - return t._evaluate(globalns, localns, type_params, recursive_guard=recursive_guard) - if isinstance(t, (_GenericAlias, GenericAlias, types.UnionType)): - if isinstance(t, GenericAlias): + if isinstance(typ, ForwardRef): + return typ._evaluate( + globalns, localns, type_params, recursive_guard=recursive_guard + ) + if isinstance(typ, (_GenericAlias, GenericAlias, types.UnionType)): + if isinstance(typ, GenericAlias): args = tuple( ForwardRef(arg) if isinstance(arg, str) else arg - for arg in t.__args__ + for arg in typ.__args__ ) - is_unpacked = t.__unpacked__ - if _should_unflatten_callable_args(t, args): - t = t.__origin__[(args[:-1], args[-1])] + is_unpacked = typ.__unpacked__ + if _should_unflatten_callable_args(typ, args): + typ = typ.__origin__[(args[:-1], args[-1])] else: - t = t.__origin__[args] + typ = typ.__origin__[args] if is_unpacked: - t = Unpack[t] + typ = Unpack[typ] - ev_args = tuple( + evaluated_args = tuple( _eval_type( - a, globalns, localns, type_params, recursive_guard=recursive_guard + arg, globalns, localns, type_params, recursive_guard=recursive_guard ) - for a in t.__args__ + for arg in typ.__args__ ) - if ev_args == t.__args__: - return t - if isinstance(t, GenericAlias): - return GenericAlias(t.__origin__, ev_args) - if isinstance(t, types.UnionType): - return functools.reduce(operator.or_, ev_args) + if evaluated_args == typ.__args__: + return typ + if isinstance(typ, GenericAlias): + return GenericAlias(typ.__origin__, evaluated_args) + if isinstance(typ, types.UnionType): + return functools.reduce(operator.or_, evaluated_args) else: - return t.copy_with(ev_args) - return t + return typ.copy_with(evaluated_args) + return typ class _Final: @@ -507,8 +521,8 @@ class _Final: __slots__ = ('__weakref__',) - def __init_subclass__(cls, /, *args, **kwds): - if '_root' not in kwds: + def __init_subclass__(cls, /, *args, **kwargs): + if '_root' not in kwargs: raise TypeError("Cannot subclass special typing classes") @@ -533,12 +547,12 @@ def __iter__(self): raise TypeError() # Internal indicator of special typing constructs. # See __doc__ instance attribute for specific docs. class _SpecialForm(_Final, _NotIterable, _root=True): - __slots__ = ('_name', '__doc__', '_getitem') + __slots__ = ('_name', '__doc__', '_getitem_callback') - def __init__(self, getitem): - self._getitem = getitem - self._name = getitem.__name__ - self.__doc__ = getitem.__doc__ + def __init__(self, getitem_callback): + self._getitem_callback = getitem_callback + self._name = getitem_callback.__name__ + self.__doc__ = getitem_callback.__doc__ def __getattr__(self, item): if item in {'__name__', '__qualname__'}: @@ -572,14 +586,14 @@ def __subclasscheck__(self, cls): @_tp_cache def __getitem__(self, parameters): - return self._getitem(self, parameters) + return self._getitem_callback(self, parameters) class _TypedCacheSpecialForm(_SpecialForm, _root=True): def __getitem__(self, parameters): if not isinstance(parameters, tuple): parameters = (parameters,) - return self._getitem(self, *parameters) + return self._getitem_callback(self, *parameters) class _AnyMeta(type): @@ -630,6 +644,7 @@ def stop() -> NoReturn: """ raise TypeError(f"{self} is not subscriptable") + # This is semantically identical to NoReturn, but it is implemented # separately so that type checkers can distinguish between the two # if they want. @@ -727,6 +742,7 @@ class Starship: item = _type_check(parameters, f'{self} accepts only single type.', allow_special_forms=True) return _GenericAlias(self, (item,)) + @_SpecialForm def Final(self, parameters): """Special typing construct to indicate final names to type checkers. @@ -749,6 +765,7 @@ class FastConnector(Connection): item = _type_check(parameters, f'{self} accepts only single type.', allow_special_forms=True) return _GenericAlias(self, (item,)) + @_SpecialForm def Union(self, parameters): """Union type; Union[X, Y] means either X or Y. @@ -785,7 +802,7 @@ def Union(self, parameters): if not isinstance(parameters, tuple): parameters = (parameters,) msg = "Union[arg, ...]: each arg must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) + parameters = tuple(_type_check(param, msg) for param in parameters) parameters = _remove_dups_flatten(parameters) if len(parameters) == 1: return parameters[0] @@ -793,6 +810,7 @@ def Union(self, parameters): return _UnionGenericAlias(self, parameters, name="Optional") return _UnionGenericAlias(self, parameters) + def _make_union(left, right): """Used from the C implementation of TypeVar. @@ -802,12 +820,14 @@ def _make_union(left, right): """ return Union[left, right] + @_SpecialForm def Optional(self, parameters): """Optional[X] is equivalent to Union[X, None].""" arg = _type_check(parameters, f"{self} requires a single type.") return Union[arg, type(None)] + @_TypedCacheSpecialForm @_tp_cache(typed=True) def Literal(self, *parameters): @@ -836,7 +856,9 @@ def open_helper(file: str, mode: MODE) -> str: parameters = _flatten_literal_params(parameters) try: - parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters)))) + parameters = tuple( + param for param, _ in _deduplicate(list(_value_and_type_iter(parameters))) + ) except TypeError: # unhashable parameters pass @@ -878,11 +900,14 @@ def Concatenate(self, parameters): raise TypeError("Cannot take a Concatenate of no types.") if not isinstance(parameters, tuple): parameters = (parameters,) - if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)): + if not isinstance(parameters[-1], (ParamSpec, types.EllipsisType)): raise TypeError("The last parameter to Concatenate should be a " "ParamSpec variable or ellipsis.") msg = "Concatenate[arg, ...]: each arg must be a type." - parameters = (*(_type_check(p, msg) for p in parameters[:-1]), parameters[-1]) + parameters = ( + *(_type_check(param, msg) for param in parameters[:-1]), + parameters[-1] + ) return _ConcatenateGenericAlias(self, parameters) @@ -1134,10 +1159,10 @@ def _typevartuple_prepare_subst(self, alias, args): if isinstance(param, TypeVarTuple): raise TypeError(f"More than one TypeVarTuple parameter in {alias}") - alen = len(args) - plen = len(params) + num_args = len(args) + num_params = len(params) left = typevartuple_index - right = plen - typevartuple_index - 1 + right = num_params - typevartuple_index - 1 var_tuple_index = None fillarg = None for k, arg in enumerate(args): @@ -1150,21 +1175,23 @@ def _typevartuple_prepare_subst(self, alias, args): fillarg = subargs[0] if var_tuple_index is not None: left = min(left, var_tuple_index) - right = min(right, alen - var_tuple_index - 1) - elif left + right > alen: - raise TypeError(f"Too few arguments for {alias};" - f" actual {alen}, expected at least {plen-1}") - if left == alen - right and self.has_default(): + right = min(right, num_args - var_tuple_index - 1) + elif left + right > num_args: + raise TypeError( + f"Too few arguments for {alias};" + f" actual {num_args}, expected at least {num_params-1}" + ) + if left == num_args - right and self.has_default(): replacement = _unpack_args(self.__default__) else: - replacement = args[left: alen - right] + replacement = args[left: num_args - right] return ( *args[:left], - *([fillarg]*(typevartuple_index - left)), + *([fillarg] * (typevartuple_index - left)), replacement, - *([fillarg]*(plen - right - left - typevartuple_index - 1)), - *args[alen - right:], + *([fillarg] * (num_params - right - left - typevartuple_index - 1)), + *args[num_args - right:], ) @@ -1208,7 +1235,7 @@ def _generic_class_getitem(cls, args): if not isinstance(args, tuple): args = (args,) - args = tuple(_type_convert(p) for p in args) + args = tuple(_type_convert(arg) for arg in args) is_generic_or_protocol = cls in (Generic, Protocol) if is_generic_or_protocol: @@ -1284,6 +1311,7 @@ def _generic_init_subclass(cls, *args, **kwargs): def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') + class _BaseGenericAlias(_Final, _root=True): """The central part of the internal API. @@ -1325,21 +1353,21 @@ def __mro_entries__(self, bases): # appear exactly once in the final bases tuple. If we let it appear # multiple times, we risk "can't form a consistent MRO" errors. i = bases.index(self) - for b in bases[i+1:]: - if isinstance(b, _BaseGenericAlias): + for base in bases[i+1:]: + if isinstance(base, _BaseGenericAlias): break - if not isinstance(b, type): - meth = getattr(b, "__mro_entries__", None) + if not isinstance(base, type): + meth = getattr(base, "__mro_entries__", None) new_bases = meth(bases) if meth else None if ( - isinstance(new_bases, tuple) and - any( - isinstance(b2, type) and issubclass(b2, Generic) - for b2 in new_bases + isinstance(new_bases, tuple) + and any( + isinstance(new_base, type) and issubclass(new_base, Generic) + for new_base in new_bases ) ): break - elif issubclass(b, Generic): + elif issubclass(base, Generic): break else: res.append(Generic) @@ -1356,7 +1384,7 @@ def __getattr__(self, attr): raise AttributeError(attr) def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in {'_name', '_inst', '_nparams', '_defaults'}: + if _is_dunder(attr) or attr in {'_name', '_inst', '_num_params', '_defaults'}: super().__setattr__(attr, val) else: setattr(self.__origin__, attr, val) @@ -1414,8 +1442,10 @@ def __init__(self, origin, args, *, inst=True, name=None): super().__init__(origin, inst=inst, name=name) if not isinstance(args, tuple): args = (args,) - self.__args__ = tuple(... if a is _TypingEllipsis else - a for a in args) + self.__args__ = tuple( + ... if arg is _TypingEllipsis else arg + for arg in args + ) enforce_default_ordering = origin in (Generic, Protocol) self.__parameters__ = _collect_type_parameters( args, @@ -1465,10 +1495,9 @@ def __getitem__(self, args): # Preprocess `args`. if not isinstance(args, tuple): args = (args,) - args = _unpack_args(*(_type_convert(p) for p in args)) + args = _unpack_args(*(_type_convert(arg) for arg in args)) new_args = self._determine_new_args(args) - r = self.copy_with(new_args) - return r + return self.copy_with(new_args) def _determine_new_args(self, args): # Determines new __args__ for __getitem__. @@ -1491,11 +1520,14 @@ def _determine_new_args(self, args): prepare = getattr(param, '__typing_prepare_subst__', None) if prepare is not None: args = prepare(self, args) - alen = len(args) - plen = len(params) - if alen != plen: - raise TypeError(f"Too {'many' if alen > plen else 'few'} arguments for {self};" - f" actual {alen}, expected {plen}") + num_args = len(args) + num_params = len(params) + if num_args != num_params: + raise TypeError( + f"Too {'many' if num_args > num_params else 'few'}" + f" arguments for {self};" + f" actual {num_args}, expected {num_params}" + ) new_arg_by_param = dict(zip(params, args)) return tuple(self._make_substitution(self.__args__, new_arg_by_param)) @@ -1523,7 +1555,7 @@ def _make_substitution(self, args, new_arg_by_param): subargs.append(new_arg_by_param[x]) new_arg = old_arg[tuple(subargs)] - if self.__origin__ == collections.abc.Callable and isinstance(new_arg, tuple): + if self.__origin__ is collections.abc.Callable and isinstance(new_arg, tuple): # Consider the following `Callable`. # C = Callable[[int], str] # Here, `C.__args__` should be (int, str) - NOT ([int], str). @@ -1570,7 +1602,7 @@ def __repr__(self): else: name = _type_repr(self.__origin__) if self.__args__: - args = ", ".join([_type_repr(a) for a in self.__args__]) + args = ", ".join([_type_repr(arg) for arg in self.__args__]) else: # To ensure the repr is eval-able. args = "()" @@ -1596,8 +1628,8 @@ def __mro_entries__(self, bases): if Protocol in bases: return () i = bases.index(self) - for b in bases[i+1:]: - if isinstance(b, _BaseGenericAlias) and b is not self: + for base in bases[i+1:]: + if isinstance(base, _BaseGenericAlias) and base is not self: return () return (self.__origin__,) @@ -1605,16 +1637,16 @@ def __iter__(self): yield Unpack[self] -# _nparams is the number of accepted parameters, e.g. 0 for Hashable, +# _numparams is the number of accepted parameters, e.g. 0 for Hashable, # 1 for List and 2 for Dict. It may be -1 if variable number of # parameters are accepted (needs custom __getitem__). class _SpecialGenericAlias(_NotIterable, _BaseGenericAlias, _root=True): - def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): + def __init__(self, origin, num_params, *, inst=True, name=None, defaults=()): if name is None: name = origin.__name__ super().__init__(origin, inst=inst, name=name) - self._nparams = nparams + self._num_params = num_params self._defaults = defaults if origin.__module__ == 'builtins': self.__doc__ = f'A generic version of {origin.__qualname__}.' @@ -1622,28 +1654,28 @@ def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.' @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) + def __getitem__(self, args): + if not isinstance(args, tuple): + args = (args,) msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) + args = tuple(_type_check(p, msg) for p in args) if (self._defaults - and len(params) < self._nparams - and len(params) + len(self._defaults) >= self._nparams + and len(args) < self._num_params + and len(args) + len(self._defaults) >= self._num_params ): - params = (*params, *self._defaults[len(params) - self._nparams:]) - actual_len = len(params) + args = (*args, *self._defaults[len(args) - self._num_params:]) + num_args = len(args) - if actual_len != self._nparams: + if num_args != self._num_params: if self._defaults: - expected = f"at least {self._nparams - len(self._defaults)}" + expected = f"at least {self._num_params - len(self._defaults)}" else: - expected = str(self._nparams) - if not self._nparams: + expected = str(self._num_params) + if not self._num_params: raise TypeError(f"{self} is not a generic class") - raise TypeError(f"Too {'many' if actual_len > self._nparams else 'few'} arguments for {self};" - f" actual {actual_len}, expected {expected}") - return self.copy_with(params) + raise TypeError(f"Too {'many' if num_args > self._num_params else 'few'} arguments for {self};" + f" actual {num_args}, expected {expected}") + return self.copy_with(args) def copy_with(self, params): return _GenericAlias(self.__origin__, params, @@ -1675,9 +1707,11 @@ def __repr__(self): args = self.__args__ if len(args) == 2 and _is_param_expr(args[0]): return super().__repr__() - return (f'typing.Callable' - f'[[{", ".join([_type_repr(a) for a in args[:-1]])}], ' - f'{_type_repr(args[-1])}]') + return ( + f'typing.Callable' + f'[[{", ".join([_type_repr(arg) for arg in args[:-1]])}], ' + f'{_type_repr(args[-1])}]' + ) def __reduce__(self): args = self.__args__ @@ -1691,23 +1725,23 @@ def copy_with(self, params): return _CallableGenericAlias(self.__origin__, params, name=self._name, inst=self._inst) - def __getitem__(self, params): - if not isinstance(params, tuple) or len(params) != 2: + def __getitem__(self, args): + if not isinstance(args, tuple) or len(args) != 2: raise TypeError("Callable must be used as " "Callable[[arg, ...], result].") - args, result = params + args, result = args # This relaxes what args can be on purpose to allow things like # PEP 612 ParamSpec. Responsibility for whether a user is using # Callable[...] properly is deferred to static type checkers. if isinstance(args, list): - params = (tuple(args), result) + args = (tuple(args), result) else: - params = (args, result) - return self.__getitem_inner__(params) + args = (args, result) + return self.__getitem_inner__(args) @_tp_cache - def __getitem_inner__(self, params): - args, result = params + def __getitem_inner__(self, args): + args, result = args msg = "Callable[args, result]: result must be a type." result = _type_check(result, msg) if args is Ellipsis: @@ -1715,22 +1749,22 @@ def __getitem_inner__(self, params): if not isinstance(args, tuple): args = (args,) args = tuple(_type_convert(arg) for arg in args) - params = args + (result,) - return self.copy_with(params) + args = args + (result,) + return self.copy_with(args) class _TupleType(_SpecialGenericAlias, _root=True): @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if len(params) >= 2 and params[-1] is ...: + def __getitem__(self, args): + if not isinstance(args, tuple): + args = (args,) + if len(args) >= 2 and args[-1] is ...: msg = "Tuple[t, ...]: t must be a type." - params = tuple(_type_check(p, msg) for p in params[:-1]) - return self.copy_with((*params, _TypingEllipsis)) + args = tuple(_type_check(p, msg) for p in args[:-1]) + return self.copy_with((*args, _TypingEllipsis)) msg = "Tuple[t0, t1, ...]: each t must be a type." - params = tuple(_type_check(p, msg) for p in params) - return self.copy_with(params) + args = tuple(_type_check(p, msg) for p in args) + return self.copy_with(args) class _UnionGenericAlias(_NotIterable, _GenericAlias, _root=True): @@ -1771,7 +1805,8 @@ def __reduce__(self): def _value_and_type_iter(parameters): - return ((p, type(p)) for p in parameters) + for param in parameters: + yield param, type(param) class _LiteralGenericAlias(_GenericAlias, _root=True): @@ -1952,6 +1987,7 @@ def _caller(depth=1, default='__main__'): pass return None + def _allow_reckless_class_checks(depth=2): """Allow instance and class checks for special stdlib modules. @@ -1978,7 +2014,8 @@ def _lazy_load_getattr_static(): return getattr_static -_cleanups.append(_lazy_load_getattr_static.cache_clear) +_CACHE_CLEANUPS.append(_lazy_load_getattr_static.cache_clear) + def _pickle_psargs(psargs): return ParamSpecArgs, (psargs.__origin__,) @@ -2171,7 +2208,7 @@ def __init_subclass__(cls, *args, **kwargs): # Determine if this is a protocol or a concrete subclass. if not cls.__dict__.get('_is_protocol', False): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) + cls._is_protocol = any(base is Protocol for base in cls.__bases__) # Set (or override) the protocol subclass hook. if '__subclasshook__' not in cls.__dict__: @@ -2200,9 +2237,9 @@ def __init__(self, origin, metadata): super().__init__(origin, origin, name='Annotated') self.__metadata__ = metadata - def copy_with(self, params): - assert len(params) == 1 - new_type = params[0] + def copy_with(self, args): + assert len(args) == 1 + new_type = args[0] return _AnnotatedAlias(new_type, self.__metadata__) def __repr__(self): @@ -2236,7 +2273,7 @@ def __mro_entries__(self, bases): @_TypedCacheSpecialForm @_tp_cache(typed=True) -def Annotated(self, *params): +def Annotated(self, *args): """Add context-specific metadata to a type. Example: Annotated[int, runtime_check.Unsigned] indicates to the @@ -2283,16 +2320,16 @@ def Annotated(self, *params): where T1, T2 etc. are TypeVars, which would be invalid, because only one type should be passed to Annotated. """ - if len(params) < 2: + if len(args) < 2: raise TypeError("Annotated[...] should be used " "with at least two arguments (a type and an " "annotation).") - if _is_unpacked_typevartuple(params[0]): + if _is_unpacked_typevartuple(args[0]): raise TypeError("Annotated[...] should not be used with an " "unpacked TypeVarTuple") msg = "Annotated[t, ...]: t must be a type." - origin = _type_check(params[0], msg, allow_special_forms=True) - metadata = tuple(params[1:]) + origin = _type_check(args[0], msg, allow_special_forms=True) + metadata = tuple(args[1:]) return _AnnotatedAlias(origin, metadata) @@ -2365,9 +2402,10 @@ def greet(name: str) -> None: return val -_allowed_types = (types.FunctionType, types.BuiltinFunctionType, - types.MethodType, types.ModuleType, - WrapperDescriptorType, MethodWrapperType, MethodDescriptorType) +_GET_TYPE_HINTS_ALLOWED_TYPES = ( + types.FunctionType, types.BuiltinFunctionType, types.MethodType, types.ModuleType, + WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, +) def get_type_hints(obj, globalns=None, localns=None, include_extras=False): @@ -2408,12 +2446,14 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): hints = {} for base in reversed(obj.__mro__): if globalns is None: - base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {}) + base_globals = getattr( + sys.modules.get(base.__module__, None), '__dict__', {} + ) else: base_globals = globalns - ann = base.__dict__.get('__annotations__', {}) - if isinstance(ann, types.GetSetDescriptorType): - ann = {} + annotations = base.__dict__.get('__annotations__', {}) + if isinstance(annotations, types.GetSetDescriptorType): + annotations = {} base_locals = dict(vars(base)) if localns is None else localns if localns is None and globalns is None: # This is surprising, but required. Before Python 3.10, @@ -2423,14 +2463,22 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): # *base_globals* first rather than *base_locals*. # This only affects ForwardRefs. base_globals, base_locals = base_locals, base_globals - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = ForwardRef(value, is_argument=False, is_class=True) - value = _eval_type(value, base_globals, base_locals, base.__type_params__) - hints[name] = value - return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()} + for name, annotation in annotations.items(): + if annotation is None: + annotation = type(None) + if isinstance(annotation, str): + annotation = ForwardRef( + annotation, is_argument=False, is_class=True + ) + annotation = _eval_type( + annotation, base_globals, base_locals, base.__type_params__ + ) + hints[name] = annotation + if include_extras: + return hints + return { + name: _strip_annotations(annotation) for name, annotation in hints.items() + } if globalns is None: if isinstance(obj, types.ModuleType): @@ -2448,54 +2496,55 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): hints = getattr(obj, '__annotations__', None) if hints is None: # Return empty annotations for something that _could_ have them. - if isinstance(obj, _allowed_types): + if isinstance(obj, _GET_TYPE_HINTS_ALLOWED_TYPES): return {} else: raise TypeError('{!r} is not a module, class, method, ' 'or function.'.format(obj)) hints = dict(hints) type_params = getattr(obj, "__type_params__", ()) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): + for name, annotation in hints.items(): + if annotation is None: + annotation = type(None) + elif isinstance(annotation, str): # class-level forward refs were handled above, this must be either # a module-level annotation or a function argument annotation - value = ForwardRef( - value, + annotation = ForwardRef( + annotation, is_argument=not isinstance(obj, types.ModuleType), is_class=False, ) - hints[name] = _eval_type(value, globalns, localns, type_params) + hints[name] = _eval_type(annotation, globalns, localns, type_params) return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()} -def _strip_annotations(t): +def _strip_annotations(typ): """Strip the annotations from a given type.""" - if isinstance(t, _AnnotatedAlias): - return _strip_annotations(t.__origin__) - if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): - return _strip_annotations(t.__args__[0]) - if isinstance(t, _GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return t.copy_with(stripped_args) - if isinstance(t, GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return GenericAlias(t.__origin__, stripped_args) - if isinstance(t, types.UnionType): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return functools.reduce(operator.or_, stripped_args) - - return t - - -def get_origin(tp): + match typ: + case _AnnotatedAlias(): + return _strip_annotations(typ.__origin__) + case object(__origin__=origin) if origin in {Required, NotRequired, ReadOnly}: + return _strip_annotations(typ.__args__[0]) + case _GenericAlias(__args__=args): + stripped_args = tuple(_strip_annotations(arg) for arg in args) + if stripped_args == args: + return typ + return typ.copy_with(stripped_args) + case GenericAlias(__args__=args): + stripped_args = tuple(_strip_annotations(arg) for arg in args) + if stripped_args == args: + return typ + return GenericAlias(typ.__origin__, stripped_args) + case types.UnionType(__args__=args): + stripped_args = tuple(_strip_annotations(arg) for arg in args) + if stripped_args == args: + return typ + return functools.reduce(operator.or_, stripped_args) + case _: + return typ + + +def get_origin(typ): """Get the unsubscripted version of a type. This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, @@ -2513,19 +2562,20 @@ def get_origin(tp): >>> assert get_origin(List[Tuple[T, T]][int]) is list >>> assert get_origin(P.args) is P """ - if isinstance(tp, _AnnotatedAlias): - return Annotated - if isinstance(tp, (_BaseGenericAlias, GenericAlias, - ParamSpecArgs, ParamSpecKwargs)): - return tp.__origin__ - if tp is Generic: - return Generic - if isinstance(tp, types.UnionType): - return types.UnionType - return None - - -def get_args(tp): + match typ: + case _AnnotatedAlias(): + return Annotated + case _BaseGenericAlias() | GenericAlias() | ParamSpecArgs() | ParamSpecKwargs(): + return typ.__origin__ + case types.UnionType(): + return types.UnionType + case typ if typ is Generic: + return Generic + case _: + return None + + +def get_args(typ): """Get type arguments with all substitutions performed. For unions, basic simplifications used by Union constructor are performed. @@ -2539,19 +2589,21 @@ def get_args(tp): >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) >>> assert get_args(Callable[[], T][int]) == ([], int) """ - if isinstance(tp, _AnnotatedAlias): - return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (_GenericAlias, GenericAlias)): - res = tp.__args__ - if _should_unflatten_callable_args(tp, res): - res = (list(res[:-1]), res[-1]) - return res - if isinstance(tp, types.UnionType): - return tp.__args__ - return () - - -def is_typeddict(tp): + match typ: + case _AnnotatedAlias(__origin__=origin, __metadata__=metadata): + return (origin,) + metadata + case _GenericAlias() | GenericAlias(): + res = typ.__args__ + if _should_unflatten_callable_args(typ, res): + res = (list(res[:-1]), res[-1]) + return res + case types.UnionType(__args__=args): + return args + case _: + return () + + +def is_typeddict(typ): """Check if an annotation is a TypedDict class. For example:: @@ -2566,7 +2618,7 @@ def is_typeddict(tp): >>> is_typeddict(dict) False """ - return isinstance(tp, _TypedDictMeta) + return isinstance(typ, _TypedDictMeta) _ASSERT_NEVER_REPR_MAX_LENGTH = 100 @@ -2597,7 +2649,7 @@ def int_or_str(arg: int | str) -> None: raise AssertionError(f"Expected code to be unreachable, but got: {value}") -def no_type_check(arg): +def no_type_check(cls_or_func): """Decorator to indicate that annotations are not type hints. The argument must be a class or function; if it is a class, it @@ -2606,13 +2658,13 @@ def no_type_check(arg): This mutates the function(s) or class(es) in place. """ - if isinstance(arg, type): - for key in dir(arg): - obj = getattr(arg, key) + if isinstance(cls_or_func, type): + for key in dir(cls_or_func): + obj = getattr(cls_or_func, key) if ( not hasattr(obj, '__qualname__') - or obj.__qualname__ != f'{arg.__qualname__}.{obj.__name__}' - or getattr(obj, '__module__', None) != arg.__module__ + or obj.__qualname__ != f'{cls_or_func.__qualname__}.{obj.__name__}' + or getattr(obj, '__module__', None) != cls_or_func.__module__ ): # We only modify objects that are defined in this type directly. # If classes / methods are nested in multiple layers, @@ -2627,10 +2679,10 @@ def no_type_check(arg): if isinstance(obj, type): no_type_check(obj) try: - arg.__no_type_check__ = True + cls_or_func.__no_type_check__ = True except TypeError: # built-in classes pass - return arg + return cls_or_func def no_type_check_decorator(decorator): @@ -2650,7 +2702,7 @@ def wrapped_decorator(*args, **kwds): return wrapped_decorator -def _overload_dummy(*args, **kwds): +def _overload_dummy(*args, **kwargs): """Helper for @overload to raise when called.""" raise NotImplementedError( "You should not call an overloaded function. " @@ -2660,7 +2712,7 @@ def _overload_dummy(*args, **kwds): # {module: {qualname: {firstlineno: func}}} -_overload_registry = defaultdict(functools.partial(defaultdict, dict)) +_OVERLOAD_REGISTRY = defaultdict(functools.partial(defaultdict, dict)) def overload(func): @@ -2697,7 +2749,7 @@ def utf8(value): # classmethod and staticmethod f = getattr(func, "__func__", func) try: - _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno] = func + _OVERLOAD_REGISTRY[f.__module__][f.__qualname__][f.__code__.co_firstlineno] = func except AttributeError: # Not a normal function; ignore. pass @@ -2708,9 +2760,9 @@ def get_overloads(func): """Return all defined overloads for *func* as a sequence.""" # classmethod and staticmethod f = getattr(func, "__func__", func) - if f.__module__ not in _overload_registry: + if f.__module__ not in _OVERLOAD_REGISTRY: return [] - mod_dict = _overload_registry[f.__module__] + mod_dict = _OVERLOAD_REGISTRY[f.__module__] if f.__qualname__ not in mod_dict: return [] return list(mod_dict[f.__qualname__].values()) @@ -2718,10 +2770,10 @@ def get_overloads(func): def clear_overloads(): """Clear all overloads in the registry.""" - _overload_registry.clear() + _OVERLOAD_REGISTRY.clear() -def final(f): +def final(cls_or_method): """Decorator to indicate final methods and final classes. Use this decorator to indicate to type checkers that the decorated @@ -2748,13 +2800,13 @@ class Other(Leaf): # Error reported by type checker object to allow runtime introspection. """ try: - f.__final__ = True + cls_or_method.__final__ = True except (AttributeError, TypeError): # Skip the attribute silently if it is not writable. # AttributeError happens if the object has __slots__ or a # read-only property, TypeError if it's a builtin class. pass - return f + return cls_or_method # Some unconstrained type variables. These were initially used by the container types. @@ -2954,25 +3006,28 @@ def _make_nmtuple(name, types, module, defaults = ()): # attributes prohibited to set in NamedTuple class syntax -_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__', - '_fields', '_field_defaults', - '_make', '_replace', '_asdict', '_source'}) +_PROHIBITED_NAMEDTUPLE_FIELDS = frozenset({ + '__new__', '__init__', '__slots__', '__getnewargs__', '_fields', + '_field_defaults', '_make', '_replace', '_asdict', '_source', +}) -_special = frozenset({'__module__', '__name__', '__annotations__'}) +_SPECIAL_CLASS_ATTRIBUTES = frozenset({ + '__module__', '__name__', '__annotations__', +}) class NamedTupleMeta(type): - def __new__(cls, typename, bases, ns): + def __new__(cls, typename, bases, namespace): assert _NamedTuple in bases for base in bases: if base is not _NamedTuple and base is not Generic: raise TypeError( 'can only inherit from a NamedTuple type and Generic') bases = tuple(tuple if base is _NamedTuple else base for base in bases) - types = ns.get('__annotations__', {}) + types = namespace.get('__annotations__', {}) default_names = [] for field_name in types: - if field_name in ns: + if field_name in namespace: default_names.append(field_name) elif default_names: raise TypeError(f"Non-default namedtuple field {field_name} " @@ -2980,17 +3035,17 @@ def __new__(cls, typename, bases, ns): f"{'s' if len(default_names) > 1 else ''} " f"{', '.join(default_names)}") nm_tpl = _make_nmtuple(typename, types.items(), - defaults=[ns[n] for n in default_names], - module=ns['__module__']) + defaults=[namespace[n] for n in default_names], + module=namespace['__module__']) nm_tpl.__bases__ = bases if Generic in bases: class_getitem = _generic_class_getitem nm_tpl.__class_getitem__ = classmethod(class_getitem) # update from user namespace without overriding special namedtuple attributes - for key, val in ns.items(): - if key in _prohibited: + for key, val in namespace.items(): + if key in _PROHIBITED_NAMEDTUPLE_FIELDS: raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special: + elif key not in _SPECIAL_CLASS_ATTRIBUTES: if key not in nm_tpl._fields: setattr(nm_tpl, key, val) try: @@ -3074,12 +3129,15 @@ class Employee(NamedTuple): nt.__orig_bases__ = (NamedTuple,) return nt + _NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {}) + def _namedtuple_mro_entries(bases): assert NamedTuple in bases return (_NamedTuple,) + NamedTuple.__mro_entries__ = _namedtuple_mro_entries @@ -3106,7 +3164,7 @@ def _get_typeddict_qualifiers(annotation_type): class _TypedDictMeta(type): - def __new__(cls, name, bases, ns, total=True): + def __new__(cls, name, bases, namespace, total=True): """Create a new typed dict class object. This method is called when TypedDict is subclassed, @@ -3119,18 +3177,18 @@ def __new__(cls, name, bases, ns, total=True): raise TypeError('cannot inherit from both a TypedDict type ' 'and a non-TypedDict base class') - if any(issubclass(b, Generic) for b in bases): + if any(issubclass(base, Generic) for base in bases): generic_base = (Generic,) else: generic_base = () - tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns) + tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), namespace) if not hasattr(tp_dict, '__orig_bases__'): tp_dict.__orig_bases__ = bases annotations = {} - own_annotations = ns.get('__annotations__', {}) + own_annotations = namespace.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" own_annotations = { n: _type_check(tp, msg, module=tp_dict.__module__) @@ -3277,22 +3335,23 @@ class DatabaseUser(TypedDict): warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15)) fields = {} - ns = {'__annotations__': dict(fields)} + namespace = {'__annotations__': dict(fields)} module = _caller() if module is not None: # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = module + namespace['__module__'] = module - td = _TypedDictMeta(typename, (), ns, total=total) + td = _TypedDictMeta(typename, (), namespace, total=total) td.__orig_bases__ = (TypedDict,) return td + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) TypedDict.__mro_entries__ = lambda bases: (_TypedDict,) @_SpecialForm -def Required(self, parameters): +def Required(self, args): """Special typing construct to mark a TypedDict key as required. This is mainly useful for total=False TypedDicts. @@ -3311,12 +3370,12 @@ class Movie(TypedDict, total=False): There is no runtime checking that a required key is actually provided when instantiating a related TypedDict. """ - item = _type_check(parameters, f'{self._name} accepts only a single type.') + item = _type_check(args, f'{self._name} accepts only a single type.') return _GenericAlias(self, (item,)) @_SpecialForm -def NotRequired(self, parameters): +def NotRequired(self, args): """Special typing construct to mark a TypedDict key as potentially missing. For example:: @@ -3330,12 +3389,12 @@ class Movie(TypedDict): year=1999, ) """ - item = _type_check(parameters, f'{self._name} accepts only a single type.') + item = _type_check(args, f'{self._name} accepts only a single type.') return _GenericAlias(self, (item,)) @_SpecialForm -def ReadOnly(self, parameters): +def ReadOnly(self, args): """A special typing construct to mark an item of a TypedDict as read-only. For example:: @@ -3350,7 +3409,7 @@ def mutate_movie(m: Movie) -> None: There is no runtime checking for this property. """ - item = _type_check(parameters, f'{self._name} accepts only a single type.') + item = _type_check(args, f'{self._name} accepts only a single type.') return _GenericAlias(self, (item,)) @@ -3378,12 +3437,12 @@ def name_by_id(user_id: UserId) -> str: __call__ = _idfunc - def __init__(self, name, tp): + def __init__(self, name, typ): self.__qualname__ = name if '.' in name: name = name.rpartition('.')[-1] self.__name__ = name - self.__supertype__ = tp + self.__supertype__ = typ def_mod = _caller() if def_mod != 'typing': self.__module__ = def_mod @@ -3554,7 +3613,7 @@ def encoding(self) -> str: @property @abstractmethod - def errors(self) -> Optional[str]: + def errors(self) -> str | None: pass @property @@ -3667,8 +3726,8 @@ class CustomerModel(ModelBase): See PEP 681 for more details. """ - def decorator(cls_or_fn): - cls_or_fn.__dataclass_transform__ = { + def decorator(cls_or_function): + cls_or_function.__dataclass_transform__ = { "eq_default": eq_default, "order_default": order_default, "kw_only_default": kw_only_default, @@ -3676,7 +3735,7 @@ def decorator(cls_or_fn): "field_specifiers": field_specifiers, "kwargs": kwargs, } - return cls_or_fn + return cls_or_function return decorator @@ -3718,7 +3777,7 @@ def method(self) -> None: return method -def is_protocol(tp: type, /) -> bool: +def is_protocol(typ: type, /) -> bool: """Return True if the given type is a Protocol. Example:: @@ -3733,13 +3792,13 @@ def is_protocol(tp: type, /) -> bool: False """ return ( - isinstance(tp, type) - and getattr(tp, '_is_protocol', False) - and tp != Protocol + isinstance(typ, type) + and getattr(typ, '_is_protocol', False) + and typ != Protocol ) -def get_protocol_members(tp: type, /) -> frozenset[str]: +def get_protocol_members(typ: type, /) -> frozenset[str]: """Return the set of members defined in a Protocol. Example:: @@ -3753,9 +3812,9 @@ def get_protocol_members(tp: type, /) -> frozenset[str]: Raise a TypeError for arguments that are not Protocols. """ - if not is_protocol(tp): - raise TypeError(f'{tp!r} is not a Protocol') - return frozenset(tp.__protocol_attrs__) + if not is_protocol(typ): + raise TypeError(f'{typ!r} is not a Protocol') + return frozenset(typ.__protocol_attrs__) def __getattr__(attr): @@ -3764,23 +3823,27 @@ def __getattr__(attr): Soft-deprecated objects which are costly to create are only created on-demand here. """ - if attr in {"Pattern", "Match"}: - import re - obj = _alias(getattr(re, attr), 1) - elif attr in {"ContextManager", "AsyncContextManager"}: - import contextlib - obj = _alias(getattr(contextlib, f"Abstract{attr}"), 2, name=attr, defaults=(bool | None,)) - elif attr == "_collect_parameters": - import warnings + match attr: + case "Pattern" | "Match": + import re + obj = _alias(getattr(re, attr), 1) + + case "ContextManager" | "AsyncContextManager": + import contextlib + obj = _alias(getattr(contextlib, f"Abstract{attr}"), 2, name=attr, defaults=(bool | None,)) + + case "_collect_parameters": + import warnings + depr_message = ( + "The private _collect_parameters function is deprecated and will be" + " removed in a future version of Python. Any use of private functions" + " is discouraged and may break in the future." + ) + warnings.warn(depr_message, category=DeprecationWarning, stacklevel=2) + obj = _collect_type_parameters + + case _: + raise AttributeError(f"module {__name__!r} has no attribute {attr!r}") - depr_message = ( - "The private _collect_parameters function is deprecated and will be" - " removed in a future version of Python. Any use of private functions" - " is discouraged and may break in the future." - ) - warnings.warn(depr_message, category=DeprecationWarning, stacklevel=2) - obj = _collect_type_parameters - else: - raise AttributeError(f"module {__name__!r} has no attribute {attr!r}") globals()[attr] = obj return obj