From 211d74252b506bf0aa7ced5053428004a57c2ae9 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Fri, 14 Jul 2023 14:13:38 +0300 Subject: [PATCH 01/88] Update dev version to 1.6.0+dev (#15671) Created the [release-1.5 branch](https://github.com/python/mypy/tree/release-1.5), updating version --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 42cda2fc7794..512890ce7d2b 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.5.0+dev" +__version__ = "1.6.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From a538cc98d54031f25e44787a90649ea909877f12 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Fri, 14 Jul 2023 14:33:32 +0300 Subject: [PATCH 02/88] fix cherry-pick-typeshed (#15672) It should exclude test_cases too --- misc/cherry-pick-typeshed.py | 1 + 1 file changed, 1 insertion(+) diff --git a/misc/cherry-pick-typeshed.py b/misc/cherry-pick-typeshed.py index af08009c2a8f..7e3b8b56e65f 100644 --- a/misc/cherry-pick-typeshed.py +++ b/misc/cherry-pick-typeshed.py @@ -53,6 +53,7 @@ def main() -> None: "--index", "--directory=mypy/typeshed", "--exclude=**/tests/**", + "--exclude=**/test_cases/**", diff_file, ], check=True, From 1958cb62f4de7492fb154323f3fdb7a0b6b51fa7 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 14 Jul 2023 20:18:54 +0200 Subject: [PATCH 03/88] Remove `--py2` argument (#15670) --- mypy/defaults.py | 2 -- mypy/main.py | 8 -------- mypy/test/helpers.py | 2 +- 3 files changed, 1 insertion(+), 11 deletions(-) diff --git a/mypy/defaults.py b/mypy/defaults.py index 2a881975a27c..1bd87de74bc9 100644 --- a/mypy/defaults.py +++ b/mypy/defaults.py @@ -3,8 +3,6 @@ import os from typing import Final -PYTHON2_VERSION: Final = (2, 7) - # Earliest fully supported Python 3.x version. Used as the default Python # version in tests. Mypy wheels should be built starting with this version, # and CI tests should be run on this version (and later versions). diff --git a/mypy/main.py b/mypy/main.py index f6e617e4d84f..6173fd6fc1a8 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -594,14 +594,6 @@ def add_invertible_flag( help="Type check code assuming it will be running on Python x.y", dest="special-opts:python_version", ) - platform_group.add_argument( - "-2", - "--py2", - dest="special-opts:python_version", - action="store_const", - const=defaults.PYTHON2_VERSION, - help="Use Python 2 mode (same as --python-version 2.7)", - ) platform_group.add_argument( "--platform", action="store", diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index d2c92614048a..d1850219e60a 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -345,7 +345,7 @@ def parse_options( options.force_union_syntax = True # Allow custom python version to override testfile_pyversion. - if all(flag.split("=")[0] not in ["--python-version", "-2", "--py2"] for flag in flag_list): + if all(flag.split("=")[0] != "--python-version" for flag in flag_list): options.python_version = testfile_pyversion(testcase.file) if testcase.config.getoption("--mypy-verbose"): From 14743a1cdd2a07ecc56ce01cc9d54130fb32931e Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sat, 15 Jul 2023 10:51:04 +0300 Subject: [PATCH 04/88] Bump minimum Python type check target version to 3.7 (#15668) --- mypy/checkexpr.py | 2 +- mypy/checkstrformat.py | 15 ------ mypy/defaults.py | 2 +- mypy/messages.py | 13 +---- mypy/semanal.py | 18 ++----- mypy/semanal_namedtuple.py | 3 -- mypy/semanal_pass1.py | 7 ++- test-data/unit/README.md | 2 +- test-data/unit/check-async-await.test | 40 ++------------- test-data/unit/check-class-namedtuple.test | 58 ++-------------------- test-data/unit/check-fastparse.test | 1 - test-data/unit/check-flags.test | 12 ++--- test-data/unit/check-formatting.test | 7 --- test-data/unit/check-generic-alias.test | 4 -- test-data/unit/check-generics.test | 2 +- test-data/unit/check-incremental.test | 1 - test-data/unit/check-inference.test | 1 - test-data/unit/check-modules.test | 21 -------- test-data/unit/check-namedtuple.test | 13 +---- test-data/unit/check-narrowing.test | 1 - test-data/unit/check-newsemanal.test | 12 ----- test-data/unit/check-newsyntax.test | 41 ++++----------- test-data/unit/check-singledispatch.test | 14 ------ test-data/unit/check-tuples.test | 3 +- test-data/unit/check-typeddict.test | 16 +----- test-data/unit/check-underscores.test | 6 --- test-data/unit/check-union-or-syntax.test | 11 ++-- test-data/unit/check-unreachable-code.test | 6 +-- test-data/unit/cmdline.test | 22 ++++---- test-data/unit/daemon.test | 20 ++++---- test-data/unit/parse-errors.test | 11 +--- test-data/unit/pythoneval.test | 2 +- test-data/unit/reports.test | 2 +- 33 files changed, 75 insertions(+), 314 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 46a5e35f320d..62e2298ba59d 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3072,7 +3072,7 @@ def visit_op_expr(self, e: OpExpr) -> Type: # Expressions of form [...] * e get special type inference. return self.check_list_multiply(e) if e.op == "%": - if isinstance(e.left, BytesExpr) and self.chk.options.python_version >= (3, 5): + if isinstance(e.left, BytesExpr): return self.strfrm_checker.check_str_interpolation(e.left, e.right) if isinstance(e.left, StrExpr): return self.strfrm_checker.check_str_interpolation(e.left, e.right) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index cda603be086b..eeb9e7633756 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -682,14 +682,6 @@ def check_str_interpolation(self, expr: FormatStringExpr, replacements: Expressi self.exprchk.accept(expr) specifiers = parse_conversion_specifiers(expr.value) has_mapping_keys = self.analyze_conversion_specifiers(specifiers, expr) - if isinstance(expr, BytesExpr) and self.chk.options.python_version < (3, 5): - self.msg.fail( - "Bytes formatting is only supported in Python 3.5 and later", - replacements, - code=codes.STRING_FORMATTING, - ) - return AnyType(TypeOfAny.from_error) - if has_mapping_keys is None: pass # Error was reported elif has_mapping_keys: @@ -1023,13 +1015,6 @@ def conversion_type( NUMERIC_TYPES = NUMERIC_TYPES_NEW if format_call else NUMERIC_TYPES_OLD INT_TYPES = REQUIRE_INT_NEW if format_call else REQUIRE_INT_OLD if p == "b" and not format_call: - if self.chk.options.python_version < (3, 5): - self.msg.fail( - 'Format character "b" is only supported in Python 3.5 and later', - context, - code=codes.STRING_FORMATTING, - ) - return None if not isinstance(expr, BytesExpr): self.msg.fail( 'Format character "b" is only supported on bytes patterns', diff --git a/mypy/defaults.py b/mypy/defaults.py index 1bd87de74bc9..6a09a61a461e 100644 --- a/mypy/defaults.py +++ b/mypy/defaults.py @@ -10,7 +10,7 @@ # Earliest Python 3.x version supported via --python-version 3.x. To run # mypy, at least version PYTHON3_VERSION is needed. -PYTHON3_VERSION_MIN: Final = (3, 4) +PYTHON3_VERSION_MIN: Final = (3, 7) # Keep in sync with typeshed's python support CACHE_DIR: Final = ".mypy_cache" CONFIG_FILE: Final = ["mypy.ini", ".mypy.ini"] diff --git a/mypy/messages.py b/mypy/messages.py index ae7fba1473ac..8b88cc1678a4 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1728,7 +1728,6 @@ def need_annotation_for_var( self, node: SymbolNode, context: Context, python_version: tuple[int, int] | None = None ) -> None: hint = "" - has_variable_annotations = not python_version or python_version >= (3, 6) pep604_supported = not python_version or python_version >= (3, 10) # type to recommend the user adds recommended_type = None @@ -1749,18 +1748,10 @@ def need_annotation_for_var( type_dec = f"{type_dec}, {type_dec}" recommended_type = f"{alias}[{type_dec}]" if recommended_type is not None: - if has_variable_annotations: - hint = f' (hint: "{node.name}: {recommended_type} = ...")' - else: - hint = f' (hint: "{node.name} = ... # type: {recommended_type}")' - - if has_variable_annotations: - needed = "annotation" - else: - needed = "comment" + hint = f' (hint: "{node.name}: {recommended_type} = ...")' self.fail( - f'Need type {needed} for "{unmangle(node.name)}"{hint}', + f'Need type annotation for "{unmangle(node.name)}"{hint}', context, code=codes.VAR_ANNOTATED, ) diff --git a/mypy/semanal.py b/mypy/semanal.py index f4f281e7a77a..5b1aea4239f5 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2521,12 +2521,7 @@ def visit_import_from(self, imp: ImportFrom) -> None: elif fullname in self.missing_modules: missing_submodule = True # If it is still not resolved, check for a module level __getattr__ - if ( - module - and not node - and (module.is_stub or self.options.python_version >= (3, 7)) - and "__getattr__" in module.names - ): + if module and not node and "__getattr__" in module.names: # We store the fullname of the original definition so that we can # detect whether two imported names refer to the same thing. fullname = module_id + "." + id @@ -5446,11 +5441,8 @@ def visit_yield_expr(self, e: YieldExpr) -> None: blocker=True, ) elif self.function_stack[-1].is_coroutine: - if self.options.python_version < (3, 6): - self.fail('"yield" in async function', e, serious=True, blocker=True) - else: - self.function_stack[-1].is_generator = True - self.function_stack[-1].is_async_generator = True + self.function_stack[-1].is_generator = True + self.function_stack[-1].is_async_generator = True else: self.function_stack[-1].is_generator = True if e.expr: @@ -5721,9 +5713,7 @@ def get_module_symbol(self, node: MypyFile, name: str) -> SymbolTableNode | None sym = SymbolTableNode(GDEF, self.modules[fullname]) elif self.is_incomplete_namespace(module): self.record_incomplete_ref() - elif "__getattr__" in names and ( - node.is_stub or self.options.python_version >= (3, 7) - ): + elif "__getattr__" in names: gvar = self.create_getattr_var(names["__getattr__"], name, fullname) if gvar: sym = SymbolTableNode(GDEF, gvar) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 42f7b10f3333..51ea90e07f3d 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -142,9 +142,6 @@ def check_namedtuple_classdef( * valid statements or None, if any of the types are not ready. """ - if self.options.python_version < (3, 6) and not is_stub_file: - self.fail("NamedTuple class syntax is only supported in Python 3.6", defn) - return [], [], {}, [] if len(defn.base_type_exprs) > 1: self.fail("NamedTuple should be a single base", defn) items: list[str] = [] diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py index 2df06feacca8..aaa01969217a 100644 --- a/mypy/semanal_pass1.py +++ b/mypy/semanal_pass1.py @@ -45,10 +45,9 @@ class SemanticAnalyzerPreAnalysis(TraverserVisitor): import sys - def do_stuff(): - # type: () -> None: - if sys.python_version < (3,): - import xyz # Only available in Python 2 + def do_stuff() -> None: + if sys.version_info >= (3, 10): + import xyz # Only available in Python 3.10+ xyz.whatever() ... diff --git a/test-data/unit/README.md b/test-data/unit/README.md index f2c727b43543..5a9416603541 100644 --- a/test-data/unit/README.md +++ b/test-data/unit/README.md @@ -12,7 +12,7 @@ feature you added. If you added a new `check-*.test` file, it will be autodiscov Add the test in this format anywhere in the file: [case testNewSyntaxBasics] - # flags: --python-version 3.6 + # flags: --python-version 3.10 x: int x = 5 y: int = 5 diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index bcf55d84ff26..3b7ef53b6bd6 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -183,7 +183,6 @@ async def f() -> None: [typing fixtures/typing-async.pyi] [case testAsyncForComprehension] -# flags: --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') @@ -223,7 +222,6 @@ async def generatorexp(obj: Iterable[int]): [typing fixtures/typing-async.pyi] [case testAsyncForComprehensionErrors] -# flags: --python-version 3.6 from typing import Generic, Iterable, TypeVar, AsyncIterator, Tuple T = TypeVar('T') @@ -240,16 +238,10 @@ class asyncify(Generic[T], AsyncIterator[T]): raise StopAsyncIteration async def wrong_iterable(obj: Iterable[int]): - [i async for i in obj] - [i for i in asyncify(obj)] - {i: i async for i in obj} - {i: i for i in asyncify(obj)} - -[out] -main:18: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable) -main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) -main:20: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable) -main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) + [i async for i in obj] # E: "Iterable[int]" has no attribute "__aiter__" (not async iterable) + [i for i in asyncify(obj)] # E: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) + {i: i async for i in obj} # E: "Iterable[int]" has no attribute "__aiter__" (not async iterable) + {i: i for i in asyncify(obj)} # E: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] @@ -340,17 +332,6 @@ async def f() -> None: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] -[case testNoYieldInAsyncDef] -# flags: --python-version 3.5 - -async def f(): - yield None # E: "yield" in async function -async def g(): - yield # E: "yield" in async function -async def h(): - x = yield # E: "yield" in async function -[builtins fixtures/async_await.pyi] - [case testNoYieldFromInAsyncDef] async def f(): @@ -422,7 +403,6 @@ def f() -> Generator[int, str, int]: -- --------------------------------------------------------------------- [case testAsyncGenerator] -# flags: --python-version 3.6 from typing import AsyncGenerator, Generator async def f() -> int: @@ -450,7 +430,6 @@ async def wrong_return() -> Generator[int, None, None]: # E: The return type of [typing fixtures/typing-async.pyi] [case testAsyncGeneratorReturnIterator] -# flags: --python-version 3.6 from typing import AsyncIterator async def gen() -> AsyncIterator[int]: @@ -466,7 +445,6 @@ async def use_gen() -> None: [typing fixtures/typing-async.pyi] [case testAsyncGeneratorManualIter] -# flags: --python-version 3.6 from typing import AsyncGenerator async def genfunc() -> AsyncGenerator[int, None]: @@ -484,7 +462,6 @@ async def user() -> None: [typing fixtures/typing-async.pyi] [case testAsyncGeneratorAsend] -# flags: --python-version 3.6 from typing import AsyncGenerator async def f() -> None: @@ -505,7 +482,6 @@ async def h() -> None: [typing fixtures/typing-async.pyi] [case testAsyncGeneratorAthrow] -# flags: --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[str, int]: @@ -524,7 +500,6 @@ async def h() -> None: [typing fixtures/typing-async.pyi] [case testAsyncGeneratorNoSyncIteration] -# flags: --python-version 3.6 from typing import AsyncGenerator async def gen() -> AsyncGenerator[int, None]: @@ -532,17 +507,13 @@ async def gen() -> AsyncGenerator[int, None]: yield i def h() -> None: - for i in gen(): + for i in gen(): # E: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) pass [builtins fixtures/dict.pyi] [typing fixtures/typing-async.pyi] -[out] -main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) - [case testAsyncGeneratorNoYieldFrom] -# flags: --python-version 3.6 from typing import AsyncGenerator async def f() -> AsyncGenerator[int, None]: @@ -555,7 +526,6 @@ async def gen() -> AsyncGenerator[int, None]: [typing fixtures/typing-async.pyi] [case testAsyncGeneratorNoReturnWithValue] -# flags: --python-version 3.6 from typing import AsyncGenerator async def return_int() -> AsyncGenerator[int, None]: diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index ab2f5f3f6b48..1916cb41bb74 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -1,13 +1,4 @@ -[case testNewNamedTupleOldPythonVersion] -# flags: --python-version 3.5 -from typing import NamedTuple - -class E(NamedTuple): # E: NamedTuple class syntax is only supported in Python 3.6 - pass -[builtins fixtures/tuple.pyi] - [case testNewNamedTupleNoUnderscoreFields] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -17,7 +8,6 @@ class X(NamedTuple): [builtins fixtures/tuple.pyi] [case testNewNamedTupleAccessingAttributes] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -31,7 +21,6 @@ x.z # E: "X" has no attribute "z" [builtins fixtures/tuple.pyi] [case testNewNamedTupleAttributesAreReadOnly] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -47,7 +36,6 @@ a.x = 5 # E: Property "x" defined in "X" is read-only [builtins fixtures/tuple.pyi] [case testNewNamedTupleCreateWithPositionalArguments] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -62,7 +50,6 @@ x = X(1, '2', 3) # E: Too many arguments for "X" [builtins fixtures/tuple.pyi] [case testNewNamedTupleShouldBeSingleBase] -# flags: --python-version 3.6 from typing import NamedTuple class A: ... @@ -71,7 +58,6 @@ class X(NamedTuple, A): # E: NamedTuple should be a single base [builtins fixtures/tuple.pyi] [case testCreateNewNamedTupleWithKeywordArguments] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -85,7 +71,6 @@ x = X(y='x') # E: Missing positional argument "x" in call to "X" [builtins fixtures/tuple.pyi] [case testNewNamedTupleCreateAndUseAsTuple] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -98,7 +83,6 @@ a, b, c = x # E: Need more than 2 values to unpack (3 expected) [builtins fixtures/tuple.pyi] [case testNewNamedTupleWithItemTypes] -# flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): @@ -116,7 +100,6 @@ if int(): [builtins fixtures/tuple.pyi] [case testNewNamedTupleConstructorArgumentTypes] -# flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): @@ -130,7 +113,6 @@ N(b='x', a=1) [builtins fixtures/tuple.pyi] [case testNewNamedTupleAsBaseClass] -# flags: --python-version 3.6 from typing import NamedTuple class N(NamedTuple): @@ -151,7 +133,6 @@ if int(): [builtins fixtures/tuple.pyi] [case testNewNamedTupleSelfTypeWithNamedTupleAsBase] -# flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): @@ -172,7 +153,6 @@ class B(A): [out] [case testNewNamedTupleTypeReferenceToClassDerivedFrom] -# flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): @@ -194,7 +174,6 @@ class B(A): [builtins fixtures/tuple.pyi] [case testNewNamedTupleSubtyping] -# flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): @@ -222,7 +201,6 @@ if int(): [builtins fixtures/tuple.pyi] [case testNewNamedTupleSimpleTypeInference] -# flags: --python-version 3.6 from typing import NamedTuple, Tuple class A(NamedTuple): @@ -239,7 +217,6 @@ a = (1,) # E: Incompatible types in assignment (expression has type "Tuple[int] [builtins fixtures/list.pyi] [case testNewNamedTupleMissingClassAttribute] -# flags: --python-version 3.6 from typing import NamedTuple class MyNamedTuple(NamedTuple): @@ -250,7 +227,6 @@ MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [builtins fixtures/tuple.pyi] [case testNewNamedTupleEmptyItems] -# flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): @@ -258,7 +234,6 @@ class A(NamedTuple): [builtins fixtures/tuple.pyi] [case testNewNamedTupleForwardRef] -# flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): @@ -271,7 +246,6 @@ a = A(1) # E: Argument 1 to "A" has incompatible type "int"; expected "B" [builtins fixtures/tuple.pyi] [case testNewNamedTupleProperty36] -# flags: --python-version 3.6 from typing import NamedTuple class A(NamedTuple): @@ -288,7 +262,6 @@ C(2).b [builtins fixtures/property.pyi] [case testNewNamedTupleAsDict] -# flags: --python-version 3.6 from typing import NamedTuple, Any class X(NamedTuple): @@ -301,7 +274,6 @@ reveal_type(x._asdict()) # N: Revealed type is "builtins.dict[builtins.str, Any [builtins fixtures/dict.pyi] [case testNewNamedTupleReplaceTyped] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -315,7 +287,6 @@ x._replace(y=5) # E: Argument "y" to "_replace" of "X" has incompatible type "i [builtins fixtures/tuple.pyi] [case testNewNamedTupleFields] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -333,7 +304,6 @@ reveal_type(X.__annotations__) # N: Revealed type is "typing.Mapping[builtins.s [builtins fixtures/dict.pyi] [case testNewNamedTupleUnit] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -345,7 +315,6 @@ x._fields[0] # E: Tuple index out of range [builtins fixtures/tuple.pyi] [case testNewNamedTupleJoinNamedTuple] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -360,7 +329,6 @@ reveal_type([X(3, 'b'), Y(1, 'a')]) # N: Revealed type is "builtins.list[Tuple[ [builtins fixtures/list.pyi] [case testNewNamedTupleJoinTuple] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -373,7 +341,6 @@ reveal_type([X(1, 'a'), (3, 'b')]) # N: Revealed type is "builtins.list[Tuple[b [builtins fixtures/list.pyi] [case testNewNamedTupleWithTooManyArguments] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -383,25 +350,17 @@ class X(NamedTuple): [builtins fixtures/tuple.pyi] [case testNewNamedTupleWithInvalidItems2] -# flags: --python-version 3.6 import typing class X(typing.NamedTuple): x: int - y = 1 - x.x: int + y = 1 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + x.x: int # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" z: str = 'z' - aa: int - -[out] -main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" -main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" -main:9: error: Non-default NamedTuple fields cannot follow default fields - + aa: int # E: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] [case testNewNamedTupleWithoutTypesSpecified] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -410,7 +369,6 @@ class X(NamedTuple): [builtins fixtures/tuple.pyi] [case testTypeUsingTypeCNamedTuple] -# flags: --python-version 3.6 from typing import NamedTuple, Type class N(NamedTuple): @@ -418,13 +376,10 @@ class N(NamedTuple): y: str def f(a: Type[N]): - a() + a() # E: Missing positional arguments "x", "y" in call to "N" [builtins fixtures/list.pyi] -[out] -main:9: error: Missing positional arguments "x", "y" in call to "N" [case testNewNamedTupleWithDefaults] -# flags: --python-version 3.6 from typing import List, NamedTuple, Optional class X(NamedTuple): @@ -464,7 +419,7 @@ UserDefined(1) # E: Argument 1 to "UserDefined" has incompatible type "int"; ex [builtins fixtures/list.pyi] [case testNewNamedTupleWithDefaultsStrictOptional] -# flags: --strict-optional --python-version 3.6 +# flags: --strict-optional from typing import List, NamedTuple, Optional class HasNone(NamedTuple): @@ -483,7 +438,6 @@ class CannotBeNone(NamedTuple): [builtins fixtures/list.pyi] [case testNewNamedTupleWrongType] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -492,7 +446,6 @@ class X(NamedTuple): [builtins fixtures/tuple.pyi] [case testNewNamedTupleErrorInDefault] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): @@ -500,7 +453,6 @@ class X(NamedTuple): [builtins fixtures/tuple.pyi] [case testNewNamedTupleInheritance] -# flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test index 132a34503b89..534967b1edbf 100644 --- a/test-data/unit/check-fastparse.test +++ b/test-data/unit/check-fastparse.test @@ -31,7 +31,6 @@ def f(x): # E: Invalid type comment or annotation pass [case testFastParseInvalidTypes3] -# flags: --python-version 3.6 # All of these should not crash from typing import Callable, Tuple, Iterable diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index c356028f6620..3750c44ed7f3 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1733,7 +1733,7 @@ def h() -> List[Any]: # E: Explicit "Any" is not allowed [builtins fixtures/list.pyi] [case testDisallowAnyExplicitVarDeclaration] -# flags: --python-version 3.6 --disallow-any-explicit +# flags: --disallow-any-explicit from typing import Any v: Any = '' # E: Explicit "Any" is not allowed w = '' # type: Any # E: Explicit "Any" is not allowed @@ -1741,7 +1741,7 @@ class X: y = '' # type: Any # E: Explicit "Any" is not allowed [case testDisallowAnyExplicitGenericVarDeclaration] -# flags: --python-version 3.6 --disallow-any-explicit +# flags: --disallow-any-explicit from typing import Any, List v: List[Any] = [] # E: Explicit "Any" is not allowed [builtins fixtures/list.pyi] @@ -1836,7 +1836,7 @@ N = TypedDict('N', {'x': str, 'y': List}) # no error [builtins fixtures/dict.pyi] [case testDisallowAnyGenericsTupleNoTypeParams] -# flags: --python-version 3.6 --disallow-any-generics +# flags: --disallow-any-generics from typing import Tuple def f(s: Tuple) -> None: pass # E: Missing type parameters for generic type "Tuple" @@ -1877,7 +1877,7 @@ def g(l: L[str]) -> None: pass # no error [builtins fixtures/list.pyi] [case testDisallowAnyGenericsGenericAlias] -# flags: --python-version 3.6 --disallow-any-generics +# flags: --disallow-any-generics from typing import TypeVar, Tuple T = TypeVar('T') @@ -1892,7 +1892,7 @@ x: A = ('a', 'b', 1) # E: Missing type parameters for generic type "A" [builtins fixtures/tuple.pyi] [case testDisallowAnyGenericsPlainList] -# flags: --python-version 3.6 --disallow-any-generics +# flags: --disallow-any-generics from typing import List def f(l: List) -> None: pass # E: Missing type parameters for generic type "List" @@ -1905,7 +1905,7 @@ y: List = [] # E: Missing type parameters for generic type "List" [builtins fixtures/list.pyi] [case testDisallowAnyGenericsCustomGenericClass] -# flags: --python-version 3.6 --disallow-any-generics +# flags: --disallow-any-generics from typing import Generic, TypeVar, Any T = TypeVar('T') diff --git a/test-data/unit/check-formatting.test b/test-data/unit/check-formatting.test index f63abbb33034..7d23c2e199f1 100644 --- a/test-data/unit/check-formatting.test +++ b/test-data/unit/check-formatting.test @@ -103,7 +103,6 @@ a = None # type: Any [typing fixtures/typing-medium.pyi] [case testStringInterpolationC] -# flags: --python-version 3.6 '%c' % 1 '%c' % 1.0 # E: "%c" requires int or char (expression has type "float") '%c' % 's' @@ -232,18 +231,12 @@ t5: Iterable[str] = ('A', 'B') -- Bytes interpolation -- -------------------- - -[case testBytesInterpolationBefore35] -# flags: --python-version 3.4 -b'%b' % 1 # E: Unsupported left operand type for % ("bytes") - [case testBytesInterpolation] b'%b' % 1 # E: Incompatible types in string interpolation (expression has type "int", placeholder has type "bytes") b'%b' % b'1' b'%a' % 3 [case testBytesInterpolationC] -# flags: --python-version 3.6 b'%c' % 1 b'%c' % 1.0 # E: "%c" requires an integer in range(256) or a single byte (expression has type "float") b'%c' % 's' # E: "%c" requires an integer in range(256) or a single byte (expression has type "str") diff --git a/test-data/unit/check-generic-alias.test b/test-data/unit/check-generic-alias.test index 574a57607d11..8c90b5adba34 100644 --- a/test-data/unit/check-generic-alias.test +++ b/test-data/unit/check-generic-alias.test @@ -200,7 +200,6 @@ t23: collections.abc.ValuesView[str] [case testGenericBuiltinTupleTyping] -# flags: --python-version 3.6 from typing import Tuple t01: Tuple = () @@ -248,7 +247,6 @@ reveal_type(tuple[int, ...]()) # N: Revealed type is "builtins.tuple[builtins.i [builtins fixtures/tuple.pyi] [case testTypeAliasWithBuiltinTupleInStub] -# flags: --python-version 3.6 import m reveal_type(m.a) # N: Revealed type is "builtins.tuple[builtins.int, ...]" reveal_type(m.b) # N: Revealed type is "Tuple[builtins.int, builtins.str]" @@ -261,7 +259,6 @@ b: B [builtins fixtures/tuple.pyi] [case testTypeAliasWithBuiltinListInStub] -# flags: --python-version 3.6 import m reveal_type(m.a) # N: Revealed type is "builtins.list[builtins.int]" reveal_type(m.b) # N: Revealed type is "builtins.list[builtins.list[builtins.int]]" @@ -280,7 +277,6 @@ d: type[str] [case testTypeAliasWithBuiltinListAliasInStub] -# flags: --python-version 3.6 import m reveal_type(m.a()[0]) # N: Revealed type is "builtins.int" diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 42e3d23eddb9..90d46c217451 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -596,7 +596,7 @@ main:13: error: Argument 2 to "Node" has incompatible type "int"; expected "str" -- Error formatting is a bit different (and probably better) with new analyzer [case testGenericTypeAliasesWrongAliases] -# flags: --show-column-numbers --python-version 3.6 --no-strict-optional +# flags: --show-column-numbers --no-strict-optional from typing import TypeVar, Generic, List, Callable, Tuple, Union T = TypeVar('T') S = TypeVar('S') diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index cd009887a5b5..d8461fc78815 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5508,7 +5508,6 @@ class Foo: class C: pass [case testIncrementalNestedNamedTuple] -# flags: --python-version 3.6 import a [file a.py] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index ee13cb3830fc..3c4a0943556a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -913,7 +913,6 @@ def call(c: Callable[[int], Any], i: int) -> None: [out] [case testCallableMeetAndJoin] -# flags: --python-version 3.6 from typing import Callable, Any, TypeVar class A: ... diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index fc3daff64fbd..4992b6589bb3 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -2067,16 +2067,6 @@ def __getattr__(name): ... [builtins fixtures/module.pyi] -[case testModuleLevelGetattrNotStub36] -# flags: --python-version 3.6 -import has_getattr -reveal_type(has_getattr.any_attribute) # E: Module has no attribute "any_attribute" \ - # N: Revealed type is "Any" -[file has_getattr.py] -def __getattr__(name) -> str: ... - -[builtins fixtures/module.pyi] - [case testModuleLevelGetattrNotStub37] # flags: --python-version 3.7 @@ -2111,17 +2101,6 @@ def __getattr__(name: str) -> int: ... [builtins fixtures/module.pyi] -[case testModuleLevelGetattrImportFromNotStub36] -# flags: --python-version 3.6 -from non_stub import name # E: Module "non_stub" has no attribute "name" -reveal_type(name) # N: Revealed type is "Any" - -[file non_stub.py] -from typing import Any -def __getattr__(name: str) -> Any: ... - -[builtins fixtures/module.pyi] - [case testModuleLevelGetattrImportFromNotStub37] # flags: --python-version 3.7 from non_stub import name diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 83cc8c099deb..d69b924971e1 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -38,18 +38,7 @@ x.y x.z # E: "X" has no attribute "z" [builtins fixtures/tuple.pyi] -[case testNamedTupleClassPython35] -# flags: --python-version 3.5 -from typing import NamedTuple - -class A(NamedTuple): - x = 3 # type: int -[builtins fixtures/tuple.pyi] -[out] -main:4: error: NamedTuple class syntax is only supported in Python 3.6 - -[case testNamedTupleClassInStubPython35] -# flags: --python-version 3.5 +[case testNamedTupleClassInStub] import foo [file foo.pyi] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index c329ccf840a8..f06af0057f0f 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1138,7 +1138,6 @@ reveal_type(x) # N: Revealed type is "builtins.bool" [builtins fixtures/primitives.pyi] [case testNarrowingTypedDictUsingEnumLiteral] -# flags: --python-version 3.6 from typing import Union from typing_extensions import TypedDict, Literal from enum import Enum diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 77a1553d4715..8300957ee511 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -2571,18 +2571,6 @@ import n [file n.pyi] class C: pass -[case testNewAnalyzerModuleGetAttrInPython36] -# flags: --python-version 3.6 -import m -import n - -x: m.n.C # E: Name "m.n.C" is not defined -y: n.D # E: Name "n.D" is not defined -[file m.py] -import n -[file n.py] -def __getattr__(x): pass - [case testNewAnalyzerModuleGetAttrInPython37] # flags: --python-version 3.7 import m diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test index cfcbfc598c51..0815d7af1933 100644 --- a/test-data/unit/check-newsyntax.test +++ b/test-data/unit/check-newsyntax.test @@ -1,15 +1,8 @@ -[case testNewSyntaxRequire36] -# flags: --python-version 3.5 -x: int = 5 # E: Variable annotation syntax is only supported in Python 3.6 and greater -[out] - [case testNewSyntaxSyntaxError] -# flags: --python-version 3.6 x: int: int # E: invalid syntax [out] [case testNewSyntaxBasics] -# flags: --python-version 3.6 x: int x = 5 y: int = 5 @@ -19,11 +12,10 @@ a = 5 # E: Incompatible types in assignment (expression has type "int", variabl b: str = 5 # E: Incompatible types in assignment (expression has type "int", variable has type "str") zzz: int -zzz: str # E: Name "zzz" already defined on line 10 +zzz: str # E: Name "zzz" already defined on line 9 [out] [case testNewSyntaxWithDict] -# flags: --python-version 3.6 from typing import Dict, Any d: Dict[int, str] = {} @@ -34,7 +26,6 @@ d['ab'] = 'ab' # E: Invalid index type "str" for "Dict[int, str]"; expected typ [out] [case testNewSyntaxWithRevealType] -# flags: --python-version 3.6 from typing import Dict def tst_local(dct: Dict[int, T]) -> Dict[T, int]: @@ -46,7 +37,6 @@ reveal_type(tst_local({1: 'a'})) # N: Revealed type is "builtins.dict[builtins. [out] [case testNewSyntaxWithInstanceVars] -# flags: --python-version 3.6 class TstInstance: a: str def __init__(self) -> None: @@ -59,20 +49,20 @@ TstInstance().a = 'ab' [out] [case testNewSyntaxWithClassVars] -# flags: --strict-optional --python-version 3.6 +# flags: --strict-optional class CCC: a: str = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testNewSyntaxWithStrictOptional] -# flags: --strict-optional --python-version 3.6 +# flags: --strict-optional strict: int strict = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") strict2: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalFunctions] -# flags: --strict-optional --python-version 3.6 +# flags: --strict-optional def f() -> None: x: int if int(): @@ -80,7 +70,7 @@ def f() -> None: [out] [case testNewSyntaxWithStrictOptionalClasses] -# flags: --strict-optional --python-version 3.6 +# flags: --strict-optional class C: def meth(self) -> None: x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") @@ -88,25 +78,18 @@ class C: [out] [case testNewSyntaxSpecialAssign] -# flags: --python-version 3.6 class X: x: str x[0]: int x.x: int [out] -main:4: error: Unexpected type declaration -main:4: error: Unsupported target for indexed assignment ("str") -main:5: error: Type cannot be declared in assignment to non-self attribute -main:5: error: "str" has no attribute "x" - -[case testNewSyntaxAsyncComprehensionError] -# flags: --python-version 3.5 -async def f(): - results = [i async for i in aiter() if i % 2] # E: Async comprehensions are only supported in Python 3.6 and greater +main:3: error: Unexpected type declaration +main:3: error: Unsupported target for indexed assignment ("str") +main:4: error: Type cannot be declared in assignment to non-self attribute +main:4: error: "str" has no attribute "x" [case testNewSyntaxFStringBasics] -# flags: --python-version 3.6 f'foobar' f'{"foobar"}' f'foo{"bar"}' @@ -118,22 +101,19 @@ a = f'{"foobar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsOk] -# flags: --python-version 3.6 f'.{1 + 1}.' f'.{1 + 1}.{"foo" + "bar"}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsErrors] -# flags: --python-version 3.6 f'{1 + ""}' f'.{1 + ""}' [builtins fixtures/f_string.pyi] [out] +main:1: error: Unsupported operand types for + ("int" and "str") main:2: error: Unsupported operand types for + ("int" and "str") -main:3: error: Unsupported operand types for + ("int" and "str") [case testNewSyntaxFStringParseFormatOptions] -# flags: --python-version 3.6 value = 10.5142 width = 10 precision = 4 @@ -141,7 +121,6 @@ f'result: {value:{width}.{precision}}' [builtins fixtures/f_string.pyi] [case testNewSyntaxFStringSingleField] -# flags: --python-version 3.6 v = 1 reveal_type(f'{v}') # N: Revealed type is "builtins.str" reveal_type(f'{1}') # N: Revealed type is "builtins.str" diff --git a/test-data/unit/check-singledispatch.test b/test-data/unit/check-singledispatch.test index 1bc34c6fdaab..1adec1575b7e 100644 --- a/test-data/unit/check-singledispatch.test +++ b/test-data/unit/check-singledispatch.test @@ -80,20 +80,6 @@ def g(arg: int) -> None: # E: Argument to register "str" is incompatible with ty [builtins fixtures/args.pyi] -[case testDispatchBasedOnTypeAnnotationsRequires37-xfail] -# flags: --python-version 3.6 -# the docs for singledispatch say that register didn't accept type annotations until python 3.7 -from functools import singledispatch - -@singledispatch -def f(arg) -> None: - pass -@f.register -def g(arg: int) -> None: # E: Singledispatch based on type annotations is only supported in Python 3.7 and greater - pass - -[builtins fixtures/args.pyi] - [case testTypePassedAsArgumentToRegister] from functools import singledispatch diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 5cb89a6854be..f64d24a4ed6b 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1455,8 +1455,7 @@ x7, x8, y7, y8 = *points2, *points3 # E: Contiguous iterable with same type expe x9, y9, x10, y10, z5 = *points2, 1, *points2 # E: Contiguous iterable with same type expected [builtins fixtures/tuple.pyi] -[case testAssignEmptyPy36] -# flags: --python-version 3.6 +[case testAssignEmpty] () = [] [case testAssignEmptyBogus] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 739d1ba6eb75..983fa8c17aec 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -87,7 +87,6 @@ D = TypedDict('D', { -- Define TypedDict (Class syntax) [case testCanCreateTypedDictWithClass] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): @@ -99,7 +98,6 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.Point', {'x': builtin [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): @@ -113,7 +111,6 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.Point2D', {'x': built [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithSubclass2] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1D(TypedDict): @@ -126,7 +123,6 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.Point2D', {'x': built [builtins fixtures/dict.pyi] [case testCanCreateTypedDictClassEmpty] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class EmptyDict(TypedDict): @@ -138,10 +134,7 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.EmptyDict', {})" [case testCanCreateTypedDictWithClassOldVersion] -# flags: --python-version 3.5 - -# Test that we can use class-syntax to merge TypedDicts even in -# versions without type annotations +# Test that we can use class-syntax to merge function-based TypedDicts from mypy_extensions import TypedDict @@ -165,7 +158,6 @@ foo({'name': 'lol', 'year': 2009, 'based_on': 0}) # E: Incompatible types (expr -- Define TypedDict (Class syntax errors) [case testCannotCreateTypedDictWithClassOtherBases] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class A: pass @@ -195,7 +187,6 @@ class C(TypedDict, TypedDict): # E: Duplicate base class "TypedDict" [typing fixtures/typing-typeddict.pyi] [case testCannotCreateTypedDictWithClassWithOtherStuff] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): @@ -251,7 +242,6 @@ Point = TypedDict('Point', {'x': int, 'y': int, '_fallback': object}) [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithClassUnderscores] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point(TypedDict): @@ -263,7 +253,6 @@ reveal_type(p) # N: Revealed type is "TypedDict('__main__.Point', {'x': builtins [builtins fixtures/dict.pyi] [case testCannotCreateTypedDictWithDuplicateKey1] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Bad(TypedDict): @@ -291,7 +280,6 @@ reveal_type(d2) # N: Revealed type is "TypedDict('__main__.D2', {'x': builtins.s [typing fixtures/typing-typeddict.pyi] [case testCanCreateTypedDictWithClassOverwriting] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): @@ -306,7 +294,6 @@ reveal_type(b) # N: Revealed type is "TypedDict('__main__.Bad', {'x': builtins.i [builtins fixtures/dict.pyi] [case testCanCreateTypedDictWithClassOverwriting2] -# flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): @@ -1774,7 +1761,6 @@ reveal_type(td.pop('c')) # E: TypedDict "TDA" has no key "c" \ [typing fixtures/typing-typeddict.pyi] [case testCanCreateTypedDictWithTypingExtensions] -# flags: --python-version 3.6 from typing_extensions import TypedDict class Point(TypedDict): diff --git a/test-data/unit/check-underscores.test b/test-data/unit/check-underscores.test index ac9fad2ca792..2a789b3314f3 100644 --- a/test-data/unit/check-underscores.test +++ b/test-data/unit/check-underscores.test @@ -1,10 +1,4 @@ -[case testUnderscoresRequire36] -# flags: --python-version 3.5 -x = 1000_000 # E: Underscores in numeric literals are only supported in Python 3.6 and greater -[out] - [case testUnderscoresBasics] -# flags: --python-version 3.6 x: int x = 1000_000 x = 0x_FF_FF_FF_FF diff --git a/test-data/unit/check-union-or-syntax.test b/test-data/unit/check-union-or-syntax.test index 58526cfd0623..f342d0ca34a5 100644 --- a/test-data/unit/check-union-or-syntax.test +++ b/test-data/unit/check-union-or-syntax.test @@ -66,8 +66,8 @@ x: List[int | str] reveal_type(x) # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]" [builtins fixtures/list.pyi] -[case testUnionOrSyntaxWithQuotedFunctionTypes] -# flags: --python-version 3.4 +[case testUnionOrSyntaxWithQuotedFunctionTypesPre310] +# flags: --python-version 3.9 from typing import Union def f(x: 'Union[int, str, None]') -> 'Union[int, None]': reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.str, None]" @@ -79,8 +79,8 @@ def g(x: "int | str | None") -> "int | None": return 42 reveal_type(g) # N: Revealed type is "def (x: Union[builtins.int, builtins.str, None]) -> Union[builtins.int, None]" -[case testUnionOrSyntaxWithQuotedVariableTypes] -# flags: --python-version 3.6 +[case testUnionOrSyntaxWithQuotedVariableTypesPre310] +# flags: --python-version 3.9 y: "int | str" = 42 reveal_type(y) # N: Revealed type is "Union[builtins.int, builtins.str]" @@ -124,7 +124,6 @@ cast(str | int, 'x') # E: Cast target is not a type [typing fixtures/typing-full.pyi] [case testUnionOrSyntaxInComment] -# flags: --python-version 3.6 x = 1 # type: int | str [case testUnionOrSyntaxFutureImport] @@ -138,7 +137,7 @@ x: int | None x: int | None # E: X | Y syntax for unions requires Python 3.10 [case testUnionOrSyntaxInStubFile] -# flags: --python-version 3.6 +# flags: --python-version 3.9 from lib import x [file lib.pyi] x: int | None diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 1db2a16e2e1c..82ff35f53702 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -422,9 +422,9 @@ x = 1 [out] [case testCustomSysVersionInfo] -# flags: --python-version 3.5 +# flags: --python-version 3.11 import sys -if sys.version_info == (3, 5): +if sys.version_info == (3, 11): x = "foo" else: x = 3 @@ -433,7 +433,7 @@ reveal_type(x) # N: Revealed type is "builtins.str" [out] [case testCustomSysVersionInfo2] -# flags: --python-version 3.5 +# flags: --python-version 3.11 import sys if sys.version_info == (3, 6): x = "foo" diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 6e9fdf6dab65..42f0ee8a9ec6 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -296,7 +296,7 @@ mypy.ini: [mypy]: ignore_missing_imports: Not a boolean: nah [file mypy.ini] \[mypy] \[mypy-*] -python_version = 3.4 +python_version = 3.11 [out] mypy.ini: [mypy-*]: Per-module sections should only specify per-module flags (python_version) == Return code: 0 @@ -592,7 +592,7 @@ main.py:1: error: Cannot find implementation or library stub for module named "a \[tool.mypy] python_version = 3.10 [out] -pyproject.toml: [mypy]: python_version: Python 3.1 is not supported (must be 3.4 or higher). You may need to put quotes around your Python version +pyproject.toml: [mypy]: python_version: Python 3.1 is not supported (must be 3.7 or higher). You may need to put quotes around your Python version == Return code: 0 [case testPythonVersionTooOld10] @@ -604,13 +604,13 @@ python_version = 1.0 mypy.ini: [mypy]: python_version: Python major version '1' out of range (must be 3) == Return code: 0 -[case testPythonVersionTooOld33] +[case testPythonVersionTooOld36] # cmd: mypy -c pass [file mypy.ini] \[mypy] -python_version = 3.3 +python_version = 3.6 [out] -mypy.ini: [mypy]: python_version: Python 3.3 is not supported (must be 3.4 or higher) +mypy.ini: [mypy]: python_version: Python 3.6 is not supported (must be 3.7 or higher) == Return code: 0 [case testPythonVersionTooNew40] @@ -633,18 +633,18 @@ usage: mypy [-h] [-v] [-V] [more options; see below] mypy: error: Mypy no longer supports checking Python 2 code. Consider pinning to mypy<0.980 if you need to check Python 2 code. == Return code: 2 -[case testPythonVersionAccepted34] +[case testPythonVersionAccepted37] # cmd: mypy -c pass [file mypy.ini] \[mypy] -python_version = 3.4 +python_version = 3.7 [out] -[case testPythonVersionAccepted36] +[case testPythonVersionAccepted311] # cmd: mypy -c pass [file mypy.ini] \[mypy] -python_version = 3.6 +python_version = 3.11 [out] -- This should be a dumping ground for tests of plugins that are sensitive to @@ -676,11 +676,11 @@ int_pow.py:10: note: Revealed type is "builtins.int" int_pow.py:11: note: Revealed type is "Any" == Return code: 0 -[case testDisallowAnyGenericsBuiltinCollections] +[case testDisallowAnyGenericsBuiltinCollectionsPre39] # cmd: mypy m.py [file mypy.ini] \[mypy] -python_version=3.6 +python_version = 3.8 \[mypy-m] disallow_any_generics = True diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index f208b4e78e54..18a03a92207d 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -159,18 +159,18 @@ def plugin(version): return Dummy [case testDaemonRunRestartGlobs] -- Ensure dmypy is not restarted if the configuration doesn't change and it contains globs -- Note: Backslash path separator in output is replaced with forward slash so the same test succeeds on Windows as well -$ dmypy run -- foo --follow-imports=error --python-version=3.6 +$ dmypy run -- foo --follow-imports=error Daemon started foo/lol.py:1: error: Name "fail" is not defined Found 1 error in 1 file (checked 3 source files) == Return code: 1 -$ dmypy run -- foo --follow-imports=error --python-version=3.6 +$ dmypy run -- foo --follow-imports=error foo/lol.py:1: error: Name "fail" is not defined Found 1 error in 1 file (checked 3 source files) == Return code: 1 $ {python} -c "print('[mypy]')" >mypy.ini $ {python} -c "print('ignore_errors=True')" >>mypy.ini -$ dmypy run -- foo --follow-imports=error --python-version=3.6 +$ dmypy run -- foo --follow-imports=error Restarting: configuration changed Daemon stopped Daemon started @@ -264,7 +264,7 @@ $ dmypy stop Daemon stopped [case testDaemonWarningSuccessExitCode-posix] -$ dmypy run -- foo.py --follow-imports=error +$ dmypy run -- foo.py --follow-imports=error --python-version=3.11 Daemon started foo.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs Success: no issues found in 1 source file @@ -282,13 +282,13 @@ def foo(): [case testDaemonQuickstart] $ {python} -c "print('x=1')" >foo.py $ {python} -c "print('x=1')" >bar.py -$ mypy --local-partial-types --cache-fine-grained --follow-imports=error --no-sqlite-cache --python-version=3.6 -- foo.py bar.py +$ mypy --local-partial-types --cache-fine-grained --follow-imports=error --no-sqlite-cache --python-version=3.11 -- foo.py bar.py Success: no issues found in 2 source files -$ {python} -c "import shutil; shutil.copy('.mypy_cache/3.6/bar.meta.json', 'asdf.json')" +$ {python} -c "import shutil; shutil.copy('.mypy_cache/3.11/bar.meta.json', 'asdf.json')" -- update bar's timestamp but don't change the file $ {python} -c "import time;time.sleep(1)" $ {python} -c "print('x=1')" >bar.py -$ dmypy run -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6 +$ dmypy run -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.11 Daemon started Success: no issues found in 2 source files $ dmypy status --fswatcher-dump-file test.json @@ -296,11 +296,11 @@ Daemon is up and running $ dmypy stop Daemon stopped -- copy the original bar cache file back so that the mtime mismatches -$ {python} -c "import shutil; shutil.copy('asdf.json', '.mypy_cache/3.6/bar.meta.json')" +$ {python} -c "import shutil; shutil.copy('asdf.json', '.mypy_cache/3.11/bar.meta.json')" -- sleep guarantees timestamp changes $ {python} -c "import time;time.sleep(1)" $ {python} -c "print('lol')" >foo.py -$ dmypy run --log-file=log -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.6 --quickstart-file test.json +$ dmypy run --log-file=log -- foo.py bar.py --follow-imports=error --use-fine-grained-cache --no-sqlite-cache --python-version=3.11 --quickstart-file test.json Daemon started foo.py:1: error: Name "lol" is not defined Found 1 error in 1 file (checked 2 source files) @@ -309,7 +309,7 @@ Found 1 error in 1 file (checked 2 source files) $ {python} -c "import sys; sys.stdout.write(open('log').read())" -- make sure the meta file didn't get updated. we use this as an imperfect proxy for -- whether the source file got rehashed, which we don't want it to have been. -$ {python} -c "x = open('.mypy_cache/3.6/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y" +$ {python} -c "x = open('.mypy_cache/3.11/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y" [case testDaemonSuggest] $ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary diff --git a/test-data/unit/parse-errors.test b/test-data/unit/parse-errors.test index 33cf9b4f91b4..c6b1c00a6169 100644 --- a/test-data/unit/parse-errors.test +++ b/test-data/unit/parse-errors.test @@ -273,17 +273,10 @@ file:3: error: Syntax error in type comment file:3: error: Inconsistent use of "*" in function signature file:3: error: Inconsistent use of "**" in function signature -[case testPrintStatementInPython35] -# flags: --python-version 3.5 +[case testPrintStatementInPython3] print 1 [out] -file:2: error: Missing parentheses in call to 'print' - -[case testPrintStatementInPython37] -# flags: --python-version 3.7 -print 1 -[out] -file:2: error: Missing parentheses in call to 'print'. Did you mean print(1)? +file:1: error: Missing parentheses in call to 'print'. Did you mean print(1)? [case testInvalidConditionInConditionalExpression] 1 if 2, 3 else 4 diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index abc0f6a464a9..289005b36d9a 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -867,7 +867,7 @@ _program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[st _program.py:24: error: Invalid index type "str" for "MyDDict[Dict[, ]]"; expected type "int" [case testNoSubcriptionOfStdlibCollections] -# flags: --python-version 3.6 +# flags: --python-version 3.7 import collections from collections import Counter from typing import TypeVar diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test index 50dabb1fdea9..a6cde503ca09 100644 --- a/test-data/unit/reports.test +++ b/test-data/unit/reports.test @@ -311,7 +311,7 @@ Total 0 14 100.00% [case testAnyExpressionsReportTypesOfAny] -# cmd: mypy --python-version=3.6 --any-exprs-report report n.py +# cmd: mypy --any-exprs-report report n.py [file n.py] from typing import Any, List From d7f9f06710cec4f0bb3cd432786264fba4809897 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Sat, 15 Jul 2023 11:38:50 +0100 Subject: [PATCH 05/88] Sync typeshed Source commit: https://github.com/python/typeshed/commit/a83e55990ca7f9b9f93271b9087a3f433f54d94a --- mypy/typeshed/stdlib/_ctypes.pyi | 6 +- mypy/typeshed/stdlib/_decimal.pyi | 1 + mypy/typeshed/stdlib/_weakref.pyi | 1 + mypy/typeshed/stdlib/asyncio/events.pyi | 1 + mypy/typeshed/stdlib/asyncio/taskgroups.pyi | 8 +- mypy/typeshed/stdlib/builtins.pyi | 108 +++++++++++++++++- mypy/typeshed/stdlib/collections/__init__.pyi | 23 +++- mypy/typeshed/stdlib/datetime.pyi | 4 + mypy/typeshed/stdlib/doctest.pyi | 3 + mypy/typeshed/stdlib/email/charset.pyi | 9 +- mypy/typeshed/stdlib/email/utils.pyi | 2 +- mypy/typeshed/stdlib/errno.pyi | 14 ++- mypy/typeshed/stdlib/functools.pyi | 40 ++++--- mypy/typeshed/stdlib/ipaddress.pyi | 1 + mypy/typeshed/stdlib/json/__init__.pyi | 9 -- mypy/typeshed/stdlib/linecache.pyi | 6 +- mypy/typeshed/stdlib/pathlib.pyi | 1 + mypy/typeshed/stdlib/plistlib.pyi | 1 + mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 20 ++++ mypy/typeshed/stdlib/statistics.pyi | 1 + mypy/typeshed/stdlib/tkinter/__init__.pyi | 6 +- mypy/typeshed/stdlib/tkinter/ttk.pyi | 4 +- mypy/typeshed/stdlib/typing.pyi | 1 + mypy/typeshed/stdlib/unittest/mock.pyi | 27 ++++- mypy/typeshed/stdlib/uuid.pyi | 40 +++---- 25 files changed, 264 insertions(+), 73 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 756ee86d3342..25d604218a00 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -151,7 +151,11 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index 60c609456954..9a90760bd2c2 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -73,6 +73,7 @@ class Decimal: def from_float(cls, __f: float) -> Self: ... def __bool__(self) -> bool: ... def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __hash__(self) -> int: ... def as_tuple(self) -> DecimalTuple: ... def as_integer_ratio(self) -> tuple[int, int]: ... def to_eng_string(self, context: Context | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index b6044fac4628..2402d0bfe721 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -22,6 +22,7 @@ class ReferenceType(Generic[_T]): __callback__: Callable[[ReferenceType[_T]], Any] def __new__(cls, __o: _T, __callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... def __call__(self) -> _T | None: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 2054f6e522a1..b1b0fcfa5fd7 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -86,6 +86,7 @@ class TimerHandle(Handle): loop: AbstractEventLoop, context: Context | None = None, ) -> None: ... + def __hash__(self) -> int: ... def when(self) -> float: ... def __lt__(self, other: TimerHandle) -> bool: ... def __le__(self, other: TimerHandle) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 08ea8f66559c..47d9bb2f699e 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -1,5 +1,4 @@ -# This only exists in 3.11+. See VERSIONS. - +import sys from contextvars import Context from types import TracebackType from typing import TypeVar @@ -8,7 +7,10 @@ from typing_extensions import Self from . import _CoroutineLike from .tasks import Task -__all__ = ["TaskGroup"] +if sys.version_info >= (3, 12): + __all__ = ("TaskGroup",) +else: + __all__ = ["TaskGroup"] _T = TypeVar("_T") diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 7415a1b7680d..ea917bddb799 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -56,6 +56,7 @@ from typing import ( # noqa: Y022 from typing_extensions import ( Concatenate, Literal, + LiteralString, ParamSpec, Self, SupportsIndex, @@ -315,6 +316,7 @@ class int: def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... + def __hash__(self) -> int: ... def __bool__(self) -> bool: ... def __index__(self) -> int: ... @@ -378,6 +380,7 @@ class float: def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... + def __hash__(self) -> int: ... def __bool__(self) -> bool: ... class complex: @@ -417,6 +420,7 @@ class complex: def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... def __abs__(self) -> float: ... + def __hash__(self) -> int: ... def __bool__(self) -> bool: ... if sys.version_info >= (3, 11): def __complex__(self) -> complex: ... @@ -432,8 +436,17 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -441,11 +454,20 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: + @overload + def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -461,32 +483,91 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 + ) -> LiteralString: ... + @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -497,6 +578,9 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ... + @overload def __add__(self, __value: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __key: str) -> bool: ... # type: ignore[override] @@ -504,13 +588,26 @@ class str(Sequence[str]): def __ge__(self, __value: str) -> bool: ... def __getitem__(self, __key: SupportsIndex | slice) -> str: ... def __gt__(self, __value: str) -> bool: ... + def __hash__(self) -> int: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __value: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __value: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload def __mod__(self, __value: Any) -> str: ... + @overload + def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... + @overload def __mul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __value: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... + @overload def __rmul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... @@ -597,6 +694,7 @@ class bytes(Sequence[int]): def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... + def __hash__(self) -> int: ... @overload def __getitem__(self, __key: SupportsIndex) -> int: ... @overload @@ -1004,7 +1102,13 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + @overload + def __or__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ... + @overload def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ... + @overload def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] @@ -1665,11 +1769,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index e56baf8b52c9..36d79101908d 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -83,8 +83,14 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... if sys.version_info >= (3, 9): + @overload + def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... - def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + @overload # type: ignore[misc] + def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... + @overload # type: ignore[misc] + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @@ -391,6 +397,15 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): def __missing__(self, __key: _KT) -> _VT: ... def __copy__(self) -> Self: ... def copy(self) -> Self: ... + if sys.version_info >= (3, 9): + @overload + def __or__(self, __value: Mapping[_KT, _VT]) -> Self: ... + @overload + def __or__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, __value: Mapping[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] @@ -425,7 +440,13 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... if sys.version_info >= (3, 9): + @overload + def __or__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ... + @overload def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 2bb2264c97b1..00d511915f20 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -35,6 +35,7 @@ class timezone(tzinfo): def tzname(self, __dt: datetime | None) -> str: ... def utcoffset(self, __dt: datetime | None) -> timedelta: ... def dst(self, __dt: datetime | None) -> None: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 11): UTC: timezone @@ -106,6 +107,7 @@ class date: @overload def __sub__(self, __value: date) -> timedelta: ... + def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... if sys.version_info >= (3, 9): @@ -143,6 +145,7 @@ class time: def __lt__(self, __value: time) -> bool: ... def __ge__(self, __value: time) -> bool: ... def __gt__(self, __value: time) -> bool: ... + def __hash__(self) -> int: ... def isoformat(self, timespec: str = ...) -> str: ... @classmethod def fromisoformat(cls, __time_string: str) -> Self: ... @@ -217,6 +220,7 @@ class timedelta: def __ge__(self, __value: timedelta) -> bool: ... def __gt__(self, __value: timedelta) -> bool: ... def __bool__(self) -> bool: ... + def __hash__(self) -> int: ... class datetime(date): min: ClassVar[datetime] diff --git a/mypy/typeshed/stdlib/doctest.pyi b/mypy/typeshed/stdlib/doctest.pyi index 88d066fdc23c..f3c05781ad92 100644 --- a/mypy/typeshed/stdlib/doctest.pyi +++ b/mypy/typeshed/stdlib/doctest.pyi @@ -85,6 +85,7 @@ class Example: indent: int = 0, options: dict[int, bool] | None = None, ) -> None: ... + def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class DocTest: @@ -103,6 +104,7 @@ class DocTest: lineno: int | None, docstring: str | None, ) -> None: ... + def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... def __eq__(self, other: object) -> bool: ... @@ -210,6 +212,7 @@ class DocTestCase(unittest.TestCase): ) -> None: ... def runTest(self) -> None: ... def format_failure(self, err: str) -> str: ... + def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class SkipDocTestCase(DocTestCase): diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi index e612847c75b6..d61950a26424 100644 --- a/mypy/typeshed/stdlib/email/charset.pyi +++ b/mypy/typeshed/stdlib/email/charset.pyi @@ -1,4 +1,6 @@ -from collections.abc import Iterator +from collections.abc import Callable, Iterator +from email.message import Message +from typing import overload __all__ = ["Charset", "add_alias", "add_charset", "add_codec"] @@ -14,10 +16,13 @@ class Charset: input_codec: str | None output_codec: str | None def __init__(self, input_charset: str = "us-ascii") -> None: ... - def get_body_encoding(self) -> str: ... + def get_body_encoding(self) -> str | Callable[[Message], None]: ... def get_output_charset(self) -> str | None: ... def header_encode(self, string: str) -> str: ... def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ... + @overload + def body_encode(self, string: None) -> None: ... + @overload def body_encode(self, string: str) -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __value: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/email/utils.pyi b/mypy/typeshed/stdlib/email/utils.pyi index ed63b6b32312..186e768050be 100644 --- a/mypy/typeshed/stdlib/email/utils.pyi +++ b/mypy/typeshed/stdlib/email/utils.pyi @@ -60,7 +60,7 @@ else: def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... -def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... # May return list[str]. See issue #10431 for details. def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/mypy/typeshed/stdlib/errno.pyi b/mypy/typeshed/stdlib/errno.pyi index 28874d44ff5f..84d2b44a6a61 100644 --- a/mypy/typeshed/stdlib/errno.pyi +++ b/mypy/typeshed/stdlib/errno.pyi @@ -91,9 +91,15 @@ ECANCELED: int # undocumented ENOTRECOVERABLE: int # undocumented EOWNERDEAD: int # undocumented +if sys.platform == "sunos5" or sys.platform == "solaris": # noqa: Y008 + ELOCKUNMAPPED: int + ENOTACTIVE: int + if sys.platform != "win32": ENOTBLK: int EMULTIHOP: int + +if sys.platform == "darwin": # All of the below are undocumented EAUTH: int EBADARCH: int @@ -112,9 +118,8 @@ if sys.platform != "win32": EPWROFF: int ERPCMISMATCH: int ESHLIBVERS: int - - if sys.platform != "darwin" or sys.version_info >= (3, 11): - EQFULL: int # undocumented + if sys.version_info >= (3, 11): + EQFULL: int if sys.platform != "darwin": EDEADLOCK: int @@ -164,9 +169,6 @@ if sys.platform != "win32" and sys.platform != "darwin": ENOKEY: int ENOMEDIUM: int ERFKILL: int - EL: int - ELOCKUNMAPPED: int - ENOTACTIVE: int if sys.platform == "win32": # All of these are undocumented diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 1b4e59b7c120..8adc3d82292e 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems +from _typeshed import SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, Self, TypeAlias, TypedDict, final +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -28,10 +28,12 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 9): __all__ += ["cache"] -_AnyCallable: TypeAlias = Callable[..., object] - _T = TypeVar("_T") _S = TypeVar("_S") +_PWrapped = ParamSpec("_PWrapped") +_RWrapped = TypeVar("_RWrapped") +_PWrapper = ParamSpec("_PWrapper") +_RWapper = TypeVar("_RWapper") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @@ -85,31 +87,41 @@ else: ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] +class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWapper]): + __wrapped__: Callable[_PWrapped, _RWrapped] + def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWapper: ... + # as with ``Callable``, we'll assume that these attributes exist + __name__: str + __qualname__: str + +class _Wrapper(Generic[_PWrapped, _RWrapped]): + def __call__(self, f: Callable[_PWrapper, _RWapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + if sys.version_info >= (3, 12): def update_wrapper( - wrapper: _T, - wrapped: _AnyCallable, + wrapper: Callable[_PWrapper, _RWapper], + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _T: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... def wraps( - wrapped: _AnyCallable, + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> IdentityFunction: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: ... else: def update_wrapper( - wrapper: _T, - wrapped: _AnyCallable, + wrapper: Callable[_PWrapper, _RWapper], + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _T: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... def wraps( - wrapped: _AnyCallable, + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> IdentityFunction: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 7a4146885b29..fc42cf03e2bb 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -34,6 +34,7 @@ class _IPAddressBase: class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... def __add__(self, other: int) -> Self: ... + def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self, other: int) -> Self: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index dc0cdff926d4..63e9718ee151 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -1,4 +1,3 @@ -import sys from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable from typing import Any @@ -7,8 +6,6 @@ from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDeco from .encoder import JSONEncoder as JSONEncoder __all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"] -if sys.version_info >= (3, 12): - __all__ += ["AttrDict"] def dumps( obj: Any, @@ -62,9 +59,3 @@ def load( **kwds: Any, ) -> Any: ... def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented - -if sys.version_info >= (3, 12): - class AttrDict(dict[str, Any]): - def __getattr__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... - def __delattr__(self, name: str) -> None: ... diff --git a/mypy/typeshed/stdlib/linecache.pyi b/mypy/typeshed/stdlib/linecache.pyi index 8e317dd38990..2e050e13b621 100644 --- a/mypy/typeshed/stdlib/linecache.pyi +++ b/mypy/typeshed/stdlib/linecache.pyi @@ -1,5 +1,6 @@ import sys -from typing import Any, Protocol +from collections.abc import Callable +from typing import Any from typing_extensions import TypeAlias if sys.version_info >= (3, 9): @@ -10,8 +11,7 @@ else: _ModuleGlobals: TypeAlias = dict[str, Any] _ModuleMetadata: TypeAlias = tuple[int, float | None, list[str], str] -class _SourceLoader(Protocol): - def __call__(self) -> str | None: ... +_SourceLoader: TypeAlias = tuple[Callable[[], str | None]] cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 3c2ae0fe7ab1..a509ec3af9f2 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -39,6 +39,7 @@ class PurePath(PathLike[str]): @property def stem(self) -> str: ... def __new__(cls, *args: StrPath) -> Self: ... + def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __fspath__(self) -> str: ... def __lt__(self, other: PurePath) -> bool: ... diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 5b76c935f76e..bd5525484514 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -102,6 +102,7 @@ if sys.version_info >= (3, 8): def __init__(self, data: int) -> None: ... def __index__(self) -> int: ... def __reduce__(self) -> tuple[type[Self], tuple[int]]: ... + def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class InvalidFileException(ValueError): diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 24974f787c62..cff0f5e5ff1d 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -196,6 +196,25 @@ if sys.version_info >= (3, 11): SQLITE_WARNING: int SQLITE_WARNING_AUTOINDEX: int +if sys.version_info >= (3, 12): + LEGACY_TRANSACTION_CONTROL: int + SQLITE_DBCONFIG_DEFENSIVE: int + SQLITE_DBCONFIG_DQS_DDL: int + SQLITE_DBCONFIG_DQS_DML: int + SQLITE_DBCONFIG_ENABLE_FKEY: int + SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: int + SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION: int + SQLITE_DBCONFIG_ENABLE_QPSG: int + SQLITE_DBCONFIG_ENABLE_TRIGGER: int + SQLITE_DBCONFIG_ENABLE_VIEW: int + SQLITE_DBCONFIG_LEGACY_ALTER_TABLE: int + SQLITE_DBCONFIG_LEGACY_FILE_FORMAT: int + SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: int + SQLITE_DBCONFIG_RESET_DATABASE: int + SQLITE_DBCONFIG_TRIGGER_EQP: int + SQLITE_DBCONFIG_TRUSTED_SCHEMA: int + SQLITE_DBCONFIG_WRITABLE_SCHEMA: int + # Can take or return anything depending on what's in the registry. @overload def adapt(__obj: Any, __proto: Any) -> Any: ... @@ -419,6 +438,7 @@ class Row: def __getitem__(self, __key: int | str) -> Any: ... @overload def __getitem__(self, __key: slice) -> tuple[Any, ...]: ... + def __hash__(self) -> int: ... def __iter__(self) -> Iterator[Any]: ... def __len__(self) -> int: ... # These return NotImplemented for anything that is not a Row. diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi index af5fcec6ad0c..07174f4531b9 100644 --- a/mypy/typeshed/stdlib/statistics.pyi +++ b/mypy/typeshed/stdlib/statistics.pyi @@ -113,6 +113,7 @@ if sys.version_info >= (3, 8): __radd__ = __add__ def __rsub__(self, x2: float | NormalDist) -> NormalDist: ... __rmul__ = __mul__ + def __hash__(self) -> int: ... if sys.version_info >= (3, 12): def correlation( diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index 3291b0c9dd98..a03c48c039dd 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -500,7 +500,7 @@ class Misc: bbox = grid_bbox def grid_columnconfigure( self, - index: _GridIndex, + index: _GridIndex | list[int] | tuple[int, ...], cnf: _GridIndexInfo = {}, *, minsize: _ScreenUnits = ..., @@ -510,7 +510,7 @@ class Misc: ) -> _GridIndexInfo | Any: ... # can be None but annoying to check def grid_rowconfigure( self, - index: _GridIndex, + index: _GridIndex | list[int] | tuple[int, ...], cnf: _GridIndexInfo = {}, *, minsize: _ScreenUnits = ..., @@ -1633,6 +1633,7 @@ class Canvas(Widget, XView, YView): activefill: str = ..., activestipple: str = ..., anchor: _Anchor = ..., + angle: float | str = ..., disabledfill: str = ..., disabledstipple: str = ..., fill: str = ..., @@ -1653,6 +1654,7 @@ class Canvas(Widget, XView, YView): activefill: str = ..., activestipple: str = ..., anchor: _Anchor = ..., + angle: float | str = ..., disabledfill: str = ..., disabledstipple: str = ..., fill: str = ..., diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index 009fdf51a440..d73566fc0917 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -961,7 +961,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): master: tkinter.Misc | None = None, *, class_: str = ..., - columns: str | list[str] | tuple[str, ...] = ..., + columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ..., cursor: tkinter._Cursor = ..., displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., @@ -983,7 +983,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): self, cnf: dict[str, Any] | None = None, *, - columns: str | list[str] | tuple[str, ...] = ..., + columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ..., cursor: tkinter._Cursor = ..., displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 2c5f820ea365..7496a0920690 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -921,6 +921,7 @@ class ForwardRef: def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 11): def __or__(self, other: Any) -> _SpecialForm: ... def __ror__(self, other: Any) -> _SpecialForm: ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 0ed0701cc450..db1cc7d9bfc9 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -234,6 +234,8 @@ class _patch(Generic[_T]): def copy(self) -> _patch[_T]: ... @overload def __call__(self, func: _TT) -> _TT: ... + # If new==DEFAULT, this should add a MagicMock parameter to the function + # arguments. See the _patch_default_new class below for this functionality. @overload def __call__(self, func: Callable[_P, _R]) -> Callable[_P, _R]: ... if sys.version_info >= (3, 8): @@ -257,6 +259,22 @@ class _patch(Generic[_T]): def start(self) -> _T: ... def stop(self) -> None: ... +if sys.version_info >= (3, 8): + _Mock: TypeAlias = MagicMock | AsyncMock +else: + _Mock: TypeAlias = MagicMock + +# This class does not exist at runtime, it's a hack to make this work: +# @patch("foo") +# def bar(..., mock: MagicMock) -> None: ... +class _patch_default_new(_patch[_Mock]): + @overload + def __call__(self, func: _TT) -> _TT: ... + # Can't use the following as ParamSpec is only allowed as last parameter: + # def __call__(self, func: Callable[_P, _R]) -> Callable[Concatenate[_P, MagicMock], _R]: ... + @overload + def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + class _patch_dict: in_dict: Any values: Any @@ -273,11 +291,8 @@ class _patch_dict: start: Any stop: Any -if sys.version_info >= (3, 8): - _Mock: TypeAlias = MagicMock | AsyncMock -else: - _Mock: TypeAlias = MagicMock - +# This class does not exist at runtime, it's a hack to add methods to the +# patch() function. class _patcher: TEST_PREFIX: str dict: type[_patch_dict] @@ -307,7 +322,7 @@ class _patcher: autospec: Any | None = ..., new_callable: Any | None = ..., **kwargs: Any, - ) -> _patch[_Mock]: ... + ) -> _patch_default_new: ... @overload @staticmethod def object( # type: ignore[misc] diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index 74ce4ebd6b47..fd87646531a6 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -1,11 +1,9 @@ +import builtins import sys from _typeshed import Unused from enum import Enum from typing_extensions import TypeAlias -# Because UUID has properties called int and bytes we need to rename these temporarily. -_Int: TypeAlias = int -_Bytes: TypeAlias = bytes _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] class SafeUUID(Enum): @@ -17,49 +15,49 @@ class UUID: def __init__( self, hex: str | None = None, - bytes: _Bytes | None = None, - bytes_le: _Bytes | None = None, + bytes: builtins.bytes | None = None, + bytes_le: builtins.bytes | None = None, fields: _FieldsType | None = None, - int: _Int | None = None, - version: _Int | None = None, + int: builtins.int | None = None, + version: builtins.int | None = None, *, is_safe: SafeUUID = ..., ) -> None: ... @property def is_safe(self) -> SafeUUID: ... @property - def bytes(self) -> _Bytes: ... + def bytes(self) -> builtins.bytes: ... @property - def bytes_le(self) -> _Bytes: ... + def bytes_le(self) -> builtins.bytes: ... @property - def clock_seq(self) -> _Int: ... + def clock_seq(self) -> builtins.int: ... @property - def clock_seq_hi_variant(self) -> _Int: ... + def clock_seq_hi_variant(self) -> builtins.int: ... @property - def clock_seq_low(self) -> _Int: ... + def clock_seq_low(self) -> builtins.int: ... @property def fields(self) -> _FieldsType: ... @property def hex(self) -> str: ... @property - def int(self) -> _Int: ... + def int(self) -> builtins.int: ... @property - def node(self) -> _Int: ... + def node(self) -> builtins.int: ... @property - def time(self) -> _Int: ... + def time(self) -> builtins.int: ... @property - def time_hi_version(self) -> _Int: ... + def time_hi_version(self) -> builtins.int: ... @property - def time_low(self) -> _Int: ... + def time_low(self) -> builtins.int: ... @property - def time_mid(self) -> _Int: ... + def time_mid(self) -> builtins.int: ... @property def urn(self) -> str: ... @property def variant(self) -> str: ... @property - def version(self) -> _Int | None: ... - def __int__(self) -> _Int: ... + def version(self) -> builtins.int | None: ... + def __int__(self) -> builtins.int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: UUID) -> bool: ... def __le__(self, other: UUID) -> bool: ... @@ -72,7 +70,7 @@ if sys.version_info >= (3, 9): else: def getnode(*, getters: Unused = None) -> int: ... # undocumented -def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ... +def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... if sys.version_info >= (3, 12): def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... From 2f6b6e66ced53a0412c46851afb963f35a012f79 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 26 Sep 2022 12:55:07 -0700 Subject: [PATCH 06/88] Remove use of LiteralString in builtins (#13743) --- mypy/typeshed/stdlib/builtins.pyi | 93 ------------------------------- 1 file changed, 93 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index ea917bddb799..3de404b76652 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -56,7 +56,6 @@ from typing import ( # noqa: Y022 from typing_extensions import ( Concatenate, Literal, - LiteralString, ParamSpec, Self, SupportsIndex, @@ -436,17 +435,8 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - @overload - def capitalize(self: LiteralString) -> LiteralString: ... - @overload def capitalize(self) -> str: ... # type: ignore[misc] - @overload - def casefold(self: LiteralString) -> LiteralString: ... - @overload def casefold(self) -> str: ... # type: ignore[misc] - @overload - def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -454,20 +444,11 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - @overload - def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -483,91 +464,32 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... - @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - @overload - def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def lower(self: LiteralString) -> LiteralString: ... - @overload def lower(self) -> str: ... # type: ignore[misc] - @overload - def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def replace( - self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 - ) -> LiteralString: ... - @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... - @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... - @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... - @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - @overload - def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def swapcase(self: LiteralString) -> LiteralString: ... - @overload def swapcase(self) -> str: ... # type: ignore[misc] - @overload - def title(self: LiteralString) -> LiteralString: ... - @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... - @overload - def upper(self: LiteralString) -> LiteralString: ... - @overload def upper(self) -> str: ... # type: ignore[misc] - @overload - def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... - @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -578,9 +500,6 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... - @overload - def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ... - @overload def __add__(self, __value: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __key: str) -> bool: ... # type: ignore[override] @@ -589,25 +508,13 @@ class str(Sequence[str]): def __getitem__(self, __key: SupportsIndex | slice) -> str: ... def __gt__(self, __value: str) -> bool: ... def __hash__(self) -> int: ... - @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... - @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __value: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __value: str) -> bool: ... - @overload - def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... - @overload def __mod__(self, __value: Any) -> str: ... - @overload - def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... - @overload def __mul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __value: object) -> bool: ... - @overload - def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... - @overload def __rmul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... From 120af30e706a0e0e29faf915c56c5b6781c9b204 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 12:47:21 -0700 Subject: [PATCH 07/88] Revert sum literal integer change (#13961) This is allegedly causing large performance problems, see 13821 typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing to undo. Patching this in typeshed also feels weird, since there's a more general soundness issue. If a typevar has a bound or constraint, we might not want to solve it to a Literal. If we can confirm the performance regression or fix the unsoundness within mypy, I might pursue upstreaming this in typeshed. (Reminder: add this to the sync_typeshed script once merged) --- mypy/typeshed/stdlib/builtins.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 3de404b76652..d6ca39049c77 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1676,11 +1676,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... From 1866d28f156c413ce8cc9fec0b317a7d02c28565 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Mon, 1 May 2023 20:34:55 +0100 Subject: [PATCH 08/88] Revert typeshed ctypes change Since the plugin provides superior type checking: https://github.com/python/mypy/pull/13987#issuecomment-1310863427 A manual cherry-pick of e437cdf. --- mypy/typeshed/stdlib/_ctypes.pyi | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 25d604218a00..756ee86d3342 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -151,11 +151,7 @@ class Array(Generic[_CT], _CData): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - # Note: only available if _CT == c_char - @property - def raw(self) -> bytes: ... - @raw.setter - def raw(self, value: ReadableBuffer) -> None: ... + raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT From 3240da455e06a292669de22b0ef313dad43cb094 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Sat, 4 Mar 2023 13:14:11 +0000 Subject: [PATCH 09/88] Revert use of `ParamSpec` for `functools.wraps` --- mypy/typeshed/stdlib/functools.pyi | 40 +++++++++++------------------- 1 file changed, 14 insertions(+), 26 deletions(-) diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 8adc3d82292e..1b4e59b7c120 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import SupportsAllComparisons, SupportsItems +from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -28,12 +28,10 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 9): __all__ += ["cache"] +_AnyCallable: TypeAlias = Callable[..., object] + _T = TypeVar("_T") _S = TypeVar("_S") -_PWrapped = ParamSpec("_PWrapped") -_RWrapped = TypeVar("_RWrapped") -_PWrapper = ParamSpec("_PWrapper") -_RWapper = TypeVar("_RWapper") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @@ -87,41 +85,31 @@ else: ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] -class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWapper]): - __wrapped__: Callable[_PWrapped, _RWrapped] - def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWapper: ... - # as with ``Callable``, we'll assume that these attributes exist - __name__: str - __qualname__: str - -class _Wrapper(Generic[_PWrapped, _RWrapped]): - def __call__(self, f: Callable[_PWrapper, _RWapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... - if sys.version_info >= (3, 12): def update_wrapper( - wrapper: Callable[_PWrapper, _RWapper], - wrapped: Callable[_PWrapped, _RWrapped], + wrapper: _T, + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + ) -> _T: ... def wraps( - wrapped: Callable[_PWrapped, _RWrapped], + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> IdentityFunction: ... else: def update_wrapper( - wrapper: Callable[_PWrapper, _RWapper], - wrapped: Callable[_PWrapped, _RWrapped], + wrapper: _T, + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + ) -> _T: ... def wraps( - wrapped: Callable[_PWrapped, _RWrapped], + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> IdentityFunction: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... From 3f601c3641ecde3557520ddc64a18baa40b12e35 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 17 Jul 2023 02:09:28 +0100 Subject: [PATCH 10/88] Remove unneeded `--strict-optional` flags from test cases (#15684) --- test-data/unit/check-abstract.test | 17 --------- test-data/unit/check-basic.test | 1 - test-data/unit/check-class-namedtuple.test | 1 - test-data/unit/check-classes.test | 36 ++++++++----------- test-data/unit/check-columns.test | 1 - test-data/unit/check-custom-plugin.test | 2 +- test-data/unit/check-dataclasses.test | 8 ++--- test-data/unit/check-enum.test | 4 --- test-data/unit/check-errorcodes.test | 7 +--- test-data/unit/check-expressions.test | 5 ++- test-data/unit/check-flags.test | 2 +- test-data/unit/check-functions.test | 2 -- test-data/unit/check-generics.test | 7 ++-- test-data/unit/check-incremental.test | 13 ++----- test-data/unit/check-inference-context.test | 13 ------- test-data/unit/check-inference.test | 19 +++------- test-data/unit/check-inline-config.test | 4 +-- test-data/unit/check-isinstance.test | 15 -------- test-data/unit/check-kwargs.test | 1 - test-data/unit/check-lists.test | 1 - test-data/unit/check-literal.test | 6 ---- test-data/unit/check-modules.test | 1 - test-data/unit/check-narrowing.test | 13 ++----- test-data/unit/check-native-int.test | 2 -- test-data/unit/check-newsyntax.test | 4 --- test-data/unit/check-overloading.test | 11 ++---- .../unit/check-parameter-specification.test | 1 - test-data/unit/check-plugin-attrs.test | 2 -- test-data/unit/check-protocols.test | 19 ++++------ test-data/unit/check-python310.test | 5 --- test-data/unit/check-python38.test | 10 +++--- test-data/unit/check-recursive-types.test | 4 --- test-data/unit/check-serialize.test | 1 - test-data/unit/check-type-aliases.test | 1 - test-data/unit/check-typeddict.test | 8 ----- test-data/unit/check-typeguard.test | 1 - test-data/unit/check-unions.test | 5 --- test-data/unit/check-unreachable-code.test | 1 - test-data/unit/check-varargs.test | 1 - test-data/unit/check-warnings.test | 2 +- test-data/unit/deps.test | 1 - test-data/unit/fine-grained-suggest.test | 17 --------- test-data/unit/fine-grained.test | 22 ++++-------- test-data/unit/pythoneval.test | 9 +++-- 44 files changed, 58 insertions(+), 248 deletions(-) diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index dc64476beda6..299074050baa 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -1125,7 +1125,6 @@ b.y = 1 -- ----------------------------------------------- [case testEmptyBodyProhibitedFunction] -# flags: --strict-optional from typing import overload, Union def func1(x: str) -> int: pass # E: Missing return statement @@ -1148,7 +1147,6 @@ def func5(x: Union[int, str]) -> Union[int, str]: # E: Missing return statement """Some function.""" [case testEmptyBodyProhibitedMethodNonAbstract] -# flags: --strict-optional from typing import overload, Union class A: @@ -1183,7 +1181,6 @@ class C: [builtins fixtures/classmethod.pyi] [case testEmptyBodyProhibitedPropertyNonAbstract] -# flags: --strict-optional class A: @property def x(self) -> int: ... # E: Missing return statement @@ -1212,7 +1209,6 @@ class C: [builtins fixtures/property.pyi] [case testEmptyBodyNoteABCMeta] -# flags: --strict-optional from abc import ABC class A(ABC): @@ -1221,7 +1217,6 @@ class A(ABC): ... [case testEmptyBodyAllowedFunctionStub] -# flags: --strict-optional import stub [file stub.pyi] from typing import overload, Union @@ -1232,7 +1227,6 @@ def func3(x: str) -> int: """Some function.""" [case testEmptyBodyAllowedMethodNonAbstractStub] -# flags: --strict-optional import stub [file stub.pyi] from typing import overload, Union @@ -1254,7 +1248,6 @@ class B: [builtins fixtures/classmethod.pyi] [case testEmptyBodyAllowedPropertyNonAbstractStub] -# flags: --strict-optional import stub [file stub.pyi] class A: @@ -1285,7 +1278,6 @@ class C: [builtins fixtures/property.pyi] [case testEmptyBodyAllowedMethodAbstract] -# flags: --strict-optional from typing import overload, Union from abc import abstractmethod @@ -1333,7 +1325,6 @@ class C: [builtins fixtures/classmethod.pyi] [case testEmptyBodyAllowedPropertyAbstract] -# flags: --strict-optional from abc import abstractmethod class A: @property @@ -1372,7 +1363,6 @@ class C: [builtins fixtures/property.pyi] [case testEmptyBodyImplicitlyAbstractProtocol] -# flags: --strict-optional from typing import Protocol, overload, Union class P1(Protocol): @@ -1413,7 +1403,6 @@ C3() [builtins fixtures/classmethod.pyi] [case testEmptyBodyImplicitlyAbstractProtocolProperty] -# flags: --strict-optional from typing import Protocol class P1(Protocol): @@ -1443,7 +1432,6 @@ C2() [builtins fixtures/property.pyi] [case testEmptyBodyImplicitlyAbstractProtocolStub] -# flags: --strict-optional from stub import P1, P2, P3, P4 class B1(P1): ... @@ -1479,7 +1467,6 @@ class P4(Protocol): [builtins fixtures/classmethod.pyi] [case testEmptyBodyUnsafeAbstractSuper] -# flags: --strict-optional from stub import StubProto, StubAbstract from typing import Protocol from abc import abstractmethod @@ -1528,7 +1515,6 @@ class StubAbstract: def meth(self) -> int: ... [case testEmptyBodyUnsafeAbstractSuperProperty] -# flags: --strict-optional from stub import StubProto, StubAbstract from typing import Protocol from abc import abstractmethod @@ -1586,7 +1572,6 @@ class StubAbstract: [builtins fixtures/property.pyi] [case testEmptyBodyUnsafeAbstractSuperOverloads] -# flags: --strict-optional from stub import StubProto from typing import Protocol, overload, Union @@ -1671,7 +1656,6 @@ class SubAbstract(Abstract): return super().meth() [case testEmptyBodyNoSuperWarningOptionalReturn] -# flags: --strict-optional from typing import Protocol, Optional from abc import abstractmethod @@ -1689,7 +1673,6 @@ class SubAbstract(Abstract): return super().meth() [case testEmptyBodyTypeCheckingOnly] -# flags: --strict-optional from typing import TYPE_CHECKING class C: diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index 408c3599672b..61a7160ce4f4 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -385,7 +385,6 @@ y = x # E: Incompatible types in assignment (expression has type "Dict[str, int] [builtins fixtures/dict.pyi] [case testDistinctTypes] -# flags: --strict-optional import b [file a.py] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index 1916cb41bb74..a095f212b900 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -419,7 +419,6 @@ UserDefined(1) # E: Argument 1 to "UserDefined" has incompatible type "int"; ex [builtins fixtures/list.pyi] [case testNewNamedTupleWithDefaultsStrictOptional] -# flags: --strict-optional from typing import List, NamedTuple, Optional class HasNone(NamedTuple): diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 957eb9214d7c..b9e65ef4ad20 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -935,7 +935,6 @@ if int(): b = D2() [case testConstructorJoinsWithCustomMetaclass] -# flags: --strict-optional from typing import TypeVar import abc @@ -1629,7 +1628,6 @@ a = A() reveal_type(a.f) # N: Revealed type is "__main__.D" [case testAccessingDescriptorFromClass] -# flags: --strict-optional from d import D, Base class A(Base): f = D() @@ -1647,7 +1645,6 @@ class D: [builtins fixtures/bool.pyi] [case testAccessingDescriptorFromClassWrongBase] -# flags: --strict-optional from d import D, Base class A: f = D() @@ -1664,13 +1661,13 @@ class D: def __get__(self, inst: Base, own: Type[Base]) -> str: pass [builtins fixtures/bool.pyi] [out] -main:5: error: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "Type[Base]" -main:5: note: Revealed type is "d.D" -main:6: error: No overload variant of "__get__" of "D" matches argument types "A", "Type[A]" -main:6: note: Possible overload variants: -main:6: note: def __get__(self, inst: None, own: Type[Base]) -> D -main:6: note: def __get__(self, inst: Base, own: Type[Base]) -> str -main:6: note: Revealed type is "Any" +main:4: error: Argument 2 to "__get__" of "D" has incompatible type "Type[A]"; expected "Type[Base]" +main:4: note: Revealed type is "d.D" +main:5: error: No overload variant of "__get__" of "D" matches argument types "A", "Type[A]" +main:5: note: Possible overload variants: +main:5: note: def __get__(self, inst: None, own: Type[Base]) -> D +main:5: note: def __get__(self, inst: Base, own: Type[Base]) -> str +main:5: note: Revealed type is "Any" [case testAccessingGenericNonDataDescriptor] from typing import TypeVar, Type, Generic, Any @@ -1702,7 +1699,6 @@ a.g = '' a.g = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testAccessingGenericDescriptorFromClass] -# flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] @@ -1724,7 +1720,6 @@ class D(Generic[T, V]): [builtins fixtures/bool.pyi] [case testAccessingGenericDescriptorFromInferredClass] -# flags: --strict-optional from typing import Type from d import D class A: @@ -1745,11 +1740,10 @@ class D(Generic[T, V]): def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] -main:8: note: Revealed type is "d.D[__main__.A, builtins.int]" -main:9: note: Revealed type is "d.D[__main__.A, builtins.str]" +main:7: note: Revealed type is "d.D[__main__.A, builtins.int]" +main:8: note: Revealed type is "d.D[__main__.A, builtins.str]" [case testAccessingGenericDescriptorFromClassBadOverload] -# flags: --strict-optional from d import D class A: f = D(10) # type: D[A, int] @@ -1766,11 +1760,11 @@ class D(Generic[T, V]): def __get__(self, inst: T, own: Type[T]) -> V: pass [builtins fixtures/bool.pyi] [out] -main:5: error: No overload variant of "__get__" of "D" matches argument types "None", "Type[A]" -main:5: note: Possible overload variants: -main:5: note: def __get__(self, inst: None, own: None) -> D[A, int] -main:5: note: def __get__(self, inst: A, own: Type[A]) -> int -main:5: note: Revealed type is "Any" +main:4: error: No overload variant of "__get__" of "D" matches argument types "None", "Type[A]" +main:4: note: Possible overload variants: +main:4: note: def __get__(self, inst: None, own: None) -> D[A, int] +main:4: note: def __get__(self, inst: A, own: Type[A]) -> int +main:4: note: Revealed type is "Any" [case testAccessingNonDataDescriptorSubclass] from typing import Any @@ -6484,7 +6478,6 @@ def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... [out] [case testOptionalDescriptorsBinder] -# flags: --strict-optional from typing import Type, TypeVar, Optional T = TypeVar('T') @@ -6698,7 +6691,6 @@ class C(Generic[T]): [builtins fixtures/isinstancelist.pyi] [case testIsInstanceTypeSubclass] -# flags: --strict-optional from typing import Type, Optional class Base: ... class One(Base): diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 9d9a7d9ac039..44524b9df943 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -27,7 +27,6 @@ A().f(1, 1) # E:10: Argument 2 to "f" of "A" has incompatible type "int"; expect (A().f(1, 'hello', 'hi')) # E:2: Too many arguments for "f" of "A" [case testColumnsInvalidArgumentType] -# flags: --strict-optional def f(x: int, y: str) -> None: ... def g(*x: int) -> None: pass def h(**x: int) -> None: pass diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index ec5bce219dbd..9a0668f98c21 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -802,7 +802,7 @@ else: plugins=/test-data/unit/plugins/union_method.py [case testGetMethodHooksOnUnionsStrictOptional] -# flags: --config-file tmp/mypy.ini --strict-optional +# flags: --config-file tmp/mypy.ini from typing import Union class Foo: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index adcaa60a5b19..3866442230bf 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1127,7 +1127,6 @@ class Foo: [case testNoComplainFieldNoneStrict] # flags: --python-version 3.7 -# flags: --strict-optional from dataclasses import dataclass, field from typing import Optional @@ -1264,7 +1263,7 @@ class Deferred: pass [builtins fixtures/dataclasses.pyi] [case testDeferredDataclassInitSignatureSubclass] -# flags: --strict-optional --python-version 3.7 +# flags: --python-version 3.7 from dataclasses import dataclass from typing import Optional @@ -1745,7 +1744,7 @@ reveal_type(Child2[int, A]([A()], [1]).b) # N: Revealed type is "builtins.list[ [builtins fixtures/dataclasses.pyi] [case testDataclassInheritOptionalType] -# flags: --python-version 3.7 --strict-optional +# flags: --python-version 3.7 from dataclasses import dataclass from typing import Any, Callable, Generic, TypeVar, List, Optional @@ -1979,7 +1978,6 @@ B = List[C] [builtins fixtures/dataclasses.pyi] [case testDataclassSelfType] -# flags: --strict-optional from dataclasses import dataclass from typing import Self, TypeVar, Generic, Optional @@ -2104,7 +2102,6 @@ a2 = replace(a, q='42') # E: Argument "q" to "replace" of "A" has incompatible reveal_type(a2) # N: Revealed type is "__main__.A" [case testReplaceUnion] -# flags: --strict-optional from typing import Generic, Union, TypeVar from dataclasses import dataclass, replace, InitVar @@ -2136,7 +2133,6 @@ _ = replace(a_or_b, y=42, init_var=42) # E: Argument "y" to "replace" of "Union [builtins fixtures/dataclasses.pyi] [case testReplaceUnionOfTypeVar] -# flags: --strict-optional from typing import Generic, Union, TypeVar from dataclasses import dataclass, replace diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index ce7e173f635d..6779ae266454 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -953,7 +953,6 @@ else: [builtins fixtures/bool.pyi] [case testEnumReachabilityWithNone] -# flags: --strict-optional from enum import Enum from typing import Optional @@ -1016,7 +1015,6 @@ reveal_type(x3) # N: Revealed type is "Union[__main__.Foo, __main__.Bar]" [builtins fixtures/bool.pyi] [case testEnumReachabilityPEP484ExampleWithFinal] -# flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum @@ -1063,7 +1061,6 @@ def process(response: Union[str, Reason] = '') -> str: [case testEnumReachabilityPEP484ExampleSingleton] -# flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum @@ -1088,7 +1085,6 @@ def func(x: Union[int, None, Empty] = _empty) -> int: [builtins fixtures/primitives.pyi] [case testEnumReachabilityPEP484ExampleSingletonWithMethod] -# flags: --strict-optional from typing import Union from typing_extensions import Final from enum import Enum diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 1e7dc9364855..1efbab7de322 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -732,7 +732,6 @@ class InvalidReturn: [builtins fixtures/bool.pyi] [case testErrorCodeOverloadedOperatorMethod] -# flags: --strict-optional from typing import Optional, overload class A: @@ -758,7 +757,6 @@ class C: x - C() # type: ignore[operator] [case testErrorCodeMultiLineBinaryOperatorOperand] -# flags: --strict-optional from typing import Optional class C: pass @@ -897,7 +895,6 @@ if any_or_object: [builtins fixtures/list.pyi] [case testTruthyFunctions] -# flags: --strict-optional def f(): pass if f: # E: Function "f" could always be true in boolean context [truthy-function] @@ -907,7 +904,7 @@ if not f: # E: Function "f" could always be true in boolean context [truthy-fu conditional_result = 'foo' if f else 'bar' # E: Function "f" could always be true in boolean context [truthy-function] [case testTruthyIterable] -# flags: --strict-optional --enable-error-code truthy-iterable +# flags: --enable-error-code truthy-iterable from typing import Iterable def func(var: Iterable[str]) -> None: if var: # E: "var" has type "Iterable[str]" which can always be true in boolean context. Consider using "Collection[str]" instead. [truthy-iterable] @@ -995,7 +992,6 @@ var: int = "" # E: Incompatible types in assignment (expression has type "str", show_error_codes = True [case testErrorCodeUnsafeSuper_no_empty] -# flags: --strict-optional from abc import abstractmethod class Base: @@ -1008,7 +1004,6 @@ class Sub(Base): [builtins fixtures/exception.pyi] [case testDedicatedErrorCodeForEmpty_no_empty] -# flags: --strict-optional from typing import Optional def foo() -> int: ... # E: Missing return statement [empty-body] def bar() -> None: ... diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 8231b0a3265f..40ee28830b21 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1210,7 +1210,6 @@ a[:None] [builtins fixtures/slice.pyi] [case testNoneSliceBoundsWithStrictOptional] -# flags: --strict-optional from typing import Any a: Any a[None:1] @@ -2049,7 +2048,7 @@ x is 42 [typing fixtures/typing-full.pyi] [case testStrictEqualityStrictOptional] -# flags: --strict-equality --strict-optional +# flags: --strict-equality x: str if x is not None: # OK even with strict-optional @@ -2065,7 +2064,7 @@ if x is not None: # OK without strict-optional [builtins fixtures/bool.pyi] [case testStrictEqualityEqNoOptionalOverlap] -# flags: --strict-equality --strict-optional +# flags: --strict-equality from typing import Optional x: Optional[str] diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 3750c44ed7f3..e21157eae991 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -427,7 +427,7 @@ async def h() -> NoReturn: # E: Implicit return in function which does not retu [typing fixtures/typing-async.pyi] [case testNoWarnNoReturn] -# flags: --no-warn-no-return --strict-optional +# flags: --no-warn-no-return import typing def implicit_optional_return(arg) -> typing.Optional[str]: diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 0de4798ea1f5..a8722d8190b9 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2545,7 +2545,6 @@ reveal_type(bar(None)) # N: Revealed type is "None" [out] [case testNoComplainOverloadNoneStrict] -# flags: --strict-optional from typing import overload, Optional @overload def bar(x: None) -> None: @@ -2574,7 +2573,6 @@ xx: Optional[int] = X(x_in) [out] [case testNoComplainInferredNoneStrict] -# flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T') def X(val: T) -> T: ... diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 90d46c217451..34588bfceb3d 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2323,7 +2323,6 @@ class B(A): [builtins fixtures/classmethod.pyi] [case testSubclassingGenericSelfClassMethodOptional] -# flags: --strict-optional from typing import TypeVar, Type, Optional AT = TypeVar('AT', bound='A') @@ -2935,7 +2934,7 @@ reveal_type(dec(id)) # N: Revealed type is "def [S] (S`1) -> builtins.list[S`1] [builtins fixtures/list.pyi] [case testInferenceAgainstGenericCallableGenericProtocol] -# flags: --strict-optional --new-type-inference +# flags: --new-type-inference from typing import TypeVar, Protocol, Generic, Optional T = TypeVar('T') @@ -2951,7 +2950,7 @@ reveal_type(lift(g)) # N: Revealed type is "def [T] (Union[T`1, None]) -> Union [builtins fixtures/list.pyi] [case testInferenceAgainstGenericSplitOrder] -# flags: --strict-optional --new-type-inference +# flags: --new-type-inference from typing import TypeVar, Callable, List S = TypeVar('S') @@ -2966,7 +2965,7 @@ reveal_type(dec(id, id)) # N: Revealed type is "def (builtins.int) -> builtins. [builtins fixtures/list.pyi] [case testInferenceAgainstGenericSplitOrderGeneric] -# flags: --strict-optional --new-type-inference +# flags: --new-type-inference from typing import TypeVar, Callable, Tuple S = TypeVar('S') diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index d8461fc78815..80f5e4e7d12d 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -2901,7 +2901,6 @@ tmp/main.py:2: error: Expression has type "Any" tmp/main.py:2: error: Expression has type "Any" [case testIncrementalStrictOptional] -# flags: --strict-optional import a 1 + a.foo() [file a.py] @@ -2911,8 +2910,8 @@ from typing import Optional def foo() -> Optional[int]: return 0 [out1] [out2] -main:3: error: Unsupported operand types for + ("int" and "None") -main:3: note: Right operand is of type "Optional[int]" +main:2: error: Unsupported operand types for + ("int" and "None") +main:2: note: Right operand is of type "Optional[int]" [case testAttrsIncrementalSubclassingCached] from a import A @@ -3457,7 +3456,6 @@ main:2: error: Cannot find implementation or library stub for module named "a" main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports [case testIncrementalInheritanceAddAnnotation] -# flags: --strict-optional import a [file a.py] import b @@ -5757,7 +5755,6 @@ class C: [builtins fixtures/tuple.pyi] [case testNamedTupleUpdateNonRecursiveToRecursiveCoarse] -# flags: --strict-optional import c [file a.py] from b import M @@ -5800,7 +5797,6 @@ tmp/c.py:5: error: Incompatible types in assignment (expression has type "Option tmp/c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]" [case testTupleTypeUpdateNonRecursiveToRecursiveCoarse] -# flags: --strict-optional import c [file a.py] from b import M @@ -5833,7 +5829,6 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypeAliasUpdateNonRecursiveToRecursiveCoarse] -# flags: --strict-optional import c [file a.py] from b import M @@ -5866,7 +5861,6 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypedDictUpdateNonRecursiveToRecursiveCoarse] -# flags: --strict-optional import c [file a.py] from b import M @@ -6061,7 +6055,6 @@ tmp/m.py:9: note: Got: tmp/m.py:9: note: def update() -> str [case testAbstractBodyTurnsEmptyCoarse] -# flags: --strict-optional from b import Base class Sub(Base): @@ -6081,7 +6074,7 @@ class Base: def meth(self) -> int: ... [out] [out2] -main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe +main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe [case testNoCrashDoubleReexportFunctionEmpty] import m diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 59f515490964..ba36c1548532 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -925,7 +925,6 @@ reveal_type(f(None)) # N: Revealed type is "Union[None, builtins.list[builtins.i [builtins fixtures/list.pyi] [case testUnionWithGenericTypeItemContextAndStrictOptional] -# flags: --strict-optional from typing import TypeVar, Union, List T = TypeVar('T') @@ -953,7 +952,6 @@ reveal_type(c.f(None)) # N: Revealed type is "Union[builtins.list[builtins.int], [builtins fixtures/list.pyi] [case testGenericMethodCalledInGenericContext] -# flags: --strict-optional from typing import TypeVar, Generic _KT = TypeVar('_KT') @@ -1221,7 +1219,6 @@ x: Iterable[Union[A, B]] = f(B()) [builtins fixtures/list.pyi] [case testWideOuterContextOptional] -# flags: --strict-optional from typing import Optional, Type, TypeVar class Custom: @@ -1235,7 +1232,6 @@ def b(x: T) -> Optional[T]: return a(x) [case testWideOuterContextOptionalGenericReturn] -# flags: --strict-optional from typing import Optional, Type, TypeVar, Iterable class Custom: @@ -1249,7 +1245,6 @@ def b(x: T) -> Iterable[Optional[T]]: return a(x) [case testWideOuterContextOptionalMethod] -# flags: --strict-optional from typing import Optional, Type, TypeVar class A: pass @@ -1282,7 +1277,6 @@ def bar(xs: List[S]) -> S: [builtins fixtures/list.pyi] [case testWideOuterContextOptionalTypeVarReturn] -# flags: --strict-optional from typing import Callable, Iterable, List, Optional, TypeVar class C: @@ -1298,7 +1292,6 @@ def g(l: List[C], x: str) -> Optional[C]: [builtins fixtures/list.pyi] [case testWideOuterContextOptionalTypeVarReturnLambda] -# flags: --strict-optional from typing import Callable, Iterable, List, Optional, TypeVar class C: @@ -1335,7 +1328,6 @@ y: List[str] = f([]) \ [builtins fixtures/list.pyi] [case testWideOuterContextNoArgs] -# flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T', bound=int) @@ -1344,7 +1336,6 @@ def f(x: Optional[T] = None) -> T: ... y: str = f() [case testWideOuterContextNoArgsError] -# flags: --strict-optional from typing import TypeVar, Optional, List T = TypeVar('T', bound=int) @@ -1427,7 +1418,6 @@ bar({1: 2}) [builtins fixtures/dict.pyi] [case testOptionalTypeNarrowedByGenericCall] -# flags: --strict-optional from typing import Dict, Optional d: Dict[str, str] = {} @@ -1439,7 +1429,6 @@ def foo(arg: Optional[str] = None) -> None: [builtins fixtures/dict.pyi] [case testOptionalTypeNarrowedByGenericCall2] -# flags: --strict-optional from typing import Dict, Optional d: Dict[str, str] = {} @@ -1451,7 +1440,6 @@ if x: [builtins fixtures/dict.pyi] [case testOptionalTypeNarrowedByGenericCall3] -# flags: --strict-optional from typing import Generic, TypeVar, Union T = TypeVar("T") @@ -1464,7 +1452,6 @@ def foo(arg: Union[str, int]) -> None: [builtins fixtures/isinstance.pyi] [case testOptionalTypeNarrowedByGenericCall4] -# flags: --strict-optional from typing import Optional, List, Generic, TypeVar T = TypeVar("T", covariant=True) diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 3c4a0943556a..e0f29a19ec1d 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1402,7 +1402,6 @@ f(b) g(b) [case testLambdaDefaultContext] -# flags: --strict-optional from typing import Callable def f(a: Callable[..., None] = lambda *a, **k: None): pass @@ -1811,7 +1810,6 @@ reveal_type(C().a) # N: Revealed type is "builtins.dict[builtins.int, builtins. [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssigned] -# flags: --strict-optional class C: def __init__(self) -> None: self.a = None @@ -1858,7 +1856,6 @@ reveal_type(C().a) # N: Revealed type is "builtins.dict[Any, Any]" [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssignedOtherMethod] -# flags: --strict-optional class C: def __init__(self) -> None: self.a = None @@ -1891,7 +1888,6 @@ reveal_type(C().a) # N: Revealed type is "builtins.dict[Any, Any]" [builtins fixtures/dict.pyi] [case testInferAttributeInitializedToNoneAndAssignedClassBody] -# flags: --strict-optional class C: a = None def __init__(self) -> None: @@ -2538,7 +2534,6 @@ if bool(): [out] [case testDontMarkUnreachableAfterInferenceUninhabited2] -# flags: --strict-optional from typing import TypeVar, Optional T = TypeVar('T') def f(x: Optional[T] = None) -> T: pass @@ -2609,7 +2604,7 @@ x = '' reveal_type(x) # N: Revealed type is "builtins.str" [case testLocalPartialTypesWithGlobalInitializedToNoneStrictOptional] -# flags: --local-partial-types --strict-optional +# flags: --local-partial-types x = None def f() -> None: @@ -2761,7 +2756,7 @@ class B(A): reveal_type(B.x) # N: Revealed type is "None" [case testLocalPartialTypesWithInheritance2] -# flags: --local-partial-types --strict-optional +# flags: --local-partial-types class A: x: str @@ -2769,7 +2764,7 @@ class B(A): x = None # E: Incompatible types in assignment (expression has type "None", base class "A" defined the type as "str") [case testLocalPartialTypesWithAnyBaseClass] -# flags: --local-partial-types --strict-optional +# flags: --local-partial-types from typing import Any A: Any @@ -2781,7 +2776,7 @@ class C(B): y = None [case testLocalPartialTypesInMultipleMroItems] -# flags: --local-partial-types --strict-optional +# flags: --local-partial-types from typing import Optional class A: @@ -3106,7 +3101,6 @@ class B(A): x = 2 # E: Incompatible types in assignment (expression has type "int", base class "A" defined the type as "str") [case testInheritedAttributeStrictOptional] -# flags: --strict-optional class A: x: str @@ -3209,7 +3203,6 @@ x: Inv[int] reveal_type(f(x)) # N: Revealed type is "builtins.int" [case testOptionalTypeVarAgainstOptional] -# flags: --strict-optional from typing import Optional, TypeVar, Iterable, Iterator, List _T = TypeVar('_T') @@ -3256,7 +3249,6 @@ reveal_type(b) # N: Revealed type is "collections.defaultdict[builtins.int, buil [builtins fixtures/dict.pyi] [case testPartialDefaultDictListValueStrictOptional] -# flags: --strict-optional from collections import defaultdict a = defaultdict(list) a['x'].append(1) @@ -3333,7 +3325,6 @@ def g() -> None: pass reveal_type(f(g)) # N: Revealed type is "None" [case testInferCallableReturningNone2] -# flags: --strict-optional from typing import Callable, TypeVar T = TypeVar("T") @@ -3404,7 +3395,6 @@ def collection_from_dict_value(model: Type[T2]) -> None: [builtins fixtures/isinstancelist.pyi] [case testRegression11705_Strict] -# flags: --strict-optional # See: https://github.com/python/mypy/issues/11705 from typing import Dict, Optional, NamedTuple class C(NamedTuple): @@ -3454,7 +3444,6 @@ foo(("a", {"a": "b"}, "b")) [builtins fixtures/dict.pyi] [case testUseSupertypeAsInferenceContext] -# flags: --strict-optional from typing import List, Optional class B: diff --git a/test-data/unit/check-inline-config.test b/test-data/unit/check-inline-config.test index 71030b5c9b97..bedba811d95b 100644 --- a/test-data/unit/check-inline-config.test +++ b/test-data/unit/check-inline-config.test @@ -165,7 +165,6 @@ main:1: error: Unrecognized option: skip_file = True main:1: error: Setting "strict" not supported in inline configuration: specify it in a configuration file instead, or set individual inline flags (see "mypy -h" for the list of flags enabled in strict mode) [case testInlineErrorCodes] -# flags: --strict-optional # mypy: enable-error-code="ignore-without-code,truthy-bool" class Foo: pass @@ -175,7 +174,7 @@ if foo: ... # E: "__main__.foo" has type "Foo" which does not implement __bool_ 42 + "no" # type: ignore # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead) [case testInlineErrorCodesOverrideConfig] -# flags: --strict-optional --config-file tmp/mypy.ini +# flags: --config-file tmp/mypy.ini import foo import tests.bar import tests.baz @@ -243,7 +242,6 @@ class C: self.x = 1 [case testIgnoreErrorsWithUnsafeSuperCall_no_empty] -# flags: --strict-optional from m import C diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 3403e726d8b5..361d4db78752 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1801,7 +1801,6 @@ if issubclass(fm, Bar): [builtins fixtures/isinstance.pyi] [case testIssubclassWithMetaclassesStrictOptional] -# flags: --strict-optional class FooMetaclass(type): ... class BarMetaclass(type): ... class Foo(metaclass=FooMetaclass): ... @@ -1906,7 +1905,6 @@ def narrow_any_to_str_then_reassign_to_int() -> None: [builtins fixtures/isinstance.pyi] [case testNarrowTypeAfterInList] -# flags: --strict-optional from typing import List, Optional x: List[int] @@ -1924,7 +1922,6 @@ else: [out] [case testNarrowTypeAfterInListOfOptional] -# flags: --strict-optional from typing import List, Optional x: List[Optional[int]] @@ -1938,7 +1935,6 @@ else: [out] [case testNarrowTypeAfterInListNonOverlapping] -# flags: --strict-optional from typing import List, Optional x: List[str] @@ -1952,7 +1948,6 @@ else: [out] [case testNarrowTypeAfterInListNested] -# flags: --strict-optional from typing import List, Optional, Any x: Optional[int] @@ -1967,7 +1962,6 @@ if x in nested_any: [out] [case testNarrowTypeAfterInTuple] -# flags: --strict-optional from typing import Optional class A: pass class B(A): pass @@ -1982,7 +1976,6 @@ else: [out] [case testNarrowTypeAfterInNamedTuple] -# flags: --strict-optional from typing import NamedTuple, Optional class NT(NamedTuple): x: int @@ -1998,7 +1991,6 @@ else: [out] [case testNarrowTypeAfterInDict] -# flags: --strict-optional from typing import Dict, Optional x: Dict[str, int] y: Optional[str] @@ -2015,7 +2007,6 @@ else: [out] [case testNarrowTypeAfterInNoAnyOrObject] -# flags: --strict-optional from typing import Any, List, Optional x: List[Any] z: List[object] @@ -2035,7 +2026,6 @@ else: [out] [case testNarrowTypeAfterInUserDefined] -# flags: --strict-optional from typing import Container, Optional class C(Container[int]): @@ -2057,7 +2047,6 @@ else: [out] [case testNarrowTypeAfterInSet] -# flags: --strict-optional from typing import Optional, Set s: Set[str] @@ -2074,7 +2063,6 @@ else: [out] [case testNarrowTypeAfterInTypedDict] -# flags: --strict-optional from typing import Optional from mypy_extensions import TypedDict class TD(TypedDict): @@ -2150,7 +2138,6 @@ else: [builtins fixtures/isinstance.pyi] [case testIsInstanceInitialNoneCheckSkipsImpossibleCasesNoStrictOptional] -# flags: --strict-optional from typing import Optional, Union class A: pass @@ -2197,7 +2184,6 @@ def foo2(x: Optional[str]) -> None: [builtins fixtures/isinstance.pyi] [case testNoneCheckDoesNotNarrowWhenUsingTypeVars] -# flags: --strict-optional # Note: this test (and the following one) are testing checker.conditional_type_map: # if you set the 'prohibit_none_typevar_overlap' keyword argument to False when calling @@ -2249,7 +2235,6 @@ def bar(x: Union[List[str], List[int], None]) -> None: [builtins fixtures/isinstancelist.pyi] [case testNoneAndGenericTypesOverlapStrictOptional] -# flags: --strict-optional from typing import Union, Optional, List # This test is the same as the one above, except for strict-optional. diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index b3ee47aa6fdf..4beac047e278 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -350,7 +350,6 @@ class A: pass [builtins fixtures/dict.pyi] [case testInvalidTypeForKeywordVarArg] -# flags: --strict-optional from typing import Dict, Any, Optional class A: pass def f(**kwargs: 'A') -> None: pass diff --git a/test-data/unit/check-lists.test b/test-data/unit/check-lists.test index 9809024afdbb..77acdafd3319 100644 --- a/test-data/unit/check-lists.test +++ b/test-data/unit/check-lists.test @@ -89,7 +89,6 @@ reveal_type(c) # N: Revealed type is "builtins.list[builtins.int]" [builtins fixtures/list.pyi] [case testComprehensionShadowBinder] -# flags: --strict-optional def foo(x: object) -> None: if isinstance(x, str): [reveal_type(x) for x in [1, 2, 3]] # N: Revealed type is "builtins.int" diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index abdbf733a679..f63f4026c4b6 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -659,7 +659,6 @@ def foo(b: Literal[T]) -> Tuple[T]: pass # E: Parameter 1 of Literal[...] is i -- [case testLiteralMultipleValues] -# flags: --strict-optional from typing_extensions import Literal a: Literal[1, 2, 3] b: Literal["a", "b", "c"] @@ -689,7 +688,6 @@ reveal_type(b) # N: Revealed type is "Union[Literal[1], Literal[2], Literal[3]] [out] [case testLiteralNestedUsage] -# flags: --strict-optional from typing_extensions import Literal a: Literal[Literal[3], 4, Literal["foo"]] @@ -818,7 +816,6 @@ foo(c) # E: Argument 1 to "foo" has incompatible type "Literal[4, 'foo']"; expe [out] [case testLiteralCheckSubtypingStrictOptional] -# flags: --strict-optional from typing import Any, NoReturn from typing_extensions import Literal @@ -1807,7 +1804,6 @@ reveal_type(unify(f6)) # N: Revealed type is "None" [out] [case testLiteralMeetsWithStrictOptional] -# flags: --strict-optional from typing import TypeVar, Callable, Union from typing_extensions import Literal @@ -1834,7 +1830,6 @@ reveal_type(unify(func)) # N: Revealed type is "" -- [case testLiteralIntelligentIndexingTuples] -# flags: --strict-optional from typing import Tuple, NamedTuple, Optional, Final from typing_extensions import Literal @@ -2247,7 +2242,6 @@ force4(reveal_type(f.instancevar4)) # N: Revealed type is "None" [out] [case testLiteralFinalErasureInMutableDatastructures1] -# flags: --strict-optional from typing_extensions import Final var1: Final = [0, None] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 4992b6589bb3..bdf860cba89d 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -567,7 +567,6 @@ x = 1 x = 1 [case testAssignToFuncDefViaImport] -# flags: --strict-optional # Errors differ with the new analyzer. (Old analyzer gave error on the # input, which is maybe better, but no error about f, which seems diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index f06af0057f0f..22014d4c645c 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -747,7 +747,6 @@ def test3(switch: FlipFlopEnum) -> None: [builtins fixtures/primitives.pyi] [case testNarrowingEqualityRequiresExplicitStrLiteral] -# flags: --strict-optional from typing_extensions import Literal, Final A_final: Final = "A" @@ -794,7 +793,6 @@ reveal_type(x_union) # N: Revealed type is "Union[Literal['A'], Literal['B' [builtins fixtures/primitives.pyi] [case testNarrowingEqualityRequiresExplicitEnumLiteral] -# flags: --strict-optional from typing import Union from typing_extensions import Literal, Final from enum import Enum @@ -879,7 +877,7 @@ else: [builtins fixtures/primitives.pyi] [case testNarrowingEqualityDisabledForCustomEqualityChain] -# flags: --strict-optional --strict-equality --warn-unreachable +# flags: --strict-equality --warn-unreachable from typing import Union from typing_extensions import Literal @@ -916,7 +914,7 @@ else: [builtins fixtures/primitives.pyi] [case testNarrowingUnreachableCases] -# flags: --strict-optional --strict-equality --warn-unreachable +# flags: --strict-equality --warn-unreachable from typing import Union from typing_extensions import Literal @@ -964,7 +962,7 @@ else: [builtins fixtures/primitives.pyi] [case testNarrowingUnreachableCases2] -# flags: --strict-optional --strict-equality --warn-unreachable +# flags: --strict-equality --warn-unreachable from typing import Union from typing_extensions import Literal @@ -1064,7 +1062,6 @@ else: [builtins fixtures/primitives.pyi] [case testNarrowingBooleanIdentityCheck] -# flags: --strict-optional from typing import Optional from typing_extensions import Literal @@ -1087,7 +1084,6 @@ else: [builtins fixtures/primitives.pyi] [case testNarrowingBooleanTruthiness] -# flags: --strict-optional from typing import Optional from typing_extensions import Literal @@ -1109,7 +1105,6 @@ reveal_type(opt_bool_val) # N: Revealed type is "Union[builtins.bool, None]" [builtins fixtures/primitives.pyi] [case testNarrowingBooleanBoolOp] -# flags: --strict-optional from typing import Optional from typing_extensions import Literal @@ -1161,7 +1156,6 @@ def f(d: Union[Foo, Bar]) -> None: [builtins fixtures/dict.pyi] [case testNarrowingUsingMetaclass] -# flags: --strict-optional from typing import Type class M(type): @@ -1181,7 +1175,6 @@ def f(t: Type[C]) -> None: reveal_type(t) # N: Revealed type is "Type[__main__.C]" [case testNarrowingUsingTypeVar] -# flags: --strict-optional from typing import Type, TypeVar class A: pass diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test index 1129512694f4..30314eebcb31 100644 --- a/test-data/unit/check-native-int.test +++ b/test-data/unit/check-native-int.test @@ -69,7 +69,6 @@ reveal_type(join(a, n64)) # N: Revealed type is "Any" [builtins fixtures/dict.pyi] [case testNativeIntMeets] -# flags: --strict-optional from typing import TypeVar, Callable, Any from mypy_extensions import i32, i64 @@ -130,7 +129,6 @@ reveal_type(y) # N: Revealed type is "builtins.int" [builtins fixtures/dict.pyi] [case testNativeIntFloatConversion] -# flags: --strict-optional from typing import TypeVar, Callable from mypy_extensions import i32 diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test index 0815d7af1933..3ed4c6d3d8e2 100644 --- a/test-data/unit/check-newsyntax.test +++ b/test-data/unit/check-newsyntax.test @@ -49,20 +49,17 @@ TstInstance().a = 'ab' [out] [case testNewSyntaxWithClassVars] -# flags: --strict-optional class CCC: a: str = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") [out] [case testNewSyntaxWithStrictOptional] -# flags: --strict-optional strict: int strict = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") strict2: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") [out] [case testNewSyntaxWithStrictOptionalFunctions] -# flags: --strict-optional def f() -> None: x: int if int(): @@ -70,7 +67,6 @@ def f() -> None: [out] [case testNewSyntaxWithStrictOptionalClasses] -# flags: --strict-optional class C: def meth(self) -> None: x: int = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 4851cc96e6da..f49a15ada85c 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -3264,7 +3264,6 @@ f(x, B()) # E: Argument 1 to "f" has incompatible type "Union[A, B]"; expected [builtins fixtures/tuple.pyi] [case testOverloadInferUnionWithMixOfPositionalAndOptionalArgs] -# flags: --strict-optional from typing import overload, Union, Optional class A: ... @@ -3603,7 +3602,6 @@ reveal_type(g(b)) # N: Revealed type is "builtins.str" reveal_type(g(c)) # N: Revealed type is "builtins.str" [case testOverloadsAndNoneWithStrictOptional] -# flags: --strict-optional from typing import overload, Optional @overload @@ -3651,7 +3649,6 @@ reveal_type(mymap(f3, seq)) # N: Revealed type is "typing.Iterable[builtins.str [typing fixtures/typing-medium.pyi] [case testOverloadsNoneAndTypeVarsWithStrictOptional] -# flags: --strict-optional from typing import Callable, Iterable, TypeVar, overload, Optional T = TypeVar('T') @@ -3708,7 +3705,6 @@ def test_narrow_int() -> None: [typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional1] -# flags: --strict-optional from typing import overload, Union, NoReturn @overload @@ -3772,7 +3768,6 @@ def test_narrow_none() -> None: [typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional2] -# flags: --strict-optional from typing import overload, Union, TypeVar, NoReturn, Optional T = TypeVar('T') @@ -3836,7 +3831,6 @@ def test_narrow_none_v2() -> None: [typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional3] -# flags: --strict-optional from typing import overload, TypeVar, NoReturn, Optional @overload @@ -4648,7 +4642,6 @@ def none_second(x: int) -> int: return x [case testOverloadsWithNoneComingSecondIsOkInStrictOptional] -# flags: --strict-optional from typing import overload, Optional @overload @@ -4672,8 +4665,8 @@ def none_loose_impl(x: int) -> int: ... def none_loose_impl(x: int) -> int: return x [out] -main:22: error: Overloaded function implementation does not accept all possible arguments of signature 1 -main:22: error: Overloaded function implementation cannot produce return type of signature 1 +main:21: error: Overloaded function implementation does not accept all possible arguments of signature 1 +main:21: error: Overloaded function implementation cannot produce return type of signature 1 [case testTooManyUnionsException] from typing import overload, Union diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 3d05faed74f1..114fe1f8438a 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1307,7 +1307,6 @@ reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins [builtins fixtures/paramspec.pyi] [case testParamSpecClassConstructor] -# flags: --strict-optional from typing import ParamSpec, Callable P = ParamSpec("P") diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 88a541c28ac2..913584224764 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -1199,7 +1199,6 @@ class C: [builtins fixtures/bool.pyi] [case testAttrsOptionalConverter] -# flags: --strict-optional import attr from attr.converters import optional from typing import Optional @@ -1219,7 +1218,6 @@ A(None, None) [builtins fixtures/plugin_attrs.pyi] [case testAttrsOptionalConverterNewPackage] -# flags: --strict-optional import attrs from attrs.converters import optional from typing import Optional diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 5d5ba54304a3..dba01be50fee 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -324,7 +324,6 @@ var: MyHashable = C() # E: Incompatible types in assignment (expression has typ # N: __my_hash__: expected "Callable[[], int]", got "None" [case testNoneDisablesProtocolSubclassingWithStrictOptional] -# flags: --strict-optional from typing import Protocol class MyHashable(Protocol): @@ -336,7 +335,6 @@ class C(MyHashable): (expression has type "None", base class "MyHashable" defined the type as "Callable[[MyHashable], int]") [case testProtocolsWithNoneAndStrictOptional] -# flags: --strict-optional from typing import Protocol class P(Protocol): x = 0 # type: int @@ -348,12 +346,12 @@ x: P = C() # Error! def f(x: P) -> None: pass f(C()) # Error! [out] -main:9: error: Incompatible types in assignment (expression has type "C", variable has type "P") -main:9: note: Following member(s) of "C" have conflicts: -main:9: note: x: expected "int", got "None" -main:11: error: Argument 1 to "f" has incompatible type "C"; expected "P" -main:11: note: Following member(s) of "C" have conflicts: -main:11: note: x: expected "int", got "None" +main:8: error: Incompatible types in assignment (expression has type "C", variable has type "P") +main:8: note: Following member(s) of "C" have conflicts: +main:8: note: x: expected "int", got "None" +main:10: error: Argument 1 to "f" has incompatible type "C"; expected "P" +main:10: note: Following member(s) of "C" have conflicts: +main:10: note: x: expected "int", got "None" -- Semanal errors in protocol types -- -------------------------------- @@ -2412,7 +2410,6 @@ x: P = None [out] [case testNoneSubtypeOfEmptyProtocolStrict] -# flags: --strict-optional from typing import Protocol class P(Protocol): pass @@ -2959,7 +2956,6 @@ class MyClass: [case testPartialAttributeNoneTypeStrictOptional] -# flags: --strict-optional from typing import Optional, Protocol, runtime_checkable @runtime_checkable @@ -3080,7 +3076,6 @@ def round(number: SupportsRound[_T], ndigits: int) -> _T: ... round(C(), 1) [case testEmptyBodyImplicitlyAbstractProtocol] -# flags: --strict-optional from typing import Protocol, overload, Union class P1(Protocol): @@ -3127,7 +3122,6 @@ C3() [builtins fixtures/classmethod.pyi] [case testEmptyBodyImplicitlyAbstractProtocolProperty] -# flags: --strict-optional from typing import Protocol class P1(Protocol): @@ -3222,7 +3216,6 @@ D() # E: Cannot instantiate abstract class "D" with abstract attribute "meth" [builtins fixtures/exception.pyi] [case testEmptyBodyNoneCompatibleProtocol] -# flags: --strict-optional from abc import abstractmethod from typing import Any, Optional, Protocol, Union, overload from typing_extensions import TypeAlias diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 6416fa02bbce..c07a90b49e63 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1140,7 +1140,6 @@ match m: reveal_type(a) [case testMatchRedefiningPatternGuard] -# flags: --strict-optional m: str match m: @@ -1382,7 +1381,6 @@ def f(x: int | str) -> int: # E: Missing return statement [builtins fixtures/isinstance.pyi] [case testMatchNarrowDownUnionPartially] -# flags: --strict-optional def f(x: int | str) -> None: match x: @@ -1493,7 +1491,6 @@ def f(x: A) -> None: reveal_type(y) # N: Revealed type is "Union[__main__., __main__.]" [case testMatchWithBreakAndContinue] -# flags: --strict-optional def f(x: int | str | None) -> None: i = int() while i: @@ -1626,7 +1623,6 @@ def func(e: Union[str, tuple[str]]) -> None: [builtins fixtures/tuple.pyi] [case testMatchTupleOptionalNoCrash] -# flags: --strict-optional foo: tuple[int] | None match foo: case x,: @@ -1865,7 +1861,6 @@ def f() -> None: reveal_type(y.a) # N: Revealed type is "builtins.int" [case testNarrowedVariableInNestedModifiedInMatch] -# flags: --strict-optional from typing import Optional def match_stmt_error1(x: Optional[str]) -> None: diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 423daaf5ae8f..d83f29f2186a 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -223,7 +223,7 @@ h(arg=0) # E: Unexpected keyword argument "arg" for "h" i(arg=0) # E: Unexpected keyword argument "arg" [case testWalrus] -# flags: --strict-optional --python-version 3.8 +# flags: --python-version 3.8 from typing import NamedTuple, Optional, List from typing_extensions import Final @@ -427,7 +427,7 @@ else: [builtins fixtures/list.pyi] [case testWalrusConditionalTypeCheck] -# flags: --strict-optional --python-version 3.8 +# flags: --python-version 3.8 from typing import Optional maybe_str: Optional[str] @@ -729,7 +729,6 @@ def f1() -> None: [builtins fixtures/dict.pyi] [case testNarrowOnSelfInGeneric] -# flags: --strict-optional from typing import Generic, TypeVar, Optional T = TypeVar("T", int, str) @@ -741,8 +740,8 @@ class C(Generic[T]): reveal_type(y) return None [out] -main:10: note: Revealed type is "builtins.int" -main:10: note: Revealed type is "builtins.str" +main:9: note: Revealed type is "builtins.int" +main:9: note: Revealed type is "builtins.str" [case testTypeGuardWithPositionalOnlyArg] # flags: --python-version 3.8 @@ -778,7 +777,6 @@ class C: [builtins fixtures/list.pyi] [case testNarrowedVariableInNestedModifiedInWalrus] -# flags: --strict-optional from typing import Optional def walrus_with_nested_error(x: Optional[str]) -> None: diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index dc1ae448c0d1..84593933a2de 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -422,7 +422,6 @@ reveal_type(d) # N: Revealed type is "Any" [builtins fixtures/isinstancelist.pyi] [case testBasicRecursiveNamedTuple] -# flags: --strict-optional from typing import NamedTuple, Optional NT = NamedTuple("NT", [("x", Optional[NT]), ("y", int)]) @@ -457,7 +456,6 @@ reveal_type(f(tnt, nt)) # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testBasicRecursiveNamedTupleClass] -# flags: --strict-optional from typing import NamedTuple, Optional class NT(NamedTuple): @@ -684,7 +682,6 @@ itd2 = TD(x=0, y=TD(x=0, y=TD(x=0, y=None))) [typing fixtures/typing-typeddict.pyi] [case testRecursiveTypedDictMethods] -# flags: --strict-optional from typing import TypedDict class TD(TypedDict, total=False): @@ -787,7 +784,6 @@ reveal_type(std) # N: Revealed type is "TypedDict('__main__.STD', {'val': built [typing fixtures/typing-typeddict.pyi] [case testRecursiveClassLevelAlias] -# flags: --strict-optional from typing import Union, Sequence class A: diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test index e5d1d6b170f9..81da94c0591c 100644 --- a/test-data/unit/check-serialize.test +++ b/test-data/unit/check-serialize.test @@ -740,7 +740,6 @@ main:4: note: Revealed type is "def (x: builtins.int) -> Tuple[builtins.int, fal -- [case testSerializeOptionalType] -# flags: --strict-optional import a [file a.py] import b diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 3bfcf6a9afea..42f22e89d6b7 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -305,7 +305,6 @@ reveal_type(y) # N: Revealed type is "Union[builtins.int, None]" [builtins fixtures/bool.pyi] [case testNoneAliasStrict] -# flags: --strict-optional from typing import Optional, Union void = type(None) x: int diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 983fa8c17aec..7de8e6416f35 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -595,7 +595,6 @@ reveal_type(f(g)) # N: Revealed type is "TypedDict({'x': builtins.int, 'y': bui [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleCommonKeysIsUninhabited] -# flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XYa = TypedDict('XYa', {'x': int, 'y': int}) @@ -619,7 +618,6 @@ reveal_type(f(g)) # N: Revealed type is "TypedDict({'x': builtins.int, 'z': bui # TODO: It would be more accurate for the meet to be TypedDict instead. [case testMeetOfTypedDictWithCompatibleMappingIsUninhabitedForNow] -# flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) @@ -631,7 +629,6 @@ reveal_type(f(g)) # N: Revealed type is "" [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited] -# flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Mapping X = TypedDict('X', {'x': int}) @@ -643,7 +640,6 @@ reveal_type(f(g)) # N: Revealed type is "" [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow] -# flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable, Iterable X = TypedDict('X', {'x': int}) @@ -677,7 +673,6 @@ reveal_type(f(g)) # N: Revealed type is "TypedDict({'x'?: builtins.int, 'y': bu [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithIncompatibleNonTotalAndTotal] -# flags: --strict-optional from mypy_extensions import TypedDict from typing import TypeVar, Callable XY = TypedDict('XY', {'x': int, 'y': int}, total=False) @@ -972,7 +967,6 @@ if int(): -- Other TypedDict methods [case testTypedDictGetMethod] -# flags: --strict-optional from mypy_extensions import TypedDict class A: pass D = TypedDict('D', {'x': int, 'y': str}) @@ -986,7 +980,6 @@ reveal_type(d.get('y', None)) # N: Revealed type is "Union[builtins.str, None]" [typing fixtures/typing-typeddict.pyi] [case testTypedDictGetMethodTypeContext] -# flags: --strict-optional from typing import List from mypy_extensions import TypedDict class A: pass @@ -1044,7 +1037,6 @@ p.get('x', 1 + 'y') # E: Unsupported operand types for + ("int" and "str") [typing fixtures/typing-typeddict.pyi] [case testTypedDictChainedGetWithEmptyDictDefault] -# flags: --strict-optional from mypy_extensions import TypedDict C = TypedDict('C', {'a': int}) D = TypedDict('D', {'x': C, 'y': str}) diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test index a307e4c8b6a0..b3b168e5c7c6 100644 --- a/test-data/unit/check-typeguard.test +++ b/test-data/unit/check-typeguard.test @@ -248,7 +248,6 @@ def main1(a: object) -> None: [builtins fixtures/tuple.pyi] [case testTypeGuardOverload] -# flags: --strict-optional from typing import overload, Any, Callable, Iterable, Iterator, List, Optional, TypeVar from typing_extensions import TypeGuard diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 28d83aa54ccc..f6fd27e59e4d 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -929,7 +929,6 @@ reveal_type(z) # N: Revealed type is "Union[builtins.int, __main__.A, builtins.s [out] [case testUnpackUnionNoCrashOnPartialNone] -# flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any @@ -944,7 +943,6 @@ if x: [out] [case testUnpackUnionNoCrashOnPartialNone2] -# flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any @@ -960,7 +958,6 @@ if x: [out] [case testUnpackUnionNoCrashOnPartialNoneBinder] -# flags: --strict-optional from typing import Dict, Tuple, List, Any x: object @@ -975,7 +972,6 @@ if x: [out] [case testUnpackUnionNoCrashOnPartialList] -# flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any @@ -1081,7 +1077,6 @@ def bar(a: T4, b: T4) -> T4: # test multi-level alias [builtins fixtures/ops.pyi] [case testJoinUnionWithUnionAndAny] -# flags: --strict-optional from typing import TypeVar, Union, Any T = TypeVar("T") def f(x: T, y: T) -> T: diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 82ff35f53702..76ecd9f51e35 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -615,7 +615,6 @@ reveal_type(x) # N: Revealed type is "__main__.B" [typing fixtures/typing-medium.pyi] [case testUnreachableWhenSuperclassIsAny] -# flags: --strict-optional from typing import Any # This can happen if we're importing a class from a missing module diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 4da9e0e5033e..6e118597551f 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -630,7 +630,6 @@ if int(): [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] -# flags: --strict-optional from typing import TypeVar T = TypeVar('T') diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test index 10c7968be475..90f40777d6b7 100644 --- a/test-data/unit/check-warnings.test +++ b/test-data/unit/check-warnings.test @@ -207,7 +207,7 @@ def f() -> Any: return g() [out] [case testOKReturnAnyIfProperSubtype] -# flags: --warn-return-any --strict-optional +# flags: --warn-return-any from typing import Any, Optional class Test(object): diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index b43a2ace5eed..c3295b79e4ed 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -612,7 +612,6 @@ class A: -> , m.A.f, m.C [case testPartialNoneTypeAttributeCrash2] -# flags: --strict-optional class C: pass class A: diff --git a/test-data/unit/fine-grained-suggest.test b/test-data/unit/fine-grained-suggest.test index 47de16b8d765..02373091ad54 100644 --- a/test-data/unit/fine-grained-suggest.test +++ b/test-data/unit/fine-grained-suggest.test @@ -62,7 +62,6 @@ foo('3', '4') == [case testSuggestInferFunc1] -# flags: --strict-optional # suggest: foo.foo [file foo.py] def foo(arg, lol=None): @@ -85,7 +84,6 @@ def untyped(x) -> None: == [case testSuggestInferFunc2] -# flags: --strict-optional # suggest: foo.foo [file foo.py] def foo(arg): @@ -222,7 +220,6 @@ Foo('lol') == [case testSuggestInferMethod1] -# flags: --strict-optional # suggest: --no-any foo.Foo.foo [file foo.py] class Foo: @@ -248,7 +245,6 @@ def bar() -> None: == [case testSuggestInferMethod2] -# flags: --strict-optional # suggest: foo.Foo.foo [file foo.py] class Foo: @@ -275,7 +271,6 @@ def bar() -> None: == [case testSuggestInferMethod3] -# flags: --strict-optional # suggest2: foo.Foo.foo [file foo.py] class Foo: @@ -372,7 +367,6 @@ def has_nested(x): == [case testSuggestInferFunctionUnreachable] -# flags: --strict-optional # suggest: foo.foo [file foo.py] import sys @@ -390,7 +384,6 @@ foo('test') == [case testSuggestInferMethodStep2] -# flags: --strict-optional # suggest2: foo.Foo.foo [file foo.py] class Foo: @@ -417,7 +410,6 @@ def bar() -> None: (Union[str, int, None], Optional[int]) -> Union[int, str] [case testSuggestInferNestedMethod] -# flags: --strict-optional # suggest: foo.Foo.Bar.baz [file foo.py] class Foo: @@ -435,7 +427,6 @@ def bar() -> None: == [case testSuggestCallable] -# flags: --strict-optional # suggest: foo.foo # suggest: foo.bar # suggest: --flex-any=0.9 foo.bar @@ -483,7 +474,6 @@ No guesses that match criteria! == [case testSuggestNewSemanal] -# flags: --strict-optional # suggest: foo.Foo.foo # suggest: foo.foo [file foo.py] @@ -521,7 +511,6 @@ def baz() -> None: == [case testSuggestInferFuncDecorator1] -# flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar @@ -543,7 +532,6 @@ def bar() -> None: == [case testSuggestInferFuncDecorator2] -# flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar, Callable, Any @@ -565,7 +553,6 @@ def bar() -> None: == [case testSuggestInferFuncDecorator3] -# flags: --strict-optional # suggest: foo.foo [file foo.py] from typing import TypeVar, Callable, Any @@ -589,7 +576,6 @@ def bar() -> None: == [case testSuggestInferFuncDecorator4] -# flags: --strict-optional # suggest: foo.foo [file dec.py] from typing import TypeVar, Callable, Any @@ -616,7 +602,6 @@ def bar() -> None: == [case testSuggestFlexAny1] -# flags: --strict-optional # suggest: --flex-any=0.4 m.foo # suggest: --flex-any=0.7 m.foo # suggest: --flex-any=0.4 m.bar @@ -661,7 +646,6 @@ No guesses that match criteria! [case testSuggestFlexAny2] -# flags: --strict-optional # suggest: --flex-any=0.5 m.baz # suggest: --flex-any=0.0 m.baz # suggest: --flex-any=0.5 m.F.foo @@ -693,7 +677,6 @@ No guesses that match criteria! == [case testSuggestClassMethod] -# flags: --strict-optional # suggest: foo.F.bar # suggest: foo.F.baz # suggest: foo.F.eggs diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 11a8f03590f7..66c5ee46db2f 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -2083,7 +2083,6 @@ a.py:5: error: "list" expects 1 type argument, but 2 given == [case testPreviousErrorInOverloadedFunction] -# flags: --strict-optional import a [file a.py] from typing import overload @@ -3494,7 +3493,6 @@ def foo() -> None: b.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleUpdateNonRecursiveToRecursiveFine] -# flags: --strict-optional import c [file a.py] from b import M @@ -3537,7 +3535,6 @@ c.py:5: error: Incompatible types in assignment (expression has type "Optional[N c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]" [case testTupleTypeUpdateNonRecursiveToRecursiveFine] -# flags: --strict-optional import c [file a.py] from b import M @@ -3570,7 +3567,6 @@ c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypeAliasUpdateNonRecursiveToRecursiveFine] -# flags: --strict-optional import c [file a.py] from b import M @@ -4699,7 +4695,6 @@ class B: main:7: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "str" [case testStrictOptionalModule] -# flags: --strict-optional import a a.y = a.x [file a.py] @@ -4712,10 +4707,9 @@ x: Optional[int] y: int [out] == -main:3: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") +main:2: error: Incompatible types in assignment (expression has type "Optional[int]", variable has type "int") [case testStrictOptionalFunction] -# flags: --strict-optional import a from typing import Optional def f() -> None: @@ -4731,10 +4725,9 @@ def g(x: int) -> None: pass [out] == -main:6: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "int" +main:5: error: Argument 1 to "g" has incompatible type "Optional[int]"; expected "int" [case testStrictOptionalMethod] -# flags: --strict-optional import a from typing import Optional class C: @@ -4753,7 +4746,7 @@ class B: pass [out] == -main:7: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "int" +main:6: error: Argument 1 to "g" of "B" has incompatible type "Optional[int]"; expected "int" [case testPerFileStrictOptionalModule] import a @@ -7953,7 +7946,7 @@ class Foo(a.I): == [case testImplicitOptionalRefresh1] -# flags: --strict-optional --implicit-optional +# flags: --implicit-optional from x import f def foo(x: int = None) -> None: f() @@ -9793,7 +9786,6 @@ class ExampleClass(Generic[T]): [out] == [case testStrictNoneAttribute] -# flags: --strict-optional from typing import Generic, TypeVar T = TypeVar('T', int, str) @@ -10046,7 +10038,6 @@ class C(B): ... main.py:4: note: Revealed type is "def () -> builtins.str" [case testAbstractBodyTurnsEmpty] -# flags: --strict-optional from b import Base class Sub(Base): @@ -10066,10 +10057,9 @@ class Base: def meth(self) -> int: ... [out] == -main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe +main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe [case testAbstractBodyTurnsEmptyProtocol] -# flags: --strict-optional from b import Base class Sub(Base): @@ -10086,7 +10076,7 @@ class Base(Protocol): def meth(self) -> int: ... [out] == -main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe +main:5: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe [case testPrettyMessageSorting] # flags: --pretty diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 289005b36d9a..754cb21c3ff8 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1649,7 +1649,6 @@ foo(list((list(""), ""))) [out] [case testNarrowTypeForDictKeys] -# flags: --strict-optional from typing import Dict, KeysView, Optional d: Dict[str, int] @@ -1667,10 +1666,10 @@ else: reveal_type(k) [out] -_testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" -_testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" -_testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" -_testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" +_testNarrowTypeForDictKeys.py:6: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:8: note: Revealed type is "Union[builtins.str, None]" +_testNarrowTypeForDictKeys.py:13: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:15: note: Revealed type is "Union[builtins.str, None]" [case testTypeAliasWithNewStyleUnion] # flags: --python-version 3.10 From b6b6624655826985f75dfd970e2c29f7690ce323 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Mon, 17 Jul 2023 15:27:52 -0400 Subject: [PATCH 11/88] tests: skip-path-normalization should be a testcase option (#15660) The "Skip path normalization" option applies to all [out]s of a test case, so it's more correct for it to be a "case" option rather than an "out" option. This also simplifies the parsing of "out" sections' args. --- mypy/test/data.py | 13 +++++++------ test-data/unit/check-literal.test | 8 ++++---- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/mypy/test/data.py b/mypy/test/data.py index 66dafaff775a..de0267daf918 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -65,7 +65,6 @@ def parse_test_case(case: DataDrivenTestCase) -> None: join = posixpath.join out_section_missing = case.suite.required_out_section - normalize_output = True files: list[tuple[str, str]] = [] # path and contents output_files: list[tuple[str, str | Pattern[str]]] = [] # output path and contents @@ -156,8 +155,6 @@ def _item_fail(msg: str) -> NoReturn: version_check = True for arg in args: - if arg == "skip-path-normalization": - normalize_output = False if arg.startswith("version"): compare_op = arg[7:9] if compare_op not in {">=", "=="}: @@ -185,7 +182,7 @@ def _item_fail(msg: str) -> NoReturn: version_check = sys.version_info[: len(version)] == version if version_check: tmp_output = [expand_variables(line) for line in item.data] - if os.path.sep == "\\" and normalize_output: + if os.path.sep == "\\" and case.normalize_output: tmp_output = [fix_win_path(line) for line in tmp_output] if item.id == "out" or item.id == "out1": output = tmp_output @@ -239,7 +236,6 @@ def _item_fail(msg: str) -> NoReturn: case.expected_rechecked_modules = rechecked_modules case.deleted_paths = deleted_paths case.triggered = triggered or [] - case.normalize_output = normalize_output case.expected_fine_grained_targets = targets case.test_modules = test_modules @@ -269,7 +265,7 @@ class DataDrivenTestCase(pytest.Item): # Whether or not we should normalize the output to standardize things like # forward vs backward slashes in file paths for Windows vs Linux. - normalize_output = True + normalize_output: bool # Extra attributes used by some tests. last_line: int @@ -281,10 +277,12 @@ def __init__( self, parent: DataSuiteCollector, suite: DataSuite, + *, file: str, name: str, writescache: bool, only_when: str, + normalize_output: bool, platform: str | None, skip: bool, xfail: bool, @@ -296,6 +294,7 @@ def __init__( self.file = file self.writescache = writescache self.only_when = only_when + self.normalize_output = normalize_output if (platform == "windows" and sys.platform != "win32") or ( platform == "posix" and sys.platform == "win32" ): @@ -651,6 +650,7 @@ def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | N r"(?P[a-zA-Z_0-9]+)" r"(?P-writescache)?" r"(?P-only_when_cache|-only_when_nocache)?" + r"(?P-skip_path_normalization)?" r"(-(?Pposix|windows))?" r"(?P-skip)?" r"(?P-xfail)?" @@ -694,6 +694,7 @@ def split_test_cases( platform=m.group("platform"), skip=bool(m.group("skip")), xfail=bool(m.group("xfail")), + normalize_output=not m.group("skip_path_normalization"), data=data, line=line_no, ) diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index f63f4026c4b6..4498b2ddc9cf 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -278,7 +278,7 @@ reveal_type(c_bytes_wrapper_alias) # N: Revealed type is "__main__.Wrap[Liter [builtins fixtures/tuple.pyi] [out] -[case testLiteralUnicodeWeirdCharacters] +[case testLiteralUnicodeWeirdCharacters-skip_path_normalization] from typing import Any from typing_extensions import Literal @@ -334,7 +334,7 @@ a1 = b3 a1 = c3 # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']") [builtins fixtures/tuple.pyi] -[out skip-path-normalization] +[out] [case testLiteralRenamingImportWorks] from typing_extensions import Literal as Foo @@ -478,7 +478,7 @@ reveal_type(f5) # N: Revealed type is "def (x: Literal['foo']) -> Literal['foo' [builtins fixtures/tuple.pyi] [out] -[case testLiteralBasicStrUsageSlashes] +[case testLiteralBasicStrUsageSlashes-skip_path_normalization] from typing_extensions import Literal a: Literal[r"foo\nbar"] @@ -487,7 +487,7 @@ b: Literal["foo\nbar"] reveal_type(a) reveal_type(b) [builtins fixtures/tuple.pyi] -[out skip-path-normalization] +[out] main:6: note: Revealed type is "Literal['foo\\nbar']" main:7: note: Revealed type is "Literal['foo\nbar']" From 89ad125fa5a31a7c82f267e957d3c94da6b52f61 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 19 Jul 2023 01:07:53 +0100 Subject: [PATCH 12/88] Update commit hashes following typeshed sync (#15690) Followup to #15681 --- misc/sync-typeshed.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 3f870d574d38..36967f86262e 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -179,10 +179,10 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "9f4c0d8af", # LiteralString reverts - "56f434336", # sum reverts - "71c4269df", # ctypes reverts - "186fbb18e", # ParamSpec for functools.wraps + "2f6b6e66c", # LiteralString reverts + "120af30e7", # sum reverts + "1866d28f1", # ctypes reverts + "3240da455", # ParamSpec for functools.wraps ] for commit in commits_to_cherry_pick: try: From 88c0c644c78e2f31ed25a523ad1f74727f2d647f Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 20 Jul 2023 21:06:59 +0300 Subject: [PATCH 13/88] Update LICENSE with copyright year (#15727) --- LICENSE | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index 991496cb4878..55d01ee19ad8 100644 --- a/LICENSE +++ b/LICENSE @@ -4,8 +4,8 @@ Mypy (and mypyc) are licensed under the terms of the MIT license, reproduced bel The MIT License -Copyright (c) 2012-2022 Jukka Lehtosalo and contributors -Copyright (c) 2015-2022 Dropbox, Inc. +Copyright (c) 2012-2023 Jukka Lehtosalo and contributors +Copyright (c) 2015-2023 Dropbox, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), From 383137baaf36876dcf935c2f5be053b6419097d0 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 20 Jul 2023 22:38:20 +0300 Subject: [PATCH 14/88] Remove more unused scripts (#15728) --- misc/async_matrix.py | 149 ----------------------- misc/fix_annotate.py | 218 ---------------------------------- misc/remove-eol-whitespace.sh | 8 -- 3 files changed, 375 deletions(-) delete mode 100644 misc/async_matrix.py delete mode 100644 misc/fix_annotate.py delete mode 100644 misc/remove-eol-whitespace.sh diff --git a/misc/async_matrix.py b/misc/async_matrix.py deleted file mode 100644 index d4612dd81799..000000000000 --- a/misc/async_matrix.py +++ /dev/null @@ -1,149 +0,0 @@ -#!/usr/bin/env python3 -"""Test various combinations of generators/coroutines. - -This was used to cross-check the errors in the test case -testFullCoroutineMatrix in test-data/unit/check-async-await.test. -""" - -from __future__ import annotations - -import sys -from types import coroutine -from typing import Any, Awaitable, Generator, Iterator - -# The various things you might try to use in `await` or `yield from`. - - -def plain_generator() -> Generator[str, None, int]: - yield "a" - return 1 - - -async def plain_coroutine() -> int: - return 1 - - -@coroutine -def decorated_generator() -> Generator[str, None, int]: - yield "a" - return 1 - - -@coroutine -async def decorated_coroutine() -> int: - return 1 - - -class It(Iterator[str]): - stop = False - - def __iter__(self) -> It: - return self - - def __next__(self) -> str: - if self.stop: - raise StopIteration("end") - else: - self.stop = True - return "a" - - -def other_iterator() -> It: - return It() - - -class Aw(Awaitable[int]): - def __await__(self) -> Generator[str, Any, int]: - yield "a" - return 1 - - -def other_coroutine() -> Aw: - return Aw() - - -# The various contexts in which `await` or `yield from` might occur. - - -def plain_host_generator(func) -> Generator[str, None, None]: - yield "a" - x = 0 - f = func() - try: - x = yield from f # noqa: F841 - finally: - try: - f.close() - except AttributeError: - pass - - -async def plain_host_coroutine(func) -> None: - x = 0 - x = await func() # noqa: F841 - - -@coroutine -def decorated_host_generator(func) -> Generator[str, None, None]: - yield "a" - x = 0 - f = func() - try: - x = yield from f # noqa: F841 - finally: - try: - f.close() - except AttributeError: - pass - - -@coroutine -async def decorated_host_coroutine(func) -> None: - x = 0 - x = await func() # noqa: F841 - - -# Main driver. - - -def main() -> None: - verbose = "-v" in sys.argv - for host in [ - plain_host_generator, - plain_host_coroutine, - decorated_host_generator, - decorated_host_coroutine, - ]: - print() - print("==== Host:", host.__name__) - for func in [ - plain_generator, - plain_coroutine, - decorated_generator, - decorated_coroutine, - other_iterator, - other_coroutine, - ]: - print(" ---- Func:", func.__name__) - try: - f = host(func) - for i in range(10): - try: - x = f.send(None) - if verbose: - print(" yield:", x) - except StopIteration as e: - if verbose: - print(" stop:", e.value) - break - else: - if verbose: - print(" ???? still going") - except Exception as e: - print(" error:", repr(e)) - - -# Run main(). - -if __name__ == "__main__": - main() diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py deleted file mode 100644 index fc8ac27466d5..000000000000 --- a/misc/fix_annotate.py +++ /dev/null @@ -1,218 +0,0 @@ -"""Fixer for lib2to3 that inserts mypy annotations into all methods. - -The simplest way to run this is to copy it into lib2to3's "fixes" -subdirectory and then run "2to3 -f annotate" over your files. - -The fixer transforms e.g. - - def foo(self, bar, baz=12): - return bar + baz - -into - - def foo(self, bar, baz=12): - # type: (Any, int) -> Any - return bar + baz - -It does not do type inference but it recognizes some basic default -argument values such as numbers and strings (and assumes their type -implies the argument type). - -It also uses some basic heuristics to decide whether to ignore the -first argument: - - - always if it's named 'self' - - if there's a @classmethod decorator - -Finally, it knows that __init__() is supposed to return None. -""" - -from __future__ import annotations - -import os -import re -from lib2to3.fixer_base import BaseFix -from lib2to3.fixer_util import syms, token, touch_import -from lib2to3.patcomp import compile_pattern -from lib2to3.pytree import Leaf, Node - - -class FixAnnotate(BaseFix): - # This fixer is compatible with the bottom matcher. - BM_compatible = True - - # This fixer shouldn't run by default. - explicit = True - - # The pattern to match. - PATTERN = """ - funcdef< 'def' name=any parameters< '(' [args=any] ')' > ':' suite=any+ > - """ - - counter = None if not os.getenv("MAXFIXES") else int(os.getenv("MAXFIXES")) - - def transform(self, node, results): - if FixAnnotate.counter is not None: - if FixAnnotate.counter <= 0: - return - suite = results["suite"] - children = suite[0].children - - # NOTE: I've reverse-engineered the structure of the parse tree. - # It's always a list of nodes, the first of which contains the - # entire suite. Its children seem to be: - # - # [0] NEWLINE - # [1] INDENT - # [2...n-2] statements (the first may be a docstring) - # [n-1] DEDENT - # - # Comments before the suite are part of the INDENT's prefix. - # - # "Compact" functions (e.g. "def foo(x, y): return max(x, y)") - # have a different structure that isn't matched by PATTERN. - # - # print('-'*60) - # print(node) - # for i, ch in enumerate(children): - # print(i, repr(ch.prefix), repr(ch)) - # - # Check if there's already an annotation. - for ch in children: - if ch.prefix.lstrip().startswith("# type:"): - return # There's already a # type: comment here; don't change anything. - - # Compute the annotation - annot = self.make_annotation(node, results) - - # Insert '# type: {annot}' comment. - # For reference, see lib2to3/fixes/fix_tuple_params.py in stdlib. - if len(children) >= 2 and children[1].type == token.INDENT: - children[1].prefix = "{}# type: {}\n{}".format( - children[1].value, annot, children[1].prefix - ) - children[1].changed() - if FixAnnotate.counter is not None: - FixAnnotate.counter -= 1 - - # Also add 'from typing import Any' at the top. - if "Any" in annot: - touch_import("typing", "Any", node) - - def make_annotation(self, node, results): - name = results["name"] - assert isinstance(name, Leaf), repr(name) - assert name.type == token.NAME, repr(name) - decorators = self.get_decorators(node) - is_method = self.is_method(node) - if name.value == "__init__" or not self.has_return_exprs(node): - restype = "None" - else: - restype = "Any" - args = results.get("args") - argtypes = [] - if isinstance(args, Node): - children = args.children - elif isinstance(args, Leaf): - children = [args] - else: - children = [] - # Interpret children according to the following grammar: - # (('*'|'**')? NAME ['=' expr] ','?)* - stars = inferred_type = "" - in_default = False - at_start = True - for child in children: - if isinstance(child, Leaf): - if child.value in ("*", "**"): - stars += child.value - elif child.type == token.NAME and not in_default: - if not is_method or not at_start or "staticmethod" in decorators: - inferred_type = "Any" - else: - # Always skip the first argument if it's named 'self'. - # Always skip the first argument of a class method. - if child.value == "self" or "classmethod" in decorators: - pass - else: - inferred_type = "Any" - elif child.value == "=": - in_default = True - elif in_default and child.value != ",": - if child.type == token.NUMBER: - if re.match(r"\d+[lL]?$", child.value): - inferred_type = "int" - else: - inferred_type = "float" # TODO: complex? - elif child.type == token.STRING: - if child.value.startswith(("u", "U")): - inferred_type = "unicode" - else: - inferred_type = "str" - elif child.type == token.NAME and child.value in ("True", "False"): - inferred_type = "bool" - elif child.value == ",": - if inferred_type: - argtypes.append(stars + inferred_type) - # Reset - stars = inferred_type = "" - in_default = False - at_start = False - if inferred_type: - argtypes.append(stars + inferred_type) - return "(" + ", ".join(argtypes) + ") -> " + restype - - # The parse tree has a different shape when there is a single - # decorator vs. when there are multiple decorators. - DECORATED = "decorated< (d=decorator | decorators< dd=decorator+ >) funcdef >" - decorated = compile_pattern(DECORATED) - - def get_decorators(self, node): - """Return a list of decorators found on a function definition. - - This is a list of strings; only simple decorators - (e.g. @staticmethod) are returned. - - If the function is undecorated or only non-simple decorators - are found, return []. - """ - if node.parent is None: - return [] - results = {} - if not self.decorated.match(node.parent, results): - return [] - decorators = results.get("dd") or [results["d"]] - decs = [] - for d in decorators: - for child in d.children: - if isinstance(child, Leaf) and child.type == token.NAME: - decs.append(child.value) - return decs - - def is_method(self, node): - """Return whether the node occurs (directly) inside a class.""" - node = node.parent - while node is not None: - if node.type == syms.classdef: - return True - if node.type == syms.funcdef: - return False - node = node.parent - return False - - RETURN_EXPR = "return_stmt< 'return' any >" - return_expr = compile_pattern(RETURN_EXPR) - - def has_return_exprs(self, node): - """Traverse the tree below node looking for 'return expr'. - - Return True if at least 'return expr' is found, False if not. - (If both 'return' and 'return expr' are found, return True.) - """ - results = {} - if self.return_expr.match(node, results): - return True - return any( - child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child) - for child in node.children - ) diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh deleted file mode 100644 index 5cf666997e34..000000000000 --- a/misc/remove-eol-whitespace.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -# Remove trailing whitespace from all non-binary files in a git repo. - -# From https://gist.github.com/dpaluy/3690668; originally from here: -# https://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240 - -git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/' From 14e7768c3bd8d1164e887ce3becba3459ebcfaa4 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Fri, 21 Jul 2023 22:27:36 +0300 Subject: [PATCH 15/88] Raise errors on unbound TypeVars with values (#15732) Completes a `TODO` item :) Refs https://github.com/python/mypy/issues/15724 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/checker.py | 7 +++++++ test-data/unit/check-typevar-unbound.test | 3 +-- test-data/unit/deps-generics.test | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index f2873c7d58e4..724a1dd1f7d7 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1069,6 +1069,7 @@ def check_func_def( """Type check a function definition.""" # Expand type variables with value restrictions to ordinary types. expanded = self.expand_typevars(defn, typ) + original_typ = typ for item, typ in expanded: old_binder = self.binder self.binder = ConditionalTypeBinder() @@ -1126,6 +1127,12 @@ def check_func_def( message_registry.RETURN_TYPE_CANNOT_BE_CONTRAVARIANT, typ.ret_type ) self.check_unbound_return_typevar(typ) + elif ( + isinstance(original_typ.ret_type, TypeVarType) and original_typ.ret_type.values + ): + # Since type vars with values are expanded, the return type is changed + # to a raw value. This is a hack to get it back. + self.check_unbound_return_typevar(original_typ) # Check that Generator functions have the appropriate return type. if defn.is_generator: diff --git a/test-data/unit/check-typevar-unbound.test b/test-data/unit/check-typevar-unbound.test index d3e54c75e373..ed6beaa100db 100644 --- a/test-data/unit/check-typevar-unbound.test +++ b/test-data/unit/check-typevar-unbound.test @@ -15,8 +15,7 @@ def g() -> U: # E: A function returning TypeVar should receive at least one argu V = TypeVar('V', int, str) -# TODO: this should also give an error -def h() -> V: +def h() -> V: # E: A function returning TypeVar should receive at least one argument containing the same TypeVar ... [case testInnerFunctionTypeVar] diff --git a/test-data/unit/deps-generics.test b/test-data/unit/deps-generics.test index c78f3fad90c0..6baa57266d2f 100644 --- a/test-data/unit/deps-generics.test +++ b/test-data/unit/deps-generics.test @@ -159,7 +159,7 @@ class D: pass T = TypeVar('T', A, B) S = TypeVar('S', C, D) -def f(x: T) -> S: +def f(x: T, y: S) -> S: pass [out] -> , , m, m.A, m.f From 6bdcc92002a5e1a6feb1528d0221802f7514c836 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 22 Jul 2023 21:20:36 +0100 Subject: [PATCH 16/88] stubtest: Fix `__mypy-replace` false positives (#15689) --- mypy/stubtest.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index f06faa962b07..906a8c923b37 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -496,7 +496,11 @@ def verify_typeinfo( ) # Check everything already defined on the stub class itself (i.e. not inherited) - to_check = set(stub.names) + # + # Filter out non-identifier names, as these are (hopefully always?) whacky/fictional things + # (like __mypy-replace or __mypy-post_init, etc.) that don't exist at runtime, + # and exist purely for internal mypy reasons + to_check = {name for name in stub.names if name.isidentifier()} # Check all public things on the runtime class to_check.update( m for m in vars(runtime) if not is_probably_private(m) and m not in IGNORABLE_CLASS_DUNDERS From d2022a0007c0eb176ccaf37a9aa54c958be7fb10 Mon Sep 17 00:00:00 2001 From: Ali Hamdan Date: Sun, 23 Jul 2023 01:43:44 +0200 Subject: [PATCH 17/88] Add `__match_args__` to dataclasses with no fields (#15749) --- mypy/plugins/dataclasses.py | 1 - test-data/unit/check-dataclasses.test | 5 +++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index a4babe7faf61..d782acf50af5 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -365,7 +365,6 @@ def transform(self) -> bool: and ( "__match_args__" not in info.names or info.names["__match_args__"].plugin_generated ) - and attributes and py_version >= (3, 10) ): str_type = self._api.named_type("builtins.str") diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 3866442230bf..1e01a72921f7 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1892,6 +1892,11 @@ class Two: bar: int t: Two reveal_type(t.__match_args__) # N: Revealed type is "Tuple[Literal['bar']]" +@dataclass +class Empty: + ... +e: Empty +reveal_type(e.__match_args__) # N: Revealed type is "Tuple[]" [builtins fixtures/dataclasses.pyi] [case testDataclassWithoutMatchArgs] From 01c6994ac01e3822fe89c5dc46f8bc8b656c8263 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Wed, 26 Jul 2023 03:34:51 -0400 Subject: [PATCH 18/88] Don't flag intentionally empty generators unreachable (#15722) Co-authored-by: Alex Waygood --- mypy/binder.py | 8 ------- mypy/checker.py | 25 ++++++++++++++++++++-- test-data/unit/check-unreachable-code.test | 16 ++++++++++++++ 3 files changed, 39 insertions(+), 10 deletions(-) diff --git a/mypy/binder.py b/mypy/binder.py index 37c0b6bb9006..8a68f24f661e 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -42,13 +42,6 @@ def __init__(self, id: int, conditional_frame: bool = False) -> None: self.types: dict[Key, Type] = {} self.unreachable = False self.conditional_frame = conditional_frame - - # Should be set only if we're entering a frame where it's not - # possible to accurately determine whether or not contained - # statements will be unreachable or not. - # - # Long-term, we should improve mypy to the point where we no longer - # need this field. self.suppress_unreachable_warnings = False def __repr__(self) -> str: @@ -174,7 +167,6 @@ def is_unreachable(self) -> bool: return any(f.unreachable for f in self.frames) def is_unreachable_warning_suppressed(self) -> bool: - # TODO: See todo in 'is_unreachable' return any(f.suppress_unreachable_warnings for f in self.frames) def cleanse(self, expr: Expression) -> None: diff --git a/mypy/checker.py b/mypy/checker.py index 724a1dd1f7d7..e0cd02e74573 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -132,6 +132,7 @@ Var, WhileStmt, WithStmt, + YieldExpr, is_final_node, ) from mypy.options import Options @@ -1241,13 +1242,17 @@ def check_func_def( new_frame.types[key] = narrowed_type self.binder.declarations[key] = old_binder.declarations[key] with self.scope.push_function(defn): - # We suppress reachability warnings when we use TypeVars with value + # We suppress reachability warnings for empty generator functions + # (return; yield) which have a "yield" that's unreachable by definition + # since it's only there to promote the function into a generator function. + # + # We also suppress reachability warnings when we use TypeVars with value # restrictions: we only want to report a warning if a certain statement is # marked as being suppressed in *all* of the expansions, but we currently # have no good way of doing this. # # TODO: Find a way of working around this limitation - if len(expanded) >= 2: + if _is_empty_generator_function(item) or len(expanded) >= 2: self.binder.suppress_unreachable_warnings() self.accept(item.body) unreachable = self.binder.is_unreachable() @@ -6968,6 +6973,22 @@ def is_literal_not_implemented(n: Expression) -> bool: return isinstance(n, NameExpr) and n.fullname == "builtins.NotImplemented" +def _is_empty_generator_function(func: FuncItem) -> bool: + """ + Checks whether a function's body is 'return; yield' (the yield being added only + to promote the function into a generator function). + """ + body = func.body.body + return ( + len(body) == 2 + and isinstance(ret_stmt := body[0], ReturnStmt) + and (ret_stmt.expr is None or is_literal_none(ret_stmt.expr)) + and isinstance(expr_stmt := body[1], ExpressionStmt) + and isinstance(yield_expr := expr_stmt.expr, YieldExpr) + and (yield_expr.expr is None or is_literal_none(yield_expr.expr)) + ) + + def builtin_item_type(tp: Type) -> Type | None: """Get the item type of a builtin container. diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 76ecd9f51e35..7a6c2cbfd1c7 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -1446,3 +1446,19 @@ def f() -> None: Foo()['a'] = 'a' x = 0 # This should not be reported as unreachable [builtins fixtures/exception.pyi] + +[case testIntentionallyEmptyGeneratorFunction] +# flags: --warn-unreachable +from typing import Generator + +def f() -> Generator[None, None, None]: + return + yield + +[case testIntentionallyEmptyGeneratorFunction_None] +# flags: --warn-unreachable +from typing import Generator + +def f() -> Generator[None, None, None]: + return None + yield None From b901d21194400b856a88df62a3d7db871936a50d Mon Sep 17 00:00:00 2001 From: Marcel Johannesmann Date: Wed, 26 Jul 2023 20:50:13 +0200 Subject: [PATCH 19/88] docs: add missing verb (#15765) --- docs/source/cheat_sheet_py3.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 297427e72aca..fe5761ca6187 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -104,7 +104,7 @@ Functions print(value + "!" * excitement) # Note that arguments without a type are dynamically typed (treated as Any) - # and that functions without any annotations not checked + # and that functions without any annotations are not checked def untyped(x): x.anything() + 1 + "string" # no errors From a8467c43fb6423cc3f7f330f361e6b5af0bf284f Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Fri, 28 Jul 2023 14:59:18 +0300 Subject: [PATCH 20/88] [stubgen] Add required `...` rhs to `NamedTuple` fields with default values (#15680) Closes https://github.com/python/mypy/issues/15638 --- mypy/stubgen.py | 19 ++++++++++++- test-data/unit/stubgen.test | 56 +++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 9084da2053cf..a77ee738d56f 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -102,6 +102,7 @@ OverloadedFuncDef, Statement, StrExpr, + TempNode, TupleExpr, TypeInfo, UnaryExpr, @@ -637,6 +638,7 @@ def __init__( self._state = EMPTY self._toplevel_names: list[str] = [] self._include_private = include_private + self._current_class: ClassDef | None = None self.import_tracker = ImportTracker() # Was the tree semantically analysed before? self.analyzed = analyzed @@ -886,6 +888,7 @@ def get_fullname(self, expr: Expression) -> str: return resolved_name def visit_class_def(self, o: ClassDef) -> None: + self._current_class = o self.method_names = find_method_names(o.defs.body) sep: int | None = None if not self._indent and self._state != EMPTY: @@ -922,6 +925,7 @@ def visit_class_def(self, o: ClassDef) -> None: else: self._state = CLASS self.method_names = set() + self._current_class = None def get_base_types(self, cdef: ClassDef) -> list[str]: """Get list of base classes for a class.""" @@ -1330,7 +1334,20 @@ def get_init( typename += f"[{final_arg}]" else: typename = self.get_str_type_of_node(rvalue) - return f"{self._indent}{lvalue}: {typename}\n" + initializer = self.get_assign_initializer(rvalue) + return f"{self._indent}{lvalue}: {typename}{initializer}\n" + + def get_assign_initializer(self, rvalue: Expression) -> str: + """Does this rvalue need some special initializer value?""" + if self._current_class and self._current_class.info: + # Current rules + # 1. Return `...` if we are dealing with `NamedTuple` and it has an existing default value + if self._current_class.info.is_named_tuple and not isinstance(rvalue, TempNode): + return " = ..." + # TODO: support other possible cases, where initializer is important + + # By default, no initializer is required: + return "" def add(self, string: str) -> None: """Add text to generated stub.""" diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index b387aa840dc9..f6b71a994153 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -698,6 +698,62 @@ class Y(NamedTuple): a: int b: str +[case testNamedTupleClassSyntax_semanal] +from typing import NamedTuple + +class A(NamedTuple): + x: int + y: str = 'a' + +class B(A): + z1: str + z2 = 1 + z3: str = 'b' + +class RegularClass: + x: int + y: str = 'a' + class NestedNamedTuple(NamedTuple): + x: int + y: str = 'a' + z: str = 'b' +[out] +from typing import NamedTuple + +class A(NamedTuple): + x: int + y: str = ... + +class B(A): + z1: str + z2: int + z3: str + +class RegularClass: + x: int + y: str + class NestedNamedTuple(NamedTuple): + x: int + y: str = ... + z: str + + +[case testNestedClassInNamedTuple_semanal-xfail] +from typing import NamedTuple + +# TODO: make sure that nested classes in `NamedTuple` are supported: +class NamedTupleWithNestedClass(NamedTuple): + class Nested: + x: int + y: str = 'a' +[out] +from typing import NamedTuple + +class NamedTupleWithNestedClass(NamedTuple): + class Nested: + x: int + y: str + [case testEmptyNamedtuple] import collections, typing X = collections.namedtuple('X', []) From da1853ff7f764157511ece4305a11369f63353f5 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sat, 29 Jul 2023 11:05:41 +0300 Subject: [PATCH 21/88] Correctly narrow types for `tuple[type[X], ...]` (#15691) `flatten_types` forgot about the second way we represent `tuple` inside. Closes https://github.com/python/mypy/issues/15443 --------- Co-authored-by: Ilya Priven --- mypy/checker.py | 2 ++ test-data/unit/check-narrowing.test | 47 +++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index e0cd02e74573..30dbdd01d972 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7146,6 +7146,8 @@ def flatten_types(t: Type) -> list[Type]: t = get_proper_type(t) if isinstance(t, TupleType): return [b for a in t.items for b in flatten_types(a)] + elif is_named_instance(t, "builtins.tuple"): + return [t.args[0]] else: return [t] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index 22014d4c645c..b763e0ff3b68 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1261,3 +1261,50 @@ def g() -> None: def foo(): ... foo() [builtins fixtures/dict.pyi] + + +[case testNarrowingWithTupleOfTypes] +from typing import Tuple, Type + +class Base: ... + +class Impl1(Base): ... +class Impl2(Base): ... + +impls: Tuple[Type[Base], ...] = (Impl1, Impl2) +some: object + +if isinstance(some, impls): + reveal_type(some) # N: Revealed type is "__main__.Base" +else: + reveal_type(some) # N: Revealed type is "builtins.object" + +raw: Tuple[type, ...] +if isinstance(some, raw): + reveal_type(some) # N: Revealed type is "builtins.object" +else: + reveal_type(some) # N: Revealed type is "builtins.object" +[builtins fixtures/dict.pyi] + + +[case testNarrowingWithTupleOfTypesPy310Plus] +# flags: --python-version 3.10 +class Base: ... + +class Impl1(Base): ... +class Impl2(Base): ... + +some: int | Base + +impls: tuple[type[Base], ...] = (Impl1, Impl2) +if isinstance(some, impls): + reveal_type(some) # N: Revealed type is "__main__.Base" +else: + reveal_type(some) # N: Revealed type is "Union[builtins.int, __main__.Base]" + +raw: tuple[type, ...] +if isinstance(some, raw): + reveal_type(some) # N: Revealed type is "Union[builtins.int, __main__.Base]" +else: + reveal_type(some) # N: Revealed type is "Union[builtins.int, __main__.Base]" +[builtins fixtures/dict.pyi] From 14efdf2f1ec098b59b65796b3a37bd84210eca85 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sat, 29 Jul 2023 14:47:02 +0300 Subject: [PATCH 22/88] [stubtest] Test `NamedTuple` definitions with default fields (#15774) This is a test case for https://github.com/python/mypy/pull/15680 from `stubtest`'s point of view. --- mypy/test/teststubtest.py | 68 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 661d46e9fd8a..cd72bd9300d1 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -68,6 +68,7 @@ class Mapping(Generic[_K, _V]): ... class Match(Generic[AnyStr]): ... class Sequence(Iterable[_T_co]): ... class Tuple(Sequence[_T_co]): ... +class NamedTuple(tuple[Any, ...]): ... def overload(func: _T) -> _T: ... """ @@ -82,6 +83,7 @@ def overload(func: _T) -> _T: ... class object: __module__: str def __init__(self) -> None: pass + def __repr__(self) -> str: pass class type: ... class tuple(Sequence[T_co], Generic[T_co]): ... @@ -1599,6 +1601,72 @@ class Y(TypedDict): error=None, ) + @collect_cases + def test_named_tuple(self) -> Iterator[Case]: + yield Case( + stub="from typing import NamedTuple", + runtime="from typing import NamedTuple", + error=None, + ) + yield Case( + stub=""" + class X1(NamedTuple): + bar: int + foo: str = ... + """, + runtime=""" + class X1(NamedTuple): + bar: int + foo: str = 'a' + """, + error=None, + ) + yield Case( + stub=""" + class X2(NamedTuple): + bar: int + foo: str + """, + runtime=""" + class X2(NamedTuple): + bar: int + foo: str = 'a' + """, + # `__new__` will miss a default value for a `foo` parameter, + # but we don't generate special errors for `foo` missing `...` part. + error="X2.__new__", + ) + + @collect_cases + def test_named_tuple_typing_and_collections(self) -> Iterator[Case]: + yield Case( + stub="from typing import NamedTuple", + runtime="from collections import namedtuple", + error=None, + ) + yield Case( + stub=""" + class X1(NamedTuple): + bar: int + foo: str = ... + """, + runtime=""" + X1 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) + """, + error=None, + ) + yield Case( + stub=""" + class X2(NamedTuple): + bar: int + foo: str + """, + runtime=""" + X2 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) + """, + error="X2.__new__", + ) + @collect_cases def test_type_var(self) -> Iterator[Case]: yield Case( From 6040b237e31978b7f6764266a3d162acb68c7884 Mon Sep 17 00:00:00 2001 From: Mark Byrne <31762852+mbyrnepr2@users.noreply.github.com> Date: Sat, 29 Jul 2023 18:55:19 +0200 Subject: [PATCH 23/88] Remove the Python 37 environment from the `tox.ini` (#15693) Remove the Python 37 environment from the `tox.ini` since Python 3.7 is now end of life. Co-authored-by: Alex Waygood --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index b5314114570b..8fc76aed7d0e 100644 --- a/tox.ini +++ b/tox.ini @@ -2,10 +2,10 @@ minversion = 4.4.4 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} envlist = - py37, py38, py39, py310, + py311, docs, lint, type, From 8792ff1b81c98644c2563d6526dcba633fba719c Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sat, 29 Jul 2023 22:33:04 +0300 Subject: [PATCH 24/88] Raise `RuntimeError` with better error messages (#15778) While working on https://github.com/python/mypy/pull/15776 I've noticed that some `RuntimeError` do not have enough metadata to understand what is going on. CI: https://github.com/python/mypy/actions/runs/5700479199/job/15450345887 This PR adds more context to error messages. --- mypy/erasetype.py | 2 +- mypy/nodes.py | 6 +++--- mypy/patterns.py | 2 +- mypy/server/astmerge.py | 4 ++-- mypy/types.py | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 6533d0c4e0f9..fbbb4f80b578 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -71,7 +71,7 @@ def visit_erased_type(self, t: ErasedType) -> ProperType: def visit_partial_type(self, t: PartialType) -> ProperType: # Should not get here. - raise RuntimeError() + raise RuntimeError("Cannot erase partial types") def visit_deleted_type(self, t: DeletedType) -> ProperType: return t diff --git a/mypy/nodes.py b/mypy/nodes.py index 2d763fc482d3..ebd222f4f253 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -203,7 +203,7 @@ def str_with_options(self, options: Options) -> str: return ans def accept(self, visitor: NodeVisitor[T]) -> T: - raise RuntimeError("Not implemented") + raise RuntimeError("Not implemented", type(self)) @trait @@ -213,7 +213,7 @@ class Statement(Node): __slots__ = () def accept(self, visitor: StatementVisitor[T]) -> T: - raise RuntimeError("Not implemented") + raise RuntimeError("Not implemented", type(self)) @trait @@ -223,7 +223,7 @@ class Expression(Node): __slots__ = () def accept(self, visitor: ExpressionVisitor[T]) -> T: - raise RuntimeError("Not implemented") + raise RuntimeError("Not implemented", type(self)) class FakeExpression(Expression): diff --git a/mypy/patterns.py b/mypy/patterns.py index 32c27d2a5b3c..839864ef5879 100644 --- a/mypy/patterns.py +++ b/mypy/patterns.py @@ -19,7 +19,7 @@ class Pattern(Node): __slots__ = () def accept(self, visitor: PatternVisitor[T]) -> T: - raise RuntimeError("Not implemented") + raise RuntimeError("Not implemented", type(self)) class AsPattern(Pattern): diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 5e3759227c7b..f58a4eedabc8 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -467,13 +467,13 @@ def visit_overloaded(self, t: Overloaded) -> None: def visit_erased_type(self, t: ErasedType) -> None: # This type should exist only temporarily during type inference - raise RuntimeError + raise RuntimeError("Cannot handle erased type") def visit_deleted_type(self, typ: DeletedType) -> None: pass def visit_partial_type(self, typ: PartialType) -> None: - raise RuntimeError + raise RuntimeError("Cannot handle partial type") def visit_tuple_type(self, typ: TupleType) -> None: for item in typ.items: diff --git a/mypy/types.py b/mypy/types.py index ba629a3553cf..9eeaa2cc4c3f 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -260,7 +260,7 @@ def can_be_false_default(self) -> bool: return True def accept(self, visitor: TypeVisitor[T]) -> T: - raise RuntimeError("Not implemented") + raise RuntimeError("Not implemented", type(self)) def __repr__(self) -> str: return self.accept(TypeStrVisitor(options=Options())) From 710ad44916fa89b430407c02a62a6df98f3a06f8 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 30 Jul 2023 00:50:37 +0300 Subject: [PATCH 25/88] Better `tox` configuration (#15777) It solves two problems: 1. `fix_annotate` and `async_matrix` were removed in https://github.com/python/mypy/pull/15728 2. It is better to reuse stuff like `runtests.py` not to rewrite the same command we already have --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- runtests.py | 12 +++++++++++- tox.ini | 4 ++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/runtests.py b/runtests.py index 66fade81ffab..80ef8d814ee1 100755 --- a/runtests.py +++ b/runtests.py @@ -48,7 +48,17 @@ # time to run. cmds = { # Self type check - "self": [executable, "-m", "mypy", "--config-file", "mypy_self_check.ini", "-p", "mypy"], + "self": [ + executable, + "-m", + "mypy", + "--config-file", + "mypy_self_check.ini", + "-p", + "mypy", + "-p", + "mypyc", + ], # Lint "lint": ["pre-commit", "run", "--all-files"], # Fast test cases only (this is the bulk of the test suite) diff --git a/tox.ini b/tox.ini index 8fc76aed7d0e..5a728e27fec4 100644 --- a/tox.ini +++ b/tox.ini @@ -53,5 +53,5 @@ passenv = MYPY_FORCE_COLOR MYPY_FORCE_TERMINAL_WIDTH commands = - python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc - python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py + python runtests.py self + python -m mypy --config-file mypy_self_check.ini misc --exclude misc/sync-typeshed.py From 002502a0111852c360f2255830951473bcfec4a7 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Jul 2023 15:13:21 -0700 Subject: [PATCH 26/88] Fix inference for attrs.fields (#15688) --- mypy/checker.py | 5 ++++- test-data/unit/check-plugin-attrs.test | 3 +++ test-data/unit/fixtures/plugin_attrs.pyi | 11 +++++++++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 30dbdd01d972..a8cb2b862fbc 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4632,7 +4632,10 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: if int_type: return iterator, int_type - if isinstance(iterable, TupleType): + if ( + isinstance(iterable, TupleType) + and iterable.partial_fallback.type.fullname == "builtins.tuple" + ): joined: Type = UninhabitedType() for item in iterable.items: joined = join_types(joined, item) diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 913584224764..7a7bcb65fe98 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -1568,6 +1568,9 @@ reveal_type(f(A)[0]) # N: Revealed type is "attr.Attribute[builtins.int]" reveal_type(f(A).b) # N: Revealed type is "attr.Attribute[builtins.int]" f(A).x # E: "____main___A_AttrsAttributes__" has no attribute "x" +for ff in f(A): + reveal_type(ff) # N: Revealed type is "attr.Attribute[Any]" + [builtins fixtures/plugin_attrs.pyi] [case testAttrsGenericFields] diff --git a/test-data/unit/fixtures/plugin_attrs.pyi b/test-data/unit/fixtures/plugin_attrs.pyi index f62104809e74..57e5ecd1b2bc 100644 --- a/test-data/unit/fixtures/plugin_attrs.pyi +++ b/test-data/unit/fixtures/plugin_attrs.pyi @@ -1,5 +1,5 @@ # Builtins stub used to support attrs plugin tests. -from typing import Union, overload +from typing import Union, overload, Generic, Sequence, TypeVar, Type, Iterable, Iterator class object: def __init__(self) -> None: pass @@ -24,6 +24,13 @@ class complex: class str: pass class ellipsis: pass -class tuple: pass class list: pass class dict: pass + +T = TypeVar("T") +Tco = TypeVar('Tco', covariant=True) +class tuple(Sequence[Tco], Generic[Tco]): + def __new__(cls: Type[T], iterable: Iterable[Tco] = ...) -> T: ... + def __iter__(self) -> Iterator[Tco]: pass + def __contains__(self, item: object) -> bool: pass + def __getitem__(self, x: int) -> Tco: pass From d71afbf89437bdf34566f50923759ead2736d93a Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 30 Jul 2023 16:53:47 +0300 Subject: [PATCH 27/88] Change `tuple[]` repr to `tuple[()]` (#15783) Closes https://github.com/python/mypy/issues/15782 --- mypy/messages.py | 5 +++-- mypy/test/testtypes.py | 4 ++-- mypy/types.py | 2 +- test-data/unit/check-async-await.test | 2 +- test-data/unit/check-dataclasses.test | 2 +- test-data/unit/check-namedtuple.test | 12 +++++++++++- test-data/unit/check-overloading.test | 2 +- test-data/unit/check-python310.test | 4 ++-- test-data/unit/check-tuples.test | 21 +++++++++++++++------ test-data/unit/check-type-aliases.test | 8 ++++---- test-data/unit/check-typevar-tuple.test | 2 +- test-data/unit/fine-grained.test | 2 +- test-data/unit/typexport-basic.test | 8 ++++---- 13 files changed, 47 insertions(+), 27 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 8b88cc1678a4..c9bf26f8952e 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2507,10 +2507,11 @@ def format_literal_value(typ: LiteralType) -> str: # Prefer the name of the fallback class (if not tuple), as it's more informative. if typ.partial_fallback.type.fullname != "builtins.tuple": return format(typ.partial_fallback) + type_items = format_list(typ.items) or "()" if options.use_lowercase_names(): - s = f"tuple[{format_list(typ.items)}]" + s = f"tuple[{type_items}]" else: - s = f"Tuple[{format_list(typ.items)}]" + s = f"Tuple[{type_items}]" return s elif isinstance(typ, TypedDictType): # If the TypedDictType is named, return the name diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index b1f21b3be79b..59457dfa5d3b 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -129,10 +129,10 @@ def test_callable_type_with_var_args(self) -> None: ) assert_equal(str(c3), "def (X? =, *Y?) -> Any") - def test_tuple_type(self) -> None: + def test_tuple_type_upper(self) -> None: options = Options() options.force_uppercase_builtins = True - assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[]") + assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[()]") assert_equal(TupleType([self.x], self.fx.std_tuple).str_with_options(options), "Tuple[X?]") assert_equal( TupleType( diff --git a/mypy/types.py b/mypy/types.py index 9eeaa2cc4c3f..d13cff00c06d 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3197,7 +3197,7 @@ def visit_overloaded(self, t: Overloaded) -> str: return f"Overload({', '.join(a)})" def visit_tuple_type(self, t: TupleType) -> str: - s = self.list_str(t.items) + s = self.list_str(t.items) or "()" tuple_name = "tuple" if self.options.use_lowercase_names() else "Tuple" if t.partial_fallback and t.partial_fallback.type: fallback_name = t.partial_fallback.type.fullname diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 3b7ef53b6bd6..af6c31624b96 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -475,7 +475,7 @@ async def gen() -> AsyncGenerator[int, str]: async def h() -> None: g = gen() - await g.asend(()) # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[]"; expected "str" + await g.asend(()) # E: Argument 1 to "asend" of "AsyncGenerator" has incompatible type "Tuple[()]"; expected "str" reveal_type(await g.asend('hello')) # N: Revealed type is "builtins.int" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 1e01a72921f7..7881dfbcf1bb 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1896,7 +1896,7 @@ reveal_type(t.__match_args__) # N: Revealed type is "Tuple[Literal['bar']]" class Empty: ... e: Empty -reveal_type(e.__match_args__) # N: Revealed type is "Tuple[]" +reveal_type(e.__match_args__) # N: Revealed type is "Tuple[()]" [builtins fixtures/dataclasses.pyi] [case testDataclassWithoutMatchArgs] diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index d69b924971e1..6e3628060617 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -931,6 +931,16 @@ reveal_type(A().b) # N: Revealed type is "typing.NamedTuple" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testEmptyNamedTupleTypeRepr] +from typing import NamedTuple + +N = NamedTuple('N', []) +n: N +reveal_type(N) # N: Revealed type is "def () -> Tuple[(), fallback=__main__.N]" +reveal_type(n) # N: Revealed type is "Tuple[(), fallback=__main__.N]" +[builtins fixtures/tuple.pyi] + [case testNamedTupleWrongfile] from typing import NamedTuple from b import Type1 @@ -1036,7 +1046,7 @@ def good6() -> NamedTuple: def bad1() -> NamedTuple: return 1 # E: Incompatible return value type (got "int", expected "NamedTuple") def bad2() -> NamedTuple: - return () # E: Incompatible return value type (got "Tuple[]", expected "NamedTuple") + return () # E: Incompatible return value type (got "Tuple[()]", expected "NamedTuple") def bad3() -> NamedTuple: return (1, 2) # E: Incompatible return value type (got "Tuple[int, int]", expected "NamedTuple") diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index f49a15ada85c..89e5aea210b4 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -1145,7 +1145,7 @@ def f(x: str) -> None: pass f(1.1) f('') f(1) -f(()) # E: No overload variant of "f" matches argument type "Tuple[]" \ +f(()) # E: No overload variant of "f" matches argument type "Tuple[()]" \ # N: Possible overload variants: \ # N: def f(x: float) -> None \ # N: def f(x: str) -> None diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index c07a90b49e63..75293ce9d193 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1568,8 +1568,8 @@ class AnnAssign(stmt): value: str simple: int -reveal_type(AST.__match_args__) # N: Revealed type is "Tuple[]" -reveal_type(stmt.__match_args__) # N: Revealed type is "Tuple[]" +reveal_type(AST.__match_args__) # N: Revealed type is "Tuple[()]" +reveal_type(stmt.__match_args__) # N: Revealed type is "Tuple[()]" reveal_type(AnnAssign.__match_args__) # N: Revealed type is "Tuple[Literal['target']?, Literal['annotation']?, Literal['value']?, Literal['simple']?]" AnnAssign.__match_args__ = ('a', 'b', 'c', 'd') # E: Cannot assign to "__match_args__" diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index f64d24a4ed6b..cff261774663 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -143,7 +143,7 @@ t3 = None # type: Tuple[A, B] a, b, c = None, None, None # type: (A, B, C) if int(): - t2 = () # E: Incompatible types in assignment (expression has type "Tuple[]", variable has type "Tuple[A]") + t2 = () # E: Incompatible types in assignment (expression has type "Tuple[()]", variable has type "Tuple[A]") if int(): t2 = (a, a) # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "Tuple[A]") if int(): @@ -1244,9 +1244,9 @@ f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Any, . from typing import Tuple def f(a: Tuple[()]) -> None: pass f(()) -f((1,)) # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[]" -f(('', '')) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[]" -f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[]" +f((1,)) # E: Argument 1 to "f" has incompatible type "Tuple[int]"; expected "Tuple[()]" +f(('', '')) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[()]" +f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[()]" [builtins fixtures/tuple.pyi] [case testNonliteralTupleIndex] @@ -1467,7 +1467,7 @@ from typing import Tuple t = ('',) * 2 reveal_type(t) # N: Revealed type is "Tuple[builtins.str, builtins.str]" t2 = ('',) * -1 -reveal_type(t2) # N: Revealed type is "Tuple[]" +reveal_type(t2) # N: Revealed type is "Tuple[()]" t3 = ('', 1) * 2 reveal_type(t3) # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.str, builtins.int]" def f() -> Tuple[str, ...]: @@ -1475,12 +1475,21 @@ def f() -> Tuple[str, ...]: reveal_type(f() * 2) # N: Revealed type is "builtins.tuple[builtins.str, ...]" [builtins fixtures/tuple.pyi] +[case testEmptyTupleTypeRepr] +from typing import Tuple + +def f() -> Tuple[()]: ... + +reveal_type(f) # N: Revealed type is "def () -> Tuple[()]" +reveal_type(f()) # N: Revealed type is "Tuple[()]" +[builtins fixtures/tuple.pyi] + [case testMultiplyTupleByIntegerLiteralReverse] from typing import Tuple t = 2 * ('',) reveal_type(t) # N: Revealed type is "Tuple[builtins.str, builtins.str]" t2 = -1 * ('',) -reveal_type(t2) # N: Revealed type is "Tuple[]" +reveal_type(t2) # N: Revealed type is "Tuple[()]" t3 = 2 * ('', 1) reveal_type(t3) # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.str, builtins.int]" def f() -> Tuple[str, ...]: diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 42f22e89d6b7..3ca0c5ef0a4b 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -12,7 +12,7 @@ U = Union[int, str] def f(x: U) -> None: pass f(1) f('') -f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" +f(()) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Union[int, str]" [targets __main__, __main__.f] [builtins fixtures/tuple.pyi] @@ -64,7 +64,7 @@ from _m import U def f(x: U) -> None: pass f(1) f('x') -f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" +f(()) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Union[int, str]" [file _m.py] from typing import Union U = Union[int, str] @@ -170,11 +170,11 @@ f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "str" from typing import Tuple, Callable EmptyTuple = Tuple[()] x: EmptyTuple -reveal_type(x) # N: Revealed type is "Tuple[]" +reveal_type(x) # N: Revealed type is "Tuple[()]" EmptyTupleCallable = Callable[[Tuple[()]], None] f: EmptyTupleCallable -reveal_type(f) # N: Revealed type is "def (Tuple[])" +reveal_type(f) # N: Revealed type is "def (Tuple[()])" [builtins fixtures/list.pyi] [case testForwardTypeAlias] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 1024f90ee6b7..e822cea9304f 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -58,7 +58,7 @@ f_args3: Tuple[int, str, bool] reveal_type(f(f_args)) # N: Revealed type is "Tuple[builtins.str, builtins.str]" reveal_type(f(f_args2)) # N: Revealed type is "Tuple[builtins.str]" reveal_type(f(f_args3)) # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.bool]" -f(empty) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Tuple[int]" +f(empty) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Tuple[int]" f(bad_args) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[int, str]" # TODO: This hits a crash where we assert len(templates.items) == 1. See visit_tuple_type # in mypy/constraints.py. diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 66c5ee46db2f..68f72a2aa992 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -8026,7 +8026,7 @@ A = NamedTuple('A', F) # type: ignore [builtins fixtures/list.pyi] [out] == -b.py:3: note: Revealed type is "Tuple[, fallback=a.A]" +b.py:3: note: Revealed type is "Tuple[(), fallback=a.A]" [case testImportOnTopOfAlias1] from a import A diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 0dcd0098f177..cd2afe2c1c75 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -294,8 +294,8 @@ import typing x = () [builtins fixtures/primitives.pyi] [out] -NameExpr(2) : Tuple[] -TupleExpr(2) : Tuple[] +NameExpr(2) : Tuple[()] +TupleExpr(2) : Tuple[()] [case testInferTwoTypes] ## NameExpr @@ -313,8 +313,8 @@ def f() -> None: x = () [builtins fixtures/primitives.pyi] [out] -NameExpr(3) : Tuple[] -TupleExpr(3) : Tuple[] +NameExpr(3) : Tuple[()] +TupleExpr(3) : Tuple[()] -- Basic generics From cb813259c3b9dff6aaa8686793cf6a0634cf1f69 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 30 Jul 2023 18:48:12 +0300 Subject: [PATCH 28/88] Update pre-commit deps (#15784) Closes https://github.com/python/mypy/pull/15526 --- .pre-commit-config.yaml | 4 ++-- test-requirements.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a56e1af938b8..7a4aada8d593 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,11 +6,11 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/hauntsaninja/black-pre-commit-mirror - rev: 23.3.0 # must match test-requirements.txt + rev: 23.7.0 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.272 # must match test-requirements.txt + rev: v0.0.280 # must match test-requirements.txt hooks: - id: ruff args: [--exit-non-zero-on-fix] diff --git a/test-requirements.txt b/test-requirements.txt index 5340973a4de1..6f7bec0375ad 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,7 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==23.3.0 # must match version in .pre-commit-config.yaml +black==23.7.0 # must match version in .pre-commit-config.yaml filelock>=3.3.0 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' @@ -11,6 +11,6 @@ psutil>=4.0 pytest>=7.4.0 pytest-xdist>=1.34.0 pytest-cov>=2.10.0 -ruff==0.0.272 # must match version in .pre-commit-config.yaml +ruff==0.0.280 # must match version in .pre-commit-config.yaml setuptools>=65.5.1 tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 From 54bc37ccade0476a1738b33cd34b6eb35d7124e1 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 1 Aug 2023 15:59:34 +0100 Subject: [PATCH 29/88] reduce frequency of pre-commit.ci autoupdate PRs (#15798) --- .pre-commit-config.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7a4aada8d593..8ee89cbb912f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,3 +14,5 @@ repos: hooks: - id: ruff args: [--exit-non-zero-on-fix] +ci: + autoupdate_schedule: quarterly From 2b613e5ba1ada5a44f88a90528af834bf9f770a7 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Thu, 3 Aug 2023 02:31:00 +0300 Subject: [PATCH 30/88] Fix type narrowing of `== None` and `in (None,)` conditions (#15760) --- mypy/checker.py | 10 +++++----- mypy/checkexpr.py | 9 +++++++-- mypy/plugins/common.py | 4 ++-- mypy/suggestions.py | 6 +++--- mypy/types_utils.py | 6 +++--- test-data/unit/check-narrowing.test | 26 ++++++++++++++++++++++++++ test-data/unit/fixtures/primitives.pyi | 3 ++- 7 files changed, 48 insertions(+), 16 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index a8cb2b862fbc..0c27da8b5ac8 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -216,7 +216,7 @@ is_literal_type, is_named_instance, ) -from mypy.types_utils import is_optional, remove_optional, store_argument_type, strip_type +from mypy.types_utils import is_overlapping_none, remove_optional, store_argument_type, strip_type from mypy.typetraverser import TypeTraverserVisitor from mypy.typevars import fill_typevars, fill_typevars_with_any, has_no_typevars from mypy.util import is_dunder, is_sunder, is_typeshed_file @@ -5660,13 +5660,13 @@ def has_no_custom_eq_checks(t: Type) -> bool: if left_index in narrowable_operand_index_to_hash: # We only try and narrow away 'None' for now - if is_optional(item_type): + if is_overlapping_none(item_type): collection_item_type = get_proper_type( builtin_item_type(iterable_type) ) if ( collection_item_type is not None - and not is_optional(collection_item_type) + and not is_overlapping_none(collection_item_type) and not ( isinstance(collection_item_type, Instance) and collection_item_type.type.fullname == "builtins.object" @@ -6073,7 +6073,7 @@ def refine_away_none_in_comparison( non_optional_types = [] for i in chain_indices: typ = operand_types[i] - if not is_optional(typ): + if not is_overlapping_none(typ): non_optional_types.append(typ) # Make sure we have a mixture of optional and non-optional types. @@ -6083,7 +6083,7 @@ def refine_away_none_in_comparison( if_map = {} for i in narrowable_operand_indices: expr_type = operand_types[i] - if not is_optional(expr_type): + if not is_overlapping_none(expr_type): continue if any(is_overlapping_erased_types(expr_type, t) for t in non_optional_types): if_map[operands[i]] = remove_optional(expr_type) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 62e2298ba59d..114cde8327e0 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -169,7 +169,12 @@ is_named_instance, split_with_prefix_and_suffix, ) -from mypy.types_utils import is_generic_instance, is_optional, is_self_type_like, remove_optional +from mypy.types_utils import ( + is_generic_instance, + is_overlapping_none, + is_self_type_like, + remove_optional, +) from mypy.typestate import type_state from mypy.typevars import fill_typevars from mypy.typevartuples import find_unpack_in_list @@ -1809,7 +1814,7 @@ def infer_function_type_arguments_using_context( # valid results. erased_ctx = replace_meta_vars(ctx, ErasedType()) ret_type = callable.ret_type - if is_optional(ret_type) and is_optional(ctx): + if is_overlapping_none(ret_type) and is_overlapping_none(ctx): # If both the context and the return type are optional, unwrap the optional, # since in 99% cases this is what a user expects. In other words, we replace # Optional[T] <: Optional[int] diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 65d967577bea..55f2870cadb4 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -43,7 +43,7 @@ deserialize_type, get_proper_type, ) -from mypy.types_utils import is_optional +from mypy.types_utils import is_overlapping_none from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name @@ -141,7 +141,7 @@ def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) -> break elif ( arg_none - and not is_optional(arg_type) + and not is_overlapping_none(arg_type) and not ( isinstance(arg_type, Instance) and arg_type.type.fullname == "builtins.object" diff --git a/mypy/suggestions.py b/mypy/suggestions.py index 8e1225f00a2f..268f3032fc9b 100644 --- a/mypy/suggestions.py +++ b/mypy/suggestions.py @@ -79,7 +79,7 @@ UnionType, get_proper_type, ) -from mypy.types_utils import is_optional, remove_optional +from mypy.types_utils import is_overlapping_none, remove_optional from mypy.util import split_target @@ -752,7 +752,7 @@ def score_type(self, t: Type, arg_pos: bool) -> int: return 20 if any(has_any_type(x) for x in t.items): return 15 - if not is_optional(t): + if not is_overlapping_none(t): return 10 if isinstance(t, CallableType) and (has_any_type(t) or is_tricky_callable(t)): return 10 @@ -868,7 +868,7 @@ def visit_typeddict_type(self, t: TypedDictType) -> str: return t.fallback.accept(self) def visit_union_type(self, t: UnionType) -> str: - if len(t.items) == 2 and is_optional(t): + if len(t.items) == 2 and is_overlapping_none(t): return f"Optional[{remove_optional(t).accept(self)}]" else: return super().visit_union_type(t) diff --git a/mypy/types_utils.py b/mypy/types_utils.py index 43bca05d6bf9..7f2e38ef3753 100644 --- a/mypy/types_utils.py +++ b/mypy/types_utils.py @@ -101,10 +101,10 @@ def is_generic_instance(tp: Type) -> bool: return isinstance(tp, Instance) and bool(tp.args) -def is_optional(t: Type) -> bool: +def is_overlapping_none(t: Type) -> bool: t = get_proper_type(t) - return isinstance(t, UnionType) and any( - isinstance(get_proper_type(e), NoneType) for e in t.items + return isinstance(t, NoneType) or ( + isinstance(t, UnionType) and any(isinstance(get_proper_type(e), NoneType) for e in t.items) ) diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index b763e0ff3b68..291f73a45230 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1263,6 +1263,32 @@ def g() -> None: [builtins fixtures/dict.pyi] +[case testNarrowingOptionalEqualsNone] +from typing import Optional + +class A: ... + +val: Optional[A] + +if val == None: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +else: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +if val != None: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +else: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" + +if val in (None,): + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +else: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +if val not in (None,): + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +else: + reveal_type(val) # N: Revealed type is "Union[__main__.A, None]" +[builtins fixtures/primitives.pyi] + [case testNarrowingWithTupleOfTypes] from typing import Tuple, Type diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index b74252857d6f..c9b1e3f4e983 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -45,7 +45,8 @@ class memoryview(Sequence[int]): def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> int: pass -class tuple(Generic[T]): pass +class tuple(Generic[T]): + def __contains__(self, other: object) -> bool: pass class list(Sequence[T]): def __iter__(self) -> Iterator[T]: pass def __contains__(self, other: object) -> bool: pass From 0d708cb9c9d5291c1c988ef90a1b77307ed5315c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 4 Aug 2023 00:17:42 +0100 Subject: [PATCH 31/88] New type inference: complete transitive closure (#15754) This is a first follow-up for #15287 (I like how my PR titles sound like research paper titles, LOL) This PR completes the new type inference foundations by switching to a complete and well founded algorithm [1] for transitive closure (that replaces more ad hoc initial algorithm that covered 80% of cases and was good for experimenting with new inference scheme). In particular the algorithm in this PR covers two important edge cases (see tests). Some comments: * I don't intend to switch the default for `--new-type-inference`, I just want to see the effect of the switch on `mypy_primer`, I will switch back to false before merging * This flag is still not ready to be publicly announced, I am going to make another 2-3 PRs from the list in #15287 before making this public. * I am not adding yet the unit tests as discussed in previous PR. This PR is already quite big, and the next one (support for upper bounds and values) should be much smaller. I am going to add unit tests only for `transitive_closure()` which is the core of new logic. * While working on this I fixed couple bugs exposed in `TypeVarTuple` support: one is rare technical corner case, another one is serious, template and actual where swapped during constraint inference, effectively causing outer/return context to be completely ignored for instances. * It is better to review the PR with "ignore whitespace" option turned on (there is big chunk in solve.py that is just change of indentation). * There is one questionable design choice I am making in this PR, I am adding `extra_tvars` as an attribute of `Constraint` class, while it logically should not be attributed to any individual constraint, but rather to the full list of constrains. However, doing this properly would require changing the return type of `infer_constrains()` and all related functions, which would be a really big refactoring. [1] Definition 7.1 in https://inria.hal.science/inria-00073205/document --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/checker.py | 72 ++--- mypy/checkexpr.py | 40 ++- mypy/constraints.py | 63 +++-- mypy/expandtype.py | 5 + mypy/infer.py | 10 +- mypy/solve.py | 387 +++++++++++++------------- mypy/subtypes.py | 3 +- mypy/test/testconstraints.py | 3 - mypy/test/testsolve.py | 50 ++-- mypy/typeops.py | 6 +- mypy_self_check.ini | 1 + test-data/unit/check-generics.test | 20 +- test-data/unit/check-overloading.test | 15 + 13 files changed, 356 insertions(+), 319 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 0c27da8b5ac8..b786155079e5 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -734,8 +734,10 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # def foo(x: str) -> str: ... # # See Python 2's map function for a concrete example of this kind of overload. + current_class = self.scope.active_class() + type_vars = current_class.defn.type_vars if current_class else [] with state.strict_optional_set(True): - if is_unsafe_overlapping_overload_signatures(sig1, sig2): + if is_unsafe_overlapping_overload_signatures(sig1, sig2, type_vars): self.msg.overloaded_signatures_overlap(i + 1, i + j + 2, item.func) if impl_type is not None: @@ -1702,7 +1704,9 @@ def is_unsafe_overlapping_op( first = forward_tweaked second = reverse_tweaked - return is_unsafe_overlapping_overload_signatures(first, second) + current_class = self.scope.active_class() + type_vars = current_class.defn.type_vars if current_class else [] + return is_unsafe_overlapping_overload_signatures(first, second, type_vars) def check_inplace_operator_method(self, defn: FuncBase) -> None: """Check an inplace operator method such as __iadd__. @@ -3918,11 +3922,12 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool: return True if len(t.args) == 1: arg = get_proper_type(t.args[0]) - # TODO: This is too permissive -- we only allow TypeVarType since - # they leak in cases like defaultdict(list) due to a bug. - # This can result in incorrect types being inferred, but only - # in rare cases. - if isinstance(arg, (TypeVarType, UninhabitedType, NoneType)): + if self.options.new_type_inference: + allowed = isinstance(arg, (UninhabitedType, NoneType)) + else: + # Allow leaked TypeVars for legacy inference logic. + allowed = isinstance(arg, (UninhabitedType, NoneType, TypeVarType)) + if allowed: return True return False @@ -7179,7 +7184,7 @@ def are_argument_counts_overlapping(t: CallableType, s: CallableType) -> bool: def is_unsafe_overlapping_overload_signatures( - signature: CallableType, other: CallableType + signature: CallableType, other: CallableType, class_type_vars: list[TypeVarLikeType] ) -> bool: """Check if two overloaded signatures are unsafely overlapping or partially overlapping. @@ -7198,8 +7203,8 @@ def is_unsafe_overlapping_overload_signatures( # This lets us identify cases where the two signatures use completely # incompatible types -- e.g. see the testOverloadingInferUnionReturnWithMixedTypevars # test case. - signature = detach_callable(signature) - other = detach_callable(other) + signature = detach_callable(signature, class_type_vars) + other = detach_callable(other, class_type_vars) # Note: We repeat this check twice in both directions due to a slight # asymmetry in 'is_callable_compatible'. When checking for partial overlaps, @@ -7230,7 +7235,7 @@ def is_unsafe_overlapping_overload_signatures( ) -def detach_callable(typ: CallableType) -> CallableType: +def detach_callable(typ: CallableType, class_type_vars: list[TypeVarLikeType]) -> CallableType: """Ensures that the callable's type variables are 'detached' and independent of the context. A callable normally keeps track of the type variables it uses within its 'variables' field. @@ -7240,42 +7245,17 @@ def detach_callable(typ: CallableType) -> CallableType: This function will traverse the callable and find all used type vars and add them to the variables field if it isn't already present. - The caller can then unify on all type variables whether or not the callable is originally - from a class or not.""" - type_list = typ.arg_types + [typ.ret_type] - - appear_map: dict[str, list[int]] = {} - for i, inner_type in enumerate(type_list): - typevars_available = get_type_vars(inner_type) - for var in typevars_available: - if var.fullname not in appear_map: - appear_map[var.fullname] = [] - appear_map[var.fullname].append(i) - - used_type_var_names = set() - for var_name, appearances in appear_map.items(): - used_type_var_names.add(var_name) - - all_type_vars = get_type_vars(typ) - new_variables = [] - for var in set(all_type_vars): - if var.fullname not in used_type_var_names: - continue - new_variables.append( - TypeVarType( - name=var.name, - fullname=var.fullname, - id=var.id, - values=var.values, - upper_bound=var.upper_bound, - default=var.default, - variance=var.variance, - ) - ) - out = typ.copy_modified( - variables=new_variables, arg_types=type_list[:-1], ret_type=type_list[-1] + The caller can then unify on all type variables whether the callable is originally from + the class or not.""" + if not class_type_vars: + # Fast path, nothing to update. + return typ + seen_type_vars = set() + for t in typ.arg_types + [typ.ret_type]: + seen_type_vars |= set(get_type_vars(t)) + return typ.copy_modified( + variables=list(typ.variables) + [tv for tv in class_type_vars if tv in seen_type_vars] ) - return out def overload_can_never_match(signature: CallableType, other: CallableType) -> bool: diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 114cde8327e0..9e46d9ee39cb 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1857,7 +1857,7 @@ def infer_function_type_arguments_using_context( # expects_literal(identity(3)) # Should type-check if not is_generic_instance(ctx) and not is_literal_type_like(ctx): return callable.copy_modified() - args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx) + args = infer_type_arguments(callable.variables, ret_type, erased_ctx) # Only substitute non-Uninhabited and non-erased types. new_args: list[Type | None] = [] for arg in args: @@ -1906,7 +1906,7 @@ def infer_function_type_arguments( else: pass1_args.append(arg) - inferred_args = infer_function_type_arguments( + inferred_args, _ = infer_function_type_arguments( callee_type, pass1_args, arg_kinds, @@ -1948,7 +1948,7 @@ def infer_function_type_arguments( # variables while allowing for polymorphic solutions, i.e. for solutions # potentially involving free variables. # TODO: support the similar inference for return type context. - poly_inferred_args = infer_function_type_arguments( + poly_inferred_args, free_vars = infer_function_type_arguments( callee_type, arg_types, arg_kinds, @@ -1957,30 +1957,28 @@ def infer_function_type_arguments( strict=self.chk.in_checked_function(), allow_polymorphic=True, ) - for i, pa in enumerate(get_proper_types(poly_inferred_args)): - if isinstance(pa, (NoneType, UninhabitedType)) or has_erased_component(pa): - # Indicate that free variables should not be applied in the call below. - poly_inferred_args[i] = None poly_callee_type = self.apply_generic_arguments( callee_type, poly_inferred_args, context ) - yes_vars = poly_callee_type.variables - no_vars = {v for v in callee_type.variables if v not in poly_callee_type.variables} - if not set(get_type_vars(poly_callee_type)) & no_vars: - # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can - # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed. - applied = apply_poly(poly_callee_type, yes_vars) - if applied is not None and poly_inferred_args != [UninhabitedType()] * len( - poly_inferred_args - ): - freeze_all_type_vars(applied) - return applied + # Try applying inferred polymorphic type if possible, e.g. Callable[[T], T] can + # be interpreted as def [T] (T) -> T, but dict[T, T] cannot be expressed. + applied = apply_poly(poly_callee_type, free_vars) + if applied is not None and all( + a is not None and not isinstance(get_proper_type(a), UninhabitedType) + for a in poly_inferred_args + ): + freeze_all_type_vars(applied) + return applied # If it didn't work, erase free variables as , to avoid confusing errors. + unknown = UninhabitedType() + unknown.ambiguous = True inferred_args = [ - expand_type(a, {v.id: UninhabitedType() for v in callee_type.variables}) + expand_type( + a, {v.id: unknown for v in list(callee_type.variables) + free_vars} + ) if a is not None else None - for a in inferred_args + for a in poly_inferred_args ] else: # In dynamically typed functions use implicit 'Any' types for @@ -2019,7 +2017,7 @@ def infer_function_type_arguments_pass2( arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual) - inferred_args = infer_function_type_arguments( + inferred_args, _ = infer_function_type_arguments( callee_type, arg_types, arg_kinds, diff --git a/mypy/constraints.py b/mypy/constraints.py index f9124630a706..299c6292a259 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -73,6 +73,10 @@ def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None: self.op = op self.target = target self.origin_type_var = type_var + # These are additional type variables that should be solved for together with type_var. + # TODO: A cleaner solution may be to modify the return type of infer_constraints() + # to include these instead, but this is a rather big refactoring. + self.extra_tvars: list[TypeVarLikeType] = [] def __repr__(self) -> str: op_str = "<:" @@ -168,7 +172,9 @@ def infer_constraints_for_callable( return constraints -def infer_constraints(template: Type, actual: Type, direction: int) -> list[Constraint]: +def infer_constraints( + template: Type, actual: Type, direction: int, skip_neg_op: bool = False +) -> list[Constraint]: """Infer type constraints. Match a template type, which may contain type variable references, @@ -187,7 +193,9 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons ((T, S), (X, Y)) --> T :> X and S :> Y (X[T], Any) --> T <: Any and T :> Any - The constraints are represented as Constraint objects. + The constraints are represented as Constraint objects. If skip_neg_op == True, + then skip adding reverse (polymorphic) constraints (since this is already a call + to infer such constraints). """ if any( get_proper_type(template) == get_proper_type(t) @@ -202,13 +210,15 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons # Return early on an empty branch. return [] type_state.inferring.append((template, actual)) - res = _infer_constraints(template, actual, direction) + res = _infer_constraints(template, actual, direction, skip_neg_op) type_state.inferring.pop() return res - return _infer_constraints(template, actual, direction) + return _infer_constraints(template, actual, direction, skip_neg_op) -def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Constraint]: +def _infer_constraints( + template: Type, actual: Type, direction: int, skip_neg_op: bool +) -> list[Constraint]: orig_template = template template = get_proper_type(template) actual = get_proper_type(actual) @@ -284,7 +294,7 @@ def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Con return [] # Remaining cases are handled by ConstraintBuilderVisitor. - return template.accept(ConstraintBuilderVisitor(actual, direction)) + return template.accept(ConstraintBuilderVisitor(actual, direction, skip_neg_op)) def infer_constraints_if_possible( @@ -510,10 +520,14 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]): # TODO: The value may be None. Is that actually correct? actual: ProperType - def __init__(self, actual: ProperType, direction: int) -> None: + def __init__(self, actual: ProperType, direction: int, skip_neg_op: bool) -> None: # Direction must be SUBTYPE_OF or SUPERTYPE_OF. self.actual = actual self.direction = direction + # Whether to skip polymorphic inference (involves inference in opposite direction) + # this is used to prevent infinite recursion when both template and actual are + # generic callables. + self.skip_neg_op = skip_neg_op # Trivial leaf types @@ -648,13 +662,13 @@ def visit_instance(self, template: Instance) -> list[Constraint]: assert mapped.type.type_var_tuple_prefix is not None assert mapped.type.type_var_tuple_suffix is not None - unpack_constraints, mapped_args, instance_args = build_constraints_for_unpack( - mapped.args, - mapped.type.type_var_tuple_prefix, - mapped.type.type_var_tuple_suffix, + unpack_constraints, instance_args, mapped_args = build_constraints_for_unpack( instance.args, instance.type.type_var_tuple_prefix, instance.type.type_var_tuple_suffix, + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, self.direction, ) res.extend(unpack_constraints) @@ -879,6 +893,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # Note that non-normalized callables can be created in annotations # using e.g. callback protocols. template = template.with_unpacked_kwargs() + extra_tvars = False if isinstance(self.actual, CallableType): res: list[Constraint] = [] cactual = self.actual.with_unpacked_kwargs() @@ -890,6 +905,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: type_state.infer_polymorphic and cactual.variables and cactual.param_spec() is None + and not self.skip_neg_op # Technically, the correct inferred type for application of e.g. # Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic # like U -> U, should be Callable[..., Any], but if U is a self-type, we can @@ -897,18 +913,15 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # depends on this old behaviour. and not any(tv.id.raw_id == 0 for tv in cactual.variables) ): - # If actual is generic, unify it with template. Note: this is - # not an ideal solution (which would be adding the generic variables - # to the constraint inference set), but it's a good first approximation, - # and this will prevent leaking these variables in the solutions. - # Note: this may infer constraints like T <: S or T <: List[S] - # that contain variables in the target. - unified = mypy.subtypes.unify_generic_callable( - cactual, template, ignore_return=True + # If the actual callable is generic, infer constraints in the opposite + # direction, and indicate to the solver there are extra type variables + # to solve for (see more details in mypy/solve.py). + res.extend( + infer_constraints( + cactual, template, neg_op(self.direction), skip_neg_op=True + ) ) - if unified is not None: - cactual = unified - res.extend(infer_constraints(cactual, template, neg_op(self.direction))) + extra_tvars = True # We can't infer constraints from arguments if the template is Callable[..., T] # (with literal '...'). @@ -978,6 +991,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: cactual_ret_type = cactual.type_guard res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction)) + if extra_tvars: + for c in res: + c.extra_tvars = list(cactual.variables) return res elif isinstance(self.actual, AnyType): param_spec = template.param_spec() @@ -1205,6 +1221,9 @@ def find_and_build_constraints_for_unpack( def build_constraints_for_unpack( + # TODO: this naming is misleading, these should be "actual", not "mapped" + # both template and actual can be mapped before, depending on direction. + # Also the convention is to put template related args first. mapped: tuple[Type, ...], mapped_prefix_len: int | None, mapped_suffix_len: int | None, diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 83d9bf4c8725..b599b49e4c12 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -272,6 +272,11 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return repl def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: + # Sometimes solver may need to expand a type variable with (a copy of) itself + # (usually together with other TypeVars, but it is hard to filter out TypeVarTuples). + repl = self.variables[t.id] + if isinstance(repl, TypeVarTupleType): + return repl raise NotImplementedError def visit_unpack_type(self, t: UnpackType) -> Type: diff --git a/mypy/infer.py b/mypy/infer.py index 66ca4169e2ff..f34087910e4b 100644 --- a/mypy/infer.py +++ b/mypy/infer.py @@ -12,7 +12,7 @@ ) from mypy.nodes import ArgKind from mypy.solve import solve_constraints -from mypy.types import CallableType, Instance, Type, TypeVarId +from mypy.types import CallableType, Instance, Type, TypeVarLikeType class ArgumentInferContext(NamedTuple): @@ -37,7 +37,7 @@ def infer_function_type_arguments( context: ArgumentInferContext, strict: bool = True, allow_polymorphic: bool = False, -) -> list[Type | None]: +) -> tuple[list[Type | None], list[TypeVarLikeType]]: """Infer the type arguments of a generic function. Return an array of lower bound types for the type variables -1 (at @@ -57,14 +57,14 @@ def infer_function_type_arguments( ) # Solve constraints. - type_vars = callee_type.type_var_ids() + type_vars = callee_type.variables return solve_constraints(type_vars, constraints, strict, allow_polymorphic) def infer_type_arguments( - type_var_ids: list[TypeVarId], template: Type, actual: Type, is_supertype: bool = False + type_vars: Sequence[TypeVarLikeType], template: Type, actual: Type, is_supertype: bool = False ) -> list[Type | None]: # Like infer_function_type_arguments, but only match a single type # against a generic type. constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF) - return solve_constraints(type_var_ids, constraints) + return solve_constraints(type_vars, constraints)[0] diff --git a/mypy/solve.py b/mypy/solve.py index 6693d66f3479..02df90aff1e1 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import Iterable +from collections import defaultdict +from typing import Iterable, Sequence +from typing_extensions import TypeAlias as _TypeAlias -from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, neg_op +from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op from mypy.expandtype import expand_type from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort from mypy.join import join_types @@ -17,6 +19,7 @@ Type, TypeOfAny, TypeVarId, + TypeVarLikeType, TypeVarType, UninhabitedType, UnionType, @@ -25,45 +28,72 @@ ) from mypy.typestate import type_state +Bounds: _TypeAlias = "dict[TypeVarId, set[Type]]" +Graph: _TypeAlias = "set[tuple[TypeVarId, TypeVarId]]" +Solutions: _TypeAlias = "dict[TypeVarId, Type | None]" + def solve_constraints( - vars: list[TypeVarId], + original_vars: Sequence[TypeVarLikeType], constraints: list[Constraint], strict: bool = True, allow_polymorphic: bool = False, -) -> list[Type | None]: +) -> tuple[list[Type | None], list[TypeVarLikeType]]: """Solve type constraints. - Return the best type(s) for type variables; each type can be None if the value of the variable - could not be solved. + Return the best type(s) for type variables; each type can be None if the value of + the variable could not be solved. If a variable has no constraints, if strict=True then arbitrarily - pick NoneType as the value of the type variable. If strict=False, - pick AnyType. + pick UninhabitedType as the value of the type variable. If strict=False, pick AnyType. + If allow_polymorphic=True, then use the full algorithm that can potentially return + free type variables in solutions (these require special care when applying). Otherwise, + use a simplified algorithm that just solves each type variable individually if possible. """ + vars = [tv.id for tv in original_vars] if not vars: - return [] + return [], [] + + originals = {tv.id: tv for tv in original_vars} + extra_vars: list[TypeVarId] = [] + # Get additional type variables from generic actuals. + for c in constraints: + extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars]) + originals.update({v.id: v for v in c.extra_tvars if v.id not in originals}) if allow_polymorphic: # Constraints like T :> S and S <: T are semantically the same, but they are # represented differently. Normalize the constraint list w.r.t this equivalence. - constraints = normalize_constraints(constraints, vars) + constraints = normalize_constraints(constraints, vars + extra_vars) # Collect a list of constraints for each type variable. - cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars} + cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars} for con in constraints: - if con.type_var in vars: + if con.type_var in vars + extra_vars: cmap[con.type_var].append(con) if allow_polymorphic: - solutions = solve_non_linear(vars, constraints, cmap) + if constraints: + solutions, free_vars = solve_with_dependent( + vars + extra_vars, constraints, vars, originals + ) + else: + solutions = {} + free_vars = [] else: solutions = {} + free_vars = [] for tv, cs in cmap.items(): if not cs: continue lowers = [c.target for c in cs if c.op == SUPERTYPE_OF] uppers = [c.target for c in cs if c.op == SUBTYPE_OF] - solutions[tv] = solve_one(lowers, uppers, []) + solution = solve_one(lowers, uppers) + + # Do not leak type variables in non-polymorphic solutions. + if solution is None or not get_vars( + solution, [tv for tv in extra_vars if tv not in vars] + ): + solutions[tv] = solution res: list[Type | None] = [] for v in vars: @@ -78,129 +108,128 @@ def solve_constraints( else: candidate = AnyType(TypeOfAny.special_form) res.append(candidate) - return res + return res, [originals[tv] for tv in free_vars] -def solve_non_linear( - vars: list[TypeVarId], constraints: list[Constraint], cmap: dict[TypeVarId, list[Constraint]] -) -> dict[TypeVarId, Type | None]: - """Solve set of constraints that may include non-linear ones, like T <: List[S]. +def solve_with_dependent( + vars: list[TypeVarId], + constraints: list[Constraint], + original_vars: list[TypeVarId], + originals: dict[TypeVarId, TypeVarLikeType], +) -> tuple[Solutions, list[TypeVarId]]: + """Solve set of constraints that may depend on each other, like T <: List[S]. The whole algorithm consists of five steps: - * Propagate via linear constraints to get all possible constraints for each variable + * Propagate via linear constraints and use secondary constraints to get transitive closure * Find dependencies between type variables, group them in SCCs, and sort topologically - * Check all SCC are intrinsically linear, we can't solve (express) T <: List[T] + * Check that all SCC are intrinsically linear, we can't solve (express) T <: List[T] * Variables in leaf SCCs that don't have constant bounds are free (choose one per SCC) - * Solve constraints iteratively starting from leafs, updating targets after each step. + * Solve constraints iteratively starting from leafs, updating bounds after each step. """ - extra_constraints = [] - for tvar in vars: - extra_constraints.extend(propagate_constraints_for(tvar, SUBTYPE_OF, cmap)) - extra_constraints.extend(propagate_constraints_for(tvar, SUPERTYPE_OF, cmap)) - constraints += remove_dups(extra_constraints) - - # Recompute constraint map after propagating. - cmap = {tv: [] for tv in vars} - for con in constraints: - if con.type_var in vars: - cmap[con.type_var].append(con) + graph, lowers, uppers = transitive_closure(vars, constraints) - dmap = compute_dependencies(cmap) + dmap = compute_dependencies(vars, graph, lowers, uppers) sccs = list(strongly_connected_components(set(vars), dmap)) - if all(check_linear(scc, cmap) for scc in sccs): - raw_batches = list(topsort(prepare_sccs(sccs, dmap))) - leafs = raw_batches[0] - free_vars = [] - for scc in leafs: - # If all constrain targets in this SCC are type variables within the - # same SCC then the only meaningful solution we can express, is that - # each variable is equal to a new free variable. For example if we - # have T <: S, S <: U, we deduce: T = S = U = . - if all( - isinstance(c.target, TypeVarType) and c.target.id in vars - for tv in scc - for c in cmap[tv] - ): - # For convenience with current type application machinery, we randomly - # choose one of the existing type variables in SCC and designate it as free - # instead of defining a new type variable as a common solution. - # TODO: be careful about upper bounds (or values) when introducing free vars. - free_vars.append(sorted(scc, key=lambda x: x.raw_id)[0]) - - # Flatten the SCCs that are independent, we can solve them together, - # since we don't need to update any targets in between. - batches = [] - for batch in raw_batches: - next_bc = [] - for scc in batch: - next_bc.extend(list(scc)) - batches.append(next_bc) - - solutions: dict[TypeVarId, Type | None] = {} - for flat_batch in batches: - solutions.update(solve_iteratively(flat_batch, cmap, free_vars)) - # We remove the solutions like T = T for free variables. This will indicate - # to the apply function, that they should not be touched. - # TODO: return list of free type variables explicitly, this logic is fragile - # (but if we do, we need to be careful everything works in incremental modes). - for tv in free_vars: - if tv in solutions: - del solutions[tv] - return solutions - return {} + if not all(check_linear(scc, lowers, uppers) for scc in sccs): + return {}, [] + raw_batches = list(topsort(prepare_sccs(sccs, dmap))) + + free_vars = [] + for scc in raw_batches[0]: + # If there are no bounds on this SCC, then the only meaningful solution we can + # express, is that each variable is equal to a new free variable. For example, + # if we have T <: S, S <: U, we deduce: T = S = U = . + if all(not lowers[tv] and not uppers[tv] for tv in scc): + # For convenience with current type application machinery, we use a stable + # choice that prefers the original type variables (not polymorphic ones) in SCC. + # TODO: be careful about upper bounds (or values) when introducing free vars. + free_vars.append(sorted(scc, key=lambda x: (x not in original_vars, x.raw_id))[0]) + + # Update lowers/uppers with free vars, so these can now be used + # as valid solutions. + for l, u in graph.copy(): + if l in free_vars: + lowers[u].add(originals[l]) + if u in free_vars: + uppers[l].add(originals[u]) + + # Flatten the SCCs that are independent, we can solve them together, + # since we don't need to update any targets in between. + batches = [] + for batch in raw_batches: + next_bc = [] + for scc in batch: + next_bc.extend(list(scc)) + batches.append(next_bc) + + solutions: dict[TypeVarId, Type | None] = {} + for flat_batch in batches: + res = solve_iteratively(flat_batch, graph, lowers, uppers) + solutions.update(res) + return solutions, free_vars def solve_iteratively( - batch: list[TypeVarId], cmap: dict[TypeVarId, list[Constraint]], free_vars: list[TypeVarId] -) -> dict[TypeVarId, Type | None]: - """Solve constraints sequentially, updating constraint targets after each step. - - We solve for type variables that appear in `batch`. If a constraint target is not constant - (i.e. constraint looks like T :> F[S, ...]), we substitute solutions found so far in - the target F[S, ...]. This way we can gradually solve for all variables in the batch taking - one solvable variable at a time (i.e. such a variable that has at least one constant bound). - - Importantly, variables in free_vars are considered constants, so for example if we have just - one initial constraint T <: List[S], we will have two SCCs {T} and {S}, then we first - designate S as free, and therefore T = List[S] is a valid solution for T. + batch: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds +) -> Solutions: + """Solve transitive closure sequentially, updating upper/lower bounds after each step. + + Transitive closure is represented as a linear graph plus lower/upper bounds for each + type variable, see transitive_closure() docstring for details. + + We solve for type variables that appear in `batch`. If a bound is not constant (i.e. it + looks like T :> F[S, ...]), we substitute solutions found so far in the target F[S, ...] + after solving the batch. + + Importantly, after solving each variable in a batch, we move it from linear graph to + upper/lower bounds, this way we can guarantee consistency of solutions (see comment below + for an example when this is important). """ solutions = {} - relevant_constraints = [] - for tv in batch: - relevant_constraints.extend(cmap.get(tv, [])) - lowers, uppers = transitive_closure(batch, relevant_constraints) s_batch = set(batch) - not_allowed_vars = [v for v in batch if v not in free_vars] while s_batch: - for tv in s_batch: - if any(not get_vars(l, not_allowed_vars) for l in lowers[tv]) or any( - not get_vars(u, not_allowed_vars) for u in uppers[tv] - ): + for tv in sorted(s_batch, key=lambda x: x.raw_id): + if lowers[tv] or uppers[tv]: solvable_tv = tv break else: break # Solve each solvable type variable separately. s_batch.remove(solvable_tv) - result = solve_one(lowers[solvable_tv], uppers[solvable_tv], not_allowed_vars) + result = solve_one(lowers[solvable_tv], uppers[solvable_tv]) solutions[solvable_tv] = result if result is None: - # TODO: support backtracking lower/upper bound choices + # TODO: support backtracking lower/upper bound choices and order within SCCs. # (will require switching this function from iterative to recursive). continue - # Update the (transitive) constraints if there is a solution. - subs = {solvable_tv: result} - lowers = {tv: {expand_type(l, subs) for l in lowers[tv]} for tv in lowers} - uppers = {tv: {expand_type(u, subs) for u in uppers[tv]} for tv in uppers} - for v in cmap: - for c in cmap[v]: - c.target = expand_type(c.target, subs) + + # Update the (transitive) bounds from graph if there is a solution. + # This is needed to guarantee solutions will never contradict the initial + # constraints. For example, consider {T <: S, T <: A, S :> B} with A :> B. + # If we would not update the uppers/lowers from graph, we would infer T = A, S = B + # which is not correct. + for l, u in graph.copy(): + if l == u: + continue + if l == solvable_tv: + lowers[u].add(result) + graph.remove((l, u)) + if u == solvable_tv: + uppers[l].add(result) + graph.remove((l, u)) + + # We can update uppers/lowers only once after solving the whole SCC, + # since uppers/lowers can't depend on type variables in the SCC + # (and we would reject such SCC as non-linear and therefore not solvable). + subs = {tv: s for (tv, s) in solutions.items() if s is not None} + for tv in lowers: + lowers[tv] = {expand_type(lt, subs) for lt in lowers[tv]} + for tv in uppers: + uppers[tv] = {expand_type(ut, subs) for ut in uppers[tv]} return solutions -def solve_one( - lowers: Iterable[Type], uppers: Iterable[Type], not_allowed_vars: list[TypeVarId] -) -> Type | None: +def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None: """Solve constraints by finding by using meets of upper bounds, and joins of lower bounds.""" bottom: Type | None = None top: Type | None = None @@ -210,10 +239,6 @@ def solve_one( # bounds based on constraints. Note that we assume that the constraint # targets do not have constraint references. for target in lowers: - # There may be multiple steps needed to solve all vars within a - # (linear) SCC. We ignore targets pointing to not yet solved vars. - if get_vars(target, not_allowed_vars): - continue if bottom is None: bottom = target else: @@ -225,9 +250,6 @@ def solve_one( bottom = join_types(bottom, target) for target in uppers: - # Same as above. - if get_vars(target, not_allowed_vars): - continue if top is None: top = target else: @@ -262,6 +284,7 @@ def normalize_constraints( This includes two things currently: * Complement T :> S by S <: T * Remove strict duplicates + * Remove constrains for unrelated variables """ res = constraints.copy() for c in constraints: @@ -270,96 +293,81 @@ def normalize_constraints( return [c for c in remove_dups(constraints) if c.type_var in vars] -def propagate_constraints_for( - var: TypeVarId, direction: int, cmap: dict[TypeVarId, list[Constraint]] -) -> list[Constraint]: - """Propagate via linear constraints to get additional constraints for `var`. - - For example if we have constraints: - [T <: int, S <: T, S :> str] - we can add two more - [S <: int, T :> str] - """ - extra_constraints = [] - seen = set() - front = [var] - if cmap[var]: - var_def = cmap[var][0].origin_type_var - else: - return [] - while front: - tv = front.pop(0) - for c in cmap[tv]: - if ( - isinstance(c.target, TypeVarType) - and c.target.id not in seen - and c.target.id in cmap - and c.op == direction - ): - front.append(c.target.id) - seen.add(c.target.id) - elif c.op == direction: - new_c = Constraint(var_def, direction, c.target) - if new_c not in cmap[var]: - extra_constraints.append(new_c) - return extra_constraints - - def transitive_closure( tvars: list[TypeVarId], constraints: list[Constraint] -) -> tuple[dict[TypeVarId, set[Type]], dict[TypeVarId, set[Type]]]: +) -> tuple[Graph, Bounds, Bounds]: """Find transitive closure for given constraints on type variables. Transitive closure gives maximal set of lower/upper bounds for each type variable, such that we cannot deduce any further bounds by chaining other existing bounds. + The transitive closure is represented by: + * A set of lower and upper bounds for each type variable, where only constant and + non-linear terms are included in the bounds. + * A graph of linear constraints between type variables (represented as a set of pairs) + Such separation simplifies reasoning, and allows an efficient and simple incremental + transitive closure algorithm that we use here. + For example if we have initial constraints [T <: S, S <: U, U <: int], the transitive closure is given by: - * {} <: T <: {S, U, int} - * {T} <: S <: {U, int} - * {T, S} <: U <: {int} + * {} <: T <: {int} + * {} <: S <: {int} + * {} <: U <: {int} + * {T <: S, S <: U, T <: U} """ - # TODO: merge propagate_constraints_for() into this function. - # TODO: add secondary constraints here to make the algorithm complete. - uppers: dict[TypeVarId, set[Type]] = {tv: set() for tv in tvars} - lowers: dict[TypeVarId, set[Type]] = {tv: set() for tv in tvars} - graph: set[tuple[TypeVarId, TypeVarId]] = set() + uppers: Bounds = defaultdict(set) + lowers: Bounds = defaultdict(set) + graph: Graph = {(tv, tv) for tv in tvars} - # Prime the closure with the initial trivial values. - for c in constraints: - if isinstance(c.target, TypeVarType) and c.target.id in tvars: - if c.op == SUBTYPE_OF: - graph.add((c.type_var, c.target.id)) - else: - graph.add((c.target.id, c.type_var)) - if c.op == SUBTYPE_OF: - uppers[c.type_var].add(c.target) - else: - lowers[c.type_var].add(c.target) - - # At this stage we know that constant bounds have been propagated already, so we - # only need to propagate linear constraints. - for c in constraints: + remaining = set(constraints) + while remaining: + c = remaining.pop() if isinstance(c.target, TypeVarType) and c.target.id in tvars: if c.op == SUBTYPE_OF: lower, upper = c.type_var, c.target.id else: lower, upper = c.target.id, c.type_var - extras = { + if (lower, upper) in graph: + continue + graph |= { (l, u) for l in tvars for u in tvars if (l, lower) in graph and (upper, u) in graph } - graph |= extras for u in tvars: if (upper, u) in graph: lowers[u] |= lowers[lower] for l in tvars: if (l, lower) in graph: uppers[l] |= uppers[upper] - return lowers, uppers + for lt in lowers[lower]: + for ut in uppers[upper]: + # TODO: what if secondary constraints result in inference + # against polymorphic actual (also in below branches)? + remaining |= set(infer_constraints(lt, ut, SUBTYPE_OF)) + remaining |= set(infer_constraints(ut, lt, SUPERTYPE_OF)) + elif c.op == SUBTYPE_OF: + if c.target in uppers[c.type_var]: + continue + for l in tvars: + if (l, c.type_var) in graph: + uppers[l].add(c.target) + for lt in lowers[c.type_var]: + remaining |= set(infer_constraints(lt, c.target, SUBTYPE_OF)) + remaining |= set(infer_constraints(c.target, lt, SUPERTYPE_OF)) + else: + assert c.op == SUPERTYPE_OF + if c.target in lowers[c.type_var]: + continue + for u in tvars: + if (c.type_var, u) in graph: + lowers[u].add(c.target) + for ut in uppers[c.type_var]: + remaining |= set(infer_constraints(ut, c.target, SUPERTYPE_OF)) + remaining |= set(infer_constraints(c.target, ut, SUBTYPE_OF)) + return graph, lowers, uppers def compute_dependencies( - cmap: dict[TypeVarId, list[Constraint]] + tvars: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds ) -> dict[TypeVarId, list[TypeVarId]]: """Compute dependencies between type variables induced by constraints. @@ -367,25 +375,30 @@ def compute_dependencies( we will need to solve for S first before we can solve for T. """ res = {} - vars = list(cmap.keys()) - for tv in cmap: + for tv in tvars: deps = set() - for c in cmap[tv]: - deps |= get_vars(c.target, vars) + for lt in lowers[tv]: + deps |= get_vars(lt, tvars) + for ut in uppers[tv]: + deps |= get_vars(ut, tvars) + for other in tvars: + if other == tv: + continue + if (tv, other) in graph or (other, tv) in graph: + deps.add(other) res[tv] = list(deps) return res -def check_linear(scc: set[TypeVarId], cmap: dict[TypeVarId, list[Constraint]]) -> bool: +def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool: """Check there are only linear constraints between type variables in SCC. Linear are constraints like T <: S (while T <: F[S] are non-linear). """ for tv in scc: - if any( - get_vars(c.target, list(scc)) and not isinstance(c.target, TypeVarType) - for c in cmap[tv] - ): + if any(get_vars(lt, list(scc)) for lt in lowers[tv]): + return False + if any(get_vars(ut, list(scc)) for ut in uppers[tv]): return False return True diff --git a/mypy/subtypes.py b/mypy/subtypes.py index a6dc071f92b0..5712d7375e50 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1708,8 +1708,7 @@ def unify_generic_callable( type.ret_type, target.ret_type, return_constraint_direction ) constraints.extend(c) - type_var_ids = [tvar.id for tvar in type.variables] - inferred_vars = mypy.solve.solve_constraints(type_var_ids, constraints) + inferred_vars, _ = mypy.solve.solve_constraints(type.variables, constraints) if None in inferred_vars: return None non_none_inferred_vars = cast(List[Type], inferred_vars) diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index b46f31327150..f40996145cba 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -1,7 +1,5 @@ from __future__ import annotations -import pytest - from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.test.helpers import Suite from mypy.test.typefixture import TypeFixture @@ -22,7 +20,6 @@ def test_basic_type_variable(self) -> None: Constraint(type_var=fx.t, op=direction, target=fx.a) ] - @pytest.mark.xfail def test_basic_type_var_tuple_subtype(self) -> None: fx = self.fx assert infer_constraints( diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py index d6c585ef4aaa..5d67203dbbf5 100644 --- a/mypy/test/testsolve.py +++ b/mypy/test/testsolve.py @@ -6,7 +6,7 @@ from mypy.solve import solve_constraints from mypy.test.helpers import Suite, assert_equal from mypy.test.typefixture import TypeFixture -from mypy.types import Type, TypeVarId, TypeVarType +from mypy.types import Type, TypeVarLikeType, TypeVarType class SolveSuite(Suite): @@ -17,26 +17,24 @@ def test_empty_input(self) -> None: self.assert_solve([], [], []) def test_simple_supertype_constraints(self) -> None: + self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)]) self.assert_solve( - [self.fx.t.id], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)] - ) - self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], [(self.fx.a, self.fx.o)], ) def test_simple_subtype_constraints(self) -> None: - self.assert_solve([self.fx.t.id], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) + self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [self.fx.b], ) def test_both_kinds_of_constraints(self) -> None: self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], [(self.fx.b, self.fx.a)], ) @@ -44,21 +42,19 @@ def test_both_kinds_of_constraints(self) -> None: def test_unsatisfiable_constraints(self) -> None: # The constraints are impossible to satisfy. self.assert_solve( - [self.fx.t.id], - [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], - [None], + [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None] ) def test_exactly_specified_result(self) -> None: self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], [(self.fx.b, self.fx.b)], ) def test_multiple_variables(self) -> None: self.assert_solve( - [self.fx.t.id, self.fx.s.id], + [self.fx.t, self.fx.s], [ self.supc(self.fx.t, self.fx.b), self.supc(self.fx.s, self.fx.c), @@ -68,40 +64,38 @@ def test_multiple_variables(self) -> None: ) def test_no_constraints_for_var(self) -> None: - self.assert_solve([self.fx.t.id], [], [self.fx.uninhabited]) - self.assert_solve( - [self.fx.t.id, self.fx.s.id], [], [self.fx.uninhabited, self.fx.uninhabited] - ) + self.assert_solve([self.fx.t], [], [self.fx.uninhabited]) + self.assert_solve([self.fx.t, self.fx.s], [], [self.fx.uninhabited, self.fx.uninhabited]) self.assert_solve( - [self.fx.t.id, self.fx.s.id], + [self.fx.t, self.fx.s], [self.supc(self.fx.s, self.fx.a)], [self.fx.uninhabited, (self.fx.a, self.fx.o)], ) def test_simple_constraints_with_dynamic_type(self) -> None: self.assert_solve( - [self.fx.t.id], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] + [self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] ) self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)], ) self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)], ) self.assert_solve( - [self.fx.t.id], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] + [self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] ) self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)], ) - # self.assert_solve([self.fx.t.id], + # self.assert_solve([self.fx.t], # [self.subc(self.fx.t, self.fx.anyt), # self.subc(self.fx.t, self.fx.a)], # [(self.fx.anyt, self.fx.anyt)]) @@ -111,20 +105,20 @@ def test_both_normal_and_any_types_in_results(self) -> None: # If one of the bounds is any, we promote the other bound to # any as well, since otherwise the type range does not make sense. self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)], ) self.assert_solve( - [self.fx.t.id], + [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], [(self.fx.anyt, self.fx.anyt)], ) def assert_solve( self, - vars: list[TypeVarId], + vars: list[TypeVarLikeType], constraints: list[Constraint], results: list[None | Type | tuple[Type, Type]], ) -> None: @@ -134,7 +128,7 @@ def assert_solve( res.append(r[0]) else: res.append(r) - actual = solve_constraints(vars, constraints) + actual, _ = solve_constraints(vars, constraints) assert_equal(str(actual), str(res)) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: diff --git a/mypy/typeops.py b/mypy/typeops.py index 519d3de995f5..65ab4340403c 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -313,7 +313,9 @@ class B(A): pass original_type = get_proper_type(original_type) all_ids = func.type_var_ids() - typeargs = infer_type_arguments(all_ids, self_param_type, original_type, is_supertype=True) + typeargs = infer_type_arguments( + func.variables, self_param_type, original_type, is_supertype=True + ) if ( is_classmethod # TODO: why do we need the extra guards here? @@ -322,7 +324,7 @@ class B(A): pass ): # In case we call a classmethod through an instance x, fallback to type(x) typeargs = infer_type_arguments( - all_ids, self_param_type, TypeType(original_type), is_supertype=True + func.variables, self_param_type, TypeType(original_type), is_supertype=True ) ids = [tid for tid in all_ids if any(tid == t.id for t in get_type_vars(self_param_type))] diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 7413e6407d4f..fcdbe641d6d6 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -8,6 +8,7 @@ always_false = MYPYC plugins = misc/proper_plugin.py python_version = 3.8 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/ +new_type_inference = True enable_error_code = ignore-without-code,redundant-expr show_error_code_links = True diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 34588bfceb3d..5c510a11b970 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2911,9 +2911,23 @@ def test1(x: V) -> V: ... def test2(x: V, y: V) -> V: ... reveal_type(dec1(test1)) # N: Revealed type is "def () -> def [T] (T`1) -> T`1" -# TODO: support this situation -reveal_type(dec2(test2)) # N: Revealed type is "def (builtins.object) -> def (builtins.object) -> builtins.object" -[builtins fixtures/paramspec.pyi] +reveal_type(dec2(test2)) # N: Revealed type is "def [T] (T`3) -> def (T`3) -> T`3" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericCallableNewVariable] +# flags: --new-type-inference +from typing import TypeVar, Callable, List + +S = TypeVar('S') +T = TypeVar('T') +U = TypeVar('U') + +def dec(f: Callable[[S], T]) -> Callable[[S], T]: + ... +def test(x: List[U]) -> List[U]: + ... +reveal_type(dec(test)) # N: Revealed type is "def [U] (builtins.list[U`-1]) -> builtins.list[U`-1]" +[builtins fixtures/list.pyi] [case testInferenceAgainstGenericCallableGenericAlias] # flags: --new-type-inference diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 89e5aea210b4..50acd7d77c8c 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6554,3 +6554,18 @@ class Snafu(object): reveal_type(Snafu().snafu('123')) # N: Revealed type is "builtins.str" reveal_type(Snafu.snafu('123')) # N: Revealed type is "builtins.str" [builtins fixtures/staticmethod.pyi] + +[case testOverloadedWithInternalTypeVars] +# flags: --new-type-inference +import m + +[file m.pyi] +from typing import Callable, TypeVar, overload + +T = TypeVar("T") +S = TypeVar("S", bound=str) + +@overload +def foo(x: int = ...) -> Callable[[T], T]: ... +@overload +def foo(x: S = ...) -> Callable[[T], T]: ... From 5617cdd03d12ff73622c8d4b496979e0377b1675 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 4 Aug 2023 16:39:11 +0200 Subject: [PATCH 32/88] Update black pre-commit mirror link (#15815) The black pre-commit mirror is now hosted at: https://github.com/psf/black-pre-commit-mirror --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ee89cbb912f..f2367f63bb3d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ repos: hooks: - id: trailing-whitespace - id: end-of-file-fixer - - repo: https://github.com/hauntsaninja/black-pre-commit-mirror + - repo: https://github.com/psf/black-pre-commit-mirror rev: 23.7.0 # must match test-requirements.txt hooks: - id: black From 2aaeda4b84a863004a6694a7d562462fbe531ece Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Wed, 9 Aug 2023 15:17:13 +0900 Subject: [PATCH 33/88] Reconsider constraints involving parameter specifications (#15272) - Fixes https://github.com/python/mypy/issues/15037 - Fixes https://github.com/python/mypy/issues/15065 - Fixes https://github.com/python/mypy/issues/15073 - Fixes https://github.com/python/mypy/issues/15388 - Fixes https://github.com/python/mypy/issues/15086 Yet another part of https://github.com/python/mypy/pull/14903 that's finally been extracted! --- mypy/constraints.py | 129 ++++++++++++++---- mypy/test/testconstraints.py | 62 +++++++++ mypy/test/typefixture.py | 42 ++++++ .../unit/check-parameter-specification.test | 32 ++++- 4 files changed, 241 insertions(+), 24 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 299c6292a259..9c55b56dd70e 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -82,15 +82,19 @@ def __repr__(self) -> str: op_str = "<:" if self.op == SUPERTYPE_OF: op_str = ":>" - return f"{self.type_var} {op_str} {self.target}" + return f"{self.origin_type_var} {op_str} {self.target}" def __hash__(self) -> int: - return hash((self.type_var, self.op, self.target)) + return hash((self.origin_type_var, self.op, self.target)) def __eq__(self, other: object) -> bool: if not isinstance(other, Constraint): return False - return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target) + return (self.origin_type_var, self.op, self.target) == ( + other.origin_type_var, + other.op, + other.target, + ) def infer_constraints_for_callable( @@ -698,25 +702,54 @@ def visit_instance(self, template: Instance) -> list[Constraint]: ) elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType): suffix = get_proper_type(instance_arg) + prefix = mapped_arg.prefix + length = len(prefix.arg_types) if isinstance(suffix, CallableType): - prefix = mapped_arg.prefix from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? - # TODO: constraints between prefixes - prefix = mapped_arg.prefix - suffix = suffix.copy_modified( - suffix.arg_types[len(prefix.arg_types) :], - suffix.arg_kinds[len(prefix.arg_kinds) :], - suffix.arg_names[len(prefix.arg_names) :], + length = min(length, len(suffix.arg_types)) + + constrained_to = suffix.copy_modified( + suffix.arg_types[length:], + suffix.arg_kinds[length:], + suffix.arg_names[length:], + ) + constrained_from = mapped_arg.copy_modified( + prefix=prefix.copy_modified( + prefix.arg_types[length:], + prefix.arg_kinds[length:], + prefix.arg_names[length:], + ) ) - res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) + + res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to)) + res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to)) elif isinstance(suffix, ParamSpecType): - res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) + suffix_prefix = suffix.prefix + length = min(length, len(suffix_prefix.arg_types)) + + constrained = suffix.copy_modified( + prefix=suffix_prefix.copy_modified( + suffix_prefix.arg_types[length:], + suffix_prefix.arg_kinds[length:], + suffix_prefix.arg_names[length:], + ) + ) + constrained_from = mapped_arg.copy_modified( + prefix=prefix.copy_modified( + prefix.arg_types[length:], + prefix.arg_kinds[length:], + prefix.arg_names[length:], + ) + ) + + res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained)) + res.append(Constraint(constrained_from, SUBTYPE_OF, constrained)) else: # This case should have been handled above. assert not isinstance(tvar, TypeVarTupleType) @@ -768,26 +801,56 @@ def visit_instance(self, template: Instance) -> list[Constraint]: template_arg, ParamSpecType ): suffix = get_proper_type(mapped_arg) + prefix = template_arg.prefix + length = len(prefix.arg_types) if isinstance(suffix, CallableType): prefix = template_arg.prefix from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) + # TODO: this is almost a copy-paste of code above: make this into a function if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? - # TODO: constraints between prefixes - prefix = template_arg.prefix + length = min(length, len(suffix.arg_types)) - suffix = suffix.copy_modified( - suffix.arg_types[len(prefix.arg_types) :], - suffix.arg_kinds[len(prefix.arg_kinds) :], - suffix.arg_names[len(prefix.arg_names) :], + constrained_to = suffix.copy_modified( + suffix.arg_types[length:], + suffix.arg_kinds[length:], + suffix.arg_names[length:], ) - res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) + constrained_from = template_arg.copy_modified( + prefix=prefix.copy_modified( + prefix.arg_types[length:], + prefix.arg_kinds[length:], + prefix.arg_names[length:], + ) + ) + + res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to)) + res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to)) elif isinstance(suffix, ParamSpecType): - res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) + suffix_prefix = suffix.prefix + length = min(length, len(suffix_prefix.arg_types)) + + constrained = suffix.copy_modified( + prefix=suffix_prefix.copy_modified( + suffix_prefix.arg_types[length:], + suffix_prefix.arg_kinds[length:], + suffix_prefix.arg_names[length:], + ) + ) + constrained_from = template_arg.copy_modified( + prefix=prefix.copy_modified( + prefix.arg_types[length:], + prefix.arg_kinds[length:], + prefix.arg_names[length:], + ) + ) + + res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained)) + res.append(Constraint(constrained_from, SUBTYPE_OF, constrained)) else: # This case should have been handled above. assert not isinstance(tvar, TypeVarTupleType) @@ -954,9 +1017,19 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: prefix_len = len(prefix.arg_types) cactual_ps = cactual.param_spec() + cactual_prefix: Parameters | CallableType + if cactual_ps: + cactual_prefix = cactual_ps.prefix + else: + cactual_prefix = cactual + + max_prefix_len = len( + [k for k in cactual_prefix.arg_kinds if k in (ARG_POS, ARG_OPT)] + ) + prefix_len = min(prefix_len, max_prefix_len) + + # we could check the prefixes match here, but that should be caught elsewhere. if not cactual_ps: - max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)]) - prefix_len = min(prefix_len, max_prefix_len) res.append( Constraint( param_spec, @@ -970,7 +1043,17 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) ) else: - res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps)) + # earlier, cactual_prefix = cactual_ps.prefix. thus, this is guaranteed + assert isinstance(cactual_prefix, Parameters) + + constrained_by = cactual_ps.copy_modified( + prefix=cactual_prefix.copy_modified( + cactual_prefix.arg_types[prefix_len:], + cactual_prefix.arg_kinds[prefix_len:], + cactual_prefix.arg_names[prefix_len:], + ) + ) + res.append(Constraint(param_spec, SUBTYPE_OF, constrained_by)) # compare prefixes cactual_prefix = cactual.copy_modified( diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index f40996145cba..be1d435f9cca 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -156,3 +156,65 @@ def test_var_length_tuple_with_fixed_length_tuple(self) -> None: Instance(fx.std_tuplei, [fx.a]), SUPERTYPE_OF, ) + + def test_paramspec_constrained_with_concatenate(self) -> None: + # for legibility (and my own understanding), `Tester.normal()` is `Tester[P]` + # and `Tester.concatenate()` is `Tester[Concatenate[A, P]]` + # ... and 2nd arg to infer_constraints ends up on LHS of equality + fx = self.fx + + # I don't think we can parametrize... + for direction in (SUPERTYPE_OF, SUBTYPE_OF): + print(f"direction is {direction}") + # equiv to: x: Tester[Q] = Tester.normal() + assert set( + infer_constraints(Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q]), direction) + ) == { + Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), + Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), + } + + # equiv to: x: Tester[Q] = Tester.concatenate() + assert set( + infer_constraints( + Instance(fx.gpsi, [fx.p_concatenate]), Instance(fx.gpsi, [fx.q]), direction + ) + ) == { + Constraint(type_var=fx.p_concatenate, op=SUPERTYPE_OF, target=fx.q), + Constraint(type_var=fx.p_concatenate, op=SUBTYPE_OF, target=fx.q), + } + + # equiv to: x: Tester[Concatenate[B, Q]] = Tester.normal() + assert set( + infer_constraints( + Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q_concatenate]), direction + ) + ) == { + Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q_concatenate), + Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q_concatenate), + } + + # equiv to: x: Tester[Concatenate[B, Q]] = Tester.concatenate() + assert set( + infer_constraints( + Instance(fx.gpsi, [fx.p_concatenate]), + Instance(fx.gpsi, [fx.q_concatenate]), + direction, + ) + ) == { + # this is correct as we assume other parts of mypy will warn that [B] != [A] + Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), + Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), + } + + # equiv to: x: Tester[Concatenate[A, Q]] = Tester.concatenate() + assert set( + infer_constraints( + Instance(fx.gpsi, [fx.p_concatenate]), + Instance(fx.gpsi, [fx.q_concatenate]), + direction, + ) + ) == { + Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), + Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), + } diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index bf1500a3cdec..df78eeb62956 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -5,6 +5,8 @@ from __future__ import annotations +from typing import Sequence + from mypy.nodes import ( ARG_OPT, ARG_POS, @@ -26,6 +28,9 @@ Instance, LiteralType, NoneType, + Parameters, + ParamSpecFlavor, + ParamSpecType, Type, TypeAliasType, TypeOfAny, @@ -238,6 +243,31 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 ) + def make_parameter_specification( + name: str, id: int, concatenate: Sequence[Type] + ) -> ParamSpecType: + return ParamSpecType( + name, + name, + id, + ParamSpecFlavor.BARE, + self.o, + AnyType(TypeOfAny.from_omitted_generics), + prefix=Parameters( + concatenate, [ARG_POS for _ in concatenate], [None for _ in concatenate] + ), + ) + + self.p = make_parameter_specification("P", 1, []) + self.p_concatenate = make_parameter_specification("P", 1, [self.a]) + self.q = make_parameter_specification("Q", 2, []) + self.q_concatenate = make_parameter_specification("Q", 2, [self.b]) + self.q_concatenate_a = make_parameter_specification("Q", 2, [self.a]) + + self.gpsi = self.make_type_info( + "GPS", mro=[self.oi], typevars=["P"], paramspec_indexes={0} + ) + def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) @@ -299,6 +329,7 @@ def make_type_info( bases: list[Instance] | None = None, typevars: list[str] | None = None, typevar_tuple_index: int | None = None, + paramspec_indexes: set[int] | None = None, variances: list[int] | None = None, ) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" @@ -326,6 +357,17 @@ def make_type_info( AnyType(TypeOfAny.from_omitted_generics), ) ) + elif paramspec_indexes is not None and id - 1 in paramspec_indexes: + v.append( + ParamSpecType( + n, + n, + id, + ParamSpecFlavor.BARE, + self.o, + AnyType(TypeOfAny.from_omitted_generics), + ) + ) else: if variances: variance = variances[id - 1] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 114fe1f8438a..f11b9aa599ed 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -776,7 +776,7 @@ _P = ParamSpec("_P") class Job(Generic[_P]): def __init__(self, target: Callable[_P, None]) -> None: - self.target = target + ... def func( action: Union[Job[int], Callable[[int], None]], @@ -1535,6 +1535,36 @@ def identity(func: Callable[P, None]) -> Callable[P, None]: ... def f(f: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... [builtins fixtures/paramspec.pyi] +[case testComplicatedParamSpecReturnType] +# regression test for https://github.com/python/mypy/issues/15073 +from typing import TypeVar, Callable +from typing_extensions import ParamSpec, Concatenate + +R = TypeVar("R") +P = ParamSpec("P") + +def f( +) -> Callable[[Callable[Concatenate[Callable[P, R], P], R]], Callable[P, R]]: + def r(fn: Callable[Concatenate[Callable[P, R], P], R]) -> Callable[P, R]: ... + return r +[builtins fixtures/paramspec.pyi] + +[case testParamSpecToParamSpecAssignment] +# minimized from https://github.com/python/mypy/issues/15037 +# ~ the same as https://github.com/python/mypy/issues/15065 +from typing import Callable +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") + +def f(f: Callable[Concatenate[int, P], None]) -> Callable[P, None]: ... + +x: Callable[ + [Callable[Concatenate[int, P], None]], + Callable[P, None], +] = f +[builtins fixtures/paramspec.pyi] + [case testParamSpecDecoratorAppliedToGeneric] # flags: --new-type-inference from typing import Callable, List, TypeVar From a7c48520560c3adf7176b91d16f4d0750ab8dfa9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 9 Aug 2023 08:23:03 +0100 Subject: [PATCH 34/88] =?UTF-8?q?Revert=20"Reconsider=20constraints=20invo?= =?UTF-8?q?lving=20parameter=20specifications=20(#1=E2=80=A6=20(#15832)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …5272)" This reverts commit 2aaeda4b84a863004a6694a7d562462fbe531ece. (Explain how this PR changes mypy.) --- mypy/constraints.py | 129 ++++-------------- mypy/test/testconstraints.py | 62 --------- mypy/test/typefixture.py | 42 ------ .../unit/check-parameter-specification.test | 32 +---- 4 files changed, 24 insertions(+), 241 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 9c55b56dd70e..299c6292a259 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -82,19 +82,15 @@ def __repr__(self) -> str: op_str = "<:" if self.op == SUPERTYPE_OF: op_str = ":>" - return f"{self.origin_type_var} {op_str} {self.target}" + return f"{self.type_var} {op_str} {self.target}" def __hash__(self) -> int: - return hash((self.origin_type_var, self.op, self.target)) + return hash((self.type_var, self.op, self.target)) def __eq__(self, other: object) -> bool: if not isinstance(other, Constraint): return False - return (self.origin_type_var, self.op, self.target) == ( - other.origin_type_var, - other.op, - other.target, - ) + return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target) def infer_constraints_for_callable( @@ -702,54 +698,25 @@ def visit_instance(self, template: Instance) -> list[Constraint]: ) elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType): suffix = get_proper_type(instance_arg) - prefix = mapped_arg.prefix - length = len(prefix.arg_types) if isinstance(suffix, CallableType): + prefix = mapped_arg.prefix from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? - length = min(length, len(suffix.arg_types)) - - constrained_to = suffix.copy_modified( - suffix.arg_types[length:], - suffix.arg_kinds[length:], - suffix.arg_names[length:], - ) - constrained_from = mapped_arg.copy_modified( - prefix=prefix.copy_modified( - prefix.arg_types[length:], - prefix.arg_kinds[length:], - prefix.arg_names[length:], - ) + # TODO: constraints between prefixes + prefix = mapped_arg.prefix + suffix = suffix.copy_modified( + suffix.arg_types[len(prefix.arg_types) :], + suffix.arg_kinds[len(prefix.arg_kinds) :], + suffix.arg_names[len(prefix.arg_names) :], ) - - res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to)) - res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to)) + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) elif isinstance(suffix, ParamSpecType): - suffix_prefix = suffix.prefix - length = min(length, len(suffix_prefix.arg_types)) - - constrained = suffix.copy_modified( - prefix=suffix_prefix.copy_modified( - suffix_prefix.arg_types[length:], - suffix_prefix.arg_kinds[length:], - suffix_prefix.arg_names[length:], - ) - ) - constrained_from = mapped_arg.copy_modified( - prefix=prefix.copy_modified( - prefix.arg_types[length:], - prefix.arg_kinds[length:], - prefix.arg_names[length:], - ) - ) - - res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained)) - res.append(Constraint(constrained_from, SUBTYPE_OF, constrained)) + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) else: # This case should have been handled above. assert not isinstance(tvar, TypeVarTupleType) @@ -801,56 +768,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]: template_arg, ParamSpecType ): suffix = get_proper_type(mapped_arg) - prefix = template_arg.prefix - length = len(prefix.arg_types) if isinstance(suffix, CallableType): prefix = template_arg.prefix from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) - # TODO: this is almost a copy-paste of code above: make this into a function if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? - length = min(length, len(suffix.arg_types)) + # TODO: constraints between prefixes + prefix = template_arg.prefix - constrained_to = suffix.copy_modified( - suffix.arg_types[length:], - suffix.arg_kinds[length:], - suffix.arg_names[length:], + suffix = suffix.copy_modified( + suffix.arg_types[len(prefix.arg_types) :], + suffix.arg_kinds[len(prefix.arg_kinds) :], + suffix.arg_names[len(prefix.arg_names) :], ) - constrained_from = template_arg.copy_modified( - prefix=prefix.copy_modified( - prefix.arg_types[length:], - prefix.arg_kinds[length:], - prefix.arg_names[length:], - ) - ) - - res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained_to)) - res.append(Constraint(constrained_from, SUBTYPE_OF, constrained_to)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) elif isinstance(suffix, ParamSpecType): - suffix_prefix = suffix.prefix - length = min(length, len(suffix_prefix.arg_types)) - - constrained = suffix.copy_modified( - prefix=suffix_prefix.copy_modified( - suffix_prefix.arg_types[length:], - suffix_prefix.arg_kinds[length:], - suffix_prefix.arg_names[length:], - ) - ) - constrained_from = template_arg.copy_modified( - prefix=prefix.copy_modified( - prefix.arg_types[length:], - prefix.arg_kinds[length:], - prefix.arg_names[length:], - ) - ) - - res.append(Constraint(constrained_from, SUPERTYPE_OF, constrained)) - res.append(Constraint(constrained_from, SUBTYPE_OF, constrained)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) else: # This case should have been handled above. assert not isinstance(tvar, TypeVarTupleType) @@ -1017,19 +954,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: prefix_len = len(prefix.arg_types) cactual_ps = cactual.param_spec() - cactual_prefix: Parameters | CallableType - if cactual_ps: - cactual_prefix = cactual_ps.prefix - else: - cactual_prefix = cactual - - max_prefix_len = len( - [k for k in cactual_prefix.arg_kinds if k in (ARG_POS, ARG_OPT)] - ) - prefix_len = min(prefix_len, max_prefix_len) - - # we could check the prefixes match here, but that should be caught elsewhere. if not cactual_ps: + max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)]) + prefix_len = min(prefix_len, max_prefix_len) res.append( Constraint( param_spec, @@ -1043,17 +970,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) ) else: - # earlier, cactual_prefix = cactual_ps.prefix. thus, this is guaranteed - assert isinstance(cactual_prefix, Parameters) - - constrained_by = cactual_ps.copy_modified( - prefix=cactual_prefix.copy_modified( - cactual_prefix.arg_types[prefix_len:], - cactual_prefix.arg_kinds[prefix_len:], - cactual_prefix.arg_names[prefix_len:], - ) - ) - res.append(Constraint(param_spec, SUBTYPE_OF, constrained_by)) + res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps)) # compare prefixes cactual_prefix = cactual.copy_modified( diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index be1d435f9cca..f40996145cba 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -156,65 +156,3 @@ def test_var_length_tuple_with_fixed_length_tuple(self) -> None: Instance(fx.std_tuplei, [fx.a]), SUPERTYPE_OF, ) - - def test_paramspec_constrained_with_concatenate(self) -> None: - # for legibility (and my own understanding), `Tester.normal()` is `Tester[P]` - # and `Tester.concatenate()` is `Tester[Concatenate[A, P]]` - # ... and 2nd arg to infer_constraints ends up on LHS of equality - fx = self.fx - - # I don't think we can parametrize... - for direction in (SUPERTYPE_OF, SUBTYPE_OF): - print(f"direction is {direction}") - # equiv to: x: Tester[Q] = Tester.normal() - assert set( - infer_constraints(Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q]), direction) - ) == { - Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), - Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), - } - - # equiv to: x: Tester[Q] = Tester.concatenate() - assert set( - infer_constraints( - Instance(fx.gpsi, [fx.p_concatenate]), Instance(fx.gpsi, [fx.q]), direction - ) - ) == { - Constraint(type_var=fx.p_concatenate, op=SUPERTYPE_OF, target=fx.q), - Constraint(type_var=fx.p_concatenate, op=SUBTYPE_OF, target=fx.q), - } - - # equiv to: x: Tester[Concatenate[B, Q]] = Tester.normal() - assert set( - infer_constraints( - Instance(fx.gpsi, [fx.p]), Instance(fx.gpsi, [fx.q_concatenate]), direction - ) - ) == { - Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q_concatenate), - Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q_concatenate), - } - - # equiv to: x: Tester[Concatenate[B, Q]] = Tester.concatenate() - assert set( - infer_constraints( - Instance(fx.gpsi, [fx.p_concatenate]), - Instance(fx.gpsi, [fx.q_concatenate]), - direction, - ) - ) == { - # this is correct as we assume other parts of mypy will warn that [B] != [A] - Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), - Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), - } - - # equiv to: x: Tester[Concatenate[A, Q]] = Tester.concatenate() - assert set( - infer_constraints( - Instance(fx.gpsi, [fx.p_concatenate]), - Instance(fx.gpsi, [fx.q_concatenate]), - direction, - ) - ) == { - Constraint(type_var=fx.p, op=SUPERTYPE_OF, target=fx.q), - Constraint(type_var=fx.p, op=SUBTYPE_OF, target=fx.q), - } diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index df78eeb62956..bf1500a3cdec 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -5,8 +5,6 @@ from __future__ import annotations -from typing import Sequence - from mypy.nodes import ( ARG_OPT, ARG_POS, @@ -28,9 +26,6 @@ Instance, LiteralType, NoneType, - Parameters, - ParamSpecFlavor, - ParamSpecType, Type, TypeAliasType, TypeOfAny, @@ -243,31 +238,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 ) - def make_parameter_specification( - name: str, id: int, concatenate: Sequence[Type] - ) -> ParamSpecType: - return ParamSpecType( - name, - name, - id, - ParamSpecFlavor.BARE, - self.o, - AnyType(TypeOfAny.from_omitted_generics), - prefix=Parameters( - concatenate, [ARG_POS for _ in concatenate], [None for _ in concatenate] - ), - ) - - self.p = make_parameter_specification("P", 1, []) - self.p_concatenate = make_parameter_specification("P", 1, [self.a]) - self.q = make_parameter_specification("Q", 2, []) - self.q_concatenate = make_parameter_specification("Q", 2, [self.b]) - self.q_concatenate_a = make_parameter_specification("Q", 2, [self.a]) - - self.gpsi = self.make_type_info( - "GPS", mro=[self.oi], typevars=["P"], paramspec_indexes={0} - ) - def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) @@ -329,7 +299,6 @@ def make_type_info( bases: list[Instance] | None = None, typevars: list[str] | None = None, typevar_tuple_index: int | None = None, - paramspec_indexes: set[int] | None = None, variances: list[int] | None = None, ) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" @@ -357,17 +326,6 @@ def make_type_info( AnyType(TypeOfAny.from_omitted_generics), ) ) - elif paramspec_indexes is not None and id - 1 in paramspec_indexes: - v.append( - ParamSpecType( - n, - n, - id, - ParamSpecFlavor.BARE, - self.o, - AnyType(TypeOfAny.from_omitted_generics), - ) - ) else: if variances: variance = variances[id - 1] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index f11b9aa599ed..114fe1f8438a 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -776,7 +776,7 @@ _P = ParamSpec("_P") class Job(Generic[_P]): def __init__(self, target: Callable[_P, None]) -> None: - ... + self.target = target def func( action: Union[Job[int], Callable[[int], None]], @@ -1535,36 +1535,6 @@ def identity(func: Callable[P, None]) -> Callable[P, None]: ... def f(f: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... [builtins fixtures/paramspec.pyi] -[case testComplicatedParamSpecReturnType] -# regression test for https://github.com/python/mypy/issues/15073 -from typing import TypeVar, Callable -from typing_extensions import ParamSpec, Concatenate - -R = TypeVar("R") -P = ParamSpec("P") - -def f( -) -> Callable[[Callable[Concatenate[Callable[P, R], P], R]], Callable[P, R]]: - def r(fn: Callable[Concatenate[Callable[P, R], P], R]) -> Callable[P, R]: ... - return r -[builtins fixtures/paramspec.pyi] - -[case testParamSpecToParamSpecAssignment] -# minimized from https://github.com/python/mypy/issues/15037 -# ~ the same as https://github.com/python/mypy/issues/15065 -from typing import Callable -from typing_extensions import Concatenate, ParamSpec - -P = ParamSpec("P") - -def f(f: Callable[Concatenate[int, P], None]) -> Callable[P, None]: ... - -x: Callable[ - [Callable[Concatenate[int, P], None]], - Callable[P, None], -] = f -[builtins fixtures/paramspec.pyi] - [case testParamSpecDecoratorAppliedToGeneric] # flags: --new-type-inference from typing import Callable, List, TypeVar From 8c219539380208bf5b8d189aafd4dec10f941f98 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 9 Aug 2023 16:33:38 +0100 Subject: [PATCH 35/88] New type inference: add support for upper bounds and values (#15813) This is a third PR in series following https://github.com/python/mypy/pull/15287 and https://github.com/python/mypy/pull/15754. This one is quite simple: I just add basic support for polymorphic inference involving type variables with upper bounds and values. A complete support would be quite complicated, and it will be a corner case to already rare situation. Finally, it is written in a way that is easy to tune in the future. I also use this PR to add some unit tests for all three PRs so far, other two PRs only added integration tests (and I clean up existing unit tests as well). --- mypy/solve.py | 80 +++++++++-- mypy/test/testsolve.py | 205 +++++++++++++++++++++++++---- mypy/test/typefixture.py | 4 + test-data/unit/check-generics.test | 28 ++++ 4 files changed, 277 insertions(+), 40 deletions(-) diff --git a/mypy/solve.py b/mypy/solve.py index 02df90aff1e1..72b3d6f26618 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -10,11 +10,13 @@ from mypy.expandtype import expand_type from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort from mypy.join import join_types -from mypy.meet import meet_types +from mypy.meet import meet_type_list, meet_types from mypy.subtypes import is_subtype from mypy.typeops import get_type_vars from mypy.types import ( AnyType, + Instance, + NoneType, ProperType, Type, TypeOfAny, @@ -108,7 +110,7 @@ def solve_constraints( else: candidate = AnyType(TypeOfAny.special_form) res.append(candidate) - return res, [originals[tv] for tv in free_vars] + return res, free_vars def solve_with_dependent( @@ -116,7 +118,7 @@ def solve_with_dependent( constraints: list[Constraint], original_vars: list[TypeVarId], originals: dict[TypeVarId, TypeVarLikeType], -) -> tuple[Solutions, list[TypeVarId]]: +) -> tuple[Solutions, list[TypeVarLikeType]]: """Solve set of constraints that may depend on each other, like T <: List[S]. The whole algorithm consists of five steps: @@ -135,23 +137,24 @@ def solve_with_dependent( raw_batches = list(topsort(prepare_sccs(sccs, dmap))) free_vars = [] + free_solutions = {} for scc in raw_batches[0]: # If there are no bounds on this SCC, then the only meaningful solution we can # express, is that each variable is equal to a new free variable. For example, # if we have T <: S, S <: U, we deduce: T = S = U = . if all(not lowers[tv] and not uppers[tv] for tv in scc): - # For convenience with current type application machinery, we use a stable - # choice that prefers the original type variables (not polymorphic ones) in SCC. - # TODO: be careful about upper bounds (or values) when introducing free vars. - free_vars.append(sorted(scc, key=lambda x: (x not in original_vars, x.raw_id))[0]) + best_free = choose_free([originals[tv] for tv in scc], original_vars) + if best_free: + free_vars.append(best_free.id) + free_solutions[best_free.id] = best_free # Update lowers/uppers with free vars, so these can now be used # as valid solutions. - for l, u in graph.copy(): + for l, u in graph: if l in free_vars: - lowers[u].add(originals[l]) + lowers[u].add(free_solutions[l]) if u in free_vars: - uppers[l].add(originals[u]) + uppers[l].add(free_solutions[u]) # Flatten the SCCs that are independent, we can solve them together, # since we don't need to update any targets in between. @@ -166,7 +169,7 @@ def solve_with_dependent( for flat_batch in batches: res = solve_iteratively(flat_batch, graph, lowers, uppers) solutions.update(res) - return solutions, free_vars + return solutions, [free_solutions[tv] for tv in free_vars] def solve_iteratively( @@ -276,6 +279,61 @@ def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None: return candidate +def choose_free( + scc: list[TypeVarLikeType], original_vars: list[TypeVarId] +) -> TypeVarLikeType | None: + """Choose the best solution for an SCC containing only type variables. + + This is needed to preserve e.g. the upper bound in a situation like this: + def dec(f: Callable[[T], S]) -> Callable[[T], S]: ... + + @dec + def test(x: U) -> U: ... + + where U <: A. + """ + + if len(scc) == 1: + # Fast path, choice is trivial. + return scc[0] + + common_upper_bound = meet_type_list([t.upper_bound for t in scc]) + common_upper_bound_p = get_proper_type(common_upper_bound) + # We include None for when strict-optional is disabled. + if isinstance(common_upper_bound_p, (UninhabitedType, NoneType)): + # This will cause to infer , which is better than a free TypeVar + # that has an upper bound . + return None + + values: list[Type] = [] + for tv in scc: + if isinstance(tv, TypeVarType) and tv.values: + if values: + # It is too tricky to support multiple TypeVars with values + # within the same SCC. + return None + values = tv.values.copy() + + if values and not is_trivial_bound(common_upper_bound_p): + # If there are both values and upper bound present, we give up, + # since type variables having both are not supported. + return None + + # For convenience with current type application machinery, we use a stable + # choice that prefers the original type variables (not polymorphic ones) in SCC. + best = sorted(scc, key=lambda x: (x.id not in original_vars, x.id.raw_id))[0] + if isinstance(best, TypeVarType): + return best.copy_modified(values=values, upper_bound=common_upper_bound) + if is_trivial_bound(common_upper_bound_p): + # TODO: support more cases for ParamSpecs/TypeVarTuples + return best + return None + + +def is_trivial_bound(tp: ProperType) -> bool: + return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" + + def normalize_constraints( constraints: list[Constraint], vars: list[TypeVarId] ) -> list[Constraint]: diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py index 5d67203dbbf5..6566b03ef5e9 100644 --- a/mypy/test/testsolve.py +++ b/mypy/test/testsolve.py @@ -3,10 +3,10 @@ from __future__ import annotations from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint -from mypy.solve import solve_constraints +from mypy.solve import Bounds, Graph, solve_constraints, transitive_closure from mypy.test.helpers import Suite, assert_equal from mypy.test.typefixture import TypeFixture -from mypy.types import Type, TypeVarLikeType, TypeVarType +from mypy.types import Type, TypeVarId, TypeVarLikeType, TypeVarType class SolveSuite(Suite): @@ -17,11 +17,11 @@ def test_empty_input(self) -> None: self.assert_solve([], [], []) def test_simple_supertype_constraints(self) -> None: - self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [(self.fx.a, self.fx.o)]) + self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], - [(self.fx.a, self.fx.o)], + [self.fx.a], ) def test_simple_subtype_constraints(self) -> None: @@ -36,7 +36,7 @@ def test_both_kinds_of_constraints(self) -> None: self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], - [(self.fx.b, self.fx.a)], + [self.fx.b], ) def test_unsatisfiable_constraints(self) -> None: @@ -49,7 +49,7 @@ def test_exactly_specified_result(self) -> None: self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], - [(self.fx.b, self.fx.b)], + [self.fx.b], ) def test_multiple_variables(self) -> None: @@ -60,7 +60,7 @@ def test_multiple_variables(self) -> None: self.supc(self.fx.s, self.fx.c), self.subc(self.fx.t, self.fx.a), ], - [(self.fx.b, self.fx.a), (self.fx.c, self.fx.o)], + [self.fx.b, self.fx.c], ) def test_no_constraints_for_var(self) -> None: @@ -69,36 +69,32 @@ def test_no_constraints_for_var(self) -> None: self.assert_solve( [self.fx.t, self.fx.s], [self.supc(self.fx.s, self.fx.a)], - [self.fx.uninhabited, (self.fx.a, self.fx.o)], + [self.fx.uninhabited, self.fx.a], ) def test_simple_constraints_with_dynamic_type(self) -> None: - self.assert_solve( - [self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] - ) + self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], - [(self.fx.anyt, self.fx.anyt)], + [self.fx.anyt], ) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], - [(self.fx.anyt, self.fx.anyt)], + [self.fx.anyt], ) - self.assert_solve( - [self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [(self.fx.anyt, self.fx.anyt)] - ) + self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) self.assert_solve( [self.fx.t], [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], - [(self.fx.anyt, self.fx.anyt)], + [self.fx.anyt], ) # self.assert_solve([self.fx.t], # [self.subc(self.fx.t, self.fx.anyt), # self.subc(self.fx.t, self.fx.a)], - # [(self.fx.anyt, self.fx.anyt)]) + # [self.fx.anyt]) # TODO: figure out what this should be after changes to meet(any, X) def test_both_normal_and_any_types_in_results(self) -> None: @@ -107,29 +103,180 @@ def test_both_normal_and_any_types_in_results(self) -> None: self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], - [(self.fx.anyt, self.fx.anyt)], + [self.fx.anyt], ) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], - [(self.fx.anyt, self.fx.anyt)], + [self.fx.anyt], + ) + + def test_poly_no_constraints(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [], + [self.fx.uninhabited, self.fx.uninhabited], + allow_polymorphic=True, + ) + + def test_poly_trivial_free(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [self.subc(self.fx.t, self.fx.a)], + [self.fx.a, self.fx.u], + [self.fx.u], + allow_polymorphic=True, + ) + + def test_poly_free_pair(self) -> None: + self.assert_solve( + [self.fx.t, self.fx.u], + [self.subc(self.fx.t, self.fx.u)], + [self.fx.t, self.fx.t], + [self.fx.t], + allow_polymorphic=True, + ) + + def test_poly_free_pair_with_bounds(self) -> None: + t_prime = self.fx.t.copy_modified(upper_bound=self.fx.b) + self.assert_solve( + [self.fx.t, self.fx.ub], + [self.subc(self.fx.t, self.fx.ub)], + [t_prime, t_prime], + [t_prime], + allow_polymorphic=True, + ) + + def test_poly_free_pair_with_bounds_uninhabited(self) -> None: + self.assert_solve( + [self.fx.ub, self.fx.uc], + [self.subc(self.fx.ub, self.fx.uc)], + [self.fx.uninhabited, self.fx.uninhabited], + [], + allow_polymorphic=True, + ) + + def test_poly_bounded_chain(self) -> None: + # B <: T <: U <: S <: A + self.assert_solve( + [self.fx.t, self.fx.u, self.fx.s], + [ + self.supc(self.fx.t, self.fx.b), + self.subc(self.fx.t, self.fx.u), + self.subc(self.fx.u, self.fx.s), + self.subc(self.fx.s, self.fx.a), + ], + [self.fx.b, self.fx.b, self.fx.b], + allow_polymorphic=True, + ) + + def test_poly_reverse_overlapping_chain(self) -> None: + # A :> T <: S :> B + self.assert_solve( + [self.fx.t, self.fx.s], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.a), + self.supc(self.fx.s, self.fx.b), + ], + [self.fx.a, self.fx.a], + allow_polymorphic=True, + ) + + def test_poly_reverse_split_chain(self) -> None: + # B :> T <: S :> A + self.assert_solve( + [self.fx.t, self.fx.s], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.b), + self.supc(self.fx.s, self.fx.a), + ], + [self.fx.b, self.fx.a], + allow_polymorphic=True, + ) + + def test_poly_unsolvable_chain(self) -> None: + # A <: T <: U <: S <: B + self.assert_solve( + [self.fx.t, self.fx.u, self.fx.s], + [ + self.supc(self.fx.t, self.fx.a), + self.subc(self.fx.t, self.fx.u), + self.subc(self.fx.u, self.fx.s), + self.subc(self.fx.s, self.fx.b), + ], + [None, None, None], + allow_polymorphic=True, + ) + + def test_simple_chain_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [ + self.supc(self.fx.t, self.fx.b), + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.s, self.fx.a), + ], + {(self.fx.t.id, self.fx.s.id)}, + {self.fx.t.id: {self.fx.b}, self.fx.s.id: {self.fx.b}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.a}}, + ) + + def test_reverse_chain_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [ + self.subc(self.fx.t, self.fx.s), + self.subc(self.fx.t, self.fx.a), + self.supc(self.fx.s, self.fx.b), + ], + {(self.fx.t.id, self.fx.s.id)}, + {self.fx.t.id: set(), self.fx.s.id: {self.fx.b}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: set()}, + ) + + def test_secondary_constraint_closure(self) -> None: + self.assert_transitive_closure( + [self.fx.t.id, self.fx.s.id], + [self.supc(self.fx.s, self.fx.gt), self.subc(self.fx.s, self.fx.ga)], + set(), + {self.fx.t.id: set(), self.fx.s.id: {self.fx.gt}}, + {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.ga}}, ) def assert_solve( self, vars: list[TypeVarLikeType], constraints: list[Constraint], - results: list[None | Type | tuple[Type, Type]], + results: list[None | Type], + free_vars: list[TypeVarLikeType] | None = None, + allow_polymorphic: bool = False, + ) -> None: + if free_vars is None: + free_vars = [] + actual, actual_free = solve_constraints( + vars, constraints, allow_polymorphic=allow_polymorphic + ) + assert_equal(actual, results) + assert_equal(actual_free, free_vars) + + def assert_transitive_closure( + self, + vars: list[TypeVarId], + constraints: list[Constraint], + graph: Graph, + lowers: Bounds, + uppers: Bounds, ) -> None: - res: list[Type | None] = [] - for r in results: - if isinstance(r, tuple): - res.append(r[0]) - else: - res.append(r) - actual, _ = solve_constraints(vars, constraints) - assert_equal(str(actual), str(res)) + actual_graph, actual_lowers, actual_uppers = transitive_closure(vars, constraints) + # Add trivial elements. + for v in vars: + graph.add((v, v)) + assert_equal(actual_graph, graph) + assert_equal(dict(actual_lowers), lowers) + assert_equal(dict(actual_uppers), uppers) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var, SUPERTYPE_OF, bound) diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index bf1500a3cdec..81af765f8585 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -219,6 +219,10 @@ def make_type_var( self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai) + # TypeVars with non-trivial bounds + self.ub = make_type_var("UB", 5, [], self.b, variance) # UB`5 (type variable) + self.uc = make_type_var("UC", 6, [], self.c, variance) # UC`6 (type variable) + def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: return TypeVarTupleType( name, diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 5c510a11b970..d1842a74d634 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -3007,3 +3007,31 @@ class C: c: C reveal_type(c.test()) # N: Revealed type is "__main__.C" + +[case testInferenceAgainstGenericBoundsAndValues] +# flags: --new-type-inference +from typing import TypeVar, Callable, List + +class B: ... +class C(B): ... + +S = TypeVar('S') +T = TypeVar('T') +UB = TypeVar('UB', bound=B) +UC = TypeVar('UC', bound=C) +V = TypeVar('V', int, str) + +def dec1(f: Callable[[S], T]) -> Callable[[S], List[T]]: + ... +def dec2(f: Callable[[UC], T]) -> Callable[[UC], List[T]]: + ... +def id1(x: UB) -> UB: + ... +def id2(x: V) -> V: + ... + +reveal_type(dec1(id1)) # N: Revealed type is "def [S <: __main__.B] (S`1) -> builtins.list[S`1]" +reveal_type(dec1(id2)) # N: Revealed type is "def [S in (builtins.int, builtins.str)] (S`3) -> builtins.list[S`3]" +reveal_type(dec2(id1)) # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]" +reveal_type(dec2(id2)) # N: Revealed type is "def () -> builtins.list[]" \ + # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[], ]" From 78339b97dc911c8c6841184eaddbbc30d0e406da Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Aug 2023 01:50:20 -0700 Subject: [PATCH 36/88] Use error subcodes to differentiate import errors (#14740) Resolves #9789 Users could use `--disable-error-code=import-untyped` to only ignore errors about libraries not having stubs, but continue to get errors about e.g. typos in an import name. The error subcode mechanism is new from #14570. Note that users will now get a different error code depending on whether or not a package is installed, and may not know that they can use the parent error code to ignore the issue regardless. I think this is okay, in general type checking results can change if you run them in two different environments. Note also that with `--warn-unused-ignore` / `--strict` mypy will complain about not having the most specific error code --- mypy/build.py | 11 ++++++++++- mypy/errorcodes.py | 6 ++++++ mypy/errors.py | 8 ++++++-- test-data/unit/check-errorcodes.test | 14 +++++++------- 4 files changed, 29 insertions(+), 10 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 5a0a481ae1a2..eed5005d182e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2780,7 +2780,16 @@ def module_not_found( else: daemon = manager.options.fine_grained_incremental msg, notes = reason.error_message_templates(daemon) - errors.report(line, 0, msg.format(module=target), code=codes.IMPORT) + if reason == ModuleNotFoundReason.NOT_FOUND: + code = codes.IMPORT_NOT_FOUND + elif ( + reason == ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS + or reason == ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED + ): + code = codes.IMPORT_UNTYPED + else: + code = codes.IMPORT + errors.report(line, 0, msg.format(module=target), code=code) top_level, second_level = get_top_two_prefixes(target) if second_level in legacy_bundled_packages or second_level in non_bundled_packages: top_level = second_level diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 717629ad1f11..e7d0c16f2d2d 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -107,6 +107,12 @@ def __hash__(self) -> int: IMPORT: Final = ErrorCode( "import", "Require that imported module can be found or has stubs", "General" ) +IMPORT_NOT_FOUND: Final = ErrorCode( + "import-not-found", "Require that imported module can be found", "General", sub_code_of=IMPORT +) +IMPORT_UNTYPED: Final = ErrorCode( + "import-untyped", "Require that imported module has stubs", "General", sub_code_of=IMPORT +) NO_REDEF: Final = ErrorCode("no-redef", "Check that each name is defined once", "General") FUNC_RETURNS_VALUE: Final = ErrorCode( "func-returns-value", "Check that called function returns a value in value context", "General" diff --git a/mypy/errors.py b/mypy/errors.py index 2badac3e3d6d..680b7f1d31ea 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -8,7 +8,7 @@ from typing_extensions import Literal, TypeAlias as _TypeAlias from mypy import errorcodes as codes -from mypy.errorcodes import IMPORT, ErrorCode +from mypy.errorcodes import IMPORT, IMPORT_NOT_FOUND, IMPORT_UNTYPED, ErrorCode from mypy.message_registry import ErrorMessage from mypy.options import Options from mypy.scope import Scope @@ -510,7 +510,11 @@ def add_error_info(self, info: ErrorInfo) -> None: if info.message in self.only_once_messages: return self.only_once_messages.add(info.message) - if self.seen_import_error and info.code is not IMPORT and self.has_many_errors(): + if ( + self.seen_import_error + and info.code not in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND) + and self.has_many_errors() + ): # Missing stubs can easily cause thousands of errors about # Any types, especially when upgrading to mypy 0.900, # which no longer bundles third-party library stubs. Avoid diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 1efbab7de322..796e1c1ea98e 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -183,7 +183,7 @@ from defusedxml import xyz # type: ignore[import] [case testErrorCodeBadIgnore] import nostub # type: ignore xyz # E: Invalid "type: ignore" comment [syntax] \ - # E: Cannot find implementation or library stub for module named "nostub" [import] \ + # E: Cannot find implementation or library stub for module named "nostub" [import-not-found] \ # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports import nostub # type: ignore[ # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo # E: Invalid "type: ignore" comment [syntax] @@ -211,7 +211,7 @@ def f(x, # type: int # type: ignore[ pass [out] main:2: error: Invalid "type: ignore" comment [syntax] -main:2: error: Cannot find implementation or library stub for module named "nostub" [import] +main:2: error: Cannot find implementation or library stub for module named "nostub" [import-not-found] main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports main:3: error: Invalid "type: ignore" comment [syntax] main:4: error: Invalid "type: ignore" comment [syntax] @@ -522,12 +522,12 @@ if int() is str(): # E: Non-overlapping identity check (left operand type: "int [builtins fixtures/primitives.pyi] [case testErrorCodeMissingModule] -from defusedxml import xyz # E: Cannot find implementation or library stub for module named "defusedxml" [import] -from nonexistent import foobar # E: Cannot find implementation or library stub for module named "nonexistent" [import] -import nonexistent2 # E: Cannot find implementation or library stub for module named "nonexistent2" [import] -from nonexistent3 import * # E: Cannot find implementation or library stub for module named "nonexistent3" [import] +from defusedxml import xyz # E: Cannot find implementation or library stub for module named "defusedxml" [import-not-found] +from nonexistent import foobar # E: Cannot find implementation or library stub for module named "nonexistent" [import-not-found] +import nonexistent2 # E: Cannot find implementation or library stub for module named "nonexistent2" [import-not-found] +from nonexistent3 import * # E: Cannot find implementation or library stub for module named "nonexistent3" [import-not-found] from pkg import bad # E: Module "pkg" has no attribute "bad" [attr-defined] -from pkg.bad2 import bad3 # E: Cannot find implementation or library stub for module named "pkg.bad2" [import] \ +from pkg.bad2 import bad3 # E: Cannot find implementation or library stub for module named "pkg.bad2" [import-not-found] \ # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports [file pkg/__init__.py] From eab5b5083adf1b54ab1691f5ecc5a846863420de Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Aug 2023 02:32:08 -0700 Subject: [PATCH 37/88] Document new import error codes (#15840) See https://github.com/python/mypy/pull/14740 My PR was pretty old and predates the nice check to ensure error codes are documented. --- docs/source/error_code_list.rst | 38 +++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index f935e025e589..f7f702aa7fcb 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -648,8 +648,18 @@ the issue: .. _code-import: -Check that import target can be found [import] ----------------------------------------------- +Check for an issue with imports [import] +---------------------------------------- + +Mypy generates an error if it can't resolve an `import` statement. +This is a parent error code of `import-not-found` and `import-untyped` + +See :ref:`ignore-missing-imports` for how to work around these errors. + +.. _code-import-not-found: + +Check that import target can be found [import-not-found] +-------------------------------------------------------- Mypy generates an error if it can't find the source code or a stub file for an imported module. @@ -658,11 +668,31 @@ Example: .. code-block:: python - # Error: Cannot find implementation or library stub for module named 'acme' [import] - import acme + # Error: Cannot find implementation or library stub for module named "m0dule_with_typo" [import-not-found] + import m0dule_with_typo See :ref:`ignore-missing-imports` for how to work around these errors. +.. _code-import-untyped: + +Check that import target can be found [import-untyped] +-------------------------------------------------------- + +Mypy generates an error if it can find the source code for an imported module, +but that module does not provide type annotations (via :ref:`PEP 561 `). + +Example: + +.. code-block:: python + + # Error: Library stubs not installed for "bs4" [import-untyped] + import bs4 + # Error: Skipping analyzing "no_py_typed": module is installed, but missing library stubs or py.typed marker [import-untyped] + import no_py_typed + +In some cases, these errors can be fixed by installing an appropriate +stub package. See :ref:`ignore-missing-imports` for more details. + .. _code-no-redef: Check that each name is defined once [no-redef] From d0d63b4644a6bb99793b32548c5197cf7600544f Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 10 Aug 2023 13:03:39 +0300 Subject: [PATCH 38/88] The oldest CAPI version we support right now is 3.7 (#15839) Looks like `capi_version < 3.7` is not supported, so I changed the lowest version to be `3.7`. Based on the discord discussion. --- mypyc/codegen/emitclass.py | 8 ++------ mypyc/codegen/emitmodule.py | 5 ++--- mypyc/common.py | 5 ----- mypyc/test/testutil.py | 4 ++-- 4 files changed, 6 insertions(+), 16 deletions(-) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 84d19d69d377..62e1b4b2dea1 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -18,7 +18,7 @@ generate_richcompare_wrapper, generate_set_del_item_wrapper, ) -from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall +from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX from mypyc.ir.class_ir import ClassIR, VTableEntries from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR from mypyc.ir.rtypes import RTuple, RType, object_rprimitive @@ -794,11 +794,7 @@ def generate_methods_table(cl: ClassIR, name: str, emitter: Emitter) -> None: continue emitter.emit_line(f'{{"{fn.name}",') emitter.emit_line(f" (PyCFunction){PREFIX}{fn.cname(emitter.names)},") - if use_fastcall(emitter.capi_version): - flags = ["METH_FASTCALL"] - else: - flags = ["METH_VARARGS"] - flags.append("METH_KEYWORDS") + flags = ["METH_FASTCALL", "METH_KEYWORDS"] if fn.decl.kind == FUNC_STATICMETHOD: flags.append("METH_STATIC") elif fn.decl.kind == FUNC_CLASSMETHOD: diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index f360fabbe8f6..caf2058ea7c4 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -43,7 +43,6 @@ TOP_LEVEL_NAME, shared_lib_name, short_id_from_name, - use_fastcall, use_vectorcall, ) from mypyc.errors import Errors @@ -1107,8 +1106,8 @@ def is_fastcall_supported(fn: FuncIR, capi_version: tuple[int, int]) -> bool: # We can use vectorcalls (PEP 590) when supported return use_vectorcall(capi_version) # TODO: Support fastcall for __init__. - return use_fastcall(capi_version) and fn.name != "__init__" - return use_fastcall(capi_version) + return fn.name != "__init__" + return True def collect_literals(fn: FuncIR, literals: Literals) -> None: diff --git a/mypyc/common.py b/mypyc/common.py index 4615bf30d742..3d07f6c3d0d3 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -98,11 +98,6 @@ def short_name(name: str) -> str: return name -def use_fastcall(capi_version: tuple[int, int]) -> bool: - # We can use METH_FASTCALL for faster wrapper functions on Python 3.7+. - return capi_version >= (3, 7) - - def use_vectorcall(capi_version: tuple[int, int]) -> bool: # We can use vectorcalls to make calls on Python 3.8+ (PEP 590). return capi_version >= (3, 8) diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 796811a6363c..6446af3427af 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -102,7 +102,7 @@ def build_ir_for_single_file2( # By default generate IR compatible with the earliest supported Python C API. # If a test needs more recent API features, this should be overridden. - compiler_options = compiler_options or CompilerOptions(capi_version=(3, 5)) + compiler_options = compiler_options or CompilerOptions(capi_version=(3, 7)) options = Options() options.show_traceback = True options.hide_error_codes = True @@ -272,7 +272,7 @@ def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None: return None if "_32bit" in name and not IS_32_BIT_PLATFORM: return None - options = CompilerOptions(strip_asserts="StripAssert" in name, capi_version=(3, 5)) + options = CompilerOptions(strip_asserts="StripAssert" in name, capi_version=(3, 7)) # A suffix like _python3.8 is used to set the target C API version. m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name) if m: From c7d2fa1525c9cbf0ab8859fd9ded526658677c28 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Aug 2023 11:32:50 -0700 Subject: [PATCH 39/88] Fix over eager types-google-cloud-ndb suggestion (#15347) Fixes #15343 --- mypy/build.py | 30 ++++++++++++++++-------------- mypy/modulefinder.py | 9 ++------- mypy/stubinfo.py | 6 ++---- mypy/util.py | 11 ----------- test-data/unit/check-modules.test | 24 +++++++++++++----------- 5 files changed, 33 insertions(+), 47 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index eed5005d182e..525d5f436e7e 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -55,7 +55,6 @@ DecodeError, decode_python_encoding, get_mypy_comments, - get_top_two_prefixes, hash_digest, is_stub_package_file, is_sub_path, @@ -91,12 +90,7 @@ from mypy.plugins.default import DefaultPlugin from mypy.renaming import LimitedVariableRenameVisitor, VariableRenameVisitor from mypy.stats import dump_type_stats -from mypy.stubinfo import ( - is_legacy_bundled_package, - legacy_bundled_packages, - non_bundled_packages, - stub_package_name, -) +from mypy.stubinfo import legacy_bundled_packages, non_bundled_packages, stub_distribution_name from mypy.types import Type from mypy.typestate import reset_global_state, type_state from mypy.version import __version__ @@ -2665,14 +2659,18 @@ def find_module_and_diagnose( # search path or the module has not been installed. ignore_missing_imports = options.ignore_missing_imports - top_level, second_level = get_top_two_prefixes(id) + + id_components = id.split(".") # Don't honor a global (not per-module) ignore_missing_imports # setting for modules that used to have bundled stubs, as # otherwise updating mypy can silently result in new false # negatives. (Unless there are stubs but they are incomplete.) global_ignore_missing_imports = manager.options.ignore_missing_imports if ( - (is_legacy_bundled_package(top_level) or is_legacy_bundled_package(second_level)) + any( + ".".join(id_components[:i]) in legacy_bundled_packages + for i in range(len(id_components), 0, -1) + ) and global_ignore_missing_imports and not options.ignore_missing_imports_per_module and result is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED @@ -2790,15 +2788,19 @@ def module_not_found( else: code = codes.IMPORT errors.report(line, 0, msg.format(module=target), code=code) - top_level, second_level = get_top_two_prefixes(target) - if second_level in legacy_bundled_packages or second_level in non_bundled_packages: - top_level = second_level + + components = target.split(".") + for i in range(len(components), 0, -1): + module = ".".join(components[:i]) + if module in legacy_bundled_packages or module in non_bundled_packages: + break + for note in notes: if "{stub_dist}" in note: - note = note.format(stub_dist=stub_package_name(top_level)) + note = note.format(stub_dist=stub_distribution_name(module)) errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT) if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: - manager.missing_stub_packages.add(stub_package_name(top_level)) + manager.missing_stub_packages.add(stub_distribution_name(module)) errors.set_import_context(save_import_context) diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index c780015c639d..c36a382848bf 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -337,14 +337,9 @@ def _find_module_non_stub_helper( # If this is not a directory then we can't traverse further into it if not self.fscache.isdir(dir_path): break - if approved_stub_package_exists(components[0]): - if len(components) == 1 or ( - self.find_module(components[0]) - is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED - ): + for i in range(len(components), 0, -1): + if approved_stub_package_exists(".".join(components[:i])): return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED - if approved_stub_package_exists(".".join(components[:2])): - return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED if plausible_match: return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS else: diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index e6e549ad280f..0d76a6215238 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -9,15 +9,13 @@ def approved_stub_package_exists(prefix: str) -> bool: return is_legacy_bundled_package(prefix) or prefix in non_bundled_packages -def stub_package_name(prefix: str) -> str: +def stub_distribution_name(prefix: str) -> str: return legacy_bundled_packages.get(prefix) or non_bundled_packages[prefix] # Stubs for these third-party packages used to be shipped with mypy. # # Map package name to PyPI stub distribution name. -# -# Package name can have one or two components ('a' or 'a.b'). legacy_bundled_packages = { "aiofiles": "types-aiofiles", "bleach": "types-bleach", @@ -116,7 +114,7 @@ def stub_package_name(prefix: str) -> str: "flask_sqlalchemy": "types-Flask-SQLAlchemy", "fpdf": "types-fpdf2", "gdb": "types-gdb", - "google.cloud": "types-google-cloud-ndb", + "google.cloud.ndb": "types-google-cloud-ndb", "hdbcli": "types-hdbcli", "html5lib": "types-html5lib", "httplib2": "types-httplib2", diff --git a/mypy/util.py b/mypy/util.py index 268ba8f9de81..8a079c5256bc 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -308,17 +308,6 @@ def get_prefix(fullname: str) -> str: return fullname.rsplit(".", 1)[0] -def get_top_two_prefixes(fullname: str) -> tuple[str, str]: - """Return one and two component prefixes of a fully qualified name. - - Given 'a.b.c.d', return ('a', 'a.b'). - - If fullname has only one component, return (fullname, fullname). - """ - components = fullname.split(".", 3) - return components[0], ".".join(components[:2]) - - def correct_relative_import( cur_mod_id: str, relative: int, target: str, is_cur_package_init_file: bool ) -> tuple[str, bool]: diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index bdf860cba89d..3da5996ed274 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -3121,26 +3121,28 @@ import google.cloud from google.cloud import x [case testErrorFromGoogleCloud] -import google.cloud +import google.cloud # E: Cannot find implementation or library stub for module named "google.cloud" \ + # E: Cannot find implementation or library stub for module named "google" from google.cloud import x -import google.non_existent +import google.non_existent # E: Cannot find implementation or library stub for module named "google.non_existent" from google.non_existent import x -[out] -main:1: error: Library stubs not installed for "google.cloud" -main:1: note: Hint: "python3 -m pip install types-google-cloud-ndb" -main:1: note: (or run "mypy --install-types" to install all missing stub packages) -main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:1: error: Cannot find implementation or library stub for module named "google" -main:3: error: Cannot find implementation or library stub for module named "google.non_existent" + +import google.cloud.ndb # E: Library stubs not installed for "google.cloud.ndb" \ + # N: Hint: "python3 -m pip install types-google-cloud-ndb" \ + # N: (or run "mypy --install-types" to install all missing stub packages) \ + # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports +from google.cloud import ndb [case testMissingSubmoduleOfInstalledStubPackage] import bleach.xyz from bleach.abc import fgh [file bleach/__init__.pyi] [out] -main:1: error: Cannot find implementation or library stub for module named "bleach.xyz" +main:1: error: Library stubs not installed for "bleach.xyz" +main:1: note: Hint: "python3 -m pip install types-bleach" +main:1: note: (or run "mypy --install-types" to install all missing stub packages) main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:2: error: Cannot find implementation or library stub for module named "bleach.abc" +main:2: error: Library stubs not installed for "bleach.abc" [case testMissingSubmoduleOfInstalledStubPackageIgnored] # flags: --ignore-missing-imports From cfd01d9f7fdceb5eb8e367e8f1a6a1efb5ede38c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 10 Aug 2023 13:49:27 -0700 Subject: [PATCH 40/88] Improve error code disabling documentation (#15841) Provide a concrete example of what file level comments would look like. Sort text into sections a little better. --- docs/source/error_codes.rst | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index 65ae0e5816e8..a71168cadf30 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -43,11 +43,7 @@ Silencing errors based on error codes You can use a special comment ``# type: ignore[code, ...]`` to only ignore errors with a specific error code (or codes) on a particular line. This can be used even if you have not configured mypy to show -error codes. Currently it's only possible to disable arbitrary error -codes on individual lines using this comment. - -You can also use :option:`--disable-error-code ` -to disable specific error codes globally. +error codes. This example shows how to ignore an error about an imported name mypy thinks is undefined: @@ -58,17 +54,17 @@ thinks is undefined: # definition. from foolib import foo # type: ignore[attr-defined] - -Enabling specific error codes ------------------------------ +Enabling/disabling specific error codes globally +------------------------------------------------ There are command-line flags and config file settings for enabling certain optional error codes, such as :option:`--disallow-untyped-defs `, which enables the ``no-untyped-def`` error code. -You can use :option:`--enable-error-code ` to -enable specific error codes that don't have a dedicated command-line -flag or config file setting. +You can use :option:`--enable-error-code ` +and :option:`--disable-error-code ` +to enable or disable specific error codes that don't have a dedicated +command-line flag or config file setting. Per-module enabling/disabling error codes ----------------------------------------- @@ -107,8 +103,9 @@ still keep the other two error codes enabled. The overall logic is following: * Individual config sections *adjust* them per glob/module -* Inline ``# mypy: ...`` comments can further *adjust* them for a specific - module +* Inline ``# mypy: disable-error-code="..."`` comments can further + *adjust* them for a specific module. + For example: ``# mypy: disable-error-code="truthy-bool, ignore-without-code"`` So one can e.g. enable some code globally, disable it for all tests in the corresponding config section, and then re-enable it with an inline From 7f22aaa5783e25c2bbac81ad520d5b7702b39e4f Mon Sep 17 00:00:00 2001 From: Marcel Telka Date: Sat, 12 Aug 2023 09:09:32 +0200 Subject: [PATCH 41/88] Add tox.ini to sdist (#15853) Fixes #14142 --- MANIFEST.in | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 1c26ae16fc78..b77b762b4852 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -39,9 +39,10 @@ graft test-data include conftest.py include runtests.py include pytest.ini +include tox.ini include LICENSE mypyc/README.md -exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md tox.ini action.yml .editorconfig +exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md action.yml .editorconfig exclude .git-blame-ignore-revs .pre-commit-config.yaml global-exclude *.py[cod] From 89c6596f0285b2f4b4b4c93f8f5696cae7a4398e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 12 Aug 2023 00:10:26 -0700 Subject: [PATCH 42/88] Sync typeshed (#15792) Source commit: https://github.com/python/typeshed/commit/fe2ebd69af14d376825f21182d415223bd037485 Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Co-authored-by: hauntsaninja --- mypy/typeshed/stdlib/_collections_abc.pyi | 2 + mypy/typeshed/stdlib/_weakref.pyi | 3 + mypy/typeshed/stdlib/abc.pyi | 2 +- mypy/typeshed/stdlib/argparse.pyi | 10 +- mypy/typeshed/stdlib/array.pyi | 6 + mypy/typeshed/stdlib/asyncio/futures.pyi | 2 +- mypy/typeshed/stdlib/builtins.pyi | 34 +++++- mypy/typeshed/stdlib/collections/__init__.pyi | 7 +- mypy/typeshed/stdlib/contextvars.pyi | 14 ++- mypy/typeshed/stdlib/datetime.pyi | 6 + mypy/typeshed/stdlib/email/charset.pyi | 4 +- mypy/typeshed/stdlib/email/policy.pyi | 8 +- mypy/typeshed/stdlib/enum.pyi | 1 + mypy/typeshed/stdlib/ftplib.pyi | 2 +- mypy/typeshed/stdlib/http/client.pyi | 4 + mypy/typeshed/stdlib/importlib/machinery.pyi | 1 + .../stdlib/importlib/metadata/__init__.pyi | 3 + mypy/typeshed/stdlib/inspect.pyi | 2 + mypy/typeshed/stdlib/ipaddress.pyi | 13 ++- .../stdlib/multiprocessing/managers.pyi | 10 +- mypy/typeshed/stdlib/pdb.pyi | 3 + mypy/typeshed/stdlib/pydoc.pyi | 2 + mypy/typeshed/stdlib/re.pyi | 2 + mypy/typeshed/stdlib/shelve.pyi | 4 +- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 13 ++- mypy/typeshed/stdlib/ssl.pyi | 1 + mypy/typeshed/stdlib/tkinter/ttk.pyi | 106 ++++++++++-------- mypy/typeshed/stdlib/traceback.pyi | 7 +- mypy/typeshed/stdlib/tracemalloc.pyi | 5 + mypy/typeshed/stdlib/types.pyi | 13 +++ mypy/typeshed/stdlib/typing.pyi | 11 +- mypy/typeshed/stdlib/typing_extensions.pyi | 90 ++++++++++++++- mypy/typeshed/stdlib/unittest/case.pyi | 3 + mypy/typeshed/stdlib/unittest/mock.pyi | 6 +- mypy/typeshed/stdlib/urllib/request.pyi | 8 +- mypy/typeshed/stdlib/uuid.pyi | 1 + mypy/typeshed/stdlib/weakref.pyi | 9 +- mypy/typeshed/stdlib/winreg.pyi | 1 + .../typeshed/stdlib/xml/etree/ElementTree.pyi | 1 + test-data/unit/pythoneval.test | 6 +- 40 files changed, 326 insertions(+), 100 deletions(-) diff --git a/mypy/typeshed/stdlib/_collections_abc.pyi b/mypy/typeshed/stdlib/_collections_abc.pyi index ba2f638d81c9..2b57f157a0e4 100644 --- a/mypy/typeshed/stdlib/_collections_abc.pyi +++ b/mypy/typeshed/stdlib/_collections_abc.pyi @@ -69,6 +69,7 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. @final class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... @@ -81,6 +82,7 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented @final class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): # undocumented + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index 2402d0bfe721..ce0f681248ab 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -11,17 +11,20 @@ _T = TypeVar("_T") @final class CallableProxyType(Generic[_C]): # "weakcallableproxy" + def __eq__(self, __value: object) -> bool: ... def __getattr__(self, attr: str) -> Any: ... __call__: _C @final class ProxyType(Generic[_T]): # "weakproxy" + def __eq__(self, __value: object) -> bool: ... def __getattr__(self, attr: str) -> Any: ... class ReferenceType(Generic[_T]): __callback__: Callable[[ReferenceType[_T]], Any] def __new__(cls, __o: _T, __callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... def __call__(self) -> _T | None: ... + def __eq__(self, __value: object) -> bool: ... def __hash__(self) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index ec04d8f85d12..43893a298341 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -31,7 +31,7 @@ def abstractmethod(funcobj: _FuncT) -> _FuncT: ... class abstractclassmethod(classmethod[_T, _P, _R_co]): __isabstractmethod__: Literal[True] - def __init__(self, callable: Callable[Concatenate[_T, _P], _R_co]) -> None: ... + def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... class abstractstaticmethod(staticmethod[_P, _R_co]): __isabstractmethod__: Literal[True] diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index e41048516dd9..b59dd56ab921 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -85,7 +85,7 @@ class _ActionsContainer: self, *name_or_flags: str, action: _ActionStr | type[Action] = ..., - nargs: int | _NArgsStr | _SUPPRESS_T = ..., + nargs: int | _NArgsStr | _SUPPRESS_T | None = None, const: Any = ..., default: Any = ..., type: Callable[[str], _T] | FileType = ..., @@ -171,7 +171,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): ) -> None: ... @overload - def parse_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... # type: ignore[misc] + def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... # type: ignore[misc] @overload def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... @overload @@ -210,7 +210,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def format_usage(self) -> str: ... def format_help(self) -> str: ... @overload - def parse_known_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] + def parse_known_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] @overload def parse_known_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... @overload @@ -219,13 +219,13 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... def error(self, message: str) -> NoReturn: ... @overload - def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... # type: ignore[misc] + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... # type: ignore[misc] @overload def parse_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... @overload def parse_intermixed_args(self, *, namespace: _N) -> _N: ... @overload - def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] + def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] @overload def parse_known_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... @overload diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index 8b003503bc9b..b533f9240073 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -6,6 +6,9 @@ from collections.abc import Iterable from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y022 from typing_extensions import Literal, Self, SupportsIndex, TypeAlias +if sys.version_info >= (3, 12): + from types import GenericAlias + _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] _FloatTypeCode: TypeAlias = Literal["f", "d"] _UnicodeTypeCode: TypeAlias = Literal["u"] @@ -70,6 +73,7 @@ class array(MutableSequence[_T], Generic[_T]): def __setitem__(self, __key: slice, __value: array[_T]) -> None: ... def __delitem__(self, __key: SupportsIndex | slice) -> None: ... def __add__(self, __value: array[_T]) -> array[_T]: ... + def __eq__(self, __value: object) -> bool: ... def __ge__(self, __value: array[_T]) -> bool: ... def __gt__(self, __value: array[_T]) -> bool: ... def __iadd__(self, __value: array[_T]) -> Self: ... # type: ignore[override] @@ -82,5 +86,7 @@ class array(MutableSequence[_T], Generic[_T]): def __deepcopy__(self, __unused: Any) -> array[_T]: ... def __buffer__(self, __flags: int) -> memoryview: ... def __release_buffer__(self, __buffer: memoryview) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... ArrayType = array diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi index 79209f5ed4fb..af05425d02a2 100644 --- a/mypy/typeshed/stdlib/asyncio/futures.pyi +++ b/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -31,7 +31,7 @@ def isfuture(obj: object) -> TypeGuard[Future[Any]]: ... class Future(Awaitable[_T], Iterable[_T]): _state: str @property - def _exception(self) -> BaseException: ... + def _exception(self) -> BaseException | None: ... _blocking: bool @property def _log_traceback(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index d6ca39049c77..66c644d09a4d 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -131,6 +131,9 @@ class staticmethod(Generic[_P, _R_co]): @property def __isabstractmethod__(self) -> bool: ... def __init__(self, __f: Callable[_P, _R_co]) -> None: ... + @overload + def __get__(self, __instance: None, __owner: type) -> Callable[_P, _R_co]: ... + @overload def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): __name__: str @@ -141,16 +144,19 @@ class staticmethod(Generic[_P, _R_co]): class classmethod(Generic[_T, _P, _R_co]): @property - def __func__(self) -> Callable[Concatenate[_T, _P], _R_co]: ... + def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... - def __init__(self, __f: Callable[Concatenate[_T, _P], _R_co]) -> None: ... + def __init__(self, __f: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... + @overload def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ... + @overload + def __get__(self, __instance: None, __owner: type[_T]) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): __name__: str __qualname__: str @property - def __wrapped__(self) -> Callable[Concatenate[_T, _P], _R_co]: ... + def __wrapped__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... class type: @property @@ -781,6 +787,8 @@ class memoryview(Sequence[int]): def __contains__(self, __x: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... @overload def __setitem__(self, __key: slice, __value: ReadableBuffer) -> None: ... @overload @@ -848,6 +856,7 @@ class slice: def __init__(self, __stop: Any) -> None: ... @overload def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ... + def __eq__(self, __value: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... @@ -864,6 +873,8 @@ class tuple(Sequence[_T_co], Generic[_T_co]): def __le__(self, __value: tuple[_T_co, ...]) -> bool: ... def __gt__(self, __value: tuple[_T_co, ...]) -> bool: ... def __ge__(self, __value: tuple[_T_co, ...]) -> bool: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... @overload def __add__(self, __value: tuple[_T_co, ...]) -> tuple[_T_co, ...]: ... @overload @@ -952,6 +963,7 @@ class list(MutableSequence[_T], Generic[_T]): def __ge__(self, __value: list[_T]) -> bool: ... def __lt__(self, __value: list[_T]) -> bool: ... def __le__(self, __value: list[_T]) -> bool: ... + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -991,19 +1003,24 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... # Positional-only in dict, but not in MutableMapping - @overload + @overload # type: ignore[override] def get(self, __key: _KT) -> _VT | None: ... @overload - def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def get(self, __key: _KT, __default: _VT) -> _VT: ... + @overload + def get(self, __key: _KT, __default: _T) -> _VT | _T: ... @overload def pop(self, __key: _KT) -> _VT: ... @overload - def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def pop(self, __key: _KT, __default: _VT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _T) -> _VT | _T: ... def __len__(self) -> int: ... def __getitem__(self, __key: _KT) -> _VT: ... def __setitem__(self, __key: _KT, __value: _VT) -> None: ... def __delitem__(self, __key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -1058,6 +1075,7 @@ class set(MutableSet[_T], Generic[_T]): def __lt__(self, __value: AbstractSet[object]) -> bool: ... def __ge__(self, __value: AbstractSet[object]) -> bool: ... def __gt__(self, __value: AbstractSet[object]) -> bool: ... + def __eq__(self, __value: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -1086,6 +1104,8 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]): def __lt__(self, __value: AbstractSet[object]) -> bool: ... def __ge__(self, __value: AbstractSet[object]) -> bool: ... def __gt__(self, __value: AbstractSet[object]) -> bool: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -1111,6 +1131,8 @@ class range(Sequence[int]): def count(self, __value: int) -> int: ... def index(self, __value: int) -> int: ... # type: ignore[override] def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... def __contains__(self, __key: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... @overload diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 36d79101908d..8ceecd1f354e 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -153,6 +153,7 @@ class UserString(Sequence[UserString]): def __gt__(self, string: str | UserString) -> bool: ... def __ge__(self, string: str | UserString) -> bool: ... def __eq__(self, string: object) -> bool: ... + def __hash__(self) -> int: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... def __getitem__(self, index: SupportsIndex | slice) -> Self: ... @@ -257,6 +258,7 @@ class deque(MutableSequence[_T], Generic[_T]): def __le__(self, __value: deque[_T]) -> bool: ... def __gt__(self, __value: deque[_T]) -> bool: ... def __ge__(self, __value: deque[_T]) -> bool: ... + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -365,6 +367,7 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... + def __eq__(self, __value: object) -> bool: ... class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Callable[[], _VT] | None @@ -429,7 +432,9 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def pop(self, key: _KT) -> _VT: ... @overload - def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... def copy(self) -> Self: ... __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. diff --git a/mypy/typeshed/stdlib/contextvars.pyi b/mypy/typeshed/stdlib/contextvars.pyi index ef6e2700e667..63b5f80aea6c 100644 --- a/mypy/typeshed/stdlib/contextvars.pyi +++ b/mypy/typeshed/stdlib/contextvars.pyi @@ -18,16 +18,21 @@ class ContextVar(Generic[_T]): def __init__(self, name: str) -> None: ... @overload def __init__(self, name: str, *, default: _T) -> None: ... + def __hash__(self) -> int: ... @property def name(self) -> str: ... @overload def get(self) -> _T: ... if sys.version_info >= (3, 8): @overload - def get(self, default: _D | _T) -> _D | _T: ... + def get(self, default: _T) -> _T: ... + @overload + def get(self, default: _D) -> _D | _T: ... else: @overload - def get(self, __default: _D | _T) -> _D | _T: ... + def get(self, __default: _T) -> _T: ... + @overload + def get(self, __default: _D) -> _D | _T: ... def set(self, __value: _T) -> Token[_T]: ... def reset(self, __token: Token[_T]) -> None: ... @@ -52,7 +57,9 @@ def copy_context() -> Context: ... class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... @overload - def get(self, __key: ContextVar[_T]) -> _T | None: ... + def get(self, __key: ContextVar[_T], __default: None = None) -> _T | None: ... # type: ignore[misc] # overlapping overloads + @overload + def get(self, __key: ContextVar[_T], __default: _T) -> _T: ... @overload def get(self, __key: ContextVar[_T], __default: _D) -> _T | _D: ... def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... @@ -60,3 +67,4 @@ class Context(Mapping[ContextVar[Any], Any]): def __getitem__(self, __key: ContextVar[_T]) -> _T: ... def __iter__(self) -> Iterator[ContextVar[Any]]: ... def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 00d511915f20..36577c5b7e1b 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -36,6 +36,7 @@ class timezone(tzinfo): def utcoffset(self, __dt: datetime | None) -> timedelta: ... def dst(self, __dt: datetime | None) -> None: ... def __hash__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 11): UTC: timezone @@ -87,6 +88,7 @@ class date: def __lt__(self, __value: date) -> bool: ... def __ge__(self, __value: date) -> bool: ... def __gt__(self, __value: date) -> bool: ... + def __eq__(self, __value: object) -> bool: ... if sys.version_info >= (3, 8): def __add__(self, __value: timedelta) -> Self: ... def __radd__(self, __value: timedelta) -> Self: ... @@ -145,6 +147,7 @@ class time: def __lt__(self, __value: time) -> bool: ... def __ge__(self, __value: time) -> bool: ... def __gt__(self, __value: time) -> bool: ... + def __eq__(self, __value: object) -> bool: ... def __hash__(self) -> int: ... def isoformat(self, timespec: str = ...) -> str: ... @classmethod @@ -219,6 +222,7 @@ class timedelta: def __lt__(self, __value: timedelta) -> bool: ... def __ge__(self, __value: timedelta) -> bool: ... def __gt__(self, __value: timedelta) -> bool: ... + def __eq__(self, __value: object) -> bool: ... def __bool__(self) -> bool: ... def __hash__(self) -> int: ... @@ -310,6 +314,8 @@ class datetime(date): def __lt__(self, __value: datetime) -> bool: ... # type: ignore[override] def __ge__(self, __value: datetime) -> bool: ... # type: ignore[override] def __gt__(self, __value: datetime) -> bool: ... # type: ignore[override] + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 8): @overload # type: ignore[override] def __sub__(self, __value: timedelta) -> Self: ... diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi index d61950a26424..f8de016ab8bf 100644 --- a/mypy/typeshed/stdlib/email/charset.pyi +++ b/mypy/typeshed/stdlib/email/charset.pyi @@ -19,11 +19,11 @@ class Charset: def get_body_encoding(self) -> str | Callable[[Message], None]: ... def get_output_charset(self) -> str | None: ... def header_encode(self, string: str) -> str: ... - def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ... + def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: ... @overload def body_encode(self, string: None) -> None: ... @overload - def body_encode(self, string: str) -> str: ... + def body_encode(self, string: str | bytes) -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __value: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/email/policy.pyi b/mypy/typeshed/stdlib/email/policy.pyi index dc7f18489bfa..804044031fcd 100644 --- a/mypy/typeshed/stdlib/email/policy.pyi +++ b/mypy/typeshed/stdlib/email/policy.pyi @@ -53,7 +53,7 @@ compat32: Compat32 class EmailPolicy(Policy): utf8: bool refold_source: str - header_factory: Callable[[str, str], str] + header_factory: Callable[[str, Any], Any] content_manager: ContentManager def __init__( self, @@ -70,9 +70,9 @@ class EmailPolicy(Policy): content_manager: ContentManager = ..., ) -> None: ... def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... - def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... - def header_fetch_parse(self, name: str, value: str) -> str: ... - def fold(self, name: str, value: str) -> str: ... + def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: ... + def header_fetch_parse(self, name: str, value: str) -> Any: ... + def fold(self, name: str, value: str) -> Any: ... def fold_binary(self, name: str, value: str) -> bytes: ... default: EmailPolicy diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 96a96dbce10e..60cc27215fd0 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -190,6 +190,7 @@ class Enum(metaclass=EnumMeta): # and in practice using `object` here has the same effect as using `Any`. def __new__(cls, value: object) -> Self: ... def __dir__(self) -> list[str]: ... + def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index f24d14fbf2b6..2d2ffa9aff03 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -87,7 +87,7 @@ class FTP: def makepasv(self) -> tuple[str, int]: ... def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. - def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int]: ... + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ... def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... def retrbinary( self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 41ece1b050b8..4b5ed3d8bda0 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -115,6 +115,10 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incomp chunk_left: int | None length: int | None will_close: bool + # url is set on instances of the class in urllib.request.AbstractHTTPHandler.do_open + # to match urllib.response.addinfourl's interface. + # It's not set in HTTPResponse.__init__ or any other method on the class + url: str def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... def peek(self, n: int = -1) -> bytes: ... def read(self, amt: int | None = None) -> bytes: ... diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index 5aaefce87e3a..f5037da00d5f 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -148,3 +148,4 @@ class ExtensionFileLoader(importlib.abc.ExecutionLoader): def exec_module(self, module: types.ModuleType) -> None: ... def get_code(self, fullname: str) -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 083453cd3c9a..0af33bc876c4 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -66,6 +66,9 @@ class EntryPoint(_EntryPointBase): extras: list[str] = ..., ) -> bool: ... # undocumented + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 10): class EntryPoints(list[EntryPoint]): # use as list is deprecated since 3.10 # int argument is deprecated since 3.10 diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index 9af4c39bae9e..601d23e786ac 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -354,6 +354,7 @@ class Signature: def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 10): def get_annotations( @@ -413,6 +414,7 @@ class Parameter: annotation: Any = ..., ) -> Self: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class BoundArguments: arguments: OrderedDict[str, Any] diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index fc42cf03e2bb..945e8bcbbdee 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -78,6 +78,7 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): def __ge__(self, other: Self) -> bool: ... @@ -148,7 +149,10 @@ class _BaseV4: class IPv4Address(_BaseV4, _BaseAddress): ... class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ... -class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... + +class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class _BaseV6: @property @@ -169,11 +173,16 @@ class IPv6Address(_BaseV6, _BaseAddress): @property def scope_id(self) -> str | None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): @property def is_site_local(self) -> bool: ... -class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... +class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def v4_int_to_packed(address: int) -> bytes: ... def v6_int_to_packed(address: int) -> bytes: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index 27a903fb9987..9cfc1ebbdd5e 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -73,14 +73,18 @@ class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): def __delitem__(self, __key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> dict[_KT, _VT]: ... - @overload + @overload # type: ignore[override] def get(self, __key: _KT) -> _VT | None: ... @overload - def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def get(self, __key: _KT, __default: _VT) -> _VT: ... + @overload + def get(self, __key: _KT, __default: _T) -> _VT | _T: ... @overload def pop(self, __key: _KT) -> _VT: ... @overload - def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def pop(self, __key: _KT, __default: _VT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _T) -> _VT | _T: ... def keys(self) -> list[_KT]: ... # type: ignore[override] def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] def values(self) -> list[_VT]: ... # type: ignore[override] diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index e0d69e7d30fa..4cc708d9d5fe 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -125,6 +125,9 @@ class Pdb(Bdb, Cmd): def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... def message(self, msg: str) -> None: ... def error(self, msg: str) -> None: ... + if sys.version_info >= (3, 12): + def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ... + def _select_frame(self, number: int) -> None: ... def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... def _print_lines( diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index c993af390bbb..7791c977aa8b 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -61,6 +61,7 @@ class Doc: def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... class HTMLRepr(Repr): + def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... def repr1(self, x: object, level: complex) -> str: ... @@ -148,6 +149,7 @@ class HTMLDoc(Doc): def filelink(self, url: str, path: str) -> str: ... class TextRepr(Repr): + def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... def repr_str(self, x: str, level: complex) -> str: ... diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi index 4e53141ade84..29ee8b66815e 100644 --- a/mypy/typeshed/stdlib/re.pyi +++ b/mypy/typeshed/stdlib/re.pyi @@ -175,6 +175,8 @@ class Pattern(Generic[AnyStr]): def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ... def __copy__(self) -> Pattern[AnyStr]: ... def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi index 82d0b03f4049..b162b3a85766 100644 --- a/mypy/typeshed/stdlib/shelve.pyi +++ b/mypy/typeshed/stdlib/shelve.pyi @@ -15,8 +15,10 @@ class Shelf(MutableMapping[str, _VT]): ) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... + @overload # type: ignore[override] + def get(self, key: str, default: None = None) -> _VT | None: ... # type: ignore[misc] # overlapping overloads @overload - def get(self, key: str) -> _VT | None: ... + def get(self, key: str, default: _VT) -> _VT: ... @overload def get(self, key: str, default: _T) -> _VT | _T: ... def __getitem__(self, key: str) -> _VT: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index cff0f5e5ff1d..41f731e21e26 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -233,8 +233,9 @@ def connect( ) -> Connection: ... def enable_callback_tracebacks(__enable: bool) -> None: ... -# takes a pos-or-keyword argument because there is a C wrapper -def enable_shared_cache(enable: int) -> None: ... +if sys.version_info < (3, 12): + # takes a pos-or-keyword argument because there is a C wrapper + def enable_shared_cache(enable: int) -> None: ... if sys.version_info >= (3, 10): def register_adapter(__type: type[_T], __adapter: _Adapter[_T]) -> None: ... @@ -298,6 +299,11 @@ class Connection: isolation_level: str | None # one of '', 'DEFERRED', 'IMMEDIATE' or 'EXCLUSIVE' @property def total_changes(self) -> int: ... + if sys.version_info >= (3, 12): + @property + def autocommit(self) -> int: ... + @autocommit.setter + def autocommit(self, val: int) -> None: ... row_factory: Any text_factory: Any def __init__( @@ -375,6 +381,9 @@ class Connection: def getlimit(self, __category: int) -> int: ... def serialize(self, *, name: str = "main") -> bytes: ... def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ... + if sys.version_info >= (3, 12): + def getconfig(self, __op: int) -> bool: ... + def setconfig(self, __op: int, __enable: bool = True) -> bool: ... def __call__(self, __sql: str) -> _Statement: ... def __enter__(self) -> Self: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 20b8802bd7b9..446bbf8d1009 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -485,6 +485,7 @@ class SSLSession: def time(self) -> int: ... @property def timeout(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... class SSLErrorNumber(enum.IntEnum): SSL_ERROR_EOF: int diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index d73566fc0917..bb416717a378 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -953,8 +953,6 @@ class _TreeviewColumnDict(TypedDict): anchor: tkinter._Anchor id: str -_TreeviewColumnId: TypeAlias = int | str # manual page: "COLUMN IDENTIFIERS" - class Treeview(Widget, tkinter.XView, tkinter.YView): def __init__( self, @@ -963,7 +961,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): class_: str = ..., columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., + displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., name: str = ..., padding: _Padding = ..., @@ -985,7 +983,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): *, columns: str | list[str] | list[int] | list[str | int] | tuple[str | int, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., + displaycolumns: str | int | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., padding: _Padding = ..., selectmode: Literal["extended", "browse", "none"] = ..., @@ -998,23 +996,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item, column: _TreeviewColumnId | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] - def get_children(self, item: str | None = None) -> tuple[str, ...]: ... - def set_children(self, item: str, *newchildren: str) -> None: ... + def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | int | None = None) -> tuple[str, ...]: ... + def set_children(self, item: str | int, *newchildren: str | int) -> None: ... @overload - def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ... + def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: ... @overload - def column(self, column: _TreeviewColumnId, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 + def column(self, column: str | int, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 @overload - def column(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + def column(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... @overload - def column(self, column: _TreeviewColumnId, option: Literal["id"]) -> str: ... + def column(self, column: str | int, option: Literal["id"]) -> str: ... @overload - def column(self, column: _TreeviewColumnId, option: str) -> Any: ... + def column(self, column: str | int, option: str) -> Any: ... @overload def column( self, - column: _TreeviewColumnId, + column: str | int, option: None = None, *, width: int = ..., @@ -1023,29 +1021,29 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): anchor: tkinter._Anchor = ..., # id is read-only ) -> _TreeviewColumnDict | None: ... - def delete(self, *items: str) -> None: ... - def detach(self, *items: str) -> None: ... - def exists(self, item: str) -> bool: ... + def delete(self, *items: str | int) -> None: ... + def detach(self, *items: str | int) -> None: ... + def exists(self, item: str | int) -> bool: ... @overload # type: ignore[override] def focus(self, item: None = None) -> str: ... # can return empty string @overload - def focus(self, item: str) -> Literal[""]: ... + def focus(self, item: str | int) -> Literal[""]: ... @overload - def heading(self, column: _TreeviewColumnId, option: Literal["text"]) -> str: ... + def heading(self, column: str | int, option: Literal["text"]) -> str: ... @overload - def heading(self, column: _TreeviewColumnId, option: Literal["image"]) -> tuple[str] | str: ... + def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ... @overload - def heading(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + def heading(self, column: str | int, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... @overload - def heading(self, column: _TreeviewColumnId, option: Literal["command"]) -> str: ... + def heading(self, column: str | int, option: Literal["command"]) -> str: ... @overload - def heading(self, column: _TreeviewColumnId, option: str) -> Any: ... + def heading(self, column: str | int, option: str) -> Any: ... @overload - def heading(self, column: _TreeviewColumnId, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[misc] + def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[misc] @overload def heading( self, - column: _TreeviewColumnId, + column: str | int, option: None = None, *, text: str = ..., @@ -1058,14 +1056,14 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def identify_column(self, x: int) -> str: ... def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... def identify_element(self, x: int, y: int) -> str: ... # don't know what possible return values are - def index(self, item: str) -> int: ... + def index(self, item: str | int) -> int: ... def insert( self, parent: str, index: int | Literal["end"], - iid: str | None = None, + iid: str | int | None = None, *, - id: str = ..., # same as iid + id: str | int = ..., # same as iid text: str = ..., image: tkinter._ImageSpec = ..., values: list[Any] | tuple[Any, ...] = ..., @@ -1073,23 +1071,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): tags: str | list[str] | tuple[str, ...] = ..., ) -> str: ... @overload - def item(self, item: str, option: Literal["text"]) -> str: ... + def item(self, item: str | int, option: Literal["text"]) -> str: ... @overload - def item(self, item: str, option: Literal["image"]) -> tuple[str] | Literal[""]: ... + def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ... @overload - def item(self, item: str, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ... + def item(self, item: str | int, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ... @overload - def item(self, item: str, option: Literal["open"]) -> bool: ... # actually 0 or 1 + def item(self, item: str | int, option: Literal["open"]) -> bool: ... # actually 0 or 1 @overload - def item(self, item: str, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ... + def item(self, item: str | int, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ... @overload - def item(self, item: str, option: str) -> Any: ... + def item(self, item: str | int, option: str) -> Any: ... @overload - def item(self, item: str, option: None = None) -> _TreeviewItemDict: ... # type: ignore[misc] + def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ... # type: ignore[misc] @overload def item( self, - item: str, + item: str | int, option: None = None, *, text: str = ..., @@ -1098,27 +1096,39 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): open: bool = ..., tags: str | list[str] | tuple[str, ...] = ..., ) -> None: ... - def move(self, item: str, parent: str, index: int) -> None: ... + def move(self, item: str | int, parent: str, index: int) -> None: ... reattach = move - def next(self, item: str) -> str: ... # returning empty string means last item - def parent(self, item: str) -> str: ... - def prev(self, item: str) -> str: ... # returning empty string means first item - def see(self, item: str) -> None: ... + def next(self, item: str | int) -> str: ... # returning empty string means last item + def parent(self, item: str | int) -> str: ... + def prev(self, item: str | int) -> str: ... # returning empty string means first item + def see(self, item: str | int) -> None: ... if sys.version_info >= (3, 8): def selection(self) -> tuple[str, ...]: ... else: def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = None) -> tuple[str, ...]: ... - def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ... - def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ... - def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ... - def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ... @overload - def set(self, item: str, column: None = None, value: None = None) -> dict[str, Any]: ... + def selection_set(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ... + @overload + def selection_set(self, *items: str | int) -> None: ... + @overload + def selection_add(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ... + @overload + def selection_add(self, *items: str | int) -> None: ... + @overload + def selection_remove(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ... + @overload + def selection_remove(self, *items: str | int) -> None: ... + @overload + def selection_toggle(self, __items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...]) -> None: ... + @overload + def selection_toggle(self, *items: str | int) -> None: ... + @overload + def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: ... @overload - def set(self, item: str, column: _TreeviewColumnId, value: None = None) -> Any: ... + def set(self, item: str | int, column: str | int, value: None = None) -> Any: ... @overload - def set(self, item: str, column: _TreeviewColumnId, value: Any) -> Literal[""]: ... + def set(self, item: str | int, column: str | int, value: Any) -> Literal[""]: ... # There's no tag_unbind() or 'add' argument for whatever reason. # Also, it's 'callback' instead of 'func' here. @overload @@ -1150,7 +1160,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... @overload - def tag_has(self, tagname: str, item: str) -> bool: ... + def tag_has(self, tagname: str, item: str | int) -> bool: ... class LabeledScale(Frame): label: Incomplete diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index a6d6d3e168b3..47449dfe8143 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import SupportsWrite +from _typeshed import SupportsWrite, Unused from collections.abc import Generator, Iterable, Iterator, Mapping from types import FrameType, TracebackType from typing import Any, overload @@ -84,7 +84,10 @@ def format_list(extracted_list: list[FrameSummary]) -> list[str]: ... def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): - def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... + @overload + def format_exception_only(__exc: BaseException | None) -> list[str]: ... + @overload + def format_exception_only(__exc: Unused, value: BaseException | None) -> list[str]: ... else: def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... diff --git a/mypy/typeshed/stdlib/tracemalloc.pyi b/mypy/typeshed/stdlib/tracemalloc.pyi index 3dc8b8603fe5..6448a16ce11a 100644 --- a/mypy/typeshed/stdlib/tracemalloc.pyi +++ b/mypy/typeshed/stdlib/tracemalloc.pyi @@ -37,6 +37,7 @@ class Statistic: traceback: Traceback def __init__(self, traceback: Traceback, size: int, count: int) -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class StatisticDiff: count: int @@ -46,6 +47,7 @@ class StatisticDiff: traceback: Traceback def __init__(self, traceback: Traceback, size: int, size_diff: int, count: int, count_diff: int) -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... _FrameTuple: TypeAlias = tuple[str, int] @@ -56,6 +58,7 @@ class Frame: def lineno(self) -> int: ... def __init__(self, frame: _FrameTuple) -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def __lt__(self, other: Frame) -> bool: ... if sys.version_info >= (3, 11): def __gt__(self, other: Frame) -> bool: ... @@ -80,6 +83,7 @@ class Trace: def traceback(self) -> Traceback: ... def __init__(self, trace: _TraceTuple) -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class Traceback(Sequence[Frame]): if sys.version_info >= (3, 9): @@ -97,6 +101,7 @@ class Traceback(Sequence[Frame]): def __contains__(self, frame: Frame) -> bool: ... # type: ignore[override] def __len__(self) -> int: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def __lt__(self, other: Traceback) -> bool: ... if sys.version_info >= (3, 11): def __gt__(self, other: Traceback) -> bool: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index e5468ce4ed3c..2f4bd1a88047 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -71,6 +71,7 @@ class _Cell: if sys.version_info >= (3, 8): def __init__(self, __contents: object = ...) -> None: ... + def __eq__(self, __value: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -113,6 +114,8 @@ LambdaType = FunctionType @final class CodeType: + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... @property def co_argcount(self) -> int: ... if sys.version_info >= (3, 8): @@ -326,6 +329,7 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): class SimpleNamespace: __hash__: ClassVar[None] # type: ignore[assignment] def __init__(self, **kwargs: Any) -> None: ... + def __eq__(self, __value: object) -> bool: ... def __getattribute__(self, __name: str) -> Any: ... def __setattr__(self, __name: str, __value: Any) -> None: ... def __delattr__(self, __name: str) -> None: ... @@ -442,6 +446,8 @@ class MethodType: def __qualname__(self) -> str: ... # inherited from the added function def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... @final class BuiltinFunctionType: @@ -452,6 +458,8 @@ class BuiltinFunctionType: @property def __qualname__(self) -> str: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... BuiltinMethodType = BuiltinFunctionType @@ -479,6 +487,7 @@ class MethodWrapperType: def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __eq__(self, __value: object) -> bool: ... def __ne__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... @final class MethodDescriptorType: @@ -603,6 +612,8 @@ if sys.version_info >= (3, 9): def __parameters__(self) -> tuple[Any, ...]: ... def __init__(self, origin: type, args: Any) -> None: ... def __getitem__(self, __typeargs: Any) -> GenericAlias: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 11): @property def __unpacked__(self) -> bool: ... @@ -626,3 +637,5 @@ if sys.version_info >= (3, 10): def __args__(self) -> tuple[Any, ...]: ... def __or__(self, __value: Any) -> UnionType: ... def __ror__(self, __value: Any) -> UnionType: ... + def __eq__(self, __value: object) -> bool: ... + def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 7496a0920690..6a307368642f 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -226,12 +226,14 @@ if sys.version_info >= (3, 10): @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... + def __eq__(self, other: object) -> bool: ... @_final class ParamSpecKwargs: @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... + def __eq__(self, other: object) -> bool: ... @_final class ParamSpec: @@ -563,6 +565,7 @@ class AbstractSet(Collection[_T_co], Generic[_T_co]): def __or__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def __eq__(self, other: object) -> bool: ... def isdisjoint(self, other: Iterable[Any]) -> bool: ... class MutableSet(AbstractSet[_T], Generic[_T]): @@ -647,7 +650,9 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @overload def pop(self, __key: _KT) -> _VT: ... @overload - def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... + def pop(self, __key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, __key: _KT, default: _T) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: ... # This overload should be allowed only if the value type is compatible with None. # @@ -953,3 +958,7 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... def __ror__(self, left: Any) -> _SpecialForm: ... + +if sys.version_info >= (3, 13): + def is_protocol(__tp: type) -> bool: ... + def get_protocol_members(__tp: type) -> frozenset[str]: ... diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 93087a45a108..efcc13e42047 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -4,26 +4,52 @@ import sys import typing from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete -from collections.abc import Iterable -from typing import ( # noqa: Y022,Y039 +from typing import ( # noqa: Y022,Y037,Y038,Y039 + IO as IO, TYPE_CHECKING as TYPE_CHECKING, + AbstractSet as AbstractSet, Any as Any, + AnyStr as AnyStr, AsyncContextManager as AsyncContextManager, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, AsyncIterator as AsyncIterator, Awaitable as Awaitable, - Callable, + BinaryIO as BinaryIO, + Callable as Callable, ChainMap as ChainMap, ClassVar as ClassVar, + Collection as Collection, + Container as Container, ContextManager as ContextManager, Coroutine as Coroutine, Counter as Counter, DefaultDict as DefaultDict, Deque as Deque, - Mapping, + Dict as Dict, + ForwardRef as ForwardRef, + FrozenSet as FrozenSet, + Generator as Generator, + Generic as Generic, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + List as List, + Mapping as Mapping, + MappingView as MappingView, + Match as Match, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, NoReturn as NoReturn, - Sequence, + Optional as Optional, + Pattern as Pattern, + Reversible as Reversible, + Sequence as Sequence, + Set as Set, + Sized as Sized, SupportsAbs as SupportsAbs, SupportsBytes as SupportsBytes, SupportsComplex as SupportsComplex, @@ -31,8 +57,15 @@ from typing import ( # noqa: Y022,Y039 SupportsInt as SupportsInt, SupportsRound as SupportsRound, Text as Text, + TextIO as TextIO, + Tuple as Tuple, Type as Type, + Union as Union, + ValuesView as ValuesView, _Alias, + cast as cast, + no_type_check as no_type_check, + no_type_check_decorator as no_type_check_decorator, overload as overload, type_check_only, ) @@ -109,11 +142,50 @@ __all__ = [ "get_original_bases", "get_overloads", "get_type_hints", + "AbstractSet", + "AnyStr", + "BinaryIO", + "Callable", + "Collection", + "Container", + "Dict", + "ForwardRef", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Optional", + "Pattern", + "Reversible", + "Sequence", + "Set", + "Sized", + "TextIO", + "Tuple", + "Union", + "ValuesView", + "cast", + "get_protocol_members", + "is_protocol", + "no_type_check", + "no_type_check_decorator", ] _T = typing.TypeVar("_T") _F = typing.TypeVar("_F", bound=Callable[..., Any]) -_TC = typing.TypeVar("_TC", bound=Type[object]) +_TC = typing.TypeVar("_TC", bound=type[object]) # unfortunately we have to duplicate this class definition from typing.pyi or we break pytype class _SpecialForm: @@ -403,3 +475,9 @@ else: # Not actually a Protocol at runtime; see # https://github.com/python/typeshed/issues/10224 for why we're defining it this way def __buffer__(self, __flags: int) -> memoryview: ... + +if sys.version_info >= (3, 13): + from typing import get_protocol_members as get_protocol_members, is_protocol as is_protocol +else: + def is_protocol(__tp: type) -> bool: ... + def get_protocol_members(__tp: type) -> frozenset[str]: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 45c39e3f3010..1f58f266ee89 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -86,6 +86,7 @@ class TestCase: _testMethodDoc: str def __init__(self, methodName: str = "runTest") -> None: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod @@ -304,6 +305,8 @@ class FunctionTestCase(TestCase): description: str | None = None, ) -> None: ... def runTest(self) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... class _AssertRaisesContext(Generic[_E]): exception: _E diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index db1cc7d9bfc9..66120197b269 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -389,7 +389,11 @@ if sys.version_info >= (3, 8): class AsyncMagicMixin(MagicMixin): def __init__(self, *args: Any, **kw: Any) -> None: ... - class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... + class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): + # Improving the `reset_mock` signature. + # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal. + # But, `NonCallableMock` super-class has the better version. + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... class MagicProxy: name: str diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 8f99c5837871..079c9755528c 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -173,7 +173,7 @@ class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... def is_suburi(self, base: str, test: str) -> bool: ... # undocumented - def reduce_uri(self, uri: str, default_port: bool = True) -> str: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: ... # undocumented class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -184,7 +184,7 @@ class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False ) -> None: ... def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ... - def is_authenticated(self, authuri: str) -> bool: ... + def is_authenticated(self, authuri: str) -> bool | None: ... class AbstractBasicAuthHandler: rx: ClassVar[Pattern[str]] # undocumented @@ -212,7 +212,7 @@ class AbstractDigestAuthHandler: def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... def get_cnonce(self, nonce: str) -> str: ... - def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... + def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str | None: ... def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... @@ -269,7 +269,7 @@ class ftpwrapper: # undocumented def file_close(self) -> None: ... def init(self) -> None: ... def real_close(self) -> None: ... - def retrfile(self, file: str, type: str) -> tuple[addclosehook, int]: ... + def retrfile(self, file: str, type: str) -> tuple[addclosehook, int | None]: ... class FTPHandler(BaseHandler): def ftp_open(self, req: Request) -> addinfourl: ... diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index fd87646531a6..e1ea424f9680 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -63,6 +63,7 @@ class UUID: def __le__(self, other: UUID) -> bool: ... def __gt__(self, other: UUID) -> bool: ... def __ge__(self, other: UUID) -> bool: ... + def __hash__(self) -> builtins.int: ... if sys.version_info >= (3, 9): def getnode() -> int: ... diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index 13f48fe85a8d..ecb98d4269d5 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -45,6 +45,7 @@ class WeakMethod(ref[_CallableT], Generic[_CallableT]): def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): @overload @@ -74,7 +75,9 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): @overload def pop(self, key: _KT) -> _VT: ... @overload - def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... if sys.version_info >= (3, 9): def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... @@ -117,7 +120,9 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload def pop(self, key: _KT) -> _VT: ... @overload - def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + def pop(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _T) -> _VT | _T: ... if sys.version_info >= (3, 9): def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi index 70ea6a1ced11..337bd9706050 100644 --- a/mypy/typeshed/stdlib/winreg.pyi +++ b/mypy/typeshed/stdlib/winreg.pyi @@ -98,3 +98,4 @@ if sys.platform == "win32": ) -> bool | None: ... def Close(self) -> None: ... def Detach(self) -> int: ... + def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index 2cf8dbbe7025..d8ff2f5b6090 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -142,6 +142,7 @@ class QName: def __gt__(self, other: QName | str) -> bool: ... def __ge__(self, other: QName | str) -> bool: ... def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... class ElementTree: def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 754cb21c3ff8..58dfb172cf76 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1482,14 +1482,12 @@ frozenset({1}) == [1] # Error {1: 2}.keys() == frozenset({1}) {1: 2}.items() == {(1, 2)} -{1: 2}.keys() == {'no'} # Error +{1: 2}.keys() == {'no'} # OK {1: 2}.values() == {2} # Error -{1: 2}.keys() == [1] # Error +{1: 2}.keys() == [1] # OK [out] _testStrictEqualityAllowlist.py:5: error: Non-overlapping equality check (left operand type: "FrozenSet[int]", right operand type: "List[int]") -_testStrictEqualityAllowlist.py:11: error: Non-overlapping equality check (left operand type: "dict_keys[int, int]", right operand type: "Set[str]") _testStrictEqualityAllowlist.py:12: error: Non-overlapping equality check (left operand type: "dict_values[int, int]", right operand type: "Set[int]") -_testStrictEqualityAllowlist.py:13: error: Non-overlapping equality check (left operand type: "dict_keys[int, int]", right operand type: "List[int]") [case testUnreachableWithStdlibContextManagers] # mypy: warn-unreachable, strict-optional From 0e4521aa2d0b643b67777b4136bc27d97e622c56 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sat, 12 Aug 2023 04:36:03 -0400 Subject: [PATCH 43/88] Fix inheriting from generic @frozen attrs class (#15700) Fixes #15658. --- mypy/plugins/attrs.py | 2 +- test-data/unit/check-plugin-attrs.test | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 0f748cc140e8..d444c18852dd 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -803,7 +803,7 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) else: # This variable belongs to a super class so create new Var so we # can modify it. - var = Var(attribute.name, ctx.cls.info[attribute.name].type) + var = Var(attribute.name, attribute.init_type) var.info = ctx.cls.info var._fullname = f"{ctx.cls.info.fullname}.{var.name}" ctx.cls.info.names[var.name] = SymbolTableNode(MDEF, var) diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 7a7bcb65fe98..3534d206c060 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -2253,3 +2253,27 @@ c = attrs.assoc(c, name=42) # E: Argument "name" to "assoc" of "C" has incompat [builtins fixtures/plugin_attrs.pyi] [typing fixtures/typing-medium.pyi] + +[case testFrozenInheritFromGeneric] +from typing import Generic, TypeVar +from attrs import field, frozen + +T = TypeVar('T') + +def f(s: str) -> int: + ... + +@frozen +class A(Generic[T]): + x: T + y: int = field(converter=f) + +@frozen +class B(A[int]): + pass + +b = B(42, 'spam') +reveal_type(b.x) # N: Revealed type is "builtins.int" +reveal_type(b.y) # N: Revealed type is "builtins.int" + +[builtins fixtures/plugin_attrs.pyi] From 742b5c68cae5b33c7b53768e874d9bab4344567e Mon Sep 17 00:00:00 2001 From: Jannic Warken Date: Sat, 12 Aug 2023 10:56:09 +0200 Subject: [PATCH 44/88] Support __bool__ with Literal in --warn-unreachable (#15645) This adds support for `Literal` as return type of `__bool__` in the reachability analysis. Fixes https://github.com/python/mypy/issues/7008 --- mypy/typeops.py | 12 +++----- test-data/unit/check-unreachable-code.test | 32 ++++++++++++++++++++++ 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/mypy/typeops.py b/mypy/typeops.py index 65ab4340403c..4233cc1b2b33 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -602,10 +602,8 @@ def true_only(t: Type) -> ProperType: else: ret_type = _get_type_special_method_bool_ret_type(t) - if ret_type and ret_type.can_be_false and not ret_type.can_be_true: - new_t = copy_type(t) - new_t.can_be_true = False - return new_t + if ret_type and not ret_type.can_be_true: + return UninhabitedType(line=t.line, column=t.column) new_t = copy_type(t) new_t.can_be_false = False @@ -637,10 +635,8 @@ def false_only(t: Type) -> ProperType: else: ret_type = _get_type_special_method_bool_ret_type(t) - if ret_type and ret_type.can_be_true and not ret_type.can_be_false: - new_t = copy_type(t) - new_t.can_be_false = False - return new_t + if ret_type and not ret_type.can_be_false: + return UninhabitedType(line=t.line) new_t = copy_type(t) new_t.can_be_true = False diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 7a6c2cbfd1c7..20b5dea9fc87 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -1379,6 +1379,38 @@ def f() -> None: x = 1 # E: Statement is unreachable [builtins fixtures/dict.pyi] +[case testUnreachableLiteralFrom__bool__] +# flags: --warn-unreachable +from typing_extensions import Literal + +class Truth: + def __bool__(self) -> Literal[True]: ... + +class Lie: + def __bool__(self) -> Literal[False]: ... + +class Maybe: + def __bool__(self) -> Literal[True | False]: ... + +t = Truth() +if t: + x = 1 +else: + x = 2 # E: Statement is unreachable + +if Lie(): + x = 3 # E: Statement is unreachable + +if Maybe(): + x = 4 + + +def foo() -> bool: ... + +y = Truth() or foo() # E: Right operand of "or" is never evaluated +z = Lie() and foo() # E: Right operand of "and" is never evaluated +[builtins fixtures/dict.pyi] + [case testUnreachableModuleBody1] # flags: --warn-unreachable from typing import NoReturn From 9dbb1232c0f2158f72d099d35d85252696d308ff Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sat, 12 Aug 2023 11:05:08 +0200 Subject: [PATCH 45/88] Apply TypeVar defaults to functions (PEP 696) (#15387) Use TypeVar defaults to resolve fallback return type of functions. **Note**: Defaults for TypeVarTuples don't yet work, probably a result of the limited support for `Unpack` / `TypeVarTuple`. Ref: #14851 --- mypy/applytype.py | 5 +++- test-data/unit/check-typevar-defaults.test | 35 ++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 55a51d4adbb6..f8be63362a6b 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -18,6 +18,7 @@ TypeVarLikeType, TypeVarTupleType, TypeVarType, + UninhabitedType, UnpackType, get_proper_type, ) @@ -32,13 +33,15 @@ def get_target_type( context: Context, skip_unsatisfied: bool, ) -> Type | None: + p_type = get_proper_type(type) + if isinstance(p_type, UninhabitedType) and tvar.has_default(): + return tvar.default if isinstance(tvar, ParamSpecType): return type if isinstance(tvar, TypeVarTupleType): return type assert isinstance(tvar, TypeVarType) values = tvar.values - p_type = get_proper_type(type) if values: if isinstance(p_type, AnyType): return type diff --git a/test-data/unit/check-typevar-defaults.test b/test-data/unit/check-typevar-defaults.test index 514186aa7518..36ec125eb1a4 100644 --- a/test-data/unit/check-typevar-defaults.test +++ b/test-data/unit/check-typevar-defaults.test @@ -81,3 +81,38 @@ T2 = TypeVar("T2", bound=List[str], default=List[int]) # E: TypeVar default mus T3 = TypeVar("T3", int, str, default=bytes) # E: TypeVar default must be one of the constraint types T4 = TypeVar("T4", int, str, default=Union[int, str]) # E: TypeVar default must be one of the constraint types T5 = TypeVar("T5", float, str, default=int) # E: TypeVar default must be one of the constraint types + +[case testTypeVarDefaultsFunctions] +from typing import TypeVar, ParamSpec, List, Union, Callable, Tuple +from typing_extensions import TypeVarTuple, Unpack + +T1 = TypeVar("T1", default=str) +T2 = TypeVar("T2", bound=str, default=str) +T3 = TypeVar("T3", bytes, str, default=str) +P1 = ParamSpec("P1", default=[int, str]) +Ts1 = TypeVarTuple("Ts1", default=Unpack[Tuple[int, str]]) + +def callback1(x: str) -> None: ... + +def func_a1(x: Union[int, T1]) -> T1: ... +reveal_type(func_a1(2)) # N: Revealed type is "builtins.str" +reveal_type(func_a1(2.1)) # N: Revealed type is "builtins.float" + +def func_a2(x: Union[int, T1]) -> List[T1]: ... +reveal_type(func_a2(2)) # N: Revealed type is "builtins.list[builtins.str]" +reveal_type(func_a2(2.1)) # N: Revealed type is "builtins.list[builtins.float]" + +def func_a3(x: Union[int, T2]) -> T2: ... +reveal_type(func_a3(2)) # N: Revealed type is "builtins.str" + +def func_a4(x: Union[int, T3]) -> T3: ... +reveal_type(func_a4(2)) # N: Revealed type is "builtins.str" + +def func_b1(x: Union[int, Callable[P1, None]]) -> Callable[P1, None]: ... +reveal_type(func_b1(callback1)) # N: Revealed type is "def (x: builtins.str)" +reveal_type(func_b1(2)) # N: Revealed type is "def (builtins.int, builtins.str)" + +def func_c1(x: Union[int, Callable[[Unpack[Ts1]], None]]) -> Tuple[Unpack[Ts1]]: ... +# reveal_type(func_c1(callback1)) # Revealed type is "builtins.tuple[str]" # TODO +# reveal_type(func_c1(2)) # Revealed type is "builtins.tuple[builtins.int, builtins.str]" # TODO +[builtins fixtures/tuple.pyi] From 3631528796cbf2a5a825d6a5fb2010853a0a46bd Mon Sep 17 00:00:00 2001 From: Gregory Santosa <94944372+gregorysantosa@users.noreply.github.com> Date: Sat, 12 Aug 2023 02:10:27 -0700 Subject: [PATCH 46/88] 'await' in non-async function is a blocking error (#15384) Fixes, https://github.com/python/mypy/issues/15339 --- mypy/errorcodes.py | 4 +++- mypy/semanal.py | 7 ++++++- test-data/unit/check-async-await.test | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index e7d0c16f2d2d..3594458fa362 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -152,7 +152,9 @@ def __hash__(self) -> int: TOP_LEVEL_AWAIT: Final = ErrorCode( "top-level-await", "Warn about top level await expressions", "General" ) - +AWAIT_NOT_ASYNC: Final = ErrorCode( + "await-not-async", 'Warn about "await" outside coroutine ("async def")', "General" +) # These error codes aren't enabled by default. NO_UNTYPED_DEF: Final[ErrorCode] = ErrorCode( "no-untyped-def", "Check that every function has an annotation", "General" diff --git a/mypy/semanal.py b/mypy/semanal.py index 5b1aea4239f5..e21fc9f1c23f 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -5455,7 +5455,12 @@ def visit_await_expr(self, expr: AwaitExpr) -> None: # support top level awaits. self.fail('"await" outside function', expr, serious=True, code=codes.TOP_LEVEL_AWAIT) elif not self.function_stack[-1].is_coroutine: - self.fail('"await" outside coroutine ("async def")', expr, serious=True, blocker=True) + self.fail( + '"await" outside coroutine ("async def")', + expr, + serious=True, + code=codes.AWAIT_NOT_ASYNC, + ) expr.expr.accept(self) # diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index af6c31624b96..653025a0bb24 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -974,7 +974,7 @@ crasher = [await foo(x) for x in [1, 2, 3]] # E: "await" outside function [top def bad() -> None: # These are always critical / syntax issues: - y = [await foo(x) for x in [1, 2, 3]] # E: "await" outside coroutine ("async def") + y = [await foo(x) for x in [1, 2, 3]] # E: "await" outside coroutine ("async def") [await-not-async] async def good() -> None: y = [await foo(x) for x in [1, 2, 3]] # OK [builtins fixtures/async_await.pyi] From 041a8af015881655a88f5aff5351576a1ef9010b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 12 Aug 2023 21:15:48 +0100 Subject: [PATCH 47/88] [pre-commit.ci] pre-commit autoupdate (#15796) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.280 → v0.0.281](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.280...v0.0.281) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f2367f63bb3d..8650a2868cd6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.280 # must match test-requirements.txt + rev: v0.0.281 # must match test-requirements.txt hooks: - id: ruff args: [--exit-non-zero-on-fix] From 9787a26f97fd6f216260aac89aa2253ed655195b Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 12 Aug 2023 14:00:59 -0700 Subject: [PATCH 48/88] Document await-not-async error code (#15858) --- docs/source/error_code_list.rst | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index f7f702aa7fcb..157f90249af8 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -1027,9 +1027,20 @@ example: top = await f() # Error: "await" outside function [top-level-await] +.. _code-await-not-async: + +Warn about await expressions used outside of coroutines [await-not-async] +------------------------------------------------------------------------- + +``await`` must be used inside a coroutine. + +.. code-block:: python + + async def f() -> None: + ... + def g() -> None: - # This is a blocker error and cannot be silenced. - await f() # Error: "await" outside coroutine ("async def") + await f() # Error: "await" outside coroutine ("async def") [await-not-async] .. _code-assert-type: From 117b9147d975c51e27dbea9ab415bc0b3bf4ac69 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 13 Aug 2023 21:19:55 +0100 Subject: [PATCH 49/88] Add option to selectively disable --disallow-untyped-calls (#15845) Fixes #10757 It is surprisingly one of the most upvoted issues. Also it looks quite easy to implement, so why not. Note I also try to improve docs for per-module logic for `disallow_untyped_calls`, as there is currently some confusion. --------- Co-authored-by: Ivan Levkivskyi --- docs/source/command_line.rst | 28 +++++++++++++++++ docs/source/config_file.rst | 33 +++++++++++++++++++- mypy/checkexpr.py | 23 +++++++++----- mypy/config_parser.py | 18 +++++++++++ mypy/main.py | 17 +++++++++- mypy/options.py | 4 +++ test-data/unit/check-flags.test | 55 +++++++++++++++++++++++++++++++++ 7 files changed, 169 insertions(+), 9 deletions(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index d9de5cd8f9bd..727d500e2d4d 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -350,6 +350,34 @@ definitions or calls. This flag reports an error whenever a function with type annotations calls a function defined without annotations. +.. option:: --untyped-calls-exclude + + This flag allows to selectively disable :option:`--disallow-untyped-calls` + for functions and methods defined in specific packages, modules, or classes. + Note that each exclude entry acts as a prefix. For example (assuming there + are no type annotations for ``third_party_lib`` available): + + .. code-block:: python + + # mypy --disallow-untyped-calls + # --untyped-calls-exclude=third_party_lib.module_a + # --untyped-calls-exclude=foo.A + from third_party_lib.module_a import some_func + from third_party_lib.module_b import other_func + import foo + + some_func() # OK, function comes from module `third_party_lib.module_a` + other_func() # E: Call to untyped function "other_func" in typed context + + foo.A().meth() # OK, method was defined in class `foo.A` + foo.B().meth() # E: Call to untyped function "meth" in typed context + + # file foo.py + class A: + def meth(self): pass + class B: + def meth(self): pass + .. option:: --disallow-untyped-defs This flag reports an error whenever it encounters a function definition diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index 9e79ff99937b..c0798bbf03f1 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -490,7 +490,38 @@ section of the command line docs. :default: False Disallows calling functions without type annotations from functions with type - annotations. + annotations. Note that when used in per-module options, it enables/disables + this check **inside** the module(s) specified, not for functions that come + from that module(s), for example config like this: + + .. code-block:: ini + + [mypy] + disallow_untyped_calls = True + + [mypy-some.library.*] + disallow_untyped_calls = False + + will disable this check inside ``some.library``, not for your code that + imports ``some.library``. If you want to selectively disable this check for + all your code that imports ``some.library`` you should instead use + :confval:`untyped_calls_exclude`, for example: + + .. code-block:: ini + + [mypy] + disallow_untyped_calls = True + untyped_calls_exclude = some.library + +.. confval:: untyped_calls_exclude + + :type: comma-separated list of strings + + Selectively excludes functions and methods defined in specific packages, + modules, and classes from action of :confval:`disallow_untyped_calls`. + This also applies to all submodules of packages (i.e. everything inside + a given prefix). Note, this option does not support per-file configuration, + the exclusions list is defined globally for all your code. .. confval:: disallow_untyped_defs diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 9e46d9ee39cb..6df64b32493c 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -529,13 +529,6 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> callee_type = get_proper_type( self.accept(e.callee, type_context, always_allow_any=True, is_callee=True) ) - if ( - self.chk.options.disallow_untyped_calls - and self.chk.in_checked_function() - and isinstance(callee_type, CallableType) - and callee_type.implicit - ): - self.msg.untyped_function_call(callee_type, e) # Figure out the full name of the callee for plugin lookup. object_type = None @@ -561,6 +554,22 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> ): member = e.callee.name object_type = self.chk.lookup_type(e.callee.expr) + + if ( + self.chk.options.disallow_untyped_calls + and self.chk.in_checked_function() + and isinstance(callee_type, CallableType) + and callee_type.implicit + ): + if fullname is None and member is not None: + assert object_type is not None + fullname = self.method_fullname(object_type, member) + if not fullname or not any( + fullname == p or fullname.startswith(f"{p}.") + for p in self.chk.options.untyped_calls_exclude + ): + self.msg.untyped_function_call(callee_type, e) + ret_type = self.check_call_expr_with_callee_type( callee_type, e, fullname, object_type, member ) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 47b0bc3acabc..a84f3594a0d2 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -81,6 +81,20 @@ def validate_codes(codes: list[str]) -> list[str]: return codes +def validate_package_allow_list(allow_list: list[str]) -> list[str]: + for p in allow_list: + msg = f"Invalid allow list entry: {p}" + if "*" in p: + raise argparse.ArgumentTypeError( + f"{msg} (entries are already prefixes so must not contain *)" + ) + if "\\" in p or "/" in p: + raise argparse.ArgumentTypeError( + f"{msg} (entries must be packages like foo.bar not directories or files)" + ) + return allow_list + + def expand_path(path: str) -> str: """Expand the user home directory and any environment variables contained within the provided path. @@ -164,6 +178,9 @@ def split_commas(value: str) -> list[str]: "plugins": lambda s: [p.strip() for p in split_commas(s)], "always_true": lambda s: [p.strip() for p in split_commas(s)], "always_false": lambda s: [p.strip() for p in split_commas(s)], + "untyped_calls_exclude": lambda s: validate_package_allow_list( + [p.strip() for p in split_commas(s)] + ), "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)], "disable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), "enable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), @@ -187,6 +204,7 @@ def split_commas(value: str) -> list[str]: "plugins": try_split, "always_true": try_split, "always_false": try_split, + "untyped_calls_exclude": lambda s: validate_package_allow_list(try_split(s)), "enable_incomplete_feature": try_split, "disable_error_code": lambda s: validate_codes(try_split(s)), "enable_error_code": lambda s: validate_codes(try_split(s)), diff --git a/mypy/main.py b/mypy/main.py index 6173fd6fc1a8..30f6cfe97455 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -11,7 +11,12 @@ from typing import IO, Any, Final, NoReturn, Sequence, TextIO from mypy import build, defaults, state, util -from mypy.config_parser import get_config_module_names, parse_config_file, parse_version +from mypy.config_parser import ( + get_config_module_names, + parse_config_file, + parse_version, + validate_package_allow_list, +) from mypy.errorcodes import error_codes from mypy.errors import CompileError from mypy.find_sources import InvalidSourceList, create_source_list @@ -675,6 +680,14 @@ def add_invertible_flag( " from functions with type annotations", group=untyped_group, ) + untyped_group.add_argument( + "--untyped-calls-exclude", + metavar="MODULE", + action="append", + default=[], + help="Disable --disallow-untyped-calls for functions/methods coming" + " from specific package, module, or class", + ) add_invertible_flag( "--disallow-untyped-defs", default=False, @@ -1307,6 +1320,8 @@ def set_strict_flags() -> None: % ", ".join(sorted(overlap)) ) + validate_package_allow_list(options.untyped_calls_exclude) + # Process `--enable-error-code` and `--disable-error-code` flags disabled_codes = set(options.disable_error_code) enabled_codes = set(options.enable_error_code) diff --git a/mypy/options.py b/mypy/options.py index 75343acd38bb..9b2e88335b24 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -136,6 +136,10 @@ def __init__(self) -> None: # Disallow calling untyped functions from typed ones self.disallow_untyped_calls = False + # Always allow untyped calls for function coming from modules/packages + # in this list (each item effectively acts as a prefix match) + self.untyped_calls_exclude: list[str] = [] + # Disallow defining untyped (or incompletely typed) functions self.disallow_untyped_defs = False diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index e21157eae991..96f78d81dd16 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2077,6 +2077,61 @@ y = 1 f(reveal_type(y)) # E: Call to untyped function "f" in typed context \ # N: Revealed type is "builtins.int" +[case testDisallowUntypedCallsAllowListFlags] +# flags: --disallow-untyped-calls --untyped-calls-exclude=foo --untyped-calls-exclude=bar.A +from foo import test_foo +from bar import A, B +from baz import test_baz +from foobar import bad + +test_foo(42) # OK +test_baz(42) # E: Call to untyped function "test_baz" in typed context +bad(42) # E: Call to untyped function "bad" in typed context + +a: A +b: B +a.meth() # OK +b.meth() # E: Call to untyped function "meth" in typed context +[file foo.py] +def test_foo(x): pass +[file foobar.py] +def bad(x): pass +[file bar.py] +class A: + def meth(self): pass +class B: + def meth(self): pass +[file baz.py] +def test_baz(x): pass + +[case testDisallowUntypedCallsAllowListConfig] +# flags: --config-file tmp/mypy.ini +from foo import test_foo +from bar import A, B +from baz import test_baz + +test_foo(42) # OK +test_baz(42) # E: Call to untyped function "test_baz" in typed context + +a: A +b: B +a.meth() # OK +b.meth() # E: Call to untyped function "meth" in typed context +[file foo.py] +def test_foo(x): pass +[file bar.py] +class A: + def meth(self): pass +class B: + def meth(self): pass +[file baz.py] +def test_baz(x): pass + +[file mypy.ini] +\[mypy] +disallow_untyped_calls = True +untyped_calls_exclude = foo, bar.A + [case testPerModuleErrorCodes] # flags: --config-file tmp/mypy.ini import tests.foo From 98881d2cbf6b5a410b5eec2971edc80146422bac Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 13 Aug 2023 13:20:23 -0700 Subject: [PATCH 50/88] Add regression test for expand type -> simplified union crash (#15864) See #13431 Authored by ilevkivskyi --- test-data/unit/check-callable.test | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test index 7d25eb271f53..07c42de74bb3 100644 --- a/test-data/unit/check-callable.test +++ b/test-data/unit/check-callable.test @@ -587,3 +587,14 @@ class C(B): def f(self, x: int) -> C: ... class B: ... [builtins fixtures/classmethod.pyi] + +[case testCallableUnionCallback] +from typing import Union, Callable, TypeVar + +TA = TypeVar("TA", bound="A") +class A: + def __call__(self: TA, other: Union[Callable, TA]) -> TA: ... +a: A +a() # E: Missing positional argument "other" in call to "__call__" of "A" +a(a) +a(lambda: None) From edbfdaa802fd6d951026545b0eddcba5494fbb0b Mon Sep 17 00:00:00 2001 From: chylek <1331917+chylek@users.noreply.github.com> Date: Sun, 13 Aug 2023 22:46:58 +0200 Subject: [PATCH 51/88] Add option to include docstrings with stubgen (#13284) ### Description Closes #11965. Add a --include-docstrings flag to stubgen. This was suggested in #11965 along with a use case. When using this flag, the .pyi files will include docstrings for Python classes and functions and for C extension functions. The flag is optional and does not change the default stubgen behaviour. When using the flag, the resulting function stubs that contain docstring will no longer be one-liners, but functions without a docstring still retain the default one-liner style. Example input: ```python class A: """class docstring""" def func(): """func docstring""" ... def nodoc(): ... ``` output: ```python class A: """class docstring""" def func() -> None: """func docstring""" ... def nodoc() -> None: ... ``` ## Test Plan Tests `testIncludeDocstrings` and `testIgnoreDocstrings` were added to `test-data/unit/stubgen.test` to ensure the code works as intended. All other tests passed as well. C extension docstrings are tested using an updated bash script `misc/test_stubgenc.sh` with test data in `test-data/pybind11_mypy_demo/stubgen-include-docs` in same fashion as in an already existing test. --------- Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- docs/source/stubgen.rst | 5 + misc/test-stubgenc.sh | 30 +++-- mypy/fastparse.py | 4 + mypy/nodes.py | 4 + mypy/options.py | 6 + mypy/stubgen.py | 42 ++++++- mypy/stubgenc.py | 29 +++-- mypy/util.py | 17 +++ test-data/pybind11_mypy_demo/src/main.cpp | 4 +- .../pybind11_mypy_demo/__init__.pyi | 0 .../pybind11_mypy_demo/basics.pyi | 112 ++++++++++++++++++ test-data/unit/stubgen.test | 79 ++++++++++++ 12 files changed, 311 insertions(+), 21 deletions(-) create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi diff --git a/docs/source/stubgen.rst b/docs/source/stubgen.rst index f06c9c066bb7..2de0743572e7 100644 --- a/docs/source/stubgen.rst +++ b/docs/source/stubgen.rst @@ -163,6 +163,11 @@ Additional flags Instead, only export imported names that are not referenced in the module that contains the import. +.. option:: --include-docstrings + + Include docstrings in stubs. This will add docstrings to Python function and + classes stubs and to C extension function stubs. + .. option:: --search-path PATH Specify module search directories, separated by colons (only used if diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index 7da135f0bf16..7713e1b04e43 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -3,17 +3,33 @@ set -e set -x -cd "$(dirname $0)/.." +cd "$(dirname "$0")/.." # Install dependencies, demo project and mypy python -m pip install -r test-requirements.txt python -m pip install ./test-data/pybind11_mypy_demo python -m pip install . -# Remove expected stubs and generate new inplace -STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/stubgen -rm -rf $STUBGEN_OUTPUT_FOLDER/* -stubgen -p pybind11_mypy_demo -o $STUBGEN_OUTPUT_FOLDER +EXIT=0 -# Compare generated stubs to expected ones -git diff --exit-code $STUBGEN_OUTPUT_FOLDER +# performs the stubgenc test +# first argument is the test result folder +# everything else is passed to stubgen as its arguments +function stubgenc_test() { + # Remove expected stubs and generate new inplace + STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/$1 + rm -rf "${STUBGEN_OUTPUT_FOLDER:?}/*" + stubgen -o "$STUBGEN_OUTPUT_FOLDER" "${@:2}" + + # Compare generated stubs to expected ones + if ! git diff --exit-code "$STUBGEN_OUTPUT_FOLDER"; + then + EXIT=$? + fi +} + +# create stubs without docstrings +stubgenc_test stubgen -p pybind11_mypy_demo +# create stubs with docstrings +stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings +exit $EXIT diff --git a/mypy/fastparse.py b/mypy/fastparse.py index f7a98e9b2b8f..3a26cfe7d6ff 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1008,6 +1008,8 @@ def do_func_def( # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset, end_line, end_column) retval = func_def + if self.options.include_docstrings: + func_def.docstring = ast3.get_docstring(n, clean=False) self.class_and_function_stack.pop() return retval @@ -1121,6 +1123,8 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: cdef.line = n.lineno cdef.deco_line = n.decorator_list[0].lineno if n.decorator_list else None + if self.options.include_docstrings: + cdef.docstring = ast3.get_docstring(n, clean=False) cdef.column = n.col_offset cdef.end_line = getattr(n, "end_lineno", None) cdef.end_column = getattr(n, "end_col_offset", None) diff --git a/mypy/nodes.py b/mypy/nodes.py index ebd222f4f253..452a4f643255 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -751,6 +751,7 @@ class FuncDef(FuncItem, SymbolNode, Statement): "is_mypy_only", # Present only when a function is decorated with @typing.datasclass_transform or similar "dataclass_transform_spec", + "docstring", ) __match_args__ = ("name", "arguments", "type", "body") @@ -779,6 +780,7 @@ def __init__( # Definitions that appear in if TYPE_CHECKING are marked with this flag. self.is_mypy_only = False self.dataclass_transform_spec: DataclassTransformSpec | None = None + self.docstring: str | None = None @property def name(self) -> str: @@ -1081,6 +1083,7 @@ class ClassDef(Statement): "analyzed", "has_incompatible_baseclass", "deco_line", + "docstring", "removed_statements", ) @@ -1127,6 +1130,7 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None + self.docstring: str | None = None self.removed_statements = [] @property diff --git a/mypy/options.py b/mypy/options.py index 9b2e88335b24..5e451c0aa0a3 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -283,6 +283,12 @@ def __init__(self) -> None: # mypy. (Like mypyc.) self.preserve_asts = False + # If True, function and class docstrings will be extracted and retained. + # This isn't exposed as a command line option + # because it is intended for software integrating with + # mypy. (Like stubgen.) + self.include_docstrings = False + # Paths of user plugins self.plugins: list[str] = [] diff --git a/mypy/stubgen.py b/mypy/stubgen.py index a77ee738d56f..b6fc3e8b7377 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -243,6 +243,7 @@ def __init__( verbose: bool, quiet: bool, export_less: bool, + include_docstrings: bool, ) -> None: # See parse_options for descriptions of the flags. self.pyversion = pyversion @@ -261,6 +262,7 @@ def __init__( self.verbose = verbose self.quiet = quiet self.export_less = export_less + self.include_docstrings = include_docstrings class StubSource: @@ -624,6 +626,7 @@ def __init__( include_private: bool = False, analyzed: bool = False, export_less: bool = False, + include_docstrings: bool = False, ) -> None: # Best known value of __all__. self._all_ = _all_ @@ -638,6 +641,7 @@ def __init__( self._state = EMPTY self._toplevel_names: list[str] = [] self._include_private = include_private + self._include_docstrings = include_docstrings self._current_class: ClassDef | None = None self.import_tracker = ImportTracker() # Was the tree semantically analysed before? @@ -809,7 +813,13 @@ def visit_func_def(self, o: FuncDef) -> None: retfield = " -> " + retname self.add(", ".join(args)) - self.add(f"){retfield}: ...\n") + self.add(f"){retfield}:") + if self._include_docstrings and o.docstring: + docstring = mypy.util.quote_docstring(o.docstring) + self.add(f"\n{self._indent} {docstring}\n") + else: + self.add(" ...\n") + self._state = FUNC def is_none_expr(self, expr: Expression) -> bool: @@ -910,8 +920,11 @@ def visit_class_def(self, o: ClassDef) -> None: if base_types: self.add(f"({', '.join(base_types)})") self.add(":\n") - n = len(self._output) self._indent += " " + if self._include_docstrings and o.docstring: + docstring = mypy.util.quote_docstring(o.docstring) + self.add(f"{self._indent}{docstring}\n") + n = len(self._output) self._vars.append([]) super().visit_class_def(o) self._indent = self._indent[:-4] @@ -920,7 +933,8 @@ def visit_class_def(self, o: ClassDef) -> None: if len(self._output) == n: if self._state == EMPTY_CLASS and sep is not None: self._output[sep] = "" - self._output[-1] = self._output[-1][:-1] + " ...\n" + if not (self._include_docstrings and o.docstring): + self._output[-1] = self._output[-1][:-1] + " ...\n" self._state = EMPTY_CLASS else: self._state = CLASS @@ -1710,6 +1724,7 @@ def mypy_options(stubgen_options: Options) -> MypyOptions: options.show_traceback = True options.transform_source = remove_misplaced_type_comments options.preserve_asts = True + options.include_docstrings = stubgen_options.include_docstrings # Override cache_dir if provided in the environment environ_cache_dir = os.getenv("MYPY_CACHE_DIR", "") @@ -1773,6 +1788,7 @@ def generate_stub_from_ast( parse_only: bool = False, include_private: bool = False, export_less: bool = False, + include_docstrings: bool = False, ) -> None: """Use analysed (or just parsed) AST to generate type stub for single file. @@ -1784,6 +1800,7 @@ def generate_stub_from_ast( include_private=include_private, analyzed=not parse_only, export_less=export_less, + include_docstrings=include_docstrings, ) assert mod.ast is not None, "This function must be used only with analyzed modules" mod.ast.accept(gen) @@ -1845,7 +1862,12 @@ def generate_stubs(options: Options) -> None: files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): generate_stub_from_ast( - mod, target, options.parse_only, options.include_private, options.export_less + mod, + target, + options.parse_only, + options.include_private, + options.export_less, + include_docstrings=options.include_docstrings, ) # Separately analyse C modules using different logic. @@ -1859,7 +1881,11 @@ def generate_stubs(options: Options) -> None: files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): generate_stub_for_c_module( - mod.module, target, known_modules=all_modules, sig_generators=sig_generators + mod.module, + target, + known_modules=all_modules, + sig_generators=sig_generators, + include_docstrings=options.include_docstrings, ) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: @@ -1913,6 +1939,11 @@ def parse_options(args: list[str]) -> Options: action="store_true", help="don't implicitly export all names imported from other modules in the same package", ) + parser.add_argument( + "--include-docstrings", + action="store_true", + help="include existing docstrings with the stubs", + ) parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages") parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages") parser.add_argument( @@ -1993,6 +2024,7 @@ def parse_options(args: list[str]) -> Options: verbose=ns.verbose, quiet=ns.quiet, export_less=ns.export_less, + include_docstrings=ns.include_docstrings, ) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 8aa1fb3d2c0a..31487f9d0dcf 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -14,6 +14,7 @@ from types import ModuleType from typing import Any, Final, Iterable, Mapping +import mypy.util from mypy.moduleinspect import is_c_module from mypy.stubdoc import ( ArgSig, @@ -169,6 +170,7 @@ def generate_stub_for_c_module( target: str, known_modules: list[str], sig_generators: Iterable[SignatureGenerator], + include_docstrings: bool = False, ) -> None: """Generate stub for C module. @@ -201,6 +203,7 @@ def generate_stub_for_c_module( known_modules=known_modules, imports=imports, sig_generators=sig_generators, + include_docstrings=include_docstrings, ) done.add(name) types: list[str] = [] @@ -216,6 +219,7 @@ def generate_stub_for_c_module( known_modules=known_modules, imports=imports, sig_generators=sig_generators, + include_docstrings=include_docstrings, ) done.add(name) variables = [] @@ -319,15 +323,17 @@ def generate_c_function_stub( self_var: str | None = None, cls: type | None = None, class_name: str | None = None, + include_docstrings: bool = False, ) -> None: """Generate stub for a single function or method. - The result (always a single line) will be appended to 'output'. + The result will be appended to 'output'. If necessary, any required names will be added to 'imports'. The 'class_name' is used to find signature of __init__ or __new__ in 'class_sigs'. """ inferred: list[FunctionSig] | None = None + docstr: str | None = None if class_name: # method: assert cls is not None, "cls should be provided for methods" @@ -379,13 +385,19 @@ def generate_c_function_stub( # a sig generator indicates @classmethod by specifying the cls arg if class_name and signature.args and signature.args[0].name == "cls": output.append("@classmethod") - output.append( - "def {function}({args}) -> {ret}: ...".format( - function=name, - args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, known_modules, imports), - ) + output_signature = "def {function}({args}) -> {ret}:".format( + function=name, + args=", ".join(args), + ret=strip_or_import(signature.ret_type, module, known_modules, imports), ) + if include_docstrings and docstr: + docstr_quoted = mypy.util.quote_docstring(docstr.strip()) + docstr_indented = "\n ".join(docstr_quoted.split("\n")) + output.append(output_signature) + output.extend(f" {docstr_indented}".split("\n")) + else: + output_signature += " ..." + output.append(output_signature) def strip_or_import( @@ -493,6 +505,7 @@ def generate_c_type_stub( known_modules: list[str], imports: list[str], sig_generators: Iterable[SignatureGenerator], + include_docstrings: bool = False, ) -> None: """Generate stub for a single class using runtime introspection. @@ -535,6 +548,7 @@ def generate_c_type_stub( cls=obj, class_name=class_name, sig_generators=sig_generators, + include_docstrings=include_docstrings, ) elif is_c_property(raw_value): generate_c_property_stub( @@ -557,6 +571,7 @@ def generate_c_type_stub( imports=imports, known_modules=known_modules, sig_generators=sig_generators, + include_docstrings=include_docstrings, ) else: attrs.append((attr, value)) diff --git a/mypy/util.py b/mypy/util.py index 8a079c5256bc..d0f2f8c6cc36 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -809,3 +809,20 @@ def plural_s(s: int | Sized) -> str: return "s" else: return "" + + +def quote_docstring(docstr: str) -> str: + """Returns docstring correctly encapsulated in a single or double quoted form.""" + # Uses repr to get hint on the correct quotes and escape everything properly. + # Creating multiline string for prettier output. + docstr_repr = "\n".join(re.split(r"(?<=[^\\])\\n", repr(docstr))) + + if docstr_repr.startswith("'"): + # Enforce double quotes when it's safe to do so. + # That is when double quotes are not in the string + # or when it doesn't end with a single quote. + if '"' not in docstr_repr[1:-1] and docstr_repr[-2] != "'": + return f'"""{docstr_repr[1:-1]}"""' + return f"''{docstr_repr}''" + else: + return f'""{docstr_repr}""' diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp index ff0f93bf7017..00e5b2f4e871 100644 --- a/test-data/pybind11_mypy_demo/src/main.cpp +++ b/test-data/pybind11_mypy_demo/src/main.cpp @@ -119,8 +119,8 @@ void bind_basics(py::module& basics) { using namespace basics; // Functions - basics.def("answer", &answer); - basics.def("sum", &sum); + basics.def("answer", &answer, "answer docstring, with end quote\""); // tests explicit docstrings + basics.def("sum", &sum, "multiline docstring test, edge case quotes \"\"\"'''"); basics.def("midpoint", &midpoint, py::arg("left"), py::arg("right")); basics.def("weighted_midpoint", weighted_midpoint, py::arg("left"), py::arg("right"), py::arg("alpha")=0.5); diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi new file mode 100644 index 000000000000..676d7f6d3f15 --- /dev/null +++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi @@ -0,0 +1,112 @@ +from typing import ClassVar + +from typing import overload +PI: float + +class Point: + class AngleUnit: + __members__: ClassVar[dict] = ... # read-only + __entries: ClassVar[dict] = ... + degree: ClassVar[Point.AngleUnit] = ... + radian: ClassVar[Point.AngleUnit] = ... + def __init__(self, value: int) -> None: + """__init__(self: pybind11_mypy_demo.basics.Point.AngleUnit, value: int) -> None""" + def __eq__(self, other: object) -> bool: + """__eq__(self: object, other: object) -> bool""" + def __getstate__(self) -> int: + """__getstate__(self: object) -> int""" + def __hash__(self) -> int: + """__hash__(self: object) -> int""" + def __index__(self) -> int: + """__index__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" + def __int__(self) -> int: + """__int__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" + def __ne__(self, other: object) -> bool: + """__ne__(self: object, other: object) -> bool""" + def __setstate__(self, state: int) -> None: + """__setstate__(self: pybind11_mypy_demo.basics.Point.AngleUnit, state: int) -> None""" + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + class LengthUnit: + __members__: ClassVar[dict] = ... # read-only + __entries: ClassVar[dict] = ... + inch: ClassVar[Point.LengthUnit] = ... + mm: ClassVar[Point.LengthUnit] = ... + pixel: ClassVar[Point.LengthUnit] = ... + def __init__(self, value: int) -> None: + """__init__(self: pybind11_mypy_demo.basics.Point.LengthUnit, value: int) -> None""" + def __eq__(self, other: object) -> bool: + """__eq__(self: object, other: object) -> bool""" + def __getstate__(self) -> int: + """__getstate__(self: object) -> int""" + def __hash__(self) -> int: + """__hash__(self: object) -> int""" + def __index__(self) -> int: + """__index__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" + def __int__(self) -> int: + """__int__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" + def __ne__(self, other: object) -> bool: + """__ne__(self: object, other: object) -> bool""" + def __setstate__(self, state: int) -> None: + """__setstate__(self: pybind11_mypy_demo.basics.Point.LengthUnit, state: int) -> None""" + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + angle_unit: ClassVar[Point.AngleUnit] = ... + length_unit: ClassVar[Point.LengthUnit] = ... + x_axis: ClassVar[Point] = ... # read-only + y_axis: ClassVar[Point] = ... # read-only + origin: ClassVar[Point] = ... + x: float + y: float + @overload + def __init__(self) -> None: + """__init__(*args, **kwargs) + Overloaded function. + + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" + @overload + def __init__(self, x: float, y: float) -> None: + """__init__(*args, **kwargs) + Overloaded function. + + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" + @overload + def distance_to(self, x: float, y: float) -> float: + """distance_to(*args, **kwargs) + Overloaded function. + + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" + @overload + def distance_to(self, other: Point) -> float: + """distance_to(*args, **kwargs) + Overloaded function. + + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" + @property + def length(self) -> float: ... + +def answer() -> int: + '''answer() -> int + + answer docstring, with end quote"''' +def midpoint(left: float, right: float) -> float: + """midpoint(left: float, right: float) -> float""" +def sum(arg0: int, arg1: int) -> int: + '''sum(arg0: int, arg1: int) -> int + + multiline docstring test, edge case quotes """\'\'\'''' +def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float: + """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float""" diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index f6b71a994153..774a17b76161 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -3183,6 +3183,85 @@ def f2(): def f1(): ... def f2(): ... +[case testIncludeDocstrings] +# flags: --include-docstrings +class A: + """class docstring + + a multiline docstring""" + def func(): + """func docstring + don't forget to indent""" + ... + def nodoc(): + ... +class B: + def quoteA(): + '''func docstring with quotes"""\\n + and an end quote\'''' + ... + def quoteB(): + '''func docstring with quotes""" + \'\'\' + and an end quote\\"''' + ... + def quoteC(): + """func docstring with end quote\\\"""" + ... + def quoteD(): + r'''raw with quotes\"''' + ... +[out] +class A: + """class docstring + + a multiline docstring""" + def func() -> None: + """func docstring + don't forget to indent""" + def nodoc() -> None: ... + +class B: + def quoteA() -> None: + '''func docstring with quotes"""\\n + and an end quote\'''' + def quoteB() -> None: + '''func docstring with quotes""" + \'\'\' + and an end quote\\"''' + def quoteC() -> None: + '''func docstring with end quote\\"''' + def quoteD() -> None: + '''raw with quotes\\"''' + +[case testIgnoreDocstrings] +class A: + """class docstring + + a multiline docstring""" + def func(): + """func docstring + + don't forget to indent""" + def nodoc(): + ... + +class B: + def func(): + """func docstring""" + ... + def nodoc(): + ... + +[out] +class A: + def func() -> None: ... + def nodoc() -> None: ... + +class B: + def func() -> None: ... + def nodoc() -> None: ... + [case testKnownMagicMethodsReturnTypes] class Some: def __len__(self): ... From 11a94be6f408d5bce391a1ec3931ce7197ca207b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 14 Aug 2023 13:58:52 +0100 Subject: [PATCH 52/88] Add regression test for fixed bug involving bytes formatting (#15867) Adds a regression test for #12665, which is a strange bug that was fixed somewhat by accident --- test-data/unit/check-formatting.test | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/test-data/unit/check-formatting.test b/test-data/unit/check-formatting.test index 7d23c2e199f1..75651124b76f 100644 --- a/test-data/unit/check-formatting.test +++ b/test-data/unit/check-formatting.test @@ -484,6 +484,23 @@ class D(bytes): '{}'.format(D()) [builtins fixtures/primitives.pyi] +[case testNoSpuriousFormattingErrorsDuringFailedOverlodMatch] +from typing import overload, Callable + +@overload +def sub(pattern: str, repl: Callable[[str], str]) -> str: ... +@overload +def sub(pattern: bytes, repl: Callable[[bytes], bytes]) -> bytes: ... +def sub(pattern: object, repl: object) -> object: + pass + +def better_snakecase(text: str) -> str: + # Mypy used to emit a spurious error here + # warning about interpolating bytes into an f-string: + text = sub(r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))", lambda match: f"{match}") + return text +[builtins fixtures/primitives.pyi] + [case testFormatCallFinal] from typing_extensions import Final From a1fcad5bd6a6f71fac6f1a2f235302b2172ddd7d Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 14 Aug 2023 21:54:09 +0100 Subject: [PATCH 53/88] Add missing type annotations to the `primitives.pyi` fixture (#15871) This fixes some weird test failures I was seeing locally when trying to run just the tests in `check-enum.test` (invoked via `pytest mypy/test/testcheck.py::TypeCheckSuite::check-enum.test`) --- test-data/unit/fixtures/primitives.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index c9b1e3f4e983..63128a8ae03d 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -12,7 +12,7 @@ class object: def __ne__(self, other: object) -> bool: pass class type: - def __init__(self, x) -> None: pass + def __init__(self, x: object) -> None: pass class int: # Note: this is a simplification of the actual signature @@ -30,7 +30,7 @@ class str(Sequence[str]): def __iter__(self) -> Iterator[str]: pass def __contains__(self, other: object) -> bool: pass def __getitem__(self, item: int) -> str: pass - def format(self, *args, **kwargs) -> str: pass + def format(self, *args: object, **kwargs: object) -> str: pass class bytes(Sequence[int]): def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass From 854a9f8f82a6dae085d3514897961871fe7005b1 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 14 Aug 2023 22:05:56 +0100 Subject: [PATCH 54/88] Allow None vs TypeVar overlap for overloads (#15846) Fixes #8881 This is technically unsafe, and I remember we explicitly discussed this a while ago, but related use cases turn out to be more common than I expected (judging by how popular the issue is). Also the fix is really simple. --------- Co-authored-by: Ivan Levkivskyi Co-authored-by: Alex Waygood --- mypy/checker.py | 24 ++++++-- mypy/checkexpr.py | 86 +++++++++++++++++++++------ mypy/subtypes.py | 15 ++++- test-data/unit/check-overloading.test | 39 ++++++++++-- 4 files changed, 135 insertions(+), 29 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index b786155079e5..3bd9c494a890 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7216,22 +7216,32 @@ def is_unsafe_overlapping_overload_signatures( # # This discrepancy is unfortunately difficult to get rid of, so we repeat the # checks twice in both directions for now. + # + # Note that we ignore possible overlap between type variables and None. This + # is technically unsafe, but unsafety is tiny and this prevents some common + # use cases like: + # @overload + # def foo(x: None) -> None: .. + # @overload + # def foo(x: T) -> Foo[T]: ... return is_callable_compatible( signature, other, - is_compat=is_overlapping_types_no_promote_no_uninhabited, + is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, is_compat_return=lambda l, r: not is_subtype_no_promote(l, r), ignore_return=False, check_args_covariantly=True, allow_partial_overlap=True, + no_unify_none=True, ) or is_callable_compatible( other, signature, - is_compat=is_overlapping_types_no_promote_no_uninhabited, + is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, is_compat_return=lambda l, r: not is_subtype_no_promote(r, l), ignore_return=False, check_args_covariantly=False, allow_partial_overlap=True, + no_unify_none=True, ) @@ -7717,12 +7727,18 @@ def is_subtype_no_promote(left: Type, right: Type) -> bool: return is_subtype(left, right, ignore_promotions=True) -def is_overlapping_types_no_promote_no_uninhabited(left: Type, right: Type) -> bool: +def is_overlapping_types_no_promote_no_uninhabited_no_none(left: Type, right: Type) -> bool: # For the purpose of unsafe overload checks we consider list[] and list[int] # non-overlapping. This is consistent with how we treat list[int] and list[str] as # non-overlapping, despite [] belongs to both. Also this will prevent false positives # for failed type inference during unification. - return is_overlapping_types(left, right, ignore_promotions=True, ignore_uninhabited=True) + return is_overlapping_types( + left, + right, + ignore_promotions=True, + ignore_uninhabited=True, + prohibit_none_typevar_overlap=True, + ) def is_private(node_name: str) -> bool: diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 6df64b32493c..d00bbb288f3e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2409,6 +2409,11 @@ def check_overload_call( # typevar. See https://github.com/python/mypy/issues/4063 for related discussion. erased_targets: list[CallableType] | None = None unioned_result: tuple[Type, Type] | None = None + + # Determine whether we need to encourage union math. This should be generally safe, + # as union math infers better results in the vast majority of cases, but it is very + # computationally intensive. + none_type_var_overlap = self.possible_none_type_var_overlap(arg_types, plausible_targets) union_interrupted = False # did we try all union combinations? if any(self.real_union(arg) for arg in arg_types): try: @@ -2421,6 +2426,7 @@ def check_overload_call( arg_names, callable_name, object_type, + none_type_var_overlap, context, ) except TooManyUnions: @@ -2453,8 +2459,10 @@ def check_overload_call( # If any of checks succeed, stop early. if inferred_result is not None and unioned_result is not None: # Both unioned and direct checks succeeded, choose the more precise type. - if is_subtype(inferred_result[0], unioned_result[0]) and not isinstance( - get_proper_type(inferred_result[0]), AnyType + if ( + is_subtype(inferred_result[0], unioned_result[0]) + and not isinstance(get_proper_type(inferred_result[0]), AnyType) + and not none_type_var_overlap ): return inferred_result return unioned_result @@ -2504,7 +2512,8 @@ def check_overload_call( callable_name=callable_name, object_type=object_type, ) - if union_interrupted: + # Do not show the extra error if the union math was forced. + if union_interrupted and not none_type_var_overlap: self.chk.fail(message_registry.TOO_MANY_UNION_COMBINATIONS, context) return result @@ -2659,6 +2668,44 @@ def overload_erased_call_targets( matches.append(typ) return matches + def possible_none_type_var_overlap( + self, arg_types: list[Type], plausible_targets: list[CallableType] + ) -> bool: + """Heuristic to determine whether we need to try forcing union math. + + This is needed to avoid greedy type variable match in situations like this: + @overload + def foo(x: None) -> None: ... + @overload + def foo(x: T) -> list[T]: ... + + x: int | None + foo(x) + we want this call to infer list[int] | None, not list[int | None]. + """ + if not plausible_targets or not arg_types: + return False + has_optional_arg = False + for arg_type in get_proper_types(arg_types): + if not isinstance(arg_type, UnionType): + continue + for item in get_proper_types(arg_type.items): + if isinstance(item, NoneType): + has_optional_arg = True + break + if not has_optional_arg: + return False + + min_prefix = min(len(c.arg_types) for c in plausible_targets) + for i in range(min_prefix): + if any( + isinstance(get_proper_type(c.arg_types[i]), NoneType) for c in plausible_targets + ) and any( + isinstance(get_proper_type(c.arg_types[i]), TypeVarType) for c in plausible_targets + ): + return True + return False + def union_overload_result( self, plausible_targets: list[CallableType], @@ -2668,6 +2715,7 @@ def union_overload_result( arg_names: Sequence[str | None] | None, callable_name: str | None, object_type: Type | None, + none_type_var_overlap: bool, context: Context, level: int = 0, ) -> list[tuple[Type, Type]] | None: @@ -2707,20 +2755,23 @@ def union_overload_result( # Step 3: Try a direct match before splitting to avoid unnecessary union splits # and save performance. - with self.type_overrides_set(args, arg_types): - direct = self.infer_overload_return_type( - plausible_targets, - args, - arg_types, - arg_kinds, - arg_names, - callable_name, - object_type, - context, - ) - if direct is not None and not isinstance(get_proper_type(direct[0]), (UnionType, AnyType)): - # We only return non-unions soon, to avoid greedy match. - return [direct] + if not none_type_var_overlap: + with self.type_overrides_set(args, arg_types): + direct = self.infer_overload_return_type( + plausible_targets, + args, + arg_types, + arg_kinds, + arg_names, + callable_name, + object_type, + context, + ) + if direct is not None and not isinstance( + get_proper_type(direct[0]), (UnionType, AnyType) + ): + # We only return non-unions soon, to avoid greedy match. + return [direct] # Step 4: Split the first remaining union type in arguments into items and # try to match each item individually (recursive). @@ -2738,6 +2789,7 @@ def union_overload_result( arg_names, callable_name, object_type, + none_type_var_overlap, context, level + 1, ) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 5712d7375e50..da92f7398d4e 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1299,6 +1299,7 @@ def is_callable_compatible( check_args_covariantly: bool = False, allow_partial_overlap: bool = False, strict_concatenate: bool = False, + no_unify_none: bool = False, ) -> bool: """Is the left compatible with the right, using the provided compatibility check? @@ -1415,7 +1416,9 @@ def g(x: int) -> int: ... # (below) treats type variables on the two sides as independent. if left.variables: # Apply generic type variables away in left via type inference. - unified = unify_generic_callable(left, right, ignore_return=ignore_return) + unified = unify_generic_callable( + left, right, ignore_return=ignore_return, no_unify_none=no_unify_none + ) if unified is None: return False left = unified @@ -1427,7 +1430,9 @@ def g(x: int) -> int: ... # So, we repeat the above checks in the opposite direction. This also # lets us preserve the 'symmetry' property of allow_partial_overlap. if allow_partial_overlap and right.variables: - unified = unify_generic_callable(right, left, ignore_return=ignore_return) + unified = unify_generic_callable( + right, left, ignore_return=ignore_return, no_unify_none=no_unify_none + ) if unified is not None: right = unified @@ -1687,6 +1692,8 @@ def unify_generic_callable( target: NormalizedCallableType, ignore_return: bool, return_constraint_direction: int | None = None, + *, + no_unify_none: bool = False, ) -> NormalizedCallableType | None: """Try to unify a generic callable type with another callable type. @@ -1708,6 +1715,10 @@ def unify_generic_callable( type.ret_type, target.ret_type, return_constraint_direction ) constraints.extend(c) + if no_unify_none: + constraints = [ + c for c in constraints if not isinstance(get_proper_type(c.target), NoneType) + ] inferred_vars, _ = mypy.solve.solve_constraints(type.variables, constraints) if None in inferred_vars: return None diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 50acd7d77c8c..4910dfe05d31 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -2185,36 +2185,63 @@ def bar2(*x: int) -> int: ... [builtins fixtures/tuple.pyi] [case testOverloadDetectsPossibleMatchesWithGenerics] -from typing import overload, TypeVar, Generic +# flags: --strict-optional +from typing import overload, TypeVar, Generic, Optional, List T = TypeVar('T') +# The examples below are unsafe, but it is a quite common pattern +# so we ignore the possibility of type variables taking value `None` +# for the purpose of overload overlap checks. @overload -def foo(x: None, y: None) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def foo(x: None, y: None) -> str: ... @overload def foo(x: T, y: T) -> int: ... def foo(x): ... +oi: Optional[int] +reveal_type(foo(None, None)) # N: Revealed type is "builtins.str" +reveal_type(foo(None, 42)) # N: Revealed type is "builtins.int" +reveal_type(foo(42, 42)) # N: Revealed type is "builtins.int" +reveal_type(foo(oi, None)) # N: Revealed type is "Union[builtins.int, builtins.str]" +reveal_type(foo(oi, 42)) # N: Revealed type is "builtins.int" +reveal_type(foo(oi, oi)) # N: Revealed type is "Union[builtins.int, builtins.str]" + +@overload +def foo_list(x: None) -> None: ... +@overload +def foo_list(x: T) -> List[T]: ... +def foo_list(x): ... + +reveal_type(foo_list(oi)) # N: Revealed type is "Union[builtins.list[builtins.int], None]" + # What if 'T' is 'object'? @overload -def bar(x: None, y: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def bar(x: None, y: int) -> str: ... @overload def bar(x: T, y: T) -> int: ... def bar(x, y): ... class Wrapper(Generic[T]): @overload - def foo(self, x: None, y: None) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def foo(self, x: None, y: None) -> str: ... @overload def foo(self, x: T, y: None) -> int: ... def foo(self, x): ... @overload - def bar(self, x: None, y: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def bar(self, x: None, y: int) -> str: ... @overload def bar(self, x: T, y: T) -> int: ... def bar(self, x, y): ... +@overload +def baz(x: str, y: str) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +@overload +def baz(x: T, y: T) -> int: ... +def baz(x): ... +[builtins fixtures/tuple.pyi] + [case testOverloadFlagsPossibleMatches] from wrapper import * [file wrapper.pyi] @@ -3996,7 +4023,7 @@ T = TypeVar('T') class FakeAttribute(Generic[T]): @overload - def dummy(self, instance: None, owner: Type[T]) -> 'FakeAttribute[T]': ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + def dummy(self, instance: None, owner: Type[T]) -> 'FakeAttribute[T]': ... @overload def dummy(self, instance: T, owner: Type[T]) -> int: ... def dummy(self, instance: Optional[T], owner: Type[T]) -> Union['FakeAttribute[T]', int]: ... From b49be105d2940e3a0607f5ec76f519931b0d0a08 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 18:09:10 -0700 Subject: [PATCH 55/88] Sync typeshed (#15873) Source commit: https://github.com/python/typeshed/commit/74aac1aa891cbb568b124c955010b19d40f9fda7 --- mypy/typeshed/stdlib/asyncio/base_events.pyi | 47 ++++++++++++++++++- mypy/typeshed/stdlib/asyncio/constants.pyi | 2 + mypy/typeshed/stdlib/asyncio/events.pyi | 2 + mypy/typeshed/stdlib/asyncio/streams.pyi | 11 ++++- mypy/typeshed/stdlib/asyncio/tasks.pyi | 23 ++++++++- mypy/typeshed/stdlib/enum.pyi | 10 ++-- .../stdlib/importlib/metadata/__init__.pyi | 1 + mypy/typeshed/stdlib/logging/__init__.pyi | 18 ++++++- mypy/typeshed/stdlib/socket.pyi | 1 + mypy/typeshed/stdlib/sre_parse.pyi | 24 ++++++++-- mypy/typeshed/stdlib/ssl.pyi | 41 +++++++++++----- mypy/typeshed/stdlib/turtle.pyi | 7 +++ mypy/typeshed/stdlib/typing.pyi | 10 +++- mypy/typeshed/stdlib/typing_extensions.pyi | 10 +++- 14 files changed, 175 insertions(+), 32 deletions(-) diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 9924f728f6ea..e2b55da8c718 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -107,7 +107,48 @@ class BaseEventLoop(AbstractEventLoop): flags: int = 0, ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 12): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 11): @overload async def create_connection( self, @@ -426,5 +467,7 @@ class BaseEventLoop(AbstractEventLoop): # Debug flag management. def get_debug(self) -> bool: ... def set_debug(self, enabled: bool) -> None: ... - if sys.version_info >= (3, 9): + if sys.version_info >= (3, 12): + async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... + elif sys.version_info >= (3, 9): async def shutdown_default_executor(self) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/constants.pyi b/mypy/typeshed/stdlib/asyncio/constants.pyi index af209fa9ee62..60d8529209c2 100644 --- a/mypy/typeshed/stdlib/asyncio/constants.pyi +++ b/mypy/typeshed/stdlib/asyncio/constants.pyi @@ -11,6 +11,8 @@ if sys.version_info >= (3, 11): SSL_SHUTDOWN_TIMEOUT: float FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256] FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512] +if sys.version_info >= (3, 12): + THREAD_JOIN_TIMEOUT: Literal[300] class _SendfileMode(enum.Enum): UNSUPPORTED: int diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index b1b0fcfa5fd7..cde63b279b0d 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -76,6 +76,8 @@ class Handle: def cancel(self) -> None: ... def _run(self) -> None: ... def cancelled(self) -> bool: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... class TimerHandle(Handle): def __init__( diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index f30c57305d93..804be1ca5065 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -148,7 +148,16 @@ class StreamWriter: async def wait_closed(self) -> None: ... def get_extra_info(self, name: str, default: Any = None) -> Any: ... async def drain(self) -> None: ... - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 12): + async def start_tls( + self, + sslcontext: ssl.SSLContext, + *, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + elif sys.version_info >= (3, 11): async def start_tls( self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index d8c101f281fc..5ea30d3791de 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -285,7 +285,26 @@ else: # since the only reason why `asyncio.Future` is invariant is the `set_result()` method, # and `asyncio.Task.set_result()` always raises. class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 12): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop = ..., + name: str | None, + context: Context | None = None, + eager_start: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop = ..., + name: str | None, + context: Context | None = None, + ) -> None: ... + elif sys.version_info >= (3, 8): def __init__( self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., name: str | None = ... ) -> None: ... @@ -295,6 +314,8 @@ class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ... def get_name(self) -> str: ... def set_name(self, __value: object) -> None: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 60cc27215fd0..a8ba7bf157c2 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -2,7 +2,6 @@ import _typeshed import sys import types from _typeshed import SupportsKeysAndGetItem, Unused -from abc import ABCMeta from builtins import property as _builtins_property from collections.abc import Callable, Iterable, Iterator, Mapping from typing import Any, Generic, TypeVar, overload @@ -76,12 +75,8 @@ class _EnumDict(dict[str, Any]): @overload def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... -# Note: EnumMeta actually subclasses type directly, not ABCMeta. -# This is a temporary workaround to allow multiple creation of enums with builtins -# such as str as mixins, which due to the handling of ABCs of builtin types, cause -# spurious inconsistent metaclass structure. See #1595. # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself -class EnumMeta(ABCMeta): +class EnumMeta(type): if sys.version_info >= (3, 11): def __new__( metacls: type[_typeshed.Self], @@ -193,6 +188,9 @@ class Enum(metaclass=EnumMeta): def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 12): + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): class ReprEnum(Enum): ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 0af33bc876c4..0f8a6f56cf88 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -180,6 +180,7 @@ class MetadataPathFinder(DistributionFinder): def invalidate_caches(cls) -> None: ... class PathDistribution(Distribution): + _path: Path def __init__(self, path: Path) -> None: ... def read_text(self, filename: StrPath) -> str: ... def locate_file(self, path: StrPath) -> PathLike[str]: ... diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 6ebd305aacb8..db797d4180ea 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -60,6 +60,8 @@ __all__ = [ if sys.version_info >= (3, 11): __all__ += ["getLevelNamesMapping"] +if sys.version_info >= (3, 12): + __all__ += ["getHandlerByName", "getHandlerNames"] _SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException @@ -83,7 +85,10 @@ class Filterer: filters: list[_FilterType] def addFilter(self, filter: _FilterType) -> None: ... def removeFilter(self, filter: _FilterType) -> None: ... - def filter(self, record: LogRecord) -> bool: ... + if sys.version_info >= (3, 12): + def filter(self, record: LogRecord) -> bool | LogRecord: ... + else: + def filter(self, record: LogRecord) -> bool: ... class Manager: # undocumented root: RootLogger @@ -111,6 +116,8 @@ class Logger(Filterer): def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... def getChild(self, suffix: str) -> Self: ... # see python/typing#980 + if sys.version_info >= (3, 12): + def getChildren(self) -> set[Logger]: ... if sys.version_info >= (3, 8): def debug( self, @@ -324,6 +331,10 @@ class Handler(Filterer): def format(self, record: LogRecord) -> str: ... def emit(self, record: LogRecord) -> None: ... +if sys.version_info >= (3, 12): + def getHandlerByName(name: str) -> Handler | None: ... + def getHandlerNames() -> frozenset[str]: ... + class Formatter: converter: Callable[[float | None], struct_time] _fmt: str | None # undocumented @@ -370,7 +381,10 @@ class Filter: name: str # undocumented nlen: int # undocumented def __init__(self, name: str = "") -> None: ... - def filter(self, record: LogRecord) -> bool: ... + if sys.version_info >= (3, 12): + def filter(self, record: LogRecord) -> bool | LogRecord: ... + else: + def filter(self, record: LogRecord) -> bool: ... class LogRecord: # args can be set to None by logging.handlers.QueueHandler diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 5dd92ec8e116..da06ce2c2b06 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -201,6 +201,7 @@ if sys.platform != "win32" and sys.platform != "darwin": TCP_LINGER2 as TCP_LINGER2, TCP_QUICKACK as TCP_QUICKACK, TCP_SYNCNT as TCP_SYNCNT, + TCP_USER_TIMEOUT as TCP_USER_TIMEOUT, TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, ) if sys.platform != "win32": diff --git a/mypy/typeshed/stdlib/sre_parse.pyi b/mypy/typeshed/stdlib/sre_parse.pyi index 56f10bb41d57..8ef65223dc34 100644 --- a/mypy/typeshed/stdlib/sre_parse.pyi +++ b/mypy/typeshed/stdlib/sre_parse.pyi @@ -87,25 +87,39 @@ class Tokenizer: def seek(self, index: int) -> None: ... def error(self, msg: str, offset: int = 0) -> _Error: ... - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 12): + def checkgroupname(self, name: str, offset: int) -> None: ... + elif sys.version_info >= (3, 11): def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... def fix_flags(src: str | bytes, flags: int) -> int: ... _TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] _TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] -if sys.version_info >= (3, 8): - def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... + +if sys.version_info >= (3, 12): + @overload + def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... + +elif sys.version_info >= (3, 8): @overload def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... @overload def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... else: - def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... @overload def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... @overload def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... -def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... +if sys.version_info >= (3, 8): + def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... + +else: + def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... + +if sys.version_info < (3, 12): + def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 446bbf8d1009..dd7285196ed9 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -44,18 +44,20 @@ class SSLCertVerificationError(SSLError, ValueError): CertificateError = SSLCertVerificationError -def wrap_socket( - sock: socket.socket, - keyfile: StrOrBytesPath | None = None, - certfile: StrOrBytesPath | None = None, - server_side: bool = False, - cert_reqs: int = ..., - ssl_version: int = ..., - ca_certs: str | None = None, - do_handshake_on_connect: bool = True, - suppress_ragged_eofs: bool = True, - ciphers: str | None = None, -) -> SSLSocket: ... +if sys.version_info < (3, 12): + def wrap_socket( + sock: socket.socket, + keyfile: StrOrBytesPath | None = None, + certfile: StrOrBytesPath | None = None, + server_side: bool = False, + cert_reqs: int = ..., + ssl_version: int = ..., + ca_certs: str | None = None, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + ciphers: str | None = None, + ) -> SSLSocket: ... + def create_default_context( purpose: Purpose = ..., *, @@ -95,7 +97,10 @@ else: _create_default_https_context: Callable[..., SSLContext] def RAND_bytes(__n: int) -> bytes: ... -def RAND_pseudo_bytes(__n: int) -> tuple[bytes, bool]: ... + +if sys.version_info < (3, 12): + def RAND_pseudo_bytes(__n: int) -> tuple[bytes, bool]: ... + def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ... @@ -198,6 +203,11 @@ class Options(enum.IntFlag): OP_ENABLE_MIDDLEBOX_COMPAT: int if sys.platform == "linux": OP_IGNORE_UNEXPECTED_EOF: int + if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT: int + if sys.version_info >= (3, 12) and sys.platform != "linux": + OP_ENABLE_KTLS: int + OP_IGNORE_UNEXPECTED_EOF: int OP_ALL: Options OP_NO_SSLv2: Options @@ -216,6 +226,11 @@ if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options if sys.platform == "linux": OP_IGNORE_UNEXPECTED_EOF: Options +if sys.version_info >= (3, 12): + OP_LEGACY_SERVER_CONNECT: Options +if sys.version_info >= (3, 12) and sys.platform != "linux": + OP_ENABLE_KTLS: Options + OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool HAS_SSLv2: bool diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 5df3e4b90cb5..80ea40879dee 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Callable, Sequence from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar from typing import Any, ClassVar, overload @@ -249,6 +250,9 @@ class TNavigator: def reset(self) -> None: ... def degrees(self, fullcircle: float = 360.0) -> None: ... def radians(self) -> None: ... + if sys.version_info >= (3, 12): + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + def forward(self, distance: float) -> None: ... def back(self, distance: float) -> None: ... def right(self, angle: float) -> None: ... @@ -321,6 +325,9 @@ class TPen: def color(self, r: float, g: float, b: float) -> None: ... @overload def color(self, color1: _Color, color2: _Color) -> None: ... + if sys.version_info >= (3, 12): + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + def showturtle(self) -> None: ... def hideturtle(self) -> None: ... def isvisible(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 6a307368642f..a9bffdf5214f 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -900,8 +900,16 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... if sys.version_info >= (3, 9): + @overload def __or__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... - def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + @overload + def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ... + @overload + def __ror__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + @overload + def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... # type: ignore[misc] @_final class ForwardRef: diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index efcc13e42047..9320dc50b6bb 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -233,8 +233,16 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... if sys.version_info >= (3, 9): + @overload def __or__(self, __value: Self) -> Self: ... - def __ior__(self, __value: Self) -> Self: ... + @overload + def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ... + @overload + def __ror__(self, __value: Self) -> Self: ... + @overload + def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ... + # supposedly incompatible definitions of `__ior__` and `__or__`: + def __ior__(self, __value: Self) -> Self: ... # type: ignore[misc] # TypedDict is a (non-subscriptable) special form. TypedDict: object From fda7a460485cb856c595d4d0593a0ec6c0fe03e9 Mon Sep 17 00:00:00 2001 From: Albert Tugushev Date: Tue, 15 Aug 2023 17:40:37 +0200 Subject: [PATCH 56/88] Fix all the missing references found within the docs (#15875) Fixes #13196. Enable the nit-picky mode on sphinx-build in tox, as this will facilitate the detection of potential issues related to missing references. --- docs/source/error_code_list.rst | 2 +- docs/source/more_types.rst | 18 +++++++++--------- docs/source/runtime_troubles.rst | 4 ++-- tox.ini | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 157f90249af8..1f75ac54d525 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -835,7 +835,7 @@ ellipsis ``...``, a docstring, and a ``raise NotImplementedError`` statement. Check the target of NewType [valid-newtype] ------------------------------------------- -The target of a :py:func:`NewType ` definition must be a class type. It can't +The target of a :py:class:`~typing.NewType` definition must be a class type. It can't be a union type, ``Any``, or various other special types. You can also get this error if the target has been imported from a diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 542ff1c57c71..4e6e9204fdca 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -2,7 +2,7 @@ More types ========== This section introduces a few additional kinds of types, including :py:data:`~typing.NoReturn`, -:py:func:`NewType `, and types for async code. It also discusses +:py:class:`~typing.NewType`, and types for async code. It also discusses how to give functions more precise types using overloads. All of these are only situationally useful, so feel free to skip this section and come back when you have a need for some of them. @@ -11,7 +11,7 @@ Here's a quick summary of what's covered here: * :py:data:`~typing.NoReturn` lets you tell mypy that a function never returns normally. -* :py:func:`NewType ` lets you define a variant of a type that is treated as a +* :py:class:`~typing.NewType` lets you define a variant of a type that is treated as a separate type by mypy but is identical to the original type at runtime. For example, you can have ``UserId`` as a variant of ``int`` that is just an ``int`` at runtime. @@ -75,7 +75,7 @@ certain values from base class instances. Example: ... However, this approach introduces some runtime overhead. To avoid this, the typing -module provides a helper object :py:func:`NewType ` that creates simple unique types with +module provides a helper object :py:class:`~typing.NewType` that creates simple unique types with almost zero runtime overhead. Mypy will treat the statement ``Derived = NewType('Derived', Base)`` as being roughly equivalent to the following definition: @@ -113,12 +113,12 @@ implicitly casting from ``UserId`` where ``int`` is expected. Examples: num: int = UserId(5) + 1 -:py:func:`NewType ` accepts exactly two arguments. The first argument must be a string literal +:py:class:`~typing.NewType` accepts exactly two arguments. The first argument must be a string literal containing the name of the new type and must equal the name of the variable to which the new type is assigned. The second argument must be a properly subclassable class, i.e., not a type construct like :py:data:`~typing.Union`, etc. -The callable returned by :py:func:`NewType ` accepts only one argument; this is equivalent to +The callable returned by :py:class:`~typing.NewType` accepts only one argument; this is equivalent to supporting only one constructor accepting an instance of the base class (see above). Example: @@ -139,12 +139,12 @@ Example: tcp_packet = TcpPacketId(127, 0) # Fails in type checker and at runtime You cannot use :py:func:`isinstance` or :py:func:`issubclass` on the object returned by -:py:func:`~typing.NewType`, nor can you subclass an object returned by :py:func:`~typing.NewType`. +:py:class:`~typing.NewType`, nor can you subclass an object returned by :py:class:`~typing.NewType`. .. note:: - Unlike type aliases, :py:func:`NewType ` will create an entirely new and - unique type when used. The intended purpose of :py:func:`NewType ` is to help you + Unlike type aliases, :py:class:`~typing.NewType` will create an entirely new and + unique type when used. The intended purpose of :py:class:`~typing.NewType` is to help you detect cases where you accidentally mixed together the old base type and the new derived type. @@ -160,7 +160,7 @@ You cannot use :py:func:`isinstance` or :py:func:`issubclass` on the object retu name_by_id(3) # ints and UserId are synonymous - But a similar example using :py:func:`NewType ` will not typecheck: + But a similar example using :py:class:`~typing.NewType` will not typecheck: .. code-block:: python diff --git a/docs/source/runtime_troubles.rst b/docs/source/runtime_troubles.rst index 909215a774a9..66ab7b3a84c7 100644 --- a/docs/source/runtime_troubles.rst +++ b/docs/source/runtime_troubles.rst @@ -86,7 +86,7 @@ required to be valid Python syntax. For more details, see :pep:`563`. * :ref:`type aliases `; * :ref:`type narrowing `; - * type definitions (see :py:class:`~typing.TypeVar`, :py:func:`~typing.NewType`, :py:class:`~typing.NamedTuple`); + * type definitions (see :py:class:`~typing.TypeVar`, :py:class:`~typing.NewType`, :py:class:`~typing.NamedTuple`); * base classes. .. code-block:: python @@ -263,7 +263,7 @@ If your subclass is also generic, you can use the following: reveal_type(task_queue.get()) # Reveals str In Python 3.9, we can just inherit directly from ``Queue[str]`` or ``Queue[T]`` -since its :py:class:`queue.Queue` implements :py:meth:`__class_getitem__`, so +since its :py:class:`queue.Queue` implements :py:meth:`~object.__class_getitem__`, so the class object can be subscripted at runtime without issue. Using types defined in stubs but not at runtime diff --git a/tox.ini b/tox.ini index 5a728e27fec4..a809c4d2c570 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,7 @@ passenv = VERIFY_MYPY_ERROR_CODES deps = -rdocs/requirements-docs.txt commands = - sphinx-build -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs} + sphinx-build -n -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs} python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))' [testenv:lint] From 14418bc3d2c38b9ea776da6029e9d9dc6650b7ea Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 15 Aug 2023 20:31:26 +0100 Subject: [PATCH 57/88] Polymorphic inference: support for parameter specifications and lambdas (#15837) This is a third follow-up for https://github.com/python/mypy/pull/15287 (likely there will be just one more PR, for `TypeVarTuple`s, and few less important items I mentioned in the original PR I will leave for more distant future). After all this PR turned out to be larger than I wanted. The problem is that `Concatenate` support for `ParamSpec` was quite broken, and this caused many of my tests fail. So I decided to include some major cleanup in this PR (I tried splitting it into a separate PR but it turned out to be tricky). After all, if one ignores added tests, it is almost net zero line count. The main problems that I encountered are: * First, valid substitutions for a `ParamSpecType` were: another `ParamSpecType`, `Parameters`, and `CallableType` (and also `AnyType` and `UninhabitedType` but those seem to be handled trivially). Having `CallableType` in this list caused various missed cases, bogus `get_proper_type()`s, and was generally counter-intuitive. * Second (and probably bigger) issue is that it is possible to represent `Concatenate` in two different forms: as a prefix for `ParamSpecType` (used mostly for instances), and as separate argument types (used mostly for callables). The problem is that some parts of the code were implicitly relying on it being in one or the other form, while some other code uncontrollably switched between the two. I propose to fix this by introducing some simplifications and rules (some of which I enforce by asserts): * Only valid non-trivial substitutions (and consequently upper/lower bound in constraints) for `ParamSpecType` are `ParamSpecType` and `Parameters`. * When `ParamSpecType` appears in a callable it must have an empty `prefix`. * `Parameters` cannot contain other `Parameters` (and ideally also `ParamSpecType`s) among argument types. * For inference we bring `Concatenate` to common representation (because both callables and instances may appear in the same expression). Using the `ParamSpecType` representation with `prefix` looks significantly simpler (especially in solver). Apart from this actual implementation of polymorphic inference is simple/straightforward, I just handle the additional `ParamSpecType` cases (in addition to `TypeVarType`) for inference, for solver, and for application. I also enabled polymorphic inference for lambda expressions, since they are handled by similar code paths. Some minor comments: * I fixed couple minor bugs uncovered by this PR (see e.g. test case for accidental `TypeVar` id clash). * I switch few tests to `--new-type-inference` because there error messages are slightly different, and so it is easier for me to test global flip to `True` locally. * I may tweak some of the "ground rules" if `mypy_primer` output will be particularly bad. --------- Co-authored-by: Ivan Levkivskyi --- mypy/applytype.py | 11 +- mypy/checker.py | 13 +- mypy/checkexpr.py | 123 ++++++++-- mypy/constraints.py | 148 ++++++----- mypy/expandtype.py | 102 +++----- mypy/join.py | 10 +- mypy/meet.py | 5 +- mypy/solve.py | 38 +-- mypy/subtypes.py | 14 +- mypy/test/testtypes.py | 2 +- mypy/type_visitor.py | 2 +- mypy/typeanal.py | 22 +- mypy/typeops.py | 19 +- mypy/types.py | 45 ++-- test-data/unit/check-functions.test | 8 +- test-data/unit/check-generics.test | 230 +++++++++++++++++- test-data/unit/check-inference-context.test | 3 +- test-data/unit/check-inference.test | 10 +- test-data/unit/check-overloading.test | 21 +- .../unit/check-parameter-specification.test | 47 +++- 20 files changed, 639 insertions(+), 234 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index f8be63362a6b..6abe7f0022f8 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -9,7 +9,6 @@ AnyType, CallableType, Instance, - Parameters, ParamSpecType, PartialType, TupleType, @@ -112,9 +111,13 @@ def apply_generic_arguments( if param_spec is not None: nt = id_to_type.get(param_spec.id) if nt is not None: - nt = get_proper_type(nt) - if isinstance(nt, (CallableType, Parameters)): - callable = callable.expand_param_spec(nt) + # ParamSpec expansion is special-cased, so we need to always expand callable + # as a whole, not expanding arguments individually. + callable = expand_type(callable, id_to_type) + assert isinstance(callable, CallableType) + return callable.copy_modified( + variables=[tv for tv in tvars if tv.id not in id_to_type] + ) # Apply arguments to argument types. var_arg = callable.var_arg() diff --git a/mypy/checker.py b/mypy/checker.py index 3bd9c494a890..5d97a0dec713 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4280,12 +4280,14 @@ def check_return_stmt(self, s: ReturnStmt) -> None: return_type = self.return_types[-1] return_type = get_proper_type(return_type) + is_lambda = isinstance(self.scope.top_function(), LambdaExpr) if isinstance(return_type, UninhabitedType): - self.fail(message_registry.NO_RETURN_EXPECTED, s) - return + # Avoid extra error messages for failed inference in lambdas + if not is_lambda or not return_type.ambiguous: + self.fail(message_registry.NO_RETURN_EXPECTED, s) + return if s.expr: - is_lambda = isinstance(self.scope.top_function(), LambdaExpr) declared_none_return = isinstance(return_type, NoneType) declared_any_return = isinstance(return_type, AnyType) @@ -7376,6 +7378,11 @@ def visit_erased_type(self, t: ErasedType) -> bool: # This can happen inside a lambda. return True + def visit_type_var(self, t: TypeVarType) -> bool: + # This is needed to prevent leaking into partial types during + # multi-step type inference. + return t.id.is_meta_var() + class SetNothingToAny(TypeTranslator): """Replace all ambiguous types with Any (to avoid spurious extra errors).""" diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index d00bbb288f3e..68ea7c30ed6f 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -17,7 +17,12 @@ from mypy.checkstrformat import StringFormatterChecker from mypy.erasetype import erase_type, remove_instance_last_known_values, replace_meta_vars from mypy.errors import ErrorWatcher, report_internal_error -from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars +from mypy.expandtype import ( + expand_type, + expand_type_by_instance, + freshen_all_functions_type_vars, + freshen_function_type_vars, +) from mypy.infer import ArgumentInferContext, infer_function_type_arguments, infer_type_arguments from mypy.literals import literal from mypy.maptype import map_instance_to_supertype @@ -122,6 +127,7 @@ false_only, fixup_partial_type, function_type, + get_all_type_vars, get_type_vars, is_literal_type_like, make_simplified_union, @@ -145,6 +151,7 @@ LiteralValue, NoneType, Overloaded, + Parameters, ParamSpecFlavor, ParamSpecType, PartialType, @@ -167,6 +174,7 @@ get_proper_types, has_recursive_types, is_named_instance, + remove_dups, split_with_prefix_and_suffix, ) from mypy.types_utils import ( @@ -1579,6 +1587,16 @@ def check_callable_call( lambda i: self.accept(args[i]), ) + # This is tricky: return type may contain its own type variables, like in + # def [S] (S) -> def [T] (T) -> tuple[S, T], so we need to update their ids + # to avoid possible id clashes if this call itself appears in a generic + # function body. + ret_type = get_proper_type(callee.ret_type) + if isinstance(ret_type, CallableType) and ret_type.variables: + fresh_ret_type = freshen_all_functions_type_vars(callee.ret_type) + freeze_all_type_vars(fresh_ret_type) + callee = callee.copy_modified(ret_type=fresh_ret_type) + if callee.is_generic(): need_refresh = any( isinstance(v, (ParamSpecType, TypeVarTupleType)) for v in callee.variables @@ -1597,7 +1615,7 @@ def check_callable_call( lambda i: self.accept(args[i]), ) callee = self.infer_function_type_arguments( - callee, args, arg_kinds, formal_to_actual, context + callee, args, arg_kinds, arg_names, formal_to_actual, need_refresh, context ) if need_refresh: formal_to_actual = map_actuals_to_formals( @@ -1864,6 +1882,8 @@ def infer_function_type_arguments_using_context( # def identity(x: T) -> T: return x # # expects_literal(identity(3)) # Should type-check + # TODO: we may want to add similar exception if all arguments are lambdas, since + # in this case external context is almost everything we have. if not is_generic_instance(ctx) and not is_literal_type_like(ctx): return callable.copy_modified() args = infer_type_arguments(callable.variables, ret_type, erased_ctx) @@ -1885,7 +1905,9 @@ def infer_function_type_arguments( callee_type: CallableType, args: list[Expression], arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, formal_to_actual: list[list[int]], + need_refresh: bool, context: Context, ) -> CallableType: """Infer the type arguments for a generic callee type. @@ -1927,7 +1949,14 @@ def infer_function_type_arguments( if 2 in arg_pass_nums: # Second pass of type inference. (callee_type, inferred_args) = self.infer_function_type_arguments_pass2( - callee_type, args, arg_kinds, formal_to_actual, inferred_args, context + callee_type, + args, + arg_kinds, + arg_names, + formal_to_actual, + inferred_args, + need_refresh, + context, ) if ( @@ -1953,6 +1982,17 @@ def infer_function_type_arguments( or set(get_type_vars(a)) & set(callee_type.variables) for a in inferred_args ): + if need_refresh: + # Technically we need to refresh formal_to_actual after *each* inference pass, + # since each pass can expand ParamSpec or TypeVarTuple. Although such situations + # are very rare, not doing this can cause crashes. + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee_type.arg_kinds, + callee_type.arg_names, + lambda a: self.accept(args[a]), + ) # If the regular two-phase inference didn't work, try inferring type # variables while allowing for polymorphic solutions, i.e. for solutions # potentially involving free variables. @@ -2000,8 +2040,10 @@ def infer_function_type_arguments_pass2( callee_type: CallableType, args: list[Expression], arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, formal_to_actual: list[list[int]], old_inferred_args: Sequence[Type | None], + need_refresh: bool, context: Context, ) -> tuple[CallableType, list[Type | None]]: """Perform second pass of generic function type argument inference. @@ -2023,6 +2065,14 @@ def infer_function_type_arguments_pass2( if isinstance(arg, (NoneType, UninhabitedType)) or has_erased_component(arg): inferred_args[i] = None callee_type = self.apply_generic_arguments(callee_type, inferred_args, context) + if need_refresh: + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee_type.arg_kinds, + callee_type.arg_names, + lambda a: self.accept(args[a]), + ) arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual) @@ -4735,8 +4785,22 @@ def infer_lambda_type_using_context( # they must be considered as indeterminate. We use ErasedType since it # does not affect type inference results (it is for purposes like this # only). - callable_ctx = get_proper_type(replace_meta_vars(ctx, ErasedType())) - assert isinstance(callable_ctx, CallableType) + if self.chk.options.new_type_inference: + # With new type inference we can preserve argument types even if they + # are generic, since new inference algorithm can handle constraints + # like S <: T (we still erase return type since it's ultimately unknown). + extra_vars = [] + for arg in ctx.arg_types: + meta_vars = [tv for tv in get_all_type_vars(arg) if tv.id.is_meta_var()] + extra_vars.extend([tv for tv in meta_vars if tv not in extra_vars]) + callable_ctx = ctx.copy_modified( + ret_type=replace_meta_vars(ctx.ret_type, ErasedType()), + variables=list(ctx.variables) + extra_vars, + ) + else: + erased_ctx = replace_meta_vars(ctx, ErasedType()) + assert isinstance(erased_ctx, ProperType) and isinstance(erased_ctx, CallableType) + callable_ctx = erased_ctx # The callable_ctx may have a fallback of builtins.type if the context # is a constructor -- but this fallback doesn't make sense for lambdas. @@ -5693,18 +5757,28 @@ def __init__(self, poly_tvars: Sequence[TypeVarLikeType]) -> None: self.bound_tvars: set[TypeVarLikeType] = set() self.seen_aliases: set[TypeInfo] = set() - def visit_callable_type(self, t: CallableType) -> Type: - found_vars = set() + def collect_vars(self, t: CallableType | Parameters) -> list[TypeVarLikeType]: + found_vars = [] for arg in t.arg_types: - found_vars |= set(get_type_vars(arg)) & self.poly_tvars + for tv in get_all_type_vars(arg): + if isinstance(tv, ParamSpecType): + normalized: TypeVarLikeType = tv.copy_modified( + flavor=ParamSpecFlavor.BARE, prefix=Parameters([], [], []) + ) + else: + normalized = tv + if normalized in self.poly_tvars and normalized not in self.bound_tvars: + found_vars.append(normalized) + return remove_dups(found_vars) - found_vars -= self.bound_tvars - self.bound_tvars |= found_vars + def visit_callable_type(self, t: CallableType) -> Type: + found_vars = self.collect_vars(t) + self.bound_tvars |= set(found_vars) result = super().visit_callable_type(t) - self.bound_tvars -= found_vars + self.bound_tvars -= set(found_vars) assert isinstance(result, ProperType) and isinstance(result, CallableType) - result.variables = list(result.variables) + list(found_vars) + result.variables = list(result.variables) + found_vars return result def visit_type_var(self, t: TypeVarType) -> Type: @@ -5713,8 +5787,9 @@ def visit_type_var(self, t: TypeVarType) -> Type: return super().visit_type_var(t) def visit_param_spec(self, t: ParamSpecType) -> Type: - # TODO: Support polymorphic apply for ParamSpec. - raise PolyTranslationError() + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_param_spec(t) def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: # TODO: Support polymorphic apply for TypeVarTuple. @@ -5730,6 +5805,26 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type: raise PolyTranslationError() def visit_instance(self, t: Instance) -> Type: + if t.type.has_param_spec_type: + # We need this special-casing to preserve the possibility to store a + # generic function in an instance type. Things like + # forall T . Foo[[x: T], T] + # are not really expressible in current type system, but this looks like + # a useful feature, so let's keep it. + param_spec_index = next( + i for (i, tv) in enumerate(t.type.defn.type_vars) if isinstance(tv, ParamSpecType) + ) + p = get_proper_type(t.args[param_spec_index]) + if isinstance(p, Parameters): + found_vars = self.collect_vars(p) + self.bound_tvars |= set(found_vars) + new_args = [a.accept(self) for a in t.args] + self.bound_tvars -= set(found_vars) + + repl = new_args[param_spec_index] + assert isinstance(repl, ProperType) and isinstance(repl, Parameters) + repl.variables = list(repl.variables) + list(found_vars) + return t.copy_modified(args=new_args) # There is the same problem with callback protocols as with aliases # (callback protocols are essentially more flexible aliases to callables). # Note: consider supporting bindings in instances, e.g. LRUCache[[x: T], T]. diff --git a/mypy/constraints.py b/mypy/constraints.py index 299c6292a259..04c3378ce16b 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -9,7 +9,7 @@ from mypy.argmap import ArgTypeExpander from mypy.erasetype import erase_typevars from mypy.maptype import map_instance_to_supertype -from mypy.nodes import ARG_OPT, ARG_POS, CONTRAVARIANT, COVARIANT, ArgKind +from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, ArgKind from mypy.types import ( TUPLE_LIKE_INSTANCE_NAMES, AnyType, @@ -40,7 +40,6 @@ UninhabitedType, UnionType, UnpackType, - callable_with_ellipsis, get_proper_type, has_recursive_types, has_type_vars, @@ -166,6 +165,8 @@ def infer_constraints_for_callable( actual_type = mapper.expand_actual_type( actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] ) + # TODO: if callee has ParamSpec, we need to collect all actuals that map to star + # args and create single constraint between P and resulting Parameters instead. c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) constraints.extend(c) @@ -577,10 +578,21 @@ def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: raise RuntimeError("Mypy bug: unpack should be handled at a higher level.") def visit_parameters(self, template: Parameters) -> list[Constraint]: - # constraining Any against C[P] turns into infer_against_any([P], Any) - # ... which seems like the only case this can happen. Better to fail loudly. + # Constraining Any against C[P] turns into infer_against_any([P], Any) + # ... which seems like the only case this can happen. Better to fail loudly otherwise. if isinstance(self.actual, AnyType): return self.infer_against_any(template.arg_types, self.actual) + if type_state.infer_polymorphic and isinstance(self.actual, Parameters): + # For polymorphic inference we need to be able to infer secondary constraints + # in situations like [x: T] <: P <: [x: int]. + res = [] + if len(template.arg_types) == len(self.actual.arg_types): + for tt, at in zip(template.arg_types, self.actual.arg_types): + # This avoids bogus constraints like T <: P.args + if isinstance(at, ParamSpecType): + continue + res.extend(infer_constraints(tt, at, self.direction)) + return res raise RuntimeError("Parameters cannot be constrained to") # Non-leaf types @@ -686,7 +698,6 @@ def visit_instance(self, template: Instance) -> list[Constraint]: # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. for tvar, mapped_arg, instance_arg in zip(tvars, mapped_args, instance_args): - # TODO(PEP612): More ParamSpec work (or is Parameters the only thing accepted) if isinstance(tvar, TypeVarType): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. @@ -697,25 +708,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]: infer_constraints(mapped_arg, instance_arg, neg_op(self.direction)) ) elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType): - suffix = get_proper_type(instance_arg) - - if isinstance(suffix, CallableType): - prefix = mapped_arg.prefix - from_concat = bool(prefix.arg_types) or suffix.from_concatenate - suffix = suffix.copy_modified(from_concatenate=from_concat) - - if isinstance(suffix, (Parameters, CallableType)): - # no such thing as variance for ParamSpecs - # TODO: is there a case I am missing? + prefix = mapped_arg.prefix + if isinstance(instance_arg, Parameters): + # No such thing as variance for ParamSpecs, consider them invariant # TODO: constraints between prefixes - prefix = mapped_arg.prefix - suffix = suffix.copy_modified( - suffix.arg_types[len(prefix.arg_types) :], - suffix.arg_kinds[len(prefix.arg_kinds) :], - suffix.arg_names[len(prefix.arg_names) :], + suffix: Type = instance_arg.copy_modified( + instance_arg.arg_types[len(prefix.arg_types) :], + instance_arg.arg_kinds[len(prefix.arg_kinds) :], + instance_arg.arg_names[len(prefix.arg_names) :], ) + res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix)) res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) - elif isinstance(suffix, ParamSpecType): + elif isinstance(instance_arg, ParamSpecType): + suffix = instance_arg.copy_modified( + prefix=Parameters( + instance_arg.prefix.arg_types[len(prefix.arg_types) :], + instance_arg.prefix.arg_kinds[len(prefix.arg_kinds) :], + instance_arg.prefix.arg_names[len(prefix.arg_names) :], + ) + ) + res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix)) res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) else: # This case should have been handled above. @@ -767,26 +779,26 @@ def visit_instance(self, template: Instance) -> list[Constraint]: elif isinstance(tvar, ParamSpecType) and isinstance( template_arg, ParamSpecType ): - suffix = get_proper_type(mapped_arg) - - if isinstance(suffix, CallableType): - prefix = template_arg.prefix - from_concat = bool(prefix.arg_types) or suffix.from_concatenate - suffix = suffix.copy_modified(from_concatenate=from_concat) - - if isinstance(suffix, (Parameters, CallableType)): - # no such thing as variance for ParamSpecs - # TODO: is there a case I am missing? + prefix = template_arg.prefix + if isinstance(mapped_arg, Parameters): + # No such thing as variance for ParamSpecs, consider them invariant # TODO: constraints between prefixes - prefix = template_arg.prefix - - suffix = suffix.copy_modified( - suffix.arg_types[len(prefix.arg_types) :], - suffix.arg_kinds[len(prefix.arg_kinds) :], - suffix.arg_names[len(prefix.arg_names) :], + suffix = mapped_arg.copy_modified( + mapped_arg.arg_types[len(prefix.arg_types) :], + mapped_arg.arg_kinds[len(prefix.arg_kinds) :], + mapped_arg.arg_names[len(prefix.arg_names) :], ) + res.append(Constraint(template_arg, SUBTYPE_OF, suffix)) res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) - elif isinstance(suffix, ParamSpecType): + elif isinstance(mapped_arg, ParamSpecType): + suffix = mapped_arg.copy_modified( + prefix=Parameters( + mapped_arg.prefix.arg_types[len(prefix.arg_types) :], + mapped_arg.prefix.arg_kinds[len(prefix.arg_kinds) :], + mapped_arg.prefix.arg_names[len(prefix.arg_names) :], + ) + ) + res.append(Constraint(template_arg, SUBTYPE_OF, suffix)) res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) else: # This case should have been handled above. @@ -848,7 +860,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF: return infer_constraints(template, mypy.typeops.tuple_fallback(actual), self.direction) elif isinstance(actual, TypeVarType): - if not actual.values: + if not actual.values and not actual.id.is_meta_var(): return infer_constraints(template, actual.upper_bound, self.direction) return [] elif isinstance(actual, ParamSpecType): @@ -892,6 +904,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # Normalize callables before matching against each other. # Note that non-normalized callables can be created in annotations # using e.g. callback protocols. + # TODO: check that callables match? Ideally we should not infer constraints + # callables that can never be subtypes of one another in given direction. template = template.with_unpacked_kwargs() extra_tvars = False if isinstance(self.actual, CallableType): @@ -899,12 +913,10 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: cactual = self.actual.with_unpacked_kwargs() param_spec = template.param_spec() if param_spec is None: - # FIX verify argument counts # TODO: Erase template variables if it is generic? if ( type_state.infer_polymorphic and cactual.variables - and cactual.param_spec() is None and not self.skip_neg_op # Technically, the correct inferred type for application of e.g. # Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic @@ -926,7 +938,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # We can't infer constraints from arguments if the template is Callable[..., T] # (with literal '...'). if not template.is_ellipsis_args: - if find_unpack_in_list(template.arg_types) is not None: + unpack_present = find_unpack_in_list(template.arg_types) + if unpack_present is not None: ( unpack_constraints, cactual_args_t, @@ -941,47 +954,70 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: else: template_args = template.arg_types cactual_args = cactual.arg_types - # The lengths should match, but don't crash (it will error elsewhere). + # TODO: use some more principled "formal to actual" logic + # instead of this lock-step loop over argument types. This identical + # logic should be used in 5 places: in Parameters vs Parameters + # inference, in Instance vs Instance inference for prefixes (two + # branches), and in Callable vs Callable inference (two branches). for t, a in zip(template_args, cactual_args): + # This avoids bogus constraints like T <: P.args + if isinstance(a, ParamSpecType): + # TODO: can we infer something useful for *T vs P? + continue # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) else: - # sometimes, it appears we try to get constraints between two paramspec callables? - - # TODO: Direction - # TODO: check the prefixes match prefix = param_spec.prefix prefix_len = len(prefix.arg_types) cactual_ps = cactual.param_spec() + if type_state.infer_polymorphic and cactual.variables and not self.skip_neg_op: + # Similar logic to the branch above. + res.extend( + infer_constraints( + cactual, template, neg_op(self.direction), skip_neg_op=True + ) + ) + extra_tvars = True + if not cactual_ps: max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)]) prefix_len = min(prefix_len, max_prefix_len) res.append( Constraint( param_spec, - SUBTYPE_OF, - cactual.copy_modified( + neg_op(self.direction), + Parameters( arg_types=cactual.arg_types[prefix_len:], arg_kinds=cactual.arg_kinds[prefix_len:], arg_names=cactual.arg_names[prefix_len:], - ret_type=UninhabitedType(), + variables=cactual.variables + if not type_state.infer_polymorphic + else [], ), ) ) else: - res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps)) + if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types): + cactual_ps = cactual_ps.copy_modified( + prefix=Parameters( + arg_types=cactual_ps.prefix.arg_types[prefix_len:], + arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:], + arg_names=cactual_ps.prefix.arg_names[prefix_len:], + ) + ) + res.append(Constraint(param_spec, neg_op(self.direction), cactual_ps)) - # compare prefixes + # Compare prefixes as well cactual_prefix = cactual.copy_modified( arg_types=cactual.arg_types[:prefix_len], arg_kinds=cactual.arg_kinds[:prefix_len], arg_names=cactual.arg_names[:prefix_len], ) - # TODO: see above "FIX" comments for param_spec is None case - # TODO: this assumes positional arguments for t, a in zip(prefix.arg_types, cactual_prefix.arg_types): + if isinstance(a, ParamSpecType): + continue res.extend(infer_constraints(t, a, neg_op(self.direction))) template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type @@ -993,7 +1029,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction)) if extra_tvars: for c in res: - c.extra_tvars = list(cactual.variables) + c.extra_tvars += cactual.variables return res elif isinstance(self.actual, AnyType): param_spec = template.param_spec() @@ -1006,7 +1042,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: Constraint( param_spec, SUBTYPE_OF, - callable_with_ellipsis(any_type, any_type, template.fallback), + Parameters([any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None]), ) ] res.extend(infer_constraints(template.ret_type, any_type, self.direction)) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index b599b49e4c12..0e98ed048197 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -231,44 +231,27 @@ def visit_type_var(self, t: TypeVarType) -> Type: return repl def visit_param_spec(self, t: ParamSpecType) -> Type: - # set prefix to something empty so we don't duplicate it - repl = get_proper_type( - self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], []))) - ) - if isinstance(repl, Instance): - # TODO: what does prefix mean in this case? - # TODO: why does this case even happen? Instances aren't plural. - return repl - elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): - if isinstance(repl, ParamSpecType): - return repl.copy_modified( - flavor=t.flavor, - prefix=t.prefix.copy_modified( - arg_types=t.prefix.arg_types + repl.prefix.arg_types, - arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, - arg_names=t.prefix.arg_names + repl.prefix.arg_names, - ), - ) - else: - # if the paramspec is *P.args or **P.kwargs: - if t.flavor != ParamSpecFlavor.BARE: - assert isinstance(repl, CallableType), "Should not be able to get here." - # Is this always the right thing to do? - param_spec = repl.param_spec() - if param_spec: - return param_spec.with_flavor(t.flavor) - else: - return repl - else: - return Parameters( - t.prefix.arg_types + repl.arg_types, - t.prefix.arg_kinds + repl.arg_kinds, - t.prefix.arg_names + repl.arg_names, - variables=[*t.prefix.variables, *repl.variables], - ) - + # Set prefix to something empty, so we don't duplicate it below. + repl = self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], []))) + if isinstance(repl, ParamSpecType): + return repl.copy_modified( + flavor=t.flavor, + prefix=t.prefix.copy_modified( + arg_types=self.expand_types(t.prefix.arg_types + repl.prefix.arg_types), + arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, + arg_names=t.prefix.arg_names + repl.prefix.arg_names, + ), + ) + elif isinstance(repl, Parameters): + assert t.flavor == ParamSpecFlavor.BARE + return Parameters( + self.expand_types(t.prefix.arg_types + repl.arg_types), + t.prefix.arg_kinds + repl.arg_kinds, + t.prefix.arg_names + repl.arg_names, + variables=[*t.prefix.variables, *repl.variables], + ) else: - # TODO: should this branch be removed? better not to fail silently + # TODO: replace this with "assert False" return repl def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: @@ -387,7 +370,7 @@ def interpolate_args_for_unpack( def visit_callable_type(self, t: CallableType) -> CallableType: param_spec = t.param_spec() if param_spec is not None: - repl = get_proper_type(self.variables.get(param_spec.id)) + repl = self.variables.get(param_spec.id) # If a ParamSpec in a callable type is substituted with a # callable type, we can't use normal substitution logic, # since ParamSpec is actually split into two components @@ -395,35 +378,30 @@ def visit_callable_type(self, t: CallableType) -> CallableType: # must expand both of them with all the argument types, # kinds and names in the replacement. The return type in # the replacement is ignored. - if isinstance(repl, (CallableType, Parameters)): - # Substitute *args: P.args, **kwargs: P.kwargs - prefix = param_spec.prefix - # we need to expand the types in the prefix, so might as well - # not get them in the first place - t = t.expand_param_spec(repl, no_prefix=True) + if isinstance(repl, Parameters): + # We need to expand both the types in the prefix and the ParamSpec itself + t = t.expand_param_spec(repl) return t.copy_modified( - arg_types=self.expand_types(prefix.arg_types) + t.arg_types, - arg_kinds=prefix.arg_kinds + t.arg_kinds, - arg_names=prefix.arg_names + t.arg_names, + arg_types=self.expand_types(t.arg_types), + arg_kinds=t.arg_kinds, + arg_names=t.arg_names, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) - # TODO: Conceptually, the "len(t.arg_types) == 2" should not be here. However, this - # errors without it. Either figure out how to eliminate this or place an - # explanation for why this is necessary. - elif isinstance(repl, ParamSpecType) and len(t.arg_types) == 2: - # We're substituting one paramspec for another; this can mean that the prefix - # changes. (e.g. sub Concatenate[int, P] for Q) + elif isinstance(repl, ParamSpecType): + # We're substituting one ParamSpec for another; this can mean that the prefix + # changes, e.g. substitute Concatenate[int, P] in place of Q. prefix = repl.prefix - old_prefix = param_spec.prefix - - # Check assumptions. I'm not sure what order to place new prefix vs old prefix: - assert not old_prefix.arg_types or not prefix.arg_types - - t = t.copy_modified( - arg_types=prefix.arg_types + old_prefix.arg_types + t.arg_types, - arg_kinds=prefix.arg_kinds + old_prefix.arg_kinds + t.arg_kinds, - arg_names=prefix.arg_names + old_prefix.arg_names + t.arg_names, + clean_repl = repl.copy_modified(prefix=Parameters([], [], [])) + return t.copy_modified( + arg_types=self.expand_types(t.arg_types[:-2] + prefix.arg_types) + + [ + clean_repl.with_flavor(ParamSpecFlavor.ARGS), + clean_repl.with_flavor(ParamSpecFlavor.KWARGS), + ], + arg_kinds=t.arg_kinds[:-2] + prefix.arg_kinds + t.arg_kinds[-2:], + arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:], + ret_type=t.ret_type.accept(self), ) var_arg = t.var_arg() diff --git a/mypy/join.py b/mypy/join.py index f4af59f4e50b..806c644a680c 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -315,8 +315,14 @@ def visit_unpack_type(self, t: UnpackType) -> UnpackType: raise NotImplementedError def visit_parameters(self, t: Parameters) -> ProperType: - if self.s == t: - return t + if isinstance(self.s, Parameters): + if len(t.arg_types) != len(self.s.arg_types): + return self.default(self.s) + return t.copy_modified( + # Note that since during constraint inference we already treat whole ParamSpec as + # contravariant, we should join individual items, not meet them like for Callables + arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] + ) else: return self.default(self.s) diff --git a/mypy/meet.py b/mypy/meet.py index 29c4d3663503..e3a22a226575 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -701,11 +701,12 @@ def visit_unpack_type(self, t: UnpackType) -> ProperType: raise NotImplementedError def visit_parameters(self, t: Parameters) -> ProperType: - # TODO: is this the right variance? - if isinstance(self.s, (Parameters, CallableType)): + if isinstance(self.s, Parameters): if len(t.arg_types) != len(self.s.arg_types): return self.default(self.s) return t.copy_modified( + # Note that since during constraint inference we already treat whole ParamSpec as + # contravariant, we should meet individual items, not join them like for Callables arg_types=[meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] ) else: diff --git a/mypy/solve.py b/mypy/solve.py index 72b3d6f26618..4b2b899c2a8d 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -6,17 +6,18 @@ from typing import Iterable, Sequence from typing_extensions import TypeAlias as _TypeAlias -from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints, neg_op +from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.expandtype import expand_type from mypy.graph_utils import prepare_sccs, strongly_connected_components, topsort from mypy.join import join_types from mypy.meet import meet_type_list, meet_types from mypy.subtypes import is_subtype -from mypy.typeops import get_type_vars +from mypy.typeops import get_all_type_vars from mypy.types import ( AnyType, Instance, NoneType, + ParamSpecType, ProperType, Type, TypeOfAny, @@ -26,7 +27,6 @@ UninhabitedType, UnionType, get_proper_type, - remove_dups, ) from mypy.typestate import type_state @@ -62,10 +62,6 @@ def solve_constraints( for c in constraints: extra_vars.extend([v.id for v in c.extra_tvars if v.id not in vars + extra_vars]) originals.update({v.id: v for v in c.extra_tvars if v.id not in originals}) - if allow_polymorphic: - # Constraints like T :> S and S <: T are semantically the same, but they are - # represented differently. Normalize the constraint list w.r.t this equivalence. - constraints = normalize_constraints(constraints, vars + extra_vars) # Collect a list of constraints for each type variable. cmap: dict[TypeVarId, list[Constraint]] = {tv: [] for tv in vars + extra_vars} @@ -334,23 +330,6 @@ def is_trivial_bound(tp: ProperType) -> bool: return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" -def normalize_constraints( - constraints: list[Constraint], vars: list[TypeVarId] -) -> list[Constraint]: - """Normalize list of constraints (to simplify life for the non-linear solver). - - This includes two things currently: - * Complement T :> S by S <: T - * Remove strict duplicates - * Remove constrains for unrelated variables - """ - res = constraints.copy() - for c in constraints: - if isinstance(c.target, TypeVarType): - res.append(Constraint(c.target, neg_op(c.op), c.origin_type_var)) - return [c for c in remove_dups(constraints) if c.type_var in vars] - - def transitive_closure( tvars: list[TypeVarId], constraints: list[Constraint] ) -> tuple[Graph, Bounds, Bounds]: @@ -380,7 +359,14 @@ def transitive_closure( remaining = set(constraints) while remaining: c = remaining.pop() - if isinstance(c.target, TypeVarType) and c.target.id in tvars: + # Note that ParamSpec constraint P <: Q may be considered linear only if Q has no prefix, + # for cases like P <: Concatenate[T, Q] we should consider this non-linear and put {P} and + # {T, Q} into separate SCCs. + if ( + isinstance(c.target, TypeVarType) + or isinstance(c.target, ParamSpecType) + and not c.target.prefix.arg_types + ) and c.target.id in tvars: if c.op == SUBTYPE_OF: lower, upper = c.type_var, c.target.id else: @@ -463,4 +449,4 @@ def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool: def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]: """Find type variables for which we are solving in a target type.""" - return {tv.id for tv in get_type_vars(target)} & set(vars) + return {tv.id for tv in get_all_type_vars(target)} & set(vars) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index da92f7398d4e..60fccc7e357c 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1705,11 +1705,15 @@ def unify_generic_callable( return_constraint_direction = mypy.constraints.SUBTYPE_OF constraints: list[mypy.constraints.Constraint] = [] - for arg_type, target_arg_type in zip(type.arg_types, target.arg_types): - c = mypy.constraints.infer_constraints( - arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF - ) - constraints.extend(c) + # There is some special logic for inference in callables, so better use them + # as wholes instead of picking separate arguments. + cs = mypy.constraints.infer_constraints( + type.copy_modified(ret_type=UninhabitedType()), + target.copy_modified(ret_type=UninhabitedType()), + mypy.constraints.SUBTYPE_OF, + skip_neg_op=True, + ) + constraints.extend(cs) if not ignore_return: c = mypy.constraints.infer_constraints( type.ret_type, target.ret_type, return_constraint_direction diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 59457dfa5d3b..56ac86058ce4 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr: class TestExpandTypeLimitGetProperType(TestCase): # WARNING: do not increase this number unless absolutely necessary, # and you understand what you are doing. - ALLOWED_GET_PROPER_TYPES = 8 + ALLOWED_GET_PROPER_TYPES = 6 @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy") def test_count_get_proper_type(self) -> None: diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index cbfa43a77b81..1860a43eb14f 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -348,7 +348,7 @@ def visit_type_var(self, t: TypeVarType) -> T: return self.query_types([t.upper_bound, t.default] + t.values) def visit_param_spec(self, t: ParamSpecType) -> T: - return self.query_types([t.upper_bound, t.default]) + return self.query_types([t.upper_bound, t.default, t.prefix]) def visit_type_var_tuple(self, t: TypeVarTupleType) -> T: return self.query_types([t.upper_bound, t.default]) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index d894e2cc8c51..8ac73cdf8aac 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1244,9 +1244,23 @@ def analyze_callable_type(self, t: UnboundType) -> Type: ) else: # Callable[P, RET] (where P is ParamSpec) - maybe_ret = self.analyze_callable_args_for_paramspec( - callable_args, ret_type, fallback - ) or self.analyze_callable_args_for_concatenate(callable_args, ret_type, fallback) + with self.tvar_scope_frame(): + # Temporarily bind ParamSpecs to allow code like this: + # my_fun: Callable[Q, Foo[Q]] + # We usually do this later in visit_callable_type(), but the analysis + # below happens at very early stage. + variables = [] + for name, tvar_expr in self.find_type_var_likes(callable_args): + variables.append(self.tvar_scope.bind_new(name, tvar_expr)) + maybe_ret = self.analyze_callable_args_for_paramspec( + callable_args, ret_type, fallback + ) or self.analyze_callable_args_for_concatenate( + callable_args, ret_type, fallback + ) + if maybe_ret: + maybe_ret = maybe_ret.copy_modified( + ret_type=ret_type.accept(self), variables=variables + ) if maybe_ret is None: # Callable[?, RET] (where ? is something invalid) self.fail( @@ -1532,6 +1546,7 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa if analyzed.prefix.arg_types: self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE) self.note("You can use Concatenate as the first argument to Callable", t) + analyzed = AnyType(TypeOfAny.from_error) else: self.fail( f'Invalid location for ParamSpec "{analyzed.name}"', t, code=codes.VALID_TYPE @@ -1541,6 +1556,7 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa "'Callable[{}, int]'".format(analyzed.name), t, ) + analyzed = AnyType(TypeOfAny.from_error) return analyzed def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType: diff --git a/mypy/typeops.py b/mypy/typeops.py index 4233cc1b2b33..d746ea701fde 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -948,22 +948,31 @@ def coerce_to_literal(typ: Type) -> Type: def get_type_vars(tp: Type) -> list[TypeVarType]: - return tp.accept(TypeVarExtractor()) + return cast("list[TypeVarType]", tp.accept(TypeVarExtractor())) -class TypeVarExtractor(TypeQuery[List[TypeVarType]]): - def __init__(self) -> None: +def get_all_type_vars(tp: Type) -> list[TypeVarLikeType]: + # TODO: should we always use this function instead of get_type_vars() above? + return tp.accept(TypeVarExtractor(include_all=True)) + + +class TypeVarExtractor(TypeQuery[List[TypeVarLikeType]]): + def __init__(self, include_all: bool = False) -> None: super().__init__(self._merge) + self.include_all = include_all - def _merge(self, iter: Iterable[list[TypeVarType]]) -> list[TypeVarType]: + def _merge(self, iter: Iterable[list[TypeVarLikeType]]) -> list[TypeVarLikeType]: out = [] for item in iter: out.extend(item) return out - def visit_type_var(self, t: TypeVarType) -> list[TypeVarType]: + def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]: return [t] + def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]: + return [t] if self.include_all else [] + def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: """Does this type have a custom special method such as __format__() or __eq__()? diff --git a/mypy/types.py b/mypy/types.py index d13cff00c06d..359ca713616b 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1577,6 +1577,7 @@ def __init__( self.arg_kinds = arg_kinds self.arg_names = list(arg_names) assert len(arg_types) == len(arg_kinds) == len(arg_names) + assert not any(isinstance(t, Parameters) for t in arg_types) self.min_args = arg_kinds.count(ARG_POS) self.is_ellipsis_args = is_ellipsis_args self.variables = variables or [] @@ -1788,6 +1789,11 @@ def __init__( ) -> None: super().__init__(line, column) assert len(arg_types) == len(arg_kinds) == len(arg_names) + for t, k in zip(arg_types, arg_kinds): + if isinstance(t, ParamSpecType): + assert not t.prefix.arg_types + # TODO: should we assert that only ARG_STAR contain ParamSpecType? + # See testParamSpecJoin, that relies on passing e.g `P.args` as plain argument. if variables is None: variables = [] self.arg_types = list(arg_types) @@ -2033,36 +2039,21 @@ def param_spec(self) -> ParamSpecType | None: if not isinstance(arg_type, ParamSpecType): return None - # sometimes paramspectypes are analyzed in from mysterious places, - # e.g. def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ...: ... - prefix = arg_type.prefix - if not prefix.arg_types: - # TODO: confirm that all arg kinds are positional - prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) - + # Prepend prefix for def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ... + # TODO: confirm that all arg kinds are positional + prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) - def expand_param_spec( - self, c: CallableType | Parameters, no_prefix: bool = False - ) -> CallableType: + def expand_param_spec(self, c: Parameters) -> CallableType: + # TODO: try deleting variables from Parameters after new type inference is default. variables = c.variables - - if no_prefix: - return self.copy_modified( - arg_types=c.arg_types, - arg_kinds=c.arg_kinds, - arg_names=c.arg_names, - is_ellipsis_args=c.is_ellipsis_args, - variables=[*variables, *self.variables], - ) - else: - return self.copy_modified( - arg_types=self.arg_types[:-2] + c.arg_types, - arg_kinds=self.arg_kinds[:-2] + c.arg_kinds, - arg_names=self.arg_names[:-2] + c.arg_names, - is_ellipsis_args=c.is_ellipsis_args, - variables=[*variables, *self.variables], - ) + return self.copy_modified( + arg_types=self.arg_types[:-2] + c.arg_types, + arg_kinds=self.arg_kinds[:-2] + c.arg_kinds, + arg_names=self.arg_names[:-2] + c.arg_names, + is_ellipsis_args=c.is_ellipsis_args, + variables=[*variables, *self.variables], + ) def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index a8722d8190b9..f49541420cc0 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2330,7 +2330,7 @@ T = TypeVar('T') def deco() -> Callable[[T], T]: pass reveal_type(deco) # N: Revealed type is "def () -> def [T] (T`-1) -> T`-1" f = deco() -reveal_type(f) # N: Revealed type is "def [T] (T`-1) -> T`-1" +reveal_type(f) # N: Revealed type is "def [T] (T`1) -> T`1" i = f(3) reveal_type(i) # N: Revealed type is "builtins.int" @@ -2343,7 +2343,7 @@ U = TypeVar('U') def deco(x: U) -> Callable[[T, U], T]: pass reveal_type(deco) # N: Revealed type is "def [U] (x: U`-1) -> def [T] (T`-2, U`-1) -> T`-2" f = deco("foo") -reveal_type(f) # N: Revealed type is "def [T] (T`-2, builtins.str) -> T`-2" +reveal_type(f) # N: Revealed type is "def [T] (T`1, builtins.str) -> T`1" i = f(3, "eggs") reveal_type(i) # N: Revealed type is "builtins.int" @@ -2354,9 +2354,9 @@ T = TypeVar('T') R = TypeVar('R') def deco() -> Callable[[T], Callable[[T, R], R]]: pass f = deco() -reveal_type(f) # N: Revealed type is "def [T] (T`-1) -> def [R] (T`-1, R`-2) -> R`-2" +reveal_type(f) # N: Revealed type is "def [T] (T`2) -> def [R] (T`2, R`1) -> R`1" g = f(3) -reveal_type(g) # N: Revealed type is "def [R] (builtins.int, R`-2) -> R`-2" +reveal_type(g) # N: Revealed type is "def [R] (builtins.int, R`3) -> R`3" s = g(4, "foo") reveal_type(s) # N: Revealed type is "builtins.str" diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index d1842a74d634..8c7c4e035961 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2713,6 +2713,7 @@ reveal_type(func(1)) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] [case testGenericLambdaGenericMethodNoCrash] +# flags: --new-type-inference from typing import TypeVar, Union, Callable, Generic S = TypeVar("S") @@ -2723,7 +2724,7 @@ def f(x: Callable[[G[T]], int]) -> T: ... class G(Generic[T]): def g(self, x: S) -> Union[S, T]: ... -f(lambda x: x.g(0)) # E: Cannot infer type argument 1 of "f" +f(lambda x: x.g(0)) # E: Incompatible return value type (got "Union[int, T]", expected "int") [case testDictStarInference] class B: ... @@ -3035,3 +3036,230 @@ reveal_type(dec1(id2)) # N: Revealed type is "def [S in (builtins.int, builtins reveal_type(dec2(id1)) # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]" reveal_type(dec2(id2)) # N: Revealed type is "def () -> builtins.list[]" \ # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[], ]" + +[case testInferenceAgainstGenericLambdas] +# flags: --new-type-inference +from typing import TypeVar, Callable, List + +S = TypeVar('S') +T = TypeVar('T') + +def dec1(f: Callable[[T], T]) -> Callable[[T], List[T]]: + ... +def dec2(f: Callable[[S], T]) -> Callable[[S], List[T]]: + ... +def dec3(f: Callable[[List[S]], T]) -> Callable[[S], T]: + def g(x: S) -> T: + return f([x]) + return g +def dec4(f: Callable[[S], List[T]]) -> Callable[[S], T]: + ... +def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]: + def g(x: int) -> List[T]: + return [f(x)] * x + return g + +reveal_type(dec1(lambda x: x)) # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]" +reveal_type(dec2(lambda x: x)) # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]" +reveal_type(dec3(lambda x: x[0])) # N: Revealed type is "def [S] (S`6) -> S`6" +reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`8) -> S`8" +reveal_type(dec1(lambda x: 1)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" +reveal_type(dec5(lambda x: x)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" +reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`15) -> builtins.list[S`15]" +dec4(lambda x: x) # E: Incompatible return value type (got "S", expected "List[object]") +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecBasicInList] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import ParamSpec + +T = TypeVar('T') +P = ParamSpec('P') +U = TypeVar('U') +V = TypeVar('V') + +def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ... +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... +def pair(x: U, y: V) -> Tuple[U, V]: ... +reveal_type(dec(id)) # N: Revealed type is "def [T] (x: T`2) -> builtins.list[T`2]" +reveal_type(dec(either)) # N: Revealed type is "def [T] (x: T`4, y: T`4) -> builtins.list[T`4]" +reveal_type(dec(pair)) # N: Revealed type is "def [U, V] (x: U`-1, y: V`-2) -> builtins.list[Tuple[U`-1, V`-2]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecBasicDeList] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import ParamSpec + +T = TypeVar('T') +P = ParamSpec('P') +U = TypeVar('U') +V = TypeVar('V') + +def dec(f: Callable[P, List[T]]) -> Callable[P, T]: ... +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... +reveal_type(dec(id)) # N: Revealed type is "def [T] (x: builtins.list[T`2]) -> T`2" +reveal_type(dec(either)) # N: Revealed type is "def [T] (x: builtins.list[T`4], y: builtins.list[T`4]) -> T`4" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecPopOff] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +S = TypeVar('S') +P = ParamSpec('P') +U = TypeVar('U') +V = TypeVar('V') + +def dec(f: Callable[Concatenate[T, P], S]) -> Callable[P, Callable[[T], S]]: ... +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... +def pair(x: U, y: V) -> Tuple[U, V]: ... +reveal_type(dec(id)) # N: Revealed type is "def () -> def [T] (T`1) -> T`1" +reveal_type(dec(either)) # N: Revealed type is "def [T] (y: T`4) -> def (T`4) -> T`4" +reveal_type(dec(pair)) # N: Revealed type is "def [V] (y: V`-2) -> def [T] (T`7) -> Tuple[T`7, V`-2]" +reveal_type(dec(dec)) # N: Revealed type is "def () -> def [T, P, S] (def (T`-1, *P.args, **P.kwargs) -> S`-3) -> def (*P.args, **P.kwargs) -> def (T`-1) -> S`-3" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecPopOn] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +S = TypeVar('S') +P = ParamSpec('P') +U = TypeVar('U') +V = TypeVar('V') + +def dec(f: Callable[P, Callable[[T], S]]) -> Callable[Concatenate[T, P], S]: ... +def id() -> Callable[[U], U]: ... +def either(x: U) -> Callable[[U], U]: ... +def pair(x: U) -> Callable[[V], Tuple[V, U]]: ... +reveal_type(dec(id)) # N: Revealed type is "def [T] (T`2) -> T`2" +reveal_type(dec(either)) # N: Revealed type is "def [T] (T`5, x: T`5) -> T`5" +reveal_type(dec(pair)) # N: Revealed type is "def [T, U] (T`8, x: U`-1) -> Tuple[T`8, U`-1]" +# This is counter-intuitive but looks correct, dec matches itself only if P is empty +reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`11, f: def () -> def (T`11) -> S`12) -> S`12" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecVsParamSpec] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple, Generic +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +P = ParamSpec('P') +Q = ParamSpec('Q') + +class Foo(Generic[P]): ... +class Bar(Generic[P, T]): ... + +def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ... +def f(*args: Q.args, **kwargs: Q.kwargs) -> Foo[Q]: ... +reveal_type(dec(f)) # N: Revealed type is "def [P] (*P.args, **P.kwargs) -> builtins.list[__main__.Foo[P`1]]" +g: Callable[Concatenate[int, Q], Foo[Q]] +reveal_type(dec(g)) # N: Revealed type is "def [Q] (builtins.int, *Q.args, **Q.kwargs) -> builtins.list[__main__.Foo[Q`-1]]" +h: Callable[Concatenate[T, Q], Bar[Q, T]] +reveal_type(dec(h)) # N: Revealed type is "def [T, Q] (T`-1, *Q.args, **Q.kwargs) -> builtins.list[__main__.Bar[Q`-2, T`-1]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecVsParamSpecConcatenate] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple, Generic +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +P = ParamSpec('P') +Q = ParamSpec('Q') + +class Foo(Generic[P]): ... +class Bar(Generic[P, T]): ... + +def dec(f: Callable[P, int]) -> Callable[P, Foo[P]]: ... +h: Callable[Concatenate[T, Q], int] +g: Callable[Concatenate[T, Q], int] +h = g +reveal_type(dec(h)) # N: Revealed type is "def [T, Q] (T`-1, *Q.args, **Q.kwargs) -> __main__.Foo[[T`-1, **Q`-2]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecSecondary] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple, Generic +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +P = ParamSpec('P') +Q = ParamSpec('Q') + +class Foo(Generic[P]): ... + +def dec(f: Callable[P, Foo[P]]) -> Callable[P, Foo[P]]: ... +g: Callable[[T], Foo[[int]]] +reveal_type(dec(g)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[[builtins.int]]" +h: Callable[Q, Foo[[int]]] +reveal_type(dec(g)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[[builtins.int]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericParamSpecSecondOrder] +# flags: --new-type-inference +from typing import TypeVar, Callable +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar('T') +S = TypeVar('S') +P = ParamSpec('P') +Q = ParamSpec('Q') +U = TypeVar('U') +W = ParamSpec('W') + +def transform( + dec: Callable[[Callable[P, T]], Callable[Q, S]] +) -> Callable[[Callable[Concatenate[int, P], T]], Callable[Concatenate[int, Q], S]]: ... + +def dec(f: Callable[W, U]) -> Callable[W, U]: ... +def dec2(f: Callable[Concatenate[str, W], U]) -> Callable[Concatenate[bytes, W], U]: ... +reveal_type(transform(dec)) # N: Revealed type is "def [P, T] (def (builtins.int, *P.args, **P.kwargs) -> T`2) -> def (builtins.int, *P.args, **P.kwargs) -> T`2" +reveal_type(transform(dec2)) # N: Revealed type is "def [W, T] (def (builtins.int, builtins.str, *W.args, **W.kwargs) -> T`6) -> def (builtins.int, builtins.bytes, *W.args, **W.kwargs) -> T`6" +[builtins fixtures/tuple.pyi] + +[case testNoAccidentalVariableClashInNestedGeneric] +# flags: --new-type-inference +from typing import TypeVar, Callable, Generic, Tuple + +T = TypeVar('T') +S = TypeVar('S') +U = TypeVar('U') + +def pipe(x: T, f1: Callable[[T], S], f2: Callable[[S], U]) -> U: ... +def and_then(a: T) -> Callable[[S], Tuple[S, T]]: ... + +def apply(a: S, b: T) -> None: + v1 = and_then(b) + v2: Callable[[Tuple[S, T]], None] + return pipe(a, v1, v2) +[builtins fixtures/tuple.pyi] + +[case testInferenceAgainstGenericParamSpecSpuriousBoundsNotUsed] +# flags: --new-type-inference +from typing import TypeVar, Callable, Generic +from typing_extensions import ParamSpec, Concatenate + +Q = ParamSpec("Q") +class Foo(Generic[Q]): ... + +T1 = TypeVar("T1", bound=Foo[...]) +T2 = TypeVar("T2", bound=Foo[...]) +P = ParamSpec("P") +def pop_off(fn: Callable[Concatenate[T1, P], T2]) -> Callable[P, Callable[[T1], T2]]: + ... + +@pop_off +def test(command: Foo[Q]) -> Foo[Q]: ... +reveal_type(test) # N: Revealed type is "def () -> def [Q] (__main__.Foo[Q`-1]) -> __main__.Foo[Q`-1]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index ba36c1548532..5f25b007dd47 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -693,6 +693,7 @@ f(lambda: None) g(lambda: None) [case testIsinstanceInInferredLambda] +# flags: --new-type-inference from typing import TypeVar, Callable, Optional T = TypeVar('T') S = TypeVar('S') @@ -700,7 +701,7 @@ class A: pass class B(A): pass class C(A): pass def f(func: Callable[[T], S], *z: T, r: Optional[S] = None) -> S: pass -f(lambda x: 0 if isinstance(x, B) else 1) # E: Cannot infer type argument 1 of "f" +reveal_type(f(lambda x: 0 if isinstance(x, B) else 1)) # N: Revealed type is "builtins.int" f(lambda x: 0 if isinstance(x, B) else 1, A())() # E: "int" not callable f(lambda x: x if isinstance(x, B) else B(), A(), r=B())() # E: "B" not callable f( diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index e0f29a19ec1d..9ee30b4df859 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1375,19 +1375,19 @@ class B: pass [builtins fixtures/list.pyi] [case testUninferableLambda] +# flags: --new-type-inference from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X]) -> X: pass -y = f(lambda x: x) # E: Cannot infer type argument 1 of "f" +y = f(lambda x: x) # E: Need type annotation for "y" [case testUninferableLambdaWithTypeError] +# flags: --new-type-inference from typing import TypeVar, Callable X = TypeVar('X') def f(x: Callable[[X], X], y: str) -> X: pass -y = f(lambda x: x, 1) # Fail -[out] -main:4: error: Cannot infer type argument 1 of "f" -main:4: error: Argument 2 to "f" has incompatible type "int"; expected "str" +y = f(lambda x: x, 1) # E: Need type annotation for "y" \ + # E: Argument 2 to "f" has incompatible type "int"; expected "str" [case testInferLambdaNone] # flags: --no-strict-optional diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 4910dfe05d31..e59b12d47980 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6483,7 +6483,7 @@ P = ParamSpec("P") R = TypeVar("R") @overload -def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... @overload def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ... def func(x: Callable[..., R]) -> Callable[..., R]: ... @@ -6501,7 +6501,7 @@ eggs = lambda: 'eggs' reveal_type(func(eggs)) # N: Revealed type is "def (builtins.str) -> builtins.str" spam: Callable[..., str] = lambda x, y: 'baz' -reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> builtins.str" +reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> Any" [builtins fixtures/paramspec.pyi] @@ -6596,3 +6596,20 @@ S = TypeVar("S", bound=str) def foo(x: int = ...) -> Callable[[T], T]: ... @overload def foo(x: S = ...) -> Callable[[T], T]: ... + +[case testOverloadGenericStarArgOverlap] +from typing import Any, Callable, TypeVar, overload, Union, Tuple, List + +F = TypeVar("F", bound=Callable[..., Any]) +S = TypeVar("S", bound=int) + +def id(f: F) -> F: ... + +@overload +def struct(*cols: S) -> int: ... +@overload +def struct(__cols: Union[List[S], Tuple[S, ...]]) -> int: ... +@id +def struct(*cols: Union[S, Union[List[S], Tuple[S, ...]]]) -> int: + pass +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 114fe1f8438a..f523cb005a2c 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1029,7 +1029,7 @@ j = Job(generic_f) reveal_type(j) # N: Revealed type is "__main__.Job[[x: _T`-1]]" jf = j.into_callable() -reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`-1)" +reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`2)" reveal_type(jf(1)) # N: Revealed type is "None" [builtins fixtures/paramspec.pyi] @@ -1048,10 +1048,10 @@ class Job(Generic[_P, _T]): def generic_f(x: _T) -> _T: ... j = Job(generic_f) -reveal_type(j) # N: Revealed type is "__main__.Job[[x: _T`-1], _T`-1]" +reveal_type(j) # N: Revealed type is "__main__.Job[[x: _T`2], _T`2]" jf = j.into_callable() -reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`-1) -> _T`-1" +reveal_type(jf) # N: Revealed type is "def [_T] (x: _T`3) -> _T`3" reveal_type(jf(1)) # N: Revealed type is "builtins.int" [builtins fixtures/paramspec.pyi] @@ -1307,7 +1307,7 @@ reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins [builtins fixtures/paramspec.pyi] [case testParamSpecClassConstructor] -from typing import ParamSpec, Callable +from typing import ParamSpec, Callable, TypeVar P = ParamSpec("P") @@ -1315,7 +1315,10 @@ class SomeClass: def __init__(self, a: str) -> None: pass -def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> None: +def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> Callable[P, SomeClass]: + pass + +def func_regular(t: Callable[[T], SomeClass], val: Callable[[T], SomeClass]) -> Callable[[T], SomeClass]: pass def constructor(a: str) -> SomeClass: @@ -1324,9 +1327,13 @@ def constructor(a: str) -> SomeClass: def wrong_constructor(a: bool) -> SomeClass: return SomeClass("a") +def wrong_name_constructor(b: bool) -> SomeClass: + return SomeClass("a") + func(SomeClass, constructor) -func(SomeClass, wrong_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" \ - # E: Argument 2 to "func" has incompatible type "Callable[[bool], SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" +reveal_type(func(SomeClass, wrong_constructor)) # N: Revealed type is "def (a: ) -> __main__.SomeClass" +reveal_type(func_regular(SomeClass, wrong_constructor)) # N: Revealed type is "def () -> __main__.SomeClass" +func(SomeClass, wrong_name_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[], SomeClass]" [builtins fixtures/paramspec.pyi] [case testParamSpecInTypeAliasBasic] @@ -1466,8 +1473,7 @@ reveal_type(gs) # N: Revealed type is "builtins.list[def (builtins.int, builtin T = TypeVar("T") class C(Generic[T]): ... -C[Callable[P, int]]() # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ - # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas +C[Callable[P, int]]() [builtins fixtures/paramspec.pyi] [case testConcatDeferralNoCrash] @@ -1547,5 +1553,26 @@ U = TypeVar("U") def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ... def test(x: U) -> U: ... reveal_type(dec) # N: Revealed type is "def [P, T] (f: def (*P.args, **P.kwargs) -> T`-2) -> def (*P.args, **P.kwargs) -> builtins.list[T`-2]" -reveal_type(dec(test)) # N: Revealed type is "def [U] (x: U`-1) -> builtins.list[U`-1]" +reveal_type(dec(test)) # N: Revealed type is "def [T] (x: T`2) -> builtins.list[T`2]" + +class A: ... +TA = TypeVar("TA", bound=A) + +def test_with_bound(x: TA) -> TA: ... +reveal_type(dec(test_with_bound)) # N: Revealed type is "def [T <: __main__.A] (x: T`4) -> builtins.list[T`4]" +dec(test_with_bound)(0) # E: Value of type variable "T" of function cannot be "int" +dec(test_with_bound)(A()) # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecNestedApplyNoCrash] +from typing import Callable, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ... +def test() -> None: ... +# TODO: avoid this error, although it may be non-trivial. +apply(apply, test) # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]" [builtins fixtures/paramspec.pyi] From 76c16a484b8b4fa6ae10abf4e96a286315ed7093 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Wed, 16 Aug 2023 13:15:42 +0100 Subject: [PATCH 58/88] [dmypy] special case stdout and stderr in show_stats too (#15881) When running dmypy, the communication between client and server is via JSON. The JSON contains the keys "out" and "err" for the actual result of "check" command, and "stdout" and "stderr" for the any other stdout and stderr text. show_stats is shown when running with --verbose. It's meant to show other keys in the JSON response, like python version or time taken. It already had some special casing to only show 1 line of text for "out" and "err". Let's add "stdout" and "stderr" to the special casing as well. Also, let's show the remaining number of characters as well. Finally, added a comment in code about stdout, stderr, out, err and how we shouldn't confuse them. (I did) Some more cleanup is needed in this area of the codebase, but will be a separate PR. show_stats outputs something like this: ``` err : out : analytics/scripts/presto/report_query_lo ... 100 more characters platform : linux python_version : 3_9 roundtrip_time : 31.996 status : 2 stderr : \nLOG: Mypy Version: 1.6.0+de ... 50186630 more characters stdout : ``` --- mypy/dmypy/client.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py index 0e9120608509..c3a2308d1b44 100644 --- a/mypy/dmypy/client.py +++ b/mypy/dmypy/client.py @@ -562,6 +562,7 @@ def check_output( sys.stdout.write(out) sys.stdout.flush() sys.stderr.write(err) + sys.stderr.flush() if verbose: show_stats(response) if junit_xml: @@ -588,13 +589,14 @@ def check_output( def show_stats(response: Mapping[str, object]) -> None: for key, value in sorted(response.items()): - if key not in ("out", "err"): - print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) - else: + if key in ("out", "err", "stdout", "stderr"): + # Special case text output to display just 40 characters of text value = repr(value)[1:-1] if len(value) > 50: - value = value[:40] + " ..." + value = f"{value[:40]} ... {len(value)-40} more characters" print("%-24s: %s" % (key, value)) + continue + print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) @action(hang_parser) @@ -668,6 +670,8 @@ def request( # TODO: Other errors, e.g. ValueError, UnicodeError else: # Display debugging output written to stdout/stderr in the server process for convenience. + # This should not be confused with "out" and "err" fields in the response. + # Those fields hold the output of the "check" command, and are handled in check_output(). stdout = response.get("stdout") if stdout: sys.stdout.write(stdout) From b3d09374dac20c8e775e4380a6b44a56d7b22699 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 17 Aug 2023 15:35:22 +0100 Subject: [PATCH 59/88] Fix subtyping between ParamSpecs (#15892) Fixes https://github.com/python/mypy/issues/14169 Fixes https://github.com/python/mypy/issues/14168 Two sings here: * Actually check prefix when we should * `strict_concatenate` check should be off by default (IIUC it is not mandated by the PEP) --- mypy/expandtype.py | 3 +- mypy/messages.py | 18 +++-- mypy/subtypes.py | 17 +++-- test-data/unit/check-overloading.test | 2 +- .../unit/check-parameter-specification.test | 70 +++++++++++++++++++ 5 files changed, 94 insertions(+), 16 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 0e98ed048197..01d9c4463174 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -383,8 +383,6 @@ def visit_callable_type(self, t: CallableType) -> CallableType: t = t.expand_param_spec(repl) return t.copy_modified( arg_types=self.expand_types(t.arg_types), - arg_kinds=t.arg_kinds, - arg_names=t.arg_names, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) @@ -402,6 +400,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType: arg_kinds=t.arg_kinds[:-2] + prefix.arg_kinds + t.arg_kinds[-2:], arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:], ret_type=t.ret_type.accept(self), + from_concatenate=t.from_concatenate or bool(repl.prefix.arg_types), ) var_arg = t.var_arg() diff --git a/mypy/messages.py b/mypy/messages.py index c9bf26f8952e..aab30ee29108 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2116,9 +2116,11 @@ def report_protocol_problems( return # Report member type conflicts - conflict_types = get_conflict_protocol_types(subtype, supertype, class_obj=class_obj) + conflict_types = get_conflict_protocol_types( + subtype, supertype, class_obj=class_obj, options=self.options + ) if conflict_types and ( - not is_subtype(subtype, erase_type(supertype)) + not is_subtype(subtype, erase_type(supertype), options=self.options) or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars ): @@ -2780,7 +2782,11 @@ def [T <: int] f(self, x: int, y: T) -> None slash = True # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list - if isinstance(tp.definition, FuncDef) and hasattr(tp.definition, "arguments"): + if ( + isinstance(tp.definition, FuncDef) + and hasattr(tp.definition, "arguments") + and not tp.from_concatenate + ): definition_arg_names = [arg.variable.name for arg in tp.definition.arguments] if ( len(definition_arg_names) > len(tp.arg_names) @@ -2857,7 +2863,7 @@ def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str def get_conflict_protocol_types( - left: Instance, right: Instance, class_obj: bool = False + left: Instance, right: Instance, class_obj: bool = False, options: Options | None = None ) -> list[tuple[str, Type, Type]]: """Find members that are defined in 'left' but have incompatible types. Return them as a list of ('member', 'got', 'expected'). @@ -2872,9 +2878,9 @@ def get_conflict_protocol_types( subtype = mypy.typeops.get_protocol_member(left, member, class_obj) if not subtype: continue - is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) + is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True, options=options) if IS_SETTABLE in get_member_flags(member, right): - is_compat = is_compat and is_subtype(supertype, subtype) + is_compat = is_compat and is_subtype(supertype, subtype, options=options) if not is_compat: conflicts.append((member, subtype, supertype)) return conflicts diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 60fccc7e357c..11847858c62c 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -600,7 +600,7 @@ def check_mixed( type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( - left, right, proper_subtype=self.proper_subtype + left, right, proper_subtype=self.proper_subtype, options=self.options ): return True # We record negative cache entry here, and not in the protocol check like we do for @@ -647,7 +647,7 @@ def visit_param_spec(self, left: ParamSpecType) -> bool: and right.id == left.id and right.flavor == left.flavor ): - return True + return self._is_subtype(left.prefix, right.prefix) if isinstance(right, Parameters) and are_trivial_parameters(right): return True return self._is_subtype(left.upper_bound, self.right) @@ -696,7 +696,7 @@ def visit_callable_type(self, left: CallableType) -> bool: ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=(self.options.extra_checks or self.options.strict_concatenate) if self.options - else True, + else False, ) elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items) @@ -863,7 +863,7 @@ def visit_overloaded(self, left: Overloaded) -> bool: strict_concat = ( (self.options.extra_checks or self.options.strict_concatenate) if self.options - else True + else False ) if left_index not in matched_overloads and ( is_callable_compatible( @@ -1003,6 +1003,7 @@ def is_protocol_implementation( proper_subtype: bool = False, class_obj: bool = False, skip: list[str] | None = None, + options: Options | None = None, ) -> bool: """Check whether 'left' implements the protocol 'right'. @@ -1068,7 +1069,9 @@ def f(self) -> A: ... # Nominal check currently ignores arg names # NOTE: If we ever change this, be sure to also change the call to # SubtypeVisitor.build_subtype_kind(...) down below. - is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=ignore_names) + is_compat = is_subtype( + subtype, supertype, ignore_pos_arg_names=ignore_names, options=options + ) else: is_compat = is_proper_subtype(subtype, supertype) if not is_compat: @@ -1080,7 +1083,7 @@ def f(self) -> A: ... superflags = get_member_flags(member, right) if IS_SETTABLE in superflags: # Check opposite direction for settable attributes. - if not is_subtype(supertype, subtype): + if not is_subtype(supertype, subtype, options=options): return False if not class_obj: if IS_SETTABLE not in superflags: @@ -1479,7 +1482,7 @@ def are_parameters_compatible( ignore_pos_arg_names: bool = False, check_args_covariantly: bool = False, allow_partial_overlap: bool = False, - strict_concatenate_check: bool = True, + strict_concatenate_check: bool = False, ) -> bool: """Helper function for is_callable_compatible, used for Parameter compatibility""" if right.is_ellipsis_args: diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index e59b12d47980..4a4c19b4a0e9 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6483,7 +6483,7 @@ P = ParamSpec("P") R = TypeVar("R") @overload -def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... +def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ... def func(x: Callable[..., R]) -> Callable[..., R]: ... diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index f523cb005a2c..b06944389623 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1576,3 +1576,73 @@ def test() -> None: ... # TODO: avoid this error, although it may be non-trivial. apply(apply, test) # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]" [builtins fixtures/paramspec.pyi] + +[case testParamSpecPrefixSubtypingGenericInvalid] +from typing import Generic +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") + +class A(Generic[P]): + def foo(self, *args: P.args, **kwargs: P.kwargs): + ... + +def bar(b: A[P]) -> A[Concatenate[int, P]]: + return b # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]") +[builtins fixtures/paramspec.pyi] + +[case testParamSpecPrefixSubtypingProtocolInvalid] +from typing import Protocol +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") + +class A(Protocol[P]): + def foo(self, *args: P.args, **kwargs: P.kwargs): + ... + +def bar(b: A[P]) -> A[Concatenate[int, P]]: + return b # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]") +[builtins fixtures/paramspec.pyi] + +[case testParamSpecPrefixSubtypingValidNonStrict] +from typing import Protocol +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") + +class A(Protocol[P]): + def foo(self, a: int, *args: P.args, **kwargs: P.kwargs): + ... + +class B(Protocol[P]): + def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs): + ... + +def bar(b: B[P]) -> A[Concatenate[int, P]]: + return b +[builtins fixtures/paramspec.pyi] + +[case testParamSpecPrefixSubtypingInvalidStrict] +# flags: --extra-checks +from typing import Protocol +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") + +class A(Protocol[P]): + def foo(self, a: int, *args: P.args, **kwargs: P.kwargs): + ... + +class B(Protocol[P]): + def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs): + ... + +def bar(b: B[P]) -> A[Concatenate[int, P]]: + return b # E: Incompatible return value type (got "B[P]", expected "A[[int, **P]]") \ + # N: Following member(s) of "B[P]" have conflicts: \ + # N: Expected: \ + # N: def foo(self, a: int, int, /, *args: P.args, **kwargs: P.kwargs) -> Any \ + # N: Got: \ + # N: def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs) -> Any +[builtins fixtures/paramspec.pyi] From fa84534b9a9c6bdfc2a155d2e916da0c308402b9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 18 Aug 2023 14:24:41 +0100 Subject: [PATCH 60/88] Basic support for decorated overloads (#15898) Fixes https://github.com/python/mypy/issues/15737 Fixes https://github.com/python/mypy/issues/12844 Fixes https://github.com/python/mypy/issues/12716 My goal was to fix the `ParamSpec` issues, but it turns out decorated overloads were not supported at all. Namely: * Decorators on overload items were ignored, caller would see original undecorated item types * Overload item overlap checks were performed for original types, while arguably we should use decorated types * Overload items completeness w.r.t. to implementation was checked with decorated implementation, and undecorated items Here I add basic support using same logic as for regular decorated functions: initially set type to `None` and defer callers until definition is type-checked. Note this results in few more `Cannot determine type` in case of other errors, but I think it is fine. Note I also add special-casing for "inline" applications of generic functions to overload arguments. This use case was mentioned few times alongside overloads. The general fix would be tricky, and my special-casing should cover typical use cases. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/checker.py | 91 ++++++++++++------- mypy/checkexpr.py | 73 +++++++++++++-- mypy/checkmember.py | 12 ++- mypy/semanal.py | 11 ++- test-data/unit/check-generics.test | 4 +- test-data/unit/check-newsemanal.test | 3 +- test-data/unit/check-overloading.test | 27 ++++++ .../unit/check-parameter-specification.test | 28 ++++++ test-data/unit/lib-stub/functools.pyi | 2 +- 9 files changed, 206 insertions(+), 45 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 5d97a0dec713..7625bf28a88c 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -636,13 +636,30 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.visit_decorator(defn.items[0]) for fdef in defn.items: assert isinstance(fdef, Decorator) - self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True) + if defn.is_property: + self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True) + else: + # Perform full check for real overloads to infer type of all decorated + # overload variants. + self.visit_decorator_inner(fdef, allow_empty=True) if fdef.func.abstract_status in (IS_ABSTRACT, IMPLICITLY_ABSTRACT): num_abstract += 1 if num_abstract not in (0, len(defn.items)): self.fail(message_registry.INCONSISTENT_ABSTRACT_OVERLOAD, defn) if defn.impl: defn.impl.accept(self) + if not defn.is_property: + self.check_overlapping_overloads(defn) + if defn.type is None: + item_types = [] + for item in defn.items: + assert isinstance(item, Decorator) + item_type = self.extract_callable_type(item.var.type, item) + if item_type is not None: + item_types.append(item_type) + if item_types: + defn.type = Overloaded(item_types) + # Check override validity after we analyzed current definition. if defn.info: found_method_base_classes = self.check_method_override(defn) if ( @@ -653,10 +670,35 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.msg.no_overridable_method(defn.name, defn) self.check_explicit_override_decorator(defn, found_method_base_classes, defn.impl) self.check_inplace_operator_method(defn) - if not defn.is_property: - self.check_overlapping_overloads(defn) return None + def extract_callable_type(self, inner_type: Type | None, ctx: Context) -> CallableType | None: + """Get type as seen by an overload item caller.""" + inner_type = get_proper_type(inner_type) + outer_type: CallableType | None = None + if inner_type is not None and not isinstance(inner_type, AnyType): + if isinstance(inner_type, CallableType): + outer_type = inner_type + elif isinstance(inner_type, Instance): + inner_call = get_proper_type( + analyze_member_access( + name="__call__", + typ=inner_type, + context=ctx, + is_lvalue=False, + is_super=False, + is_operator=True, + msg=self.msg, + original_type=inner_type, + chk=self, + ) + ) + if isinstance(inner_call, CallableType): + outer_type = inner_call + if outer_type is None: + self.msg.not_callable(inner_type, ctx) + return outer_type + def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # At this point we should have set the impl already, and all remaining # items are decorators @@ -680,40 +722,20 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # This can happen if we've got an overload with a different # decorator or if the implementation is untyped -- we gave up on the types. - inner_type = get_proper_type(inner_type) - if inner_type is not None and not isinstance(inner_type, AnyType): - if isinstance(inner_type, CallableType): - impl_type = inner_type - elif isinstance(inner_type, Instance): - inner_call = get_proper_type( - analyze_member_access( - name="__call__", - typ=inner_type, - context=defn.impl, - is_lvalue=False, - is_super=False, - is_operator=True, - msg=self.msg, - original_type=inner_type, - chk=self, - ) - ) - if isinstance(inner_call, CallableType): - impl_type = inner_call - if impl_type is None: - self.msg.not_callable(inner_type, defn.impl) + impl_type = self.extract_callable_type(inner_type, defn.impl) is_descriptor_get = defn.info and defn.name == "__get__" for i, item in enumerate(defn.items): - # TODO overloads involving decorators assert isinstance(item, Decorator) - sig1 = self.function_type(item.func) - assert isinstance(sig1, CallableType) + sig1 = self.extract_callable_type(item.var.type, item) + if sig1 is None: + continue for j, item2 in enumerate(defn.items[i + 1 :]): assert isinstance(item2, Decorator) - sig2 = self.function_type(item2.func) - assert isinstance(sig2, CallableType) + sig2 = self.extract_callable_type(item2.var.type, item2) + if sig2 is None: + continue if not are_argument_counts_overlapping(sig1, sig2): continue @@ -4751,17 +4773,20 @@ def visit_decorator(self, e: Decorator) -> None: e.var.type = AnyType(TypeOfAny.special_form) e.var.is_ready = True return + self.visit_decorator_inner(e) + def visit_decorator_inner(self, e: Decorator, allow_empty: bool = False) -> None: if self.recurse_into_functions: with self.tscope.function_scope(e.func): - self.check_func_item(e.func, name=e.func.name) + self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty) # Process decorators from the inside out to determine decorated signature, which # may be different from the declared signature. sig: Type = self.function_type(e.func) for d in reversed(e.decorators): if refers_to_fullname(d, OVERLOAD_NAMES): - self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e) + if not allow_empty: + self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e) continue dec = self.expr_checker.accept(d) temp = self.temp_node(sig, context=e) @@ -4788,6 +4813,8 @@ def visit_decorator(self, e: Decorator) -> None: self.msg.fail("Too many arguments for property", e) self.check_incompatible_property_override(e) # For overloaded functions we already checked override for overload as a whole. + if allow_empty: + return if e.func.info and not e.func.is_dynamic() and not e.is_overload: found_method_base_classes = self.check_method_override(e) if ( diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 68ea7c30ed6f..797473f7f58f 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -353,12 +353,13 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: elif isinstance(node, FuncDef): # Reference to a global function. result = function_type(node, self.named_type("builtins.function")) - elif isinstance(node, OverloadedFuncDef) and node.type is not None: - # node.type is None when there are multiple definitions of a function - # and it's decorated by something that is not typing.overload - # TODO: use a dummy Overloaded instead of AnyType in this case - # like we do in mypy.types.function_type()? - result = node.type + elif isinstance(node, OverloadedFuncDef): + if node.type is None: + if self.chk.in_checked_function() and node.items: + self.chk.handle_cannot_determine_type(node.name, e) + result = AnyType(TypeOfAny.from_error) + else: + result = node.type elif isinstance(node, TypeInfo): # Reference to a type object. if node.typeddict_type: @@ -1337,6 +1338,55 @@ def transform_callee_type( return callee + def is_generic_decorator_overload_call( + self, callee_type: CallableType, args: list[Expression] + ) -> Overloaded | None: + """Check if this looks like an application of a generic function to overload argument.""" + assert callee_type.variables + if len(callee_type.arg_types) != 1 or len(args) != 1: + # TODO: can we handle more general cases? + return None + if not isinstance(get_proper_type(callee_type.arg_types[0]), CallableType): + return None + if not isinstance(get_proper_type(callee_type.ret_type), CallableType): + return None + with self.chk.local_type_map(): + with self.msg.filter_errors(): + arg_type = get_proper_type(self.accept(args[0], type_context=None)) + if isinstance(arg_type, Overloaded): + return arg_type + return None + + def handle_decorator_overload_call( + self, callee_type: CallableType, overloaded: Overloaded, ctx: Context + ) -> tuple[Type, Type] | None: + """Type-check application of a generic callable to an overload. + + We check call on each individual overload item, and then combine results into a new + overload. This function should be only used if callee_type takes and returns a Callable. + """ + result = [] + inferred_args = [] + for item in overloaded.items: + arg = TempNode(typ=item) + with self.msg.filter_errors() as err: + item_result, inferred_arg = self.check_call(callee_type, [arg], [ARG_POS], ctx) + if err.has_new_errors(): + # This overload doesn't match. + continue + p_item_result = get_proper_type(item_result) + if not isinstance(p_item_result, CallableType): + continue + p_inferred_arg = get_proper_type(inferred_arg) + if not isinstance(p_inferred_arg, CallableType): + continue + inferred_args.append(p_inferred_arg) + result.append(p_item_result) + if not result or not inferred_args: + # None of the overload matched (or overload was initially malformed). + return None + return Overloaded(result), Overloaded(inferred_args) + def check_call_expr_with_callee_type( self, callee_type: Type, @@ -1451,6 +1501,17 @@ def check_call( callee = get_proper_type(callee) if isinstance(callee, CallableType): + if callee.variables: + overloaded = self.is_generic_decorator_overload_call(callee, args) + if overloaded is not None: + # Special casing for inline application of generic callables to overloads. + # Supporting general case would be tricky, but this should cover 95% of cases. + overloaded_result = self.handle_decorator_overload_call( + callee, overloaded, context + ) + if overloaded_result is not None: + return overloaded_result + return self.check_callable_call( callee, args, diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 343dfe3de243..2b0717f181a9 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -317,7 +317,17 @@ def analyze_instance_member_access( return analyze_var(name, first_item.var, typ, info, mx) if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) - signature = function_type(method, mx.named_type("builtins.function")) + if not isinstance(method, OverloadedFuncDef): + signature = function_type(method, mx.named_type("builtins.function")) + else: + if method.type is None: + # Overloads may be not ready if they are decorated. Handle this in same + # manner as we would handle a regular decorated function: defer if possible. + if not mx.no_deferral and method.items: + mx.not_ready_callback(method.name, mx.context) + return AnyType(TypeOfAny.special_form) + assert isinstance(method.type, Overloaded) + signature = method.type signature = freshen_all_functions_type_vars(signature) if not method.is_static: if name != "__call__": diff --git a/mypy/semanal.py b/mypy/semanal.py index e21fc9f1c23f..9d968d1da781 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1153,7 +1153,16 @@ def analyze_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: elif not non_overload_indexes: self.handle_missing_overload_implementation(defn) - if types: + if types and not any( + # If some overload items are decorated with other decorators, then + # the overload type will be determined during type checking. + isinstance(it, Decorator) and len(it.decorators) > 1 + for it in defn.items + ): + # TODO: should we enforce decorated overloads consistency somehow? + # Some existing code uses both styles: + # * Put decorator only on implementation, use "effective" types in overloads + # * Put decorator everywhere, use "bare" types in overloads. defn.type = Overloaded(types) defn.type.line = defn.line diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 8c7c4e035961..1fac42b492a8 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -3062,10 +3062,10 @@ def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]: reveal_type(dec1(lambda x: x)) # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]" reveal_type(dec2(lambda x: x)) # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]" reveal_type(dec3(lambda x: x[0])) # N: Revealed type is "def [S] (S`6) -> S`6" -reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`8) -> S`8" +reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`9) -> S`9" reveal_type(dec1(lambda x: 1)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" reveal_type(dec5(lambda x: x)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" -reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`15) -> builtins.list[S`15]" +reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`16) -> builtins.list[S`16]" dec4(lambda x: x) # E: Incompatible return value type (got "S", expected "List[object]") [builtins fixtures/list.pyi] diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 8300957ee511..ff8d346e74a1 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -3207,8 +3207,7 @@ class User: self.first_name = value def __init__(self, name: str) -> None: - self.name = name # E: Cannot assign to a method \ - # E: Incompatible types in assignment (expression has type "str", variable has type "Callable[..., Any]") + self.name = name # E: Cannot assign to a method [case testNewAnalyzerMemberNameMatchesTypedDict] from typing import Union, Any diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 4a4c19b4a0e9..b778dc50b376 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6613,3 +6613,30 @@ def struct(__cols: Union[List[S], Tuple[S, ...]]) -> int: ... def struct(*cols: Union[S, Union[List[S], Tuple[S, ...]]]) -> int: pass [builtins fixtures/tuple.pyi] + +[case testRegularGenericDecoratorOverload] +from typing import Callable, overload, TypeVar, List + +S = TypeVar("S") +T = TypeVar("T") +def transform(func: Callable[[S], List[T]]) -> Callable[[S], T]: ... + +@overload +def foo(x: int) -> List[float]: ... +@overload +def foo(x: str) -> List[str]: ... +def foo(x): ... + +reveal_type(transform(foo)) # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)" + +@transform +@overload +def bar(x: int) -> List[float]: ... +@transform +@overload +def bar(x: str) -> List[str]: ... +@transform +def bar(x): ... + +reveal_type(bar) # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)" +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index b06944389623..3a8ecdf81c7d 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1646,3 +1646,31 @@ def bar(b: B[P]) -> A[Concatenate[int, P]]: # N: Got: \ # N: def foo(self, a: int, b: int, *args: P.args, **kwargs: P.kwargs) -> Any [builtins fixtures/paramspec.pyi] + +[case testParamSpecDecoratorOverload] +from typing import Callable, overload, TypeVar, List +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") +def transform(func: Callable[P, List[T]]) -> Callable[P, T]: ... + +@overload +def foo(x: int) -> List[float]: ... +@overload +def foo(x: str) -> List[str]: ... +def foo(x): ... + +reveal_type(transform(foo)) # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)" + +@transform +@overload +def bar(x: int) -> List[float]: ... +@transform +@overload +def bar(x: str) -> List[str]: ... +@transform +def bar(x): ... + +reveal_type(bar) # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)" +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi index 9e62a14c2f34..e665b2bad0c2 100644 --- a/test-data/unit/lib-stub/functools.pyi +++ b/test-data/unit/lib-stub/functools.pyi @@ -1,4 +1,4 @@ -from typing import Generic, TypeVar, Callable, Any, Mapping +from typing import Generic, TypeVar, Callable, Any, Mapping, overload _T = TypeVar("_T") From b02ddf1db45f6cd1b3a4cf0f40e768b36f5636a7 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 18 Aug 2023 16:18:06 +0100 Subject: [PATCH 61/88] Polymorphic inference: basic support for variadic types (#15879) This is the fifth PR in the series started by #15287, and a last one for the foreseeable future. This completes polymorphic inference sufficiently for extensive experimentation, and enabling polymorphic fallback by default. Remaining items for which I am going to open follow-up issues: * Enable `--new-type-inference` by default (should be done before everything else in this list). * Use polymorphic inference during unification. * Use polymorphic inference as primary an only mechanism, rather than a fallback if basic inference fails in some way. * Move `apply_poly()` logic from `checkexpr.py` to `applytype.py` (this one depends on everything above). * Experiment with backtracking in the new solver. * Experiment with universal quantification for types other that `Callable` (btw we already have a hacky support for capturing a generic function in an instance with `ParamSpec`). Now some comments on the PR proper. First of all I decided to do some clean-up of `TypeVarTuple` support, but added only strictly necessary parts of the cleanup here. Everything else will be in follow up PR(s). The polymorphic inference/solver/application is practically trivial here, so here is my view on how I see large-scale structure of `TypeVarTuple` implementation: * There should be no special-casing in `applytype.py`, so I deleted everything from there (as I did for `ParamSpec`) and complemented `visit_callable_type()` in `expandtype.py`. Basically, `applytype.py` should have three simple steps: validate substitutions (upper bounds, values, argument kinds etc.); call `expand_type()`; update callable type variables (currently we just reduce the number, but in future we may also add variables there, see TODO that I added). * The only valid positions for a variadic item (a.k.a. `UnpackType`) are inside `Instance`s, `TupleType`s, and `CallableType`s. I like how there is an agreement that for callables there should never be a prefix, and instead prefix should be represented with regular positional arguments. I think that ideally we should enforce this with an `assert` in `CallableType` constructor (similar to how I did this for `ParamSpec`). * Completing `expand_type()` should be a priority (since it describes basic semantics of `TypeVarLikeType`s). I think I made good progress in this direction. IIUC the only valid substitution for `*Ts` are `TupleType.items`, `*tuple[X, ...]`, `Any`, and ``, so it was not hard. * I propose to only allow `TupleType` (mostly for `semanal.py`, see item below), plain `TypeVarTupleType`, and a homogeneous `tuple` instances inside `UnpackType`. Supporting unions of those is not specified by the PEP and support will likely be quite tricky to implement. Also I propose to even eagerly expand type aliases to tuples (since there is no point in supporting recursive types like `A = Tuple[int, *A]`). * I propose to forcefully flatten nested `TupleType`s, there should be no things like `Tuple[X1, *Tuple[X2, *Ts, Y2], Y1]` etc after semantic analysis. (Similarly to how we always flatten `Parameters` for `ParamSpec`, and how we flatten nested unions in `UnionType` _constructor_). Currently we do the flattening/normalization of tuples in `expand_type()` etc. * I suspect `build_constraints_for_unpack()` may be broken, at least when it was used for tuples and callables it did something wrong in few cases I tested (and there are other symptoms I mentioned in a TODO). I therefore re-implemented logic for callables/tuples using a separate dedicated helper. I will investigate more later. As I mentioned above I only implemented strictly minimal amount of the above plan to make my tests pass, but still wanted to write this out to see if there are any objections (or maybe I don't understand something). If there are no objections to this plan, I will continue it in separate PR(s). Btw, I like how with this plan we will have clear logical parallels between `TypeVarTuple` implementation and (recently updated) `ParamSpec` implementation. --------- Co-authored-by: Ivan Levkivskyi --- mypy/applytype.py | 64 ++------ mypy/checkexpr.py | 24 ++- mypy/constraints.py | 192 +++++++++++++++++++----- mypy/expandtype.py | 135 ++++++++--------- mypy/solve.py | 37 +++-- mypy/typeops.py | 3 + mypy/types.py | 7 +- mypy/typevartuples.py | 19 --- test-data/unit/check-generics.test | 144 +++++++++++++++++- test-data/unit/check-typevar-tuple.test | 24 +-- 10 files changed, 440 insertions(+), 209 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 6abe7f0022f8..884be287e33d 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -3,15 +3,13 @@ from typing import Callable, Sequence import mypy.subtypes -from mypy.expandtype import expand_type, expand_unpack_with_variables -from mypy.nodes import ARG_STAR, Context +from mypy.expandtype import expand_type +from mypy.nodes import Context from mypy.types import ( AnyType, CallableType, - Instance, ParamSpecType, PartialType, - TupleType, Type, TypeVarId, TypeVarLikeType, @@ -21,7 +19,6 @@ UnpackType, get_proper_type, ) -from mypy.typevartuples import find_unpack_in_list, replace_starargs def get_target_type( @@ -107,6 +104,8 @@ def apply_generic_arguments( if target_type is not None: id_to_type[tvar.id] = target_type + # TODO: validate arg_kinds/arg_names for ParamSpec and TypeVarTuple replacements, + # not just type variable bounds above. param_spec = callable.param_spec() if param_spec is not None: nt = id_to_type.get(param_spec.id) @@ -122,55 +121,9 @@ def apply_generic_arguments( # Apply arguments to argument types. var_arg = callable.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - star_index = callable.arg_kinds.index(ARG_STAR) - callable = callable.copy_modified( - arg_types=( - [expand_type(at, id_to_type) for at in callable.arg_types[:star_index]] - + [callable.arg_types[star_index]] - + [expand_type(at, id_to_type) for at in callable.arg_types[star_index + 1 :]] - ) - ) - - unpacked_type = get_proper_type(var_arg.typ.type) - if isinstance(unpacked_type, TupleType): - # Assuming for now that because we convert prefixes to positional arguments, - # the first argument is always an unpack. - expanded_tuple = expand_type(unpacked_type, id_to_type) - if isinstance(expanded_tuple, TupleType): - # TODO: handle the case where the tuple has an unpack. This will - # hit an assert below. - expanded_unpack = find_unpack_in_list(expanded_tuple.items) - if expanded_unpack is not None: - callable = callable.copy_modified( - arg_types=( - callable.arg_types[:star_index] - + [expanded_tuple] - + callable.arg_types[star_index + 1 :] - ) - ) - else: - callable = replace_starargs(callable, expanded_tuple.items) - else: - # TODO: handle the case for if we get a variable length tuple. - assert False, f"mypy bug: unimplemented case, {expanded_tuple}" - elif isinstance(unpacked_type, TypeVarTupleType): - expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type) - if isinstance(expanded_tvt, list): - for t in expanded_tvt: - assert not isinstance(t, UnpackType) - callable = replace_starargs(callable, expanded_tvt) - else: - assert isinstance(expanded_tvt, Instance) - assert expanded_tvt.type.fullname == "builtins.tuple" - callable = callable.copy_modified( - arg_types=( - callable.arg_types[:star_index] - + [expanded_tvt.args[0]] - + callable.arg_types[star_index + 1 :] - ) - ) - else: - assert False, "mypy bug: unhandled case applying unpack" + callable = expand_type(callable, id_to_type) + assert isinstance(callable, CallableType) + return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type]) else: callable = callable.copy_modified( arg_types=[expand_type(at, id_to_type) for at in callable.arg_types] @@ -183,6 +136,9 @@ def apply_generic_arguments( type_guard = None # The callable may retain some type vars if only some were applied. + # TODO: move apply_poly() logic from checkexpr.py here when new inference + # becomes universally used (i.e. in all passes + in unification). + # With this new logic we can actually *add* some new free variables. remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 797473f7f58f..420cfd990820 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2373,11 +2373,15 @@ def check_argument_types( ] actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) - assert isinstance(orig_callee_arg_type, TupleType) - assert orig_callee_arg_type.items - callee_arg_types = orig_callee_arg_type.items + # TODO: can we really assert this? What if formal is just plain Unpack[Ts]? + assert isinstance(orig_callee_arg_type, UnpackType) + assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance( + orig_callee_arg_type.type, TupleType + ) + assert orig_callee_arg_type.type.items + callee_arg_types = orig_callee_arg_type.type.items callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( - len(orig_callee_arg_type.items) - 1 + len(orig_callee_arg_type.type.items) - 1 ) expanded_tuple = True @@ -5853,8 +5857,9 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return super().visit_param_spec(t) def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: - # TODO: Support polymorphic apply for TypeVarTuple. - raise PolyTranslationError() + if t in self.poly_tvars and t not in self.bound_tvars: + raise PolyTranslationError() + return super().visit_type_var_tuple(t) def visit_type_alias_type(self, t: TypeAliasType) -> Type: if not t.args: @@ -5888,7 +5893,6 @@ def visit_instance(self, t: Instance) -> Type: return t.copy_modified(args=new_args) # There is the same problem with callback protocols as with aliases # (callback protocols are essentially more flexible aliases to callables). - # Note: consider supporting bindings in instances, e.g. LRUCache[[x: T], T]. if t.args and t.type.is_protocol and t.type.protocol_members == ["__call__"]: if t.type in self.seen_aliases: raise PolyTranslationError() @@ -5923,6 +5927,12 @@ def __init__(self) -> None: def visit_type_var(self, t: TypeVarType) -> bool: return True + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + def has_erased_component(t: Type | None) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) diff --git a/mypy/constraints.py b/mypy/constraints.py index 04c3378ce16b..26504ed06b3e 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -9,7 +9,16 @@ from mypy.argmap import ArgTypeExpander from mypy.erasetype import erase_typevars from mypy.maptype import map_instance_to_supertype -from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, ArgKind +from mypy.nodes import ( + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + ArgKind, + TypeInfo, +) from mypy.types import ( TUPLE_LIKE_INSTANCE_NAMES, AnyType, @@ -70,6 +79,8 @@ class Constraint: def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None: self.type_var = type_var.id self.op = op + # TODO: should we add "assert not isinstance(target, UnpackType)"? + # UnpackType is a synthetic type, and is never valid as a constraint target. self.target = target self.origin_type_var = type_var # These are additional type variables that should be solved for together with type_var. @@ -940,17 +951,20 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: if not template.is_ellipsis_args: unpack_present = find_unpack_in_list(template.arg_types) if unpack_present is not None: - ( - unpack_constraints, - cactual_args_t, - template_args_t, - ) = find_and_build_constraints_for_unpack( - tuple(cactual.arg_types), tuple(template.arg_types), self.direction + # We need to re-normalize args to the form they appear in tuples, + # for callables we always pack the suffix inside another tuple. + unpack = template.arg_types[unpack_present] + assert isinstance(unpack, UnpackType) + tuple_type = get_tuple_fallback_from_unpack(unpack) + template_types = repack_callable_args(template, tuple_type) + actual_types = repack_callable_args(cactual, tuple_type) + # Now we can use the same general helper as for tuple types. + unpack_constraints = build_constraints_for_simple_unpack( + template_types, actual_types, neg_op(self.direction) ) - template_args = list(template_args_t) - cactual_args = list(cactual_args_t) + template_args = [] + cactual_args = [] res.extend(unpack_constraints) - assert len(template_args) == len(cactual_args) else: template_args = template.arg_types cactual_args = cactual.arg_types @@ -961,7 +975,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # branches), and in Callable vs Callable inference (two branches). for t, a in zip(template_args, cactual_args): # This avoids bogus constraints like T <: P.args - if isinstance(a, ParamSpecType): + if isinstance(a, (ParamSpecType, UnpackType)): # TODO: can we infer something useful for *T vs P? continue # Negate direction due to function argument type contravariance. @@ -1093,13 +1107,11 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]: return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)] else: assert isinstance(actual, TupleType) - ( - unpack_constraints, - actual_items, - template_items, - ) = find_and_build_constraints_for_unpack( - tuple(actual.items), tuple(template.items), self.direction + unpack_constraints = build_constraints_for_simple_unpack( + template.items, actual.items, self.direction ) + actual_items: tuple[Type, ...] = () + template_items: tuple[Type, ...] = () res.extend(unpack_constraints) elif isinstance(actual, TupleType): actual_items = tuple(actual.items) @@ -1232,28 +1244,132 @@ def find_matching_overload_items( return res -def find_and_build_constraints_for_unpack( - mapped: tuple[Type, ...], template: tuple[Type, ...], direction: int -) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: - mapped_prefix_len = find_unpack_in_list(mapped) - if mapped_prefix_len is not None: - mapped_suffix_len: int | None = len(mapped) - mapped_prefix_len - 1 +def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None: + """Get builtins.tuple type from available types to construct homogeneous tuples.""" + tp = get_proper_type(unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + return tp.type + if isinstance(tp, TypeVarTupleType): + return tp.tuple_fallback.type + if isinstance(tp, TupleType): + for base in tp.partial_fallback.type.mro: + if base.fullname == "builtins.tuple": + return base + return None + + +def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> list[Type]: + """Present callable with star unpack in a normalized form. + + Since positional arguments cannot follow star argument, they are packed in a suffix, + while prefix is represented as individual positional args. We want to put all in a single + list with unpack in the middle, and prefix/suffix on the sides (as they would appear + in e.g. a TupleType). + """ + if ARG_STAR not in callable.arg_kinds: + return callable.arg_types + star_index = callable.arg_kinds.index(ARG_STAR) + arg_types = callable.arg_types[:star_index] + star_type = callable.arg_types[star_index] + suffix_types = [] + if not isinstance(star_type, UnpackType): + if tuple_type is not None: + # Re-normalize *args: X -> *args: *tuple[X, ...] + star_type = UnpackType(Instance(tuple_type, [star_type])) + else: + # This is unfortunate, something like tuple[Any, ...] would be better. + star_type = UnpackType(AnyType(TypeOfAny.from_error)) else: - mapped_suffix_len = None + tp = get_proper_type(star_type.type) + if isinstance(tp, TupleType): + assert isinstance(tp.items[0], UnpackType) + star_type = tp.items[0] + suffix_types = tp.items[1:] + return arg_types + [star_type] + suffix_types - template_prefix_len = find_unpack_in_list(template) - assert template_prefix_len is not None - template_suffix_len = len(template) - template_prefix_len - 1 - return build_constraints_for_unpack( - mapped, - mapped_prefix_len, - mapped_suffix_len, - template, - template_prefix_len, - template_suffix_len, - direction, +def build_constraints_for_simple_unpack( + template_args: list[Type], actual_args: list[Type], direction: int +) -> list[Constraint]: + """Infer constraints between two lists of types with variadic items. + + This function is only supposed to be called when a variadic item is present in templates. + If there is no variadic item the actuals, we simply use split_with_prefix_and_suffix() + and infer prefix <: prefix, suffix <: suffix, variadic <: middle. If there is a variadic + item in the actuals we need to be more careful, only common prefix/suffix can generate + constraints, also we can only infer constraints for variadic template item, if template + prefix/suffix are shorter that actual ones, otherwise there may be partial overlap + between variadic items, for example if template prefix is longer: + + templates: T1, T2, Ts, Ts, Ts, ... + actuals: A1, As, As, As, ... + + Note: this function can only be called for builtin variadic constructors: Tuple and Callable, + for Instances variance depends on position, and a much more complex function + build_constraints_for_unpack() should be used. + """ + template_unpack = find_unpack_in_list(template_args) + assert template_unpack is not None + template_prefix = template_unpack + template_suffix = len(template_args) - template_prefix - 1 + + t_unpack = None + res = [] + + actual_unpack = find_unpack_in_list(actual_args) + if actual_unpack is None: + t_unpack = template_args[template_unpack] + if template_prefix + template_suffix > len(actual_args): + # These can't be subtypes of each-other, return fast. + assert isinstance(t_unpack, UnpackType) + if isinstance(t_unpack.type, TypeVarTupleType): + # Set TypeVarTuple to empty to improve error messages. + return [ + Constraint( + t_unpack.type, direction, TupleType([], t_unpack.type.tuple_fallback) + ) + ] + else: + return [] + common_prefix = template_prefix + common_suffix = template_suffix + else: + actual_prefix = actual_unpack + actual_suffix = len(actual_args) - actual_prefix - 1 + common_prefix = min(template_prefix, actual_prefix) + common_suffix = min(template_suffix, actual_suffix) + if actual_prefix >= template_prefix and actual_suffix >= template_suffix: + # This is the only case where we can guarantee there will be no partial overlap. + t_unpack = template_args[template_unpack] + + # Handle constraints from prefixes/suffixes first. + start, middle, end = split_with_prefix_and_suffix( + tuple(actual_args), common_prefix, common_suffix ) + for t, a in zip(template_args[:common_prefix], start): + res.extend(infer_constraints(t, a, direction)) + if common_suffix: + for t, a in zip(template_args[-common_suffix:], end): + res.extend(infer_constraints(t, a, direction)) + + if t_unpack is not None: + # Add constraint(s) for variadic item when possible. + assert isinstance(t_unpack, UnpackType) + tp = get_proper_type(t_unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + # Homogeneous case *tuple[T, ...] <: [X, Y, Z, ...]. + for a in middle: + # TODO: should we use union instead of join here? + if not isinstance(a, UnpackType): + res.extend(infer_constraints(tp.args[0], a, direction)) + else: + a_tp = get_proper_type(a.type) + # This is the case *tuple[T, ...] <: *tuple[A, ...]. + if isinstance(a_tp, Instance) and a_tp.type.fullname == "builtins.tuple": + res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction)) + elif isinstance(tp, TypeVarTupleType): + res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback))) + return res def build_constraints_for_unpack( @@ -1268,6 +1384,10 @@ def build_constraints_for_unpack( template_suffix_len: int, direction: int, ) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + # TODO: this function looks broken: + # a) it should take into account variances, but it doesn't + # b) it looks like both call sites always pass identical values to args (2, 3) and (5, 6) + # because after map_instance_to_supertype() both template and actual have same TypeInfo. if mapped_prefix_len is None: mapped_prefix_len = template_prefix_len if mapped_suffix_len is None: @@ -1314,4 +1434,4 @@ def build_constraints_for_unpack( if len(template_unpack.items) == len(mapped_middle): for template_arg, item in zip(template_unpack.items, mapped_middle): res.extend(infer_constraints(template_arg, item, direction)) - return (res, mapped_prefix + mapped_suffix, template_prefix + template_suffix) + return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 01d9c4463174..6f69e09936db 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -257,7 +257,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: # Sometimes solver may need to expand a type variable with (a copy of) itself # (usually together with other TypeVars, but it is hard to filter out TypeVarTuples). - repl = self.variables[t.id] + repl = self.variables.get(t.id, t) if isinstance(repl, TypeVarTupleType): return repl raise NotImplementedError @@ -269,45 +269,54 @@ def visit_unpack_type(self, t: UnpackType) -> Type: # Relevant sections that can call unpack should call expand_unpack() # instead. # However, if the item is a variadic tuple, we can simply carry it over. + # In particular, if we expand A[*tuple[T, ...]] with substitutions {T: str}, # it is hard to assert this without getting proper type. return UnpackType(t.type.accept(self)) - def expand_unpack(self, t: UnpackType) -> list[Type] | Instance | AnyType | None: - return expand_unpack_with_variables(t, self.variables) + def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType: + assert isinstance(t.type, TypeVarTupleType) + repl = get_proper_type(self.variables.get(t.type.id, t.type)) + if isinstance(repl, TupleType): + return repl.items + elif ( + isinstance(repl, Instance) + and repl.type.fullname == "builtins.tuple" + or isinstance(repl, TypeVarTupleType) + ): + return [UnpackType(typ=repl)] + elif isinstance(repl, (AnyType, UninhabitedType)): + # tuple[Any, ...] for Any would be better, but we don't have + # the type info to construct that type here. + return repl + else: + raise RuntimeError(f"Invalid type replacement to expand: {repl}") def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types)) + # TODO: can we simplify this method? It is too long. def interpolate_args_for_unpack( self, t: CallableType, var_arg: UnpackType ) -> tuple[list[str | None], list[ArgKind], list[Type]]: star_index = t.arg_kinds.index(ARG_STAR) - # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] var_arg_type = get_proper_type(var_arg.type) + # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]] if isinstance(var_arg_type, TupleType): expanded_tuple = var_arg_type.accept(self) - # TODO: handle the case that expanded_tuple is a variable length tuple. assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType) expanded_items = expanded_tuple.items else: + # We have plain Unpack[Ts] expanded_items_res = self.expand_unpack(var_arg) if isinstance(expanded_items_res, list): expanded_items = expanded_items_res - elif ( - isinstance(expanded_items_res, Instance) - and expanded_items_res.type.fullname == "builtins.tuple" - ): - # TODO: We shouldnt't simply treat this as a *arg because of suffix handling - # (there cannot be positional args after a *arg) + else: + # We got Any or arg_types = ( - t.arg_types[:star_index] - + [expanded_items_res.args[0]] - + t.arg_types[star_index + 1 :] + t.arg_types[:star_index] + [expanded_items_res] + t.arg_types[star_index + 1 :] ) - return (t.arg_names, t.arg_kinds, arg_types) - else: - return (t.arg_names, t.arg_kinds, t.arg_types) + return t.arg_names, t.arg_kinds, arg_types expanded_unpack_index = find_unpack_in_list(expanded_items) # This is the case where we just have Unpack[Tuple[X1, X2, X3]] @@ -337,13 +346,14 @@ def interpolate_args_for_unpack( expanded_unpack = expanded_items[expanded_unpack_index] assert isinstance(expanded_unpack, UnpackType) - # Extract the typevartuple so we can get a tuple fallback from it. + # Extract the TypeVarTuple, so we can get a tuple fallback from it. expanded_unpacked_tvt = expanded_unpack.type if isinstance(expanded_unpacked_tvt, TypeVarTupleType): fallback = expanded_unpacked_tvt.tuple_fallback else: # This can happen when tuple[Any, ...] is used to "patch" a variadic - # generic type without type arguments provided. + # generic type without type arguments provided, or when substitution is + # homogeneous tuple. assert isinstance(expanded_unpacked_tvt, ProperType) assert isinstance(expanded_unpacked_tvt, Instance) assert expanded_unpacked_tvt.type.fullname == "builtins.tuple" @@ -354,18 +364,31 @@ def interpolate_args_for_unpack( arg_kinds = ( t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_items[:prefix_len] - # Constructing the Unpack containing the tuple without the prefix. - + [ - UnpackType(TupleType(expanded_items[prefix_len:], fallback)) - if len(expanded_items) - prefix_len > 1 - else expanded_items[0] - ] - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - return (arg_names, arg_kinds, arg_types) + if ( + len(expanded_items) == 1 + and isinstance(expanded_unpack.type, ProperType) + and isinstance(expanded_unpack.type, Instance) + ): + assert expanded_unpack.type.type.fullname == "builtins.tuple" + # Normalize *args: *tuple[X, ...] -> *args: X + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + [expanded_unpack.type.args[0]] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType(TupleType(expanded_items[prefix_len:], fallback)) + if len(expanded_items) - prefix_len > 1 + else expanded_items[prefix_len] + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + return arg_names, arg_kinds, arg_types def visit_callable_type(self, t: CallableType) -> CallableType: param_spec = t.param_spec() @@ -430,7 +453,7 @@ def visit_overloaded(self, t: Overloaded) -> Type: def expand_types_with_unpack( self, typs: Sequence[Type] - ) -> list[Type] | AnyType | UninhabitedType | Instance: + ) -> list[Type] | AnyType | UninhabitedType: """Expands a list of types that has an unpack. In corner cases, this can return a type rather than a list, in which case this @@ -444,15 +467,8 @@ def expand_types_with_unpack( for item in typs: if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): unpacked_items = self.expand_unpack(item) - if unpacked_items is None: - # TODO: better error, something like tuple of unknown? - return UninhabitedType() - elif isinstance(unpacked_items, Instance): - if len(typs) == 1: - return unpacked_items - else: - assert False, "Invalid unpack of variable length tuple" - elif isinstance(unpacked_items, AnyType): + if isinstance(unpacked_items, (AnyType, UninhabitedType)): + # TODO: better error for , something like tuple of unknown? return unpacked_items else: items.extend(unpacked_items) @@ -464,6 +480,14 @@ def expand_types_with_unpack( def visit_tuple_type(self, t: TupleType) -> Type: items = self.expand_types_with_unpack(t.items) if isinstance(items, list): + if len(items) == 1: + # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] + item = items[0] + if isinstance(item, UnpackType): + assert isinstance(item.type, ProperType) + if isinstance(item.type, Instance): + assert item.type.type.fullname == "builtins.tuple" + return item.type fallback = t.partial_fallback.accept(self) assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) return t.copy_modified(items=items, fallback=fallback) @@ -509,6 +533,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type: # alias itself), so we just expand the arguments. args = self.expand_types_with_unpack(t.args) if isinstance(args, list): + # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]? return t.copy_modified(args=args) else: return args @@ -520,34 +545,6 @@ def expand_types(self, types: Iterable[Type]) -> list[Type]: return a -def expand_unpack_with_variables( - t: UnpackType, variables: Mapping[TypeVarId, Type] -) -> list[Type] | Instance | AnyType | None: - """May return either a list of types to unpack to, any, or a single - variable length tuple. The latter may not be valid in all contexts. - """ - if isinstance(t.type, TypeVarTupleType): - repl = get_proper_type(variables.get(t.type.id, t)) - if isinstance(repl, TupleType): - return repl.items - elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple": - return repl - elif isinstance(repl, AnyType): - # tuple[Any, ...] would be better, but we don't have - # the type info to construct that type here. - return repl - elif isinstance(repl, TypeVarTupleType): - return [UnpackType(typ=repl)] - elif isinstance(repl, UnpackType): - return [repl] - elif isinstance(repl, UninhabitedType): - return None - else: - raise NotImplementedError(f"Invalid type replacement to expand: {repl}") - else: - raise NotImplementedError(f"Invalid type to expand: {t.type}") - - @overload def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType: ... diff --git a/mypy/solve.py b/mypy/solve.py index 4b2b899c2a8d..5945d97ed85a 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections import defaultdict -from typing import Iterable, Sequence +from typing import Iterable, Sequence, Tuple from typing_extensions import TypeAlias as _TypeAlias from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints @@ -19,13 +19,16 @@ NoneType, ParamSpecType, ProperType, + TupleType, Type, TypeOfAny, TypeVarId, TypeVarLikeType, + TypeVarTupleType, TypeVarType, UninhabitedType, UnionType, + UnpackType, get_proper_type, ) from mypy.typestate import type_state @@ -330,6 +333,23 @@ def is_trivial_bound(tp: ProperType) -> bool: return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" +def find_linear(c: Constraint) -> Tuple[bool, TypeVarId | None]: + """Find out if this constraint represent a linear relationship, return target id if yes.""" + if isinstance(c.origin_type_var, TypeVarType): + if isinstance(c.target, TypeVarType): + return True, c.target.id + if isinstance(c.origin_type_var, ParamSpecType): + if isinstance(c.target, ParamSpecType) and not c.target.prefix.arg_types: + return True, c.target.id + if isinstance(c.origin_type_var, TypeVarTupleType): + target = get_proper_type(c.target) + if isinstance(target, TupleType) and len(target.items) == 1: + item = target.items[0] + if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): + return True, item.type.id + return False, None + + def transitive_closure( tvars: list[TypeVarId], constraints: list[Constraint] ) -> tuple[Graph, Bounds, Bounds]: @@ -361,16 +381,15 @@ def transitive_closure( c = remaining.pop() # Note that ParamSpec constraint P <: Q may be considered linear only if Q has no prefix, # for cases like P <: Concatenate[T, Q] we should consider this non-linear and put {P} and - # {T, Q} into separate SCCs. - if ( - isinstance(c.target, TypeVarType) - or isinstance(c.target, ParamSpecType) - and not c.target.prefix.arg_types - ) and c.target.id in tvars: + # {T, Q} into separate SCCs. Similarly, Ts <: Tuple[*Us] considered linear, while + # Ts <: Tuple[*Us, U] is non-linear. + is_linear, target_id = find_linear(c) + if is_linear and target_id in tvars: + assert target_id is not None if c.op == SUBTYPE_OF: - lower, upper = c.type_var, c.target.id + lower, upper = c.type_var, target_id else: - lower, upper = c.target.id, c.type_var + lower, upper = target_id, c.type_var if (lower, upper) in graph: continue graph |= { diff --git a/mypy/typeops.py b/mypy/typeops.py index d746ea701fde..22dbd9e9f42e 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -973,6 +973,9 @@ def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]: def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]: return [t] if self.include_all else [] + def visit_type_var_tuple(self, t: TypeVarTupleType) -> list[TypeVarLikeType]: + return [t] if self.include_all else [] + def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: """Does this type have a custom special method such as __format__() or __eq__()? diff --git a/mypy/types.py b/mypy/types.py index 359ca713616b..d4e2fc7cb63c 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1045,7 +1045,8 @@ class UnpackType(ProperType): or unpacking * syntax. The inner type should be either a TypeVarTuple, a constant size - tuple, or a variable length tuple, or a union of one of those. + tuple, or a variable length tuple. Type aliases to these are not allowed, + except during semantic analysis. """ __slots__ = ["type"] @@ -2260,6 +2261,10 @@ def __init__( ) -> None: super().__init__(line, column) self.partial_fallback = fallback + # TODO: flatten/normalize unpack items (very similar to unions) here. + # Probably also for instances, type aliases, callables, and Unpack itself. For example, + # tuple[*tuple[X, ...], ...] -> tuple[X, ...] and Tuple[*tuple[X, ...]] -> tuple[X, ...]. + # Currently normalization happens in expand_type() et al., which is sub-optimal. self.items = items self.implicit = implicit diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index ac5f4e43c3bf..29c800140eec 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -4,9 +4,7 @@ from typing import Sequence -from mypy.nodes import ARG_POS, ARG_STAR from mypy.types import ( - CallableType, Instance, ProperType, Type, @@ -179,20 +177,3 @@ def extract_unpack(types: Sequence[Type]) -> ProperType | None: if isinstance(types[0], UnpackType): return get_proper_type(types[0].type) return None - - -def replace_starargs(callable: CallableType, types: list[Type]) -> CallableType: - star_index = callable.arg_kinds.index(ARG_STAR) - arg_kinds = ( - callable.arg_kinds[:star_index] - + [ARG_POS] * len(types) - + callable.arg_kinds[star_index + 1 :] - ) - arg_names = ( - callable.arg_names[:star_index] - + [None] * len(types) - + callable.arg_names[star_index + 1 :] - ) - arg_types = callable.arg_types[:star_index] + types + callable.arg_types[star_index + 1 :] - - return callable.copy_modified(arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds) diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 1fac42b492a8..95a7bdd2b2cd 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -3144,7 +3144,7 @@ def pair(x: U) -> Callable[[V], Tuple[V, U]]: ... reveal_type(dec(id)) # N: Revealed type is "def [T] (T`2) -> T`2" reveal_type(dec(either)) # N: Revealed type is "def [T] (T`5, x: T`5) -> T`5" reveal_type(dec(pair)) # N: Revealed type is "def [T, U] (T`8, x: U`-1) -> Tuple[T`8, U`-1]" -# This is counter-intuitive but looks correct, dec matches itself only if P is empty +# This is counter-intuitive but looks correct, dec matches itself only if P can be empty reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`11, f: def () -> def (T`11) -> S`12) -> S`12" [builtins fixtures/list.pyi] @@ -3179,7 +3179,6 @@ P = ParamSpec('P') Q = ParamSpec('Q') class Foo(Generic[P]): ... -class Bar(Generic[P, T]): ... def dec(f: Callable[P, int]) -> Callable[P, Foo[P]]: ... h: Callable[Concatenate[T, Q], int] @@ -3263,3 +3262,144 @@ def pop_off(fn: Callable[Concatenate[T1, P], T2]) -> Callable[P, Callable[[T1], def test(command: Foo[Q]) -> Foo[Q]: ... reveal_type(test) # N: Revealed type is "def () -> def [Q] (__main__.Foo[Q`-1]) -> __main__.Foo[Q`-1]" [builtins fixtures/tuple.pyi] + +[case testInferenceAgainstGenericVariadicBasicInList] +# flags: --new-type-inference +from typing import Tuple, TypeVar, List, Callable +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ... + +U = TypeVar("U") +V = TypeVar("V") +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... +def pair(x: U, y: V) -> Tuple[U, V]: ... + +reveal_type(dec(id)) # N: Revealed type is "def [T] (T`2) -> builtins.list[T`2]" +reveal_type(dec(either)) # N: Revealed type is "def [T] (T`4, T`4) -> builtins.list[T`4]" +reveal_type(dec(pair)) # N: Revealed type is "def [U, V] (U`-1, V`-2) -> builtins.list[Tuple[U`-1, V`-2]]" +[builtins fixtures/tuple.pyi] + +[case testInferenceAgainstGenericVariadicBasicDeList] +# flags: --new-type-inference +from typing import Tuple, TypeVar, List, Callable +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +def dec(f: Callable[[Unpack[Ts]], List[T]]) -> Callable[[Unpack[Ts]], T]: ... + +U = TypeVar("U") +V = TypeVar("V") +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... + +reveal_type(dec(id)) # N: Revealed type is "def [T] (builtins.list[T`2]) -> T`2" +reveal_type(dec(either)) # N: Revealed type is "def [T] (builtins.list[T`4], builtins.list[T`4]) -> T`4" +[builtins fixtures/tuple.pyi] + +[case testInferenceAgainstGenericVariadicPopOff] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +def dec(f: Callable[[T, Unpack[Ts]], S]) -> Callable[[Unpack[Ts]], Callable[[T], S]]: ... + +U = TypeVar("U") +V = TypeVar("V") +def id(x: U) -> U: ... +def either(x: U, y: U) -> U: ... +def pair(x: U, y: V) -> Tuple[U, V]: ... + +reveal_type(dec(id)) # N: Revealed type is "def () -> def [T] (T`1) -> T`1" +reveal_type(dec(either)) # N: Revealed type is "def [T] (T`4) -> def (T`4) -> T`4" +reveal_type(dec(pair)) # N: Revealed type is "def [V] (V`-2) -> def [T] (T`7) -> Tuple[T`7, V`-2]" +reveal_type(dec(dec)) # N: Revealed type is "def () -> def [T, Ts, S] (def (T`-1, *Unpack[Ts`-2]) -> S`-3) -> def (*Unpack[Ts`-2]) -> def (T`-1) -> S`-3" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericVariadicPopOn] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Tuple +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +def dec(f: Callable[[Unpack[Ts]], Callable[[T], S]]) -> Callable[[T, Unpack[Ts]], S]: ... + +U = TypeVar("U") +V = TypeVar("V") +def id() -> Callable[[U], U]: ... +def either(x: U) -> Callable[[U], U]: ... +def pair(x: U) -> Callable[[V], Tuple[V, U]]: ... + +reveal_type(dec(id)) # N: Revealed type is "def [T] (T`2) -> T`2" +reveal_type(dec(either)) # N: Revealed type is "def [T] (T`5, T`5) -> T`5" +reveal_type(dec(pair)) # N: Revealed type is "def [T, U] (T`8, U`-1) -> Tuple[T`8, U`-1]" +# This is counter-intuitive but looks correct, dec matches itself only if Ts is empty +reveal_type(dec(dec)) # N: Revealed type is "def [T, S] (T`11, def () -> def (T`11) -> S`12) -> S`12" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericVariadicVsVariadic] +# flags: --new-type-inference +from typing import TypeVar, Callable, List, Generic +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +Us = TypeVarTuple("Us") + +class Foo(Generic[Unpack[Ts]]): ... +class Bar(Generic[Unpack[Ts], T]): ... + +def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ... +# TODO: do not crash on Foo[Us] (with missing Unpack), instead give an error. +def f(*args: Unpack[Us]) -> Foo[Unpack[Us]]: ... +reveal_type(dec(f)) # N: Revealed type is "def [Ts] (*Unpack[Ts`1]) -> builtins.list[__main__.Foo[Unpack[Ts`1]]]" +g: Callable[[Unpack[Us]], Foo[Unpack[Us]]] +reveal_type(dec(g)) # N: Revealed type is "def [Ts] (*Unpack[Ts`3]) -> builtins.list[__main__.Foo[Unpack[Ts`3]]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericVariadicVsVariadicConcatenate] +# flags: --new-type-inference +from typing import TypeVar, Callable, Generic +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +Us = TypeVarTuple("Us") + +class Foo(Generic[Unpack[Ts]]): ... + +def dec(f: Callable[[Unpack[Ts]], int]) -> Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]: ... +h: Callable[[T, Unpack[Us]], int] +g: Callable[[T, Unpack[Us]], int] +h = g +reveal_type(dec(h)) # N: Revealed type is "def [T, Us] (T`-1, *Unpack[Us`-2]) -> __main__.Foo[T`-1, Unpack[Us`-2]]" +[builtins fixtures/list.pyi] + +[case testInferenceAgainstGenericVariadicSecondary] +# flags: --new-type-inference +from typing import TypeVar, Callable, Generic +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +Us = TypeVarTuple("Us") + +class Foo(Generic[Unpack[Ts]]): ... + +def dec(f: Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]) -> Callable[[Unpack[Ts]], Foo[Unpack[Ts]]]: ... +g: Callable[[T], Foo[int]] +reveal_type(dec(g)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[builtins.int]" +h: Callable[[Unpack[Us]], Foo[int]] +reveal_type(dec(g)) # N: Revealed type is "def (builtins.int) -> __main__.Foo[builtins.int]" +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index e822cea9304f..b28b2ead45e7 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -479,18 +479,18 @@ vargs: Tuple[int, ...] vargs_str: Tuple[str, ...] call(target=func, args=(0, 'foo')) -call(target=func, args=('bar', 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[object, str], None]" -call(target=func, args=(True, 'foo', 0)) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" -call(target=func, args=(0, 0, 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" -call(target=func, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=('bar', 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[str, str], None]" +call(target=func, args=(True, 'foo', 0)) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[bool, str, int], None]" +call(target=func, args=(0, 0, 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, int, str], None]" +call(target=func, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(int)], None]" # NOTE: This behavior may be a bit contentious, it is maybe inconsistent with our handling of # PEP646 but consistent with our handling of callable constraints. call(target=func2, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, int], None]"; expected "Callable[[VarArg(int)], None]" call(target=func3, args=vargs) call(target=func3, args=(0,1)) -call(target=func3, args=(0,'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" -call(target=func3, args=vargs_str) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func3, args=(0,'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[int, str], None]" +call(target=func3, args=vargs_str) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(str)], None]" [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646CallableWithPrefixSuffix] @@ -561,11 +561,11 @@ class A: vargs: Tuple[int, ...] vargs_str: Tuple[str, ...] -call(A().func) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(A().func) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[], None]" call(A().func, 0, 'foo') -call(A().func, 0, 'foo', 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" -call(A().func, 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" -call(A().func, 0, 1) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, object], None]" +call(A().func, 0, 'foo', 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, str, int], None]" +call(A().func, 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int], None]" +call(A().func, 0, 1) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, int], None]" call(A().func2, 0, 0) call(A().func3, 0, 1, 2) call(A().func3) @@ -689,7 +689,7 @@ reveal_type(x) # N: Revealed type is "builtins.list[Tuple[Any, Unpack[builtins. B = Callable[[T, Unpack[Ts]], int] y: B -reveal_type(y) # N: Revealed type is "def (Any, *Unpack[builtins.tuple[Any, ...]]) -> builtins.int" +reveal_type(y) # N: Revealed type is "def (Any, *Any) -> builtins.int" C = G[T, Unpack[Ts], T] z: C @@ -711,7 +711,7 @@ reveal_type(x) # N: Revealed type is "builtins.list[Tuple[Any, Unpack[builtins. B = Callable[[T, S, Unpack[Ts]], int] y: B[int] # E: Bad number of arguments for type alias, expected: at least 2, given: 1 -reveal_type(y) # N: Revealed type is "def (Any, Any, *Unpack[builtins.tuple[Any, ...]]) -> builtins.int" +reveal_type(y) # N: Revealed type is "def (Any, Any, *Any) -> builtins.int" C = G[T, Unpack[Ts], S] z: C[int] # E: Bad number of arguments for type alias, expected: at least 2, given: 1 From 1db3eb3c991b82c2b0f9b7bd0a2547e140dd402a Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 19 Aug 2023 14:49:39 +0100 Subject: [PATCH 62/88] Allow Ellipsis in Concatenate; cleanup ParamSpec literals (#15905) Fixes https://github.com/python/mypy/issues/14761 Fixes https://github.com/python/mypy/issues/15318 Fixes https://github.com/python/mypy/issues/14656 Fixes https://github.com/python/mypy/issues/13518 I noticed there is a bunch of inconsistencies in `semanal`/`typeanal` for ParamSpecs, so I decided do a small cleanup. Using this opportunity I also allow `Concatenate[int, ...]` (with literal Ellipsis), and reduce verbosity of some errors. cc @A5rocks --- mypy/semanal.py | 14 ++-- mypy/typeanal.py | 54 ++++++++++---- test-data/unit/check-literal.test | 3 +- .../unit/check-parameter-specification.test | 71 ++++++++++++++++++- test-data/unit/check-typevar-defaults.test | 6 +- test-data/unit/semanal-errors.test | 8 +-- 6 files changed, 123 insertions(+), 33 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 9d968d1da781..ef66c9276664 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -5285,20 +5285,18 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: else: items = [index] - # whether param spec literals be allowed here - # TODO: should this be computed once and passed in? - # or is there a better way to do this? + # TODO: this needs a clean-up. + # Probably always allow Parameters literals, and validate in semanal_typeargs.py base = expr.base if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias): alias = base.node - target = get_proper_type(alias.target) - if isinstance(target, Instance): - has_param_spec = target.type.has_param_spec_type - num_args = len(target.type.type_vars) + if any(isinstance(t, ParamSpecType) for t in alias.alias_tvars): + has_param_spec = True + num_args = len(alias.alias_tvars) else: has_param_spec = False num_args = -1 - elif isinstance(base, NameExpr) and isinstance(base.node, TypeInfo): + elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): has_param_spec = base.node.has_param_spec_type num_args = len(base.node.type_vars) else: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 8ac73cdf8aac..b15b5c7654ba 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -226,6 +226,8 @@ def __init__( self.allow_required = allow_required # Are we in a context where ParamSpec literals are allowed? self.allow_param_spec_literals = allow_param_spec_literals + # Are we in context where literal "..." specifically is allowed? + self.allow_ellipsis = False # Should we report an error whenever we encounter a RawExpressionType outside # of a Literal context: e.g. whenever we encounter an invalid type? Normally, # we want to report an error, but the caller may want to do more specialized @@ -461,9 +463,9 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) - # last argument has to be ParamSpec - ps = self.anal_type(t.args[-1], allow_param_spec=True) - if not isinstance(ps, ParamSpecType): + # Last argument has to be ParamSpec or Ellipsis. + ps = self.anal_type(t.args[-1], allow_param_spec=True, allow_ellipsis=True) + if not isinstance(ps, (ParamSpecType, Parameters)): if isinstance(ps, UnboundType) and self.allow_unbound_tvars: sym = self.lookup_qualified(ps.name, t) if sym is not None and isinstance(sym.node, ParamSpecExpr): @@ -477,11 +479,11 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: # TODO: this may not work well with aliases, if those worked. # Those should be special-cased. - elif ps.prefix.arg_types: + elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types: self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE) args = self.anal_array(t.args[:-1]) - pre = ps.prefix + pre = ps.prefix if isinstance(ps, ParamSpecType) else ps # mypy can't infer this :( names: list[str | None] = [None] * len(args) @@ -489,7 +491,7 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: pre = Parameters( args + pre.arg_types, [ARG_POS] * len(args) + pre.arg_kinds, names + pre.arg_names ) - return ps.copy_modified(prefix=pre) + return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Type | None: """Bind special type that is recognized through magic name such as 'typing.Any'. @@ -880,7 +882,7 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_type_list(self, t: TypeList) -> Type: - # paramspec literal (Z[[int, str, Whatever]]) + # Parameters literal (Z[[int, str, Whatever]]) if self.allow_param_spec_literals: params = self.analyze_callable_args(t) if params: @@ -893,7 +895,8 @@ def visit_type_list(self, t: TypeList) -> Type: self.fail( 'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE ) - self.note('Did you mean "List[...]"?', t) + if len(t.items) == 1: + self.note('Did you mean "List[...]"?', t) return AnyType(TypeOfAny.from_error) def visit_callable_argument(self, t: CallableArgument) -> Type: @@ -1106,7 +1109,7 @@ def visit_partial_type(self, t: PartialType) -> Type: assert False, "Internal error: Unexpected partial type" def visit_ellipsis_type(self, t: EllipsisType) -> Type: - if self.allow_param_spec_literals: + if self.allow_ellipsis or self.allow_param_spec_literals: any_type = AnyType(TypeOfAny.explicit) return Parameters( [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], is_ellipsis_args=True @@ -1174,7 +1177,7 @@ def analyze_callable_args_for_paramspec( def analyze_callable_args_for_concatenate( self, callable_args: Type, ret_type: Type, fallback: Instance - ) -> CallableType | None: + ) -> CallableType | AnyType | None: """Construct a 'Callable[C, RET]', where C is Concatenate[..., P], returning None if we cannot. """ @@ -1189,7 +1192,7 @@ def analyze_callable_args_for_concatenate( return None tvar_def = self.anal_type(callable_args, allow_param_spec=True) - if not isinstance(tvar_def, ParamSpecType): + if not isinstance(tvar_def, (ParamSpecType, Parameters)): if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): sym = self.lookup_qualified(tvar_def.name, callable_args) if sym is not None and isinstance(sym.node, ParamSpecExpr): @@ -1198,7 +1201,18 @@ def analyze_callable_args_for_concatenate( return callable_with_ellipsis( AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback ) - return None + # Error was already given, so prevent further errors. + return AnyType(TypeOfAny.from_error) + if isinstance(tvar_def, Parameters): + # This comes from Concatenate[int, ...] + return CallableType( + arg_types=tvar_def.arg_types, + arg_names=tvar_def.arg_names, + arg_kinds=tvar_def.arg_kinds, + ret_type=ret_type, + fallback=fallback, + from_concatenate=True, + ) # ick, CallableType should take ParamSpecType prefix = tvar_def.prefix @@ -1257,7 +1271,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type: ) or self.analyze_callable_args_for_concatenate( callable_args, ret_type, fallback ) - if maybe_ret: + if isinstance(maybe_ret, CallableType): maybe_ret = maybe_ret.copy_modified( ret_type=ret_type.accept(self), variables=variables ) @@ -1274,6 +1288,8 @@ def analyze_callable_type(self, t: UnboundType) -> Type: t, ) return AnyType(TypeOfAny.from_error) + elif isinstance(maybe_ret, AnyType): + return maybe_ret ret = maybe_ret else: if self.options.disallow_any_generics: @@ -1527,17 +1543,27 @@ def anal_array( self.allow_param_spec_literals = old_allow_param_spec_literals return self.check_unpacks_in_list(res) - def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = False) -> Type: + def anal_type( + self, + t: Type, + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_ellipsis: bool = False, + ) -> Type: if nested: self.nesting_level += 1 old_allow_required = self.allow_required self.allow_required = False + old_allow_ellipsis = self.allow_ellipsis + self.allow_ellipsis = allow_ellipsis try: analyzed = t.accept(self) finally: if nested: self.nesting_level -= 1 self.allow_required = old_allow_required + self.allow_ellipsis = old_allow_ellipsis if ( not allow_param_spec and isinstance(analyzed, ParamSpecType) diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index 4498b2ddc9cf..ecd4fc0a1f00 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -611,8 +611,7 @@ from typing_extensions import Literal a: (1, 2, 3) # E: Syntax error in type annotation \ # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) b: Literal[[1, 2, 3]] # E: Parameter 1 of Literal[...] is invalid -c: [1, 2, 3] # E: Bracketed expression "[...]" is not valid as a type \ - # N: Did you mean "List[...]"? +c: [1, 2, 3] # E: Bracketed expression "[...]" is not valid as a type [builtins fixtures/tuple.pyi] [out] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 3a8ecdf81c7d..dee8a971f925 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -38,6 +38,74 @@ def foo6(x: Callable[[P], int]) -> None: ... # E: Invalid location for ParamSpe # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' [builtins fixtures/paramspec.pyi] +[case testParamSpecImports] +import lib +from lib import Base + +class C(Base[[int]]): + def test(self, x: int): ... + +class D(lib.Base[[int]]): + def test(self, x: int): ... + +class E(lib.Base[...]): ... +reveal_type(E().test) # N: Revealed type is "def (*Any, **Any)" + +[file lib.py] +from typing import Generic +from typing_extensions import ParamSpec + +P = ParamSpec("P") +class Base(Generic[P]): + def test(self, *args: P.args, **kwargs: P.kwargs) -> None: + ... +[builtins fixtures/paramspec.pyi] + +[case testParamSpecEllipsisInAliases] +from typing import Any, Callable, Generic, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec('P') +R = TypeVar('R') +Alias = Callable[P, R] + +class B(Generic[P]): ... +Other = B[P] + +T = TypeVar('T', bound=Alias[..., Any]) +Alias[..., Any] # E: Type application is only supported for generic classes +B[...] +Other[...] +[builtins fixtures/paramspec.pyi] + +[case testParamSpecEllipsisInConcatenate] +from typing import Any, Callable, Generic, TypeVar +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec('P') +R = TypeVar('R') +Alias = Callable[P, R] + +IntFun = Callable[Concatenate[int, ...], None] +f: IntFun +reveal_type(f) # N: Revealed type is "def (builtins.int, *Any, **Any)" + +g: Callable[Concatenate[int, ...], None] +reveal_type(g) # N: Revealed type is "def (builtins.int, *Any, **Any)" + +class B(Generic[P]): + def test(self, *args: P.args, **kwargs: P.kwargs) -> None: + ... + +x: B[Concatenate[int, ...]] +reveal_type(x.test) # N: Revealed type is "def (builtins.int, *Any, **Any)" + +Bad = Callable[Concatenate[int, [int, str]], None] # E: The last parameter to Concatenate needs to be a ParamSpec \ + # E: Bracketed expression "[...]" is not valid as a type +def bad(fn: Callable[Concatenate[P, int], None]): # E: The last parameter to Concatenate needs to be a ParamSpec + ... +[builtins fixtures/paramspec.pyi] + [case testParamSpecContextManagerLike] from typing import Callable, List, Iterator, TypeVar from typing_extensions import ParamSpec @@ -1431,8 +1499,7 @@ from typing import ParamSpec, Generic, List, TypeVar, Callable P = ParamSpec("P") T = TypeVar("T") A = List[T] -def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type \ - # N: Did you mean "List[...]"? +def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type def g(x: A[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' diff --git a/test-data/unit/check-typevar-defaults.test b/test-data/unit/check-typevar-defaults.test index 36ec125eb1a4..9015d353fa08 100644 --- a/test-data/unit/check-typevar-defaults.test +++ b/test-data/unit/check-typevar-defaults.test @@ -59,9 +59,9 @@ from typing import TypeVar, ParamSpec, Tuple from typing_extensions import TypeVarTuple, Unpack T1 = TypeVar("T1", default=2) # E: TypeVar "default" must be a type -T2 = TypeVar("T2", default=[int, str]) # E: Bracketed expression "[...]" is not valid as a type \ - # N: Did you mean "List[...]"? \ - # E: TypeVar "default" must be a type +T2 = TypeVar("T2", default=[int]) # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? \ + # E: TypeVar "default" must be a type P1 = ParamSpec("P1", default=int) # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec P2 = ParamSpec("P2", default=2) # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index a098dd8791d4..09d4da54bff3 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -810,8 +810,8 @@ class C(Generic[t]): pass cast(str + str, None) # E: Cast target is not a type cast(C[str][str], None) # E: Cast target is not a type cast(C[str + str], None) # E: Cast target is not a type -cast([int, str], None) # E: Bracketed expression "[...]" is not valid as a type \ - # N: Did you mean "List[...]"? +cast([int], None) # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? [out] [case testInvalidCastTargetType] @@ -859,8 +859,8 @@ Any(arg=str) # E: Any(...) is no longer supported. Use cast(Any, ...) instead [case testTypeListAsType] -def f(x:[int, str]) -> None: # E: Bracketed expression "[...]" is not valid as a type \ - # N: Did you mean "List[...]"? +def f(x: [int]) -> None: # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? pass [out] From d7d502e5e63092322d57c607e2affac6cb5234b8 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 19 Aug 2023 14:49:55 +0100 Subject: [PATCH 63/88] Support self-types containing ParamSpec (#15903) Fixes https://github.com/python/mypy/issues/14968 Fixes https://github.com/python/mypy/issues/13911 The fix is simple, as I predicted on Discord, we simply should use `get_all_type_vars()` instead of `get_type_vars()` (that specifically returns only `TypeVarType`). I also use this opportunity to tidy-up code in `bind_self()`, it should be now more readable, and much faster (especially when compiled with mypyc). cc @A5rocks --------- Co-authored-by: Alex Waygood --- mypy/typeops.py | 34 ++++++++++-------------- test-data/unit/check-selftype.test | 42 ++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 20 deletions(-) diff --git a/mypy/typeops.py b/mypy/typeops.py index 22dbd9e9f42e..e01aad950573 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -303,7 +303,7 @@ class B(A): pass return cast(F, func) self_param_type = get_proper_type(func.arg_types[0]) - variables: Sequence[TypeVarLikeType] = [] + variables: Sequence[TypeVarLikeType] if func.variables and supported_self_type(self_param_type): from mypy.infer import infer_type_arguments @@ -312,46 +312,40 @@ class B(A): pass original_type = erase_to_bound(self_param_type) original_type = get_proper_type(original_type) - all_ids = func.type_var_ids() + # Find which of method type variables appear in the type of "self". + self_ids = {tv.id for tv in get_all_type_vars(self_param_type)} + self_vars = [tv for tv in func.variables if tv.id in self_ids] + + # Solve for these type arguments using the actual class or instance type. typeargs = infer_type_arguments( - func.variables, self_param_type, original_type, is_supertype=True + self_vars, self_param_type, original_type, is_supertype=True ) if ( is_classmethod - # TODO: why do we need the extra guards here? and any(isinstance(get_proper_type(t), UninhabitedType) for t in typeargs) and isinstance(original_type, (Instance, TypeVarType, TupleType)) ): - # In case we call a classmethod through an instance x, fallback to type(x) + # In case we call a classmethod through an instance x, fallback to type(x). typeargs = infer_type_arguments( - func.variables, self_param_type, TypeType(original_type), is_supertype=True + self_vars, self_param_type, TypeType(original_type), is_supertype=True ) - ids = [tid for tid in all_ids if any(tid == t.id for t in get_type_vars(self_param_type))] - - # Technically, some constrains might be unsolvable, make them . + # Update the method signature with the solutions found. + # Technically, some constraints might be unsolvable, make them . to_apply = [t if t is not None else UninhabitedType() for t in typeargs] - - def expand(target: Type) -> Type: - return expand_type(target, {id: to_apply[all_ids.index(id)] for id in ids}) - - arg_types = [expand(x) for x in func.arg_types[1:]] - ret_type = expand(func.ret_type) - variables = [v for v in func.variables if v.id not in ids] + func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)}) + variables = [v for v in func.variables if v not in self_vars] else: - arg_types = func.arg_types[1:] - ret_type = func.ret_type variables = func.variables original_type = get_proper_type(original_type) if isinstance(original_type, CallableType) and original_type.is_type_obj(): original_type = TypeType.make_normalized(original_type.ret_type) res = func.copy_modified( - arg_types=arg_types, + arg_types=func.arg_types[1:], arg_kinds=func.arg_kinds[1:], arg_names=func.arg_names[1:], variables=variables, - ret_type=ret_type, bound_args=[original_type], ) return cast(F, res) diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index d366e7c33799..77d2d519214a 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1973,3 +1973,45 @@ class B(A): reveal_type(self.x.extra) # N: Revealed type is "builtins.int" reveal_type(self.xs[0].extra) # N: Revealed type is "builtins.int" [builtins fixtures/list.pyi] + +[case testSelfTypesWithParamSpecExtract] +from typing import Any, Callable, Generic, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec("P") +F = TypeVar("F", bound=Callable[..., Any]) +class Example(Generic[F]): + def __init__(self, fn: F) -> None: + ... + def __call__(self: Example[Callable[P, Any]], *args: P.args, **kwargs: P.kwargs) -> None: + ... + +def test_fn(a: int, b: str) -> None: + ... + +example = Example(test_fn) +example() # E: Missing positional arguments "a", "b" in call to "__call__" of "Example" +example(1, "b") # OK +[builtins fixtures/list.pyi] + +[case testSelfTypesWithParamSpecInfer] +from typing import TypeVar, Protocol, Type, Callable +from typing_extensions import ParamSpec + +R = TypeVar("R", covariant=True) +P = ParamSpec("P") +class AsyncP(Protocol[P]): + def meth(self, *args: P.args, **kwargs: P.kwargs) -> None: + ... + +class Async: + @classmethod + def async_func(cls: Type[AsyncP[P]]) -> Callable[P, int]: + ... + +class Add(Async): + def meth(self, x: int, y: int) -> None: ... + +reveal_type(Add.async_func()) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int" +reveal_type(Add().async_func()) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int" +[builtins fixtures/classmethod.pyi] From 5af76714fa2c526007e045f9c834781f60660e6e Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 19 Aug 2023 11:41:26 -0700 Subject: [PATCH 64/88] Improve match narrowing and reachability analysis (#15882) Fixes #12534, fixes #15878 --- mypy/checker.py | 17 ++++++++++- test-data/unit/check-python310.test | 45 ++++++++++++++++++++++++++++- 2 files changed, 60 insertions(+), 2 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 7625bf28a88c..87dff91758f5 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4973,7 +4973,7 @@ def visit_match_stmt(self, s: MatchStmt) -> None: self.push_type_map(pattern_map) self.push_type_map(pattern_type.captures) if g is not None: - with self.binder.frame_context(can_skip=True, fall_through=3): + with self.binder.frame_context(can_skip=False, fall_through=3): gt = get_proper_type(self.expr_checker.accept(g)) if isinstance(gt, DeletedType): @@ -4982,6 +4982,21 @@ def visit_match_stmt(self, s: MatchStmt) -> None: guard_map, guard_else_map = self.find_isinstance_check(g) else_map = or_conditional_maps(else_map, guard_else_map) + # If the guard narrowed the subject, copy the narrowed types over + if isinstance(p, AsPattern): + case_target = p.pattern or p.name + if isinstance(case_target, NameExpr): + for type_map in (guard_map, else_map): + if not type_map: + continue + for expr in list(type_map): + if not ( + isinstance(expr, NameExpr) + and expr.fullname == case_target.fullname + ): + continue + type_map[s.subject] = type_map[expr] + self.push_type_map(guard_map) self.accept(b) else: diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 75293ce9d193..0fe6a3d5a5cc 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1372,7 +1372,7 @@ match m: reveal_type(m) # N: Revealed type is "__main__.Medal" [case testMatchNarrowUsingPatternGuardSpecialCase] -def f(x: int | str) -> int: # E: Missing return statement +def f(x: int | str) -> int: match x: case x if isinstance(x, str): return 0 @@ -1973,3 +1973,46 @@ def f2(x: T) -> None: case DataFrame(): # type: ignore[misc] pass [builtins fixtures/primitives.pyi] + +[case testMatchGuardReachability] +# flags: --warn-unreachable +def f1(e: int) -> int: + match e: + case x if True: + return x + case _: + return 0 # E: Statement is unreachable + e = 0 # E: Statement is unreachable + + +def f2(e: int) -> int: + match e: + case x if bool(): + return x + case _: + return 0 + e = 0 # E: Statement is unreachable + +def f3(e: int | str | bytes) -> int: + match e: + case x if isinstance(x, int): + return x + case [x]: + return 0 # E: Statement is unreachable + case str(x): + return 0 + reveal_type(e) # N: Revealed type is "builtins.bytes" + return 0 + +def f4(e: int | str | bytes) -> int: + match e: + case int(x): + pass + case [x]: + return 0 # E: Statement is unreachable + case x if isinstance(x, str): + return 0 + reveal_type(e) # N: Revealed type is "Union[builtins.int, builtins.bytes]" + return 0 + +[builtins fixtures/primitives.pyi] From 5d909f1cdd7dde5e58259dc153959a38d084b988 Mon Sep 17 00:00:00 2001 From: Max Murin Date: Sun, 20 Aug 2023 21:15:18 -0700 Subject: [PATCH 65/88] Sync typeshed for 1.6 (#15918) Use the sync-typeshed script to sync the latest typeshed before the 1.6 release. --------- Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Co-authored-by: AlexWaygood --- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 13 +++++++++++++ mypy/typeshed/stdlib/argparse.pyi | 17 ++++++++++++++++- mypy/typeshed/stdlib/os/__init__.pyi | 2 +- mypy/typeshed/stdlib/ssl.pyi | 4 ++-- 4 files changed, 32 insertions(+), 4 deletions(-) diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 5d03142c6d71..7ae67292e8cd 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -36,6 +36,19 @@ Incomplete: TypeAlias = Any # To describe a function parameter that is unused and will work with anything. Unused: TypeAlias = object +# Used to mark arguments that default to a sentinel value. This prevents +# stubtest from complaining about the default value not matching. +# +# def foo(x: int | None = sentinel) -> None: ... +# +# In cases where the sentinel object is exported and can be used by user code, +# a construct like this is better: +# +# _SentinelType = NewType("_SentinelType", object) +# sentinel: _SentinelType +# def foo(x: int | None | _SentinelType = ...) -> None: ... +sentinel = Any # noqa: Y026 + # stable class IdentityFunction(Protocol): def __call__(self, __x: _T) -> _T: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index b59dd56ab921..0004250b17a9 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import sentinel from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from typing import IO, Any, Generic, NewType, NoReturn, Protocol, TypeVar, overload @@ -334,7 +335,21 @@ class Action(_AttributeHolder): if sys.version_info >= (3, 9): def format_usage(self) -> str: ... -if sys.version_info >= (3, 9): +if sys.version_info >= (3, 12): + class BooleanOptionalAction(Action): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = sentinel, # noqa: Y011 + choices: Iterable[_T] | None = sentinel, # noqa: Y011 + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = sentinel, # noqa: Y011 + ) -> None: ... + +elif sys.version_info >= (3, 9): class BooleanOptionalAction(Action): def __init__( self, diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index efe80d82ffba..994595aae781 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -912,7 +912,7 @@ else: @property def si_code(self) -> int: ... - def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result: ... + def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result | None: ... def wait3(options: int) -> tuple[int, int, Any]: ... def wait4(pid: int, options: int) -> tuple[int, int, Any]: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index dd7285196ed9..1c49b130e48f 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -436,7 +436,7 @@ class SSLContext: server_side: bool = False, do_handshake_on_connect: bool = True, suppress_ragged_eofs: bool = True, - server_hostname: str | None = None, + server_hostname: str | bytes | None = None, session: SSLSession | None = None, ) -> SSLSocket: ... def wrap_bio( @@ -444,7 +444,7 @@ class SSLContext: incoming: MemoryBIO, outgoing: MemoryBIO, server_side: bool = False, - server_hostname: str | None = None, + server_hostname: str | bytes | None = None, session: SSLSession | None = None, ) -> SSLObject: ... def session_stats(self) -> dict[str, int]: ... From 2c1fd97986064161c542956bb3d9d5043dc0a480 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Mon, 21 Aug 2023 20:35:04 +1000 Subject: [PATCH 66/88] =?UTF-8?q?(=F0=9F=8E=81)=20`StubGenerator.add=5Ftyp?= =?UTF-8?q?ing=5Fimport`=20returns=20the=20name=20(#15912)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I noticed that almost all calls to `typing_name` also call `add_typing_import`. Co-authored-by: KotlinIsland --- mypy/stubgen.py | 55 ++++++++++++++++++++----------------------------- 1 file changed, 22 insertions(+), 33 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index b6fc3e8b7377..aca836c52ce8 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -786,25 +786,20 @@ def visit_func_def(self, o: FuncDef) -> None: elif o.name in KNOWN_MAGIC_METHODS_RETURN_TYPES: retname = KNOWN_MAGIC_METHODS_RETURN_TYPES[o.name] elif has_yield_expression(o) or has_yield_from_expression(o): - self.add_typing_import("Generator") + generator_name = self.add_typing_import("Generator") yield_name = "None" send_name = "None" return_name = "None" if has_yield_from_expression(o): - self.add_typing_import("Incomplete") - yield_name = send_name = self.typing_name("Incomplete") + yield_name = send_name = self.add_typing_import("Incomplete") else: for expr, in_assignment in all_yield_expressions(o): if expr.expr is not None and not self.is_none_expr(expr.expr): - self.add_typing_import("Incomplete") - yield_name = self.typing_name("Incomplete") + yield_name = self.add_typing_import("Incomplete") if in_assignment: - self.add_typing_import("Incomplete") - send_name = self.typing_name("Incomplete") + send_name = self.add_typing_import("Incomplete") if has_return_statement(o): - self.add_typing_import("Incomplete") - return_name = self.typing_name("Incomplete") - generator_name = self.typing_name("Generator") + return_name = self.add_typing_import("Incomplete") retname = f"{generator_name}[{yield_name}, {send_name}, {return_name}]" elif not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT: retname = "None" @@ -965,21 +960,19 @@ def get_base_types(self, cdef: ClassDef) -> list[str]: nt_fields = self._get_namedtuple_fields(base) assert isinstance(base.args[0], StrExpr) typename = base.args[0].value - if nt_fields is not None: - fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields) - namedtuple_name = self.typing_name("NamedTuple") - base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])") - self.add_typing_import("NamedTuple") - else: + if nt_fields is None: # Invalid namedtuple() call, cannot determine fields - base_types.append(self.typing_name("Incomplete")) + base_types.append(self.add_typing_import("Incomplete")) + continue + fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields) + namedtuple_name = self.add_typing_import("NamedTuple") + base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])") elif self.is_typed_namedtuple(base): base_types.append(base.accept(p)) else: # At this point, we don't know what the base class is, so we # just use Incomplete as the base class. - base_types.append(self.typing_name("Incomplete")) - self.add_typing_import("Incomplete") + base_types.append(self.add_typing_import("Incomplete")) for name, value in cdef.keywords.items(): if name == "metaclass": continue # handled separately @@ -1059,9 +1052,9 @@ def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None field_names.append(field.value) else: return None # Invalid namedtuple fields type - if field_names: - self.add_typing_import("Incomplete") - incomplete = self.typing_name("Incomplete") + if not field_names: + return [] + incomplete = self.add_typing_import("Incomplete") return [(field_name, incomplete) for field_name in field_names] elif self.is_typed_namedtuple(call): fields_arg = call.args[1] @@ -1092,8 +1085,7 @@ def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: if fields is None: self.annotate_as_incomplete(lvalue) return - self.add_typing_import("NamedTuple") - bases = self.typing_name("NamedTuple") + bases = self.add_typing_import("NamedTuple") # TODO: Add support for generic NamedTuples. Requires `Generic` as base class. class_def = f"{self._indent}class {lvalue.name}({bases}):" if len(fields) == 0: @@ -1143,14 +1135,13 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: total = arg else: items.append((arg_name, arg)) - self.add_typing_import("TypedDict") + bases = self.add_typing_import("TypedDict") p = AliasPrinter(self) if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items): # Keep the call syntax if there are non-identifier or reserved keyword keys. self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") self._state = VAR else: - bases = self.typing_name("TypedDict") # TODO: Add support for generic TypedDicts. Requires `Generic` as base class. if total is not None: bases += f", total={total.accept(p)}" @@ -1167,8 +1158,7 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: self._state = CLASS def annotate_as_incomplete(self, lvalue: NameExpr) -> None: - self.add_typing_import("Incomplete") - self.add(f"{self._indent}{lvalue.name}: {self.typing_name('Incomplete')}\n") + self.add(f"{self._indent}{lvalue.name}: {self.add_typing_import('Incomplete')}\n") self._state = VAR def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: @@ -1384,13 +1374,14 @@ def typing_name(self, name: str) -> str: else: return name - def add_typing_import(self, name: str) -> None: + def add_typing_import(self, name: str) -> str: """Add a name to be imported for typing, unless it's imported already. The import will be internal to the stub. """ name = self.typing_name(name) self.import_tracker.require_name(name) + return name def add_import_line(self, line: str) -> None: """Add a line of text to the import section, unless it's already there.""" @@ -1448,11 +1439,9 @@ def get_str_type_of_node( if isinstance(rvalue, NameExpr) and rvalue.name in ("True", "False"): return "bool" if can_infer_optional and isinstance(rvalue, NameExpr) and rvalue.name == "None": - self.add_typing_import("Incomplete") - return f"{self.typing_name('Incomplete')} | None" + return f"{self.add_typing_import('Incomplete')} | None" if can_be_any: - self.add_typing_import("Incomplete") - return self.typing_name("Incomplete") + return self.add_typing_import("Incomplete") else: return "" From e804e8d740631ecbdb3a70330a3ea8497e114e3a Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 21 Aug 2023 15:32:35 +0300 Subject: [PATCH 67/88] Fix `assert_type` failures when some nodes are deferred (#15920) Now it is quite the same as `reveal_type`. Which is defined here: https://github.com/python/mypy/blob/2c1fd97986064161c542956bb3d9d5043dc0a480/mypy/checkexpr.py#L4297 Closes #15851 --- mypy/checkexpr.py | 3 +++ test-data/unit/check-expressions.test | 17 +++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 420cfd990820..4d04390da84a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4271,6 +4271,9 @@ def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type: allow_none_return=True, always_allow_any=True, ) + if self.chk.current_node_deferred: + return source_type + target_type = expr.type proper_source_type = get_proper_type(source_type) if ( diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 40ee28830b21..c213255997f8 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1045,6 +1045,23 @@ def reduce_it(s: Scalar) -> Scalar: assert_type(reduce_it(True), Scalar) [builtins fixtures/tuple.pyi] +[case testAssertTypeWithDeferredNodes] +from typing import Callable, TypeVar, assert_type + +T = TypeVar("T") + +def dec(f: Callable[[], T]) -> Callable[[], T]: + return f + +def func() -> None: + some = _inner_func() + assert_type(some, int) + +@dec +def _inner_func() -> int: + return 1 +[builtins fixtures/tuple.pyi] + -- None return type -- ---------------- From 7141d6bcff9e26e774e88712015ca6bbe8307c9e Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 21 Aug 2023 23:24:46 +0100 Subject: [PATCH 68/88] More principled approach for callable vs callable inference (#15910) Fixes https://github.com/python/mypy/issues/702 (one of the oldest open issues) The approach is quite simple, I essentially replicate the logic from subtyping check, while replacing each `is_subtype()` call with `infer_constraints()` call. Note that we don't have various options available in `constraints.py` so I use all checks, even those that may be skipped with some strictness flags (so we can infer as many constraints as possible). Depending on the output of `mypy_primer` we can try to tune this. Note that while I was looking at subtyping code, I noticed couple inconsistencies for ParamSpecs, I added TODOs for them (and updated some existing TODOs). I also deleted some code that should be dead code after my previous cleanup. Among inconsistencies most notably, subtyping between `Parameters` uses wrong (opposite) direction. Normally, `Parameters` entity behaves covariantly (w.r.t. types of individual arguments) as a single big argument, like a tuple plus a map. But then this entity appears in a contravariant position in `Callable`. This is how we handle it in `constraints.py`, `join.py`, `meet.py` etc. I tried to fix the left/right order in `visit_parameters()`, but then one test failed (and btw same test would also fail if I would try to fix variance in `visit_instance()`). I decided to leave this for separate PR(s). --- mypy/constraints.py | 132 +++++++++++++++++++------ mypy/subtypes.py | 32 +++---- mypy/types.py | 8 +- test-data/unit/check-inference.test | 133 ++++++++++++++++++++++++++ test-data/unit/check-overloading.test | 10 ++ 5 files changed, 260 insertions(+), 55 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 26504ed06b3e..47f312117264 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -595,15 +595,11 @@ def visit_parameters(self, template: Parameters) -> list[Constraint]: return self.infer_against_any(template.arg_types, self.actual) if type_state.infer_polymorphic and isinstance(self.actual, Parameters): # For polymorphic inference we need to be able to infer secondary constraints - # in situations like [x: T] <: P <: [x: int]. - res = [] - if len(template.arg_types) == len(self.actual.arg_types): - for tt, at in zip(template.arg_types, self.actual.arg_types): - # This avoids bogus constraints like T <: P.args - if isinstance(at, ParamSpecType): - continue - res.extend(infer_constraints(tt, at, self.direction)) - return res + # in situations like [x: T] <: P <: [x: int]. Note we invert direction, since + # this function expects direction between callables. + return infer_callable_arguments_constraints( + template, self.actual, neg_op(self.direction) + ) raise RuntimeError("Parameters cannot be constrained to") # Non-leaf types @@ -722,7 +718,8 @@ def visit_instance(self, template: Instance) -> list[Constraint]: prefix = mapped_arg.prefix if isinstance(instance_arg, Parameters): # No such thing as variance for ParamSpecs, consider them invariant - # TODO: constraints between prefixes + # TODO: constraints between prefixes using + # infer_callable_arguments_constraints() suffix: Type = instance_arg.copy_modified( instance_arg.arg_types[len(prefix.arg_types) :], instance_arg.arg_kinds[len(prefix.arg_kinds) :], @@ -793,7 +790,8 @@ def visit_instance(self, template: Instance) -> list[Constraint]: prefix = template_arg.prefix if isinstance(mapped_arg, Parameters): # No such thing as variance for ParamSpecs, consider them invariant - # TODO: constraints between prefixes + # TODO: constraints between prefixes using + # infer_callable_arguments_constraints() suffix = mapped_arg.copy_modified( mapped_arg.arg_types[len(prefix.arg_types) :], mapped_arg.arg_kinds[len(prefix.arg_kinds) :], @@ -962,24 +960,12 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: unpack_constraints = build_constraints_for_simple_unpack( template_types, actual_types, neg_op(self.direction) ) - template_args = [] - cactual_args = [] res.extend(unpack_constraints) else: - template_args = template.arg_types - cactual_args = cactual.arg_types - # TODO: use some more principled "formal to actual" logic - # instead of this lock-step loop over argument types. This identical - # logic should be used in 5 places: in Parameters vs Parameters - # inference, in Instance vs Instance inference for prefixes (two - # branches), and in Callable vs Callable inference (two branches). - for t, a in zip(template_args, cactual_args): - # This avoids bogus constraints like T <: P.args - if isinstance(a, (ParamSpecType, UnpackType)): - # TODO: can we infer something useful for *T vs P? - continue # Negate direction due to function argument type contravariance. - res.extend(infer_constraints(t, a, neg_op(self.direction))) + res.extend( + infer_callable_arguments_constraints(template, cactual, self.direction) + ) else: prefix = param_spec.prefix prefix_len = len(prefix.arg_types) @@ -1028,11 +1014,9 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: arg_kinds=cactual.arg_kinds[:prefix_len], arg_names=cactual.arg_names[:prefix_len], ) - - for t, a in zip(prefix.arg_types, cactual_prefix.arg_types): - if isinstance(a, ParamSpecType): - continue - res.extend(infer_constraints(t, a, neg_op(self.direction))) + res.extend( + infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction) + ) template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type if template.type_guard is not None: @@ -1435,3 +1419,89 @@ def build_constraints_for_unpack( for template_arg, item in zip(template_unpack.items, mapped_middle): res.extend(infer_constraints(template_arg, item, direction)) return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix + + +def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]: + """Infer constraints between two arguments using direction between original callables.""" + if isinstance(left, (ParamSpecType, UnpackType)) or isinstance( + right, (ParamSpecType, UnpackType) + ): + # This avoids bogus constraints like T <: P.args + # TODO: can we infer something useful for *T vs P? + return [] + if direction == SUBTYPE_OF: + # We invert direction to account for argument contravariance. + return infer_constraints(left, right, neg_op(direction)) + else: + return infer_constraints(right, left, neg_op(direction)) + + +def infer_callable_arguments_constraints( + template: CallableType | Parameters, actual: CallableType | Parameters, direction: int +) -> list[Constraint]: + """Infer constraints between argument types of two callables. + + This function essentially extracts four steps from are_parameters_compatible() in + subtypes.py that involve subtype checks between argument types. We keep the argument + matching logic, but ignore various strictness flags present there, and checks that + do not involve subtyping. Then in place of every subtype check we put an infer_constraints() + call for the same types. + """ + res = [] + if direction == SUBTYPE_OF: + left, right = template, actual + else: + left, right = actual, template + left_star = left.var_arg() + left_star2 = left.kw_arg() + right_star = right.var_arg() + right_star2 = right.kw_arg() + + # Numbering of steps below matches the one in are_parameters_compatible() for convenience. + # Phase 1a: compare star vs star arguments. + if left_star is not None and right_star is not None: + res.extend(infer_directed_arg_constraints(left_star.typ, right_star.typ, direction)) + if left_star2 is not None and right_star2 is not None: + res.extend(infer_directed_arg_constraints(left_star2.typ, right_star2.typ, direction)) + + # Phase 1b: compare left args with corresponding non-star right arguments. + for right_arg in right.formal_arguments(): + left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) + if left_arg is None: + continue + res.extend(infer_directed_arg_constraints(left_arg.typ, right_arg.typ, direction)) + + # Phase 1c: compare left args with right *args. + if right_star is not None: + right_by_position = right.try_synthesizing_arg_from_vararg(None) + assert right_by_position is not None + i = right_star.pos + assert i is not None + while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional(): + left_by_position = left.argument_by_position(i) + assert left_by_position is not None + res.extend( + infer_directed_arg_constraints( + left_by_position.typ, right_by_position.typ, direction + ) + ) + i += 1 + + # Phase 1d: compare left args with right **kwargs. + if right_star2 is not None: + right_names = {name for name in right.arg_names if name is not None} + left_only_names = set() + for name, kind in zip(left.arg_names, left.arg_kinds): + if name is None or kind.is_star() or name in right_names: + continue + left_only_names.add(name) + + right_by_name = right.try_synthesizing_arg_from_kwarg(None) + assert right_by_name is not None + for name in left_only_names: + left_by_name = left.argument_by_name(name) + assert left_by_name is not None + res.extend( + infer_directed_arg_constraints(left_by_name.typ, right_by_name.typ, direction) + ) + return res diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 11847858c62c..288de10cc234 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -590,6 +590,7 @@ def check_mixed( ): nominal = False else: + # TODO: everywhere else ParamSpecs are handled as invariant. if not check_type_parameter( lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context ): @@ -666,13 +667,12 @@ def visit_unpack_type(self, left: UnpackType) -> bool: return False def visit_parameters(self, left: Parameters) -> bool: - if isinstance(self.right, (Parameters, CallableType)): - right = self.right - if isinstance(right, CallableType): - right = right.with_unpacked_kwargs() + if isinstance(self.right, Parameters): + # TODO: direction here should be opposite, this function expects + # order of callables, while parameters are contravariant. return are_parameters_compatible( left, - right, + self.right, is_compat=self._is_subtype, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, ) @@ -723,14 +723,6 @@ def visit_callable_type(self, left: CallableType) -> bool: elif isinstance(right, TypeType): # This is unsound, we don't check the __init__ signature. return left.is_type_obj() and self._is_subtype(left.ret_type, right.item) - elif isinstance(right, Parameters): - # this doesn't check return types.... but is needed for is_equivalent - return are_parameters_compatible( - left.with_unpacked_kwargs(), - right, - is_compat=self._is_subtype, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - ) else: return False @@ -1456,7 +1448,6 @@ def g(x: int) -> int: ... right, is_compat=is_compat, ignore_pos_arg_names=ignore_pos_arg_names, - check_args_covariantly=check_args_covariantly, allow_partial_overlap=allow_partial_overlap, strict_concatenate_check=strict_concatenate_check, ) @@ -1480,7 +1471,6 @@ def are_parameters_compatible( *, is_compat: Callable[[Type, Type], bool], ignore_pos_arg_names: bool = False, - check_args_covariantly: bool = False, allow_partial_overlap: bool = False, strict_concatenate_check: bool = False, ) -> bool: @@ -1534,7 +1524,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # Phase 1b: Check non-star args: for every arg right can accept, left must # also accept. The only exception is if we are allowing partial - # partial overlaps: in that case, we ignore optional args on the right. + # overlaps: in that case, we ignore optional args on the right. for right_arg in right.formal_arguments(): left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg) if left_arg is None: @@ -1548,7 +1538,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # Phase 1c: Check var args. Right has an infinite series of optional positional # arguments. Get all further positional args of left, and make sure - # they're more general then the corresponding member in right. + # they're more general than the corresponding member in right. if right_star is not None: # Synthesize an anonymous formal argument for the right right_by_position = right.try_synthesizing_arg_from_vararg(None) @@ -1575,7 +1565,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # Phase 1d: Check kw args. Right has an infinite series of optional named # arguments. Get all further named args of left, and make sure - # they're more general then the corresponding member in right. + # they're more general than the corresponding member in right. if right_star2 is not None: right_names = {name for name in right.arg_names if name is not None} left_only_names = set() @@ -1643,6 +1633,10 @@ def are_args_compatible( allow_partial_overlap: bool, is_compat: Callable[[Type, Type], bool], ) -> bool: + if left.required and right.required: + # If both arguments are required allow_partial_overlap has no effect. + allow_partial_overlap = False + def is_different(left_item: object | None, right_item: object | None) -> bool: """Checks if the left and right items are different. @@ -1670,7 +1664,7 @@ def is_different(left_item: object | None, right_item: object | None) -> bool: # If right's argument is optional, left's must also be # (unless we're relaxing the checks to allow potential - # rather then definite compatibility). + # rather than definite compatibility). if not allow_partial_overlap and not right.required and left.required: return False diff --git a/mypy/types.py b/mypy/types.py index d4e2fc7cb63c..301ce6e0cf18 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1545,9 +1545,6 @@ class FormalArgument(NamedTuple): required: bool -# TODO: should this take bound typevars too? what would this take? -# ex: class Z(Generic[P, T]): ...; Z[[V], V] -# What does a typevar even mean in this context? class Parameters(ProperType): """Type that represents the parameters to a function. @@ -1559,6 +1556,8 @@ class Parameters(ProperType): "arg_names", "min_args", "is_ellipsis_args", + # TODO: variables don't really belong here, but they are used to allow hacky support + # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909 "variables", ) @@ -1602,7 +1601,7 @@ def copy_modified( variables=variables if variables is not _dummy else self.variables, ) - # the following are copied from CallableType. Is there a way to decrease code duplication? + # TODO: here is a lot of code duplication with Callable type, fix this. def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): @@ -2046,7 +2045,6 @@ def param_spec(self) -> ParamSpecType | None: return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) def expand_param_spec(self, c: Parameters) -> CallableType: - # TODO: try deleting variables from Parameters after new type inference is default. variables = c.variables return self.copy_modified( arg_types=self.arg_types[:-2] + c.arg_types, diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 9ee30b4df859..56d3fe2b4ce7 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3553,3 +3553,136 @@ class E(D): ... reveal_type([E(), D()]) # N: Revealed type is "builtins.list[__main__.D]" reveal_type([D(), E()]) # N: Revealed type is "builtins.list[__main__.D]" + +[case testCallableInferenceAgainstCallablePosVsStar] +from typing import TypeVar, Callable, Tuple + +T = TypeVar('T') +S = TypeVar('S') + +def f(x: Callable[[T, S], None]) -> Tuple[T, S]: ... +def g(*x: int) -> None: ... +reveal_type(f(g)) # N: Revealed type is "Tuple[builtins.int, builtins.int]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableStarVsPos] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T, S]): + def __call__(self, __x: T, *args: S) -> None: ... + +def f(x: Call[T, S]) -> Tuple[T, S]: ... +def g(*x: int) -> None: ... +reveal_type(f(g)) # N: Revealed type is "Tuple[builtins.int, builtins.int]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableNamedVsStar] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T, S]): + def __call__(self, *, x: T, y: S) -> None: ... + +def f(x: Call[T, S]) -> Tuple[T, S]: ... +def g(**kwargs: int) -> None: ... +reveal_type(f(g)) # N: Revealed type is "Tuple[builtins.int, builtins.int]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableStarVsNamed] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T, S]): + def __call__(self, *, x: T, **kwargs: S) -> None: ... + +def f(x: Call[T, S]) -> Tuple[T, S]: ... +def g(**kwargs: int) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[builtins.int, builtins.int]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableNamedVsNamed] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T, S]): + def __call__(self, *, x: T, y: S) -> None: ... + +def f(x: Call[T, S]) -> Tuple[T, S]: ... + +# Note: order of names is different w.r.t. protocol +def g(*, y: int, x: str) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[builtins.str, builtins.int]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallablePosOnlyVsNamed] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T]): + def __call__(self, *, x: T) -> None: ... + +def f(x: Call[T]) -> Tuple[T, T]: ... + +def g(__x: str) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableNamedVsPosOnly] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T]): + def __call__(self, __x: T) -> None: ... + +def f(x: Call[T]) -> Tuple[T, T]: ... + +def g(*, x: str) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallablePosOnlyVsKwargs] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T]): + def __call__(self, __x: T) -> None: ... + +def f(x: Call[T]) -> Tuple[T, T]: ... + +def g(**x: str) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[]" +[builtins fixtures/list.pyi] + +[case testCallableInferenceAgainstCallableNamedVsArgs] +from typing import TypeVar, Callable, Tuple, Protocol + +T = TypeVar('T', contravariant=True) +S = TypeVar('S', contravariant=True) + +class Call(Protocol[T]): + def __call__(self, *, x: T) -> None: ... + +def f(x: Call[T]) -> Tuple[T, T]: ... + +def g(*args: str) -> None: pass +reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[]" +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index b778dc50b376..ede4a2e4cf62 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6640,3 +6640,13 @@ def bar(x): ... reveal_type(bar) # N: Revealed type is "Overload(def (builtins.int) -> builtins.float, def (builtins.str) -> builtins.str)" [builtins fixtures/paramspec.pyi] + +[case testOverloadOverlapWithNameOnlyArgs] +from typing import overload + +@overload +def d(x: int) -> int: ... +@overload +def d(f: int, *, x: int) -> str: ... +def d(*args, **kwargs): ... +[builtins fixtures/tuple.pyi] From 48835a362d86eb9964b0350e4453daf14c76fe8e Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 23 Aug 2023 06:47:32 +0100 Subject: [PATCH 69/88] Fix stubtest mypy enum.Flag edge case (#15933) Fix edge-case stubtest crashes when an instance of an enum.Flag that is not a member of that enum.Flag is used as a parameter default Fixes #15923. Note: the test cases I've added reproduce the crash, but only if you're using a compiled version of mypy. (Some of them only repro the crash on <=py310, but some repro it on py311+ as well.) We run stubtest tests in CI with compiled mypy, so they do repro the crash in the context of our CI. --- mypy/stubtest.py | 2 +- mypy/test/teststubtest.py | 103 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 101 insertions(+), 4 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 906a8c923b37..b2506e6dcc02 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1553,7 +1553,7 @@ def anytype() -> mypy.types.AnyType: value: bool | int | str if isinstance(runtime, bytes): value = bytes_to_human_readable_repr(runtime) - elif isinstance(runtime, enum.Enum): + elif isinstance(runtime, enum.Enum) and isinstance(runtime.name, str): value = runtime.name elif isinstance(runtime, (bool, int, str)): value = runtime diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index cd72bd9300d1..a6733a9e8bd0 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -64,6 +64,7 @@ def __init__(self, name: str) -> None: ... class Coroutine(Generic[_T_co, _S, _R]): ... class Iterable(Generic[_T_co]): ... +class Iterator(Iterable[_T_co]): ... class Mapping(Generic[_K, _V]): ... class Match(Generic[AnyStr]): ... class Sequence(Iterable[_T_co]): ... @@ -86,7 +87,9 @@ def __init__(self) -> None: pass def __repr__(self) -> str: pass class type: ... -class tuple(Sequence[T_co], Generic[T_co]): ... +class tuple(Sequence[T_co], Generic[T_co]): + def __ge__(self, __other: tuple[T_co, ...]) -> bool: pass + class dict(Mapping[KT, VT]): ... class function: pass @@ -105,6 +108,39 @@ def classmethod(f: T) -> T: ... def staticmethod(f: T) -> T: ... """ +stubtest_enum_stub = """ +import sys +from typing import Any, TypeVar, Iterator + +_T = TypeVar('_T') + +class EnumMeta(type): + def __len__(self) -> int: pass + def __iter__(self: type[_T]) -> Iterator[_T]: pass + def __reversed__(self: type[_T]) -> Iterator[_T]: pass + def __getitem__(self: type[_T], name: str) -> _T: pass + +class Enum(metaclass=EnumMeta): + def __new__(cls: type[_T], value: object) -> _T: pass + def __repr__(self) -> str: pass + def __str__(self) -> str: pass + def __format__(self, format_spec: str) -> str: pass + def __hash__(self) -> Any: pass + def __reduce_ex__(self, proto: Any) -> Any: pass + name: str + value: Any + +class Flag(Enum): + def __or__(self: _T, other: _T) -> _T: pass + def __and__(self: _T, other: _T) -> _T: pass + def __xor__(self: _T, other: _T) -> _T: pass + def __invert__(self: _T) -> _T: pass + if sys.version_info >= (3, 11): + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ +""" + def run_stubtest( stub: str, runtime: str, options: list[str], config_file: str | None = None @@ -114,6 +150,8 @@ def run_stubtest( f.write(stubtest_builtins_stub) with open("typing.pyi", "w") as f: f.write(stubtest_typing_stub) + with open("enum.pyi", "w") as f: + f.write(stubtest_enum_stub) with open(f"{TEST_MODULE_NAME}.pyi", "w") as f: f.write(stub) with open(f"{TEST_MODULE_NAME}.py", "w") as f: @@ -954,16 +992,15 @@ def fizz(self): pass @collect_cases def test_enum(self) -> Iterator[Case]: + yield Case(stub="import enum", runtime="import enum", error=None) yield Case( stub=""" - import enum class X(enum.Enum): a: int b: str c: str """, runtime=""" - import enum class X(enum.Enum): a = 1 b = "asdf" @@ -971,6 +1008,66 @@ class X(enum.Enum): """, error="X.c", ) + yield Case( + stub=""" + class Flags1(enum.Flag): + a: int + b: int + def foo(x: Flags1 = ...) -> None: ... + """, + runtime=""" + class Flags1(enum.Flag): + a = 1 + b = 2 + def foo(x=Flags1.a|Flags1.b): pass + """, + error=None, + ) + yield Case( + stub=""" + class Flags2(enum.Flag): + a: int + b: int + def bar(x: Flags2 | None = None) -> None: ... + """, + runtime=""" + class Flags2(enum.Flag): + a = 1 + b = 2 + def bar(x=Flags2.a|Flags2.b): pass + """, + error="bar", + ) + yield Case( + stub=""" + class Flags3(enum.Flag): + a: int + b: int + def baz(x: Flags3 | None = ...) -> None: ... + """, + runtime=""" + class Flags3(enum.Flag): + a = 1 + b = 2 + def baz(x=Flags3(0)): pass + """, + error=None, + ) + yield Case( + stub=""" + class Flags4(enum.Flag): + a: int + b: int + def spam(x: Flags4 | None = None) -> None: ... + """, + runtime=""" + class Flags4(enum.Flag): + a = 1 + b = 2 + def spam(x=Flags4(0)): pass + """, + error="spam", + ) @collect_cases def test_decorator(self) -> Iterator[Case]: From 6f650cff9ab21f81069e0ae30c92eae94219ea63 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 23 Aug 2023 20:26:27 +0100 Subject: [PATCH 70/88] Streamline some elements of variadic types support (#15924) Fixes https://github.com/python/mypy/issues/13981 Fixes https://github.com/python/mypy/issues/15241 Fixes https://github.com/python/mypy/issues/15495 Fixes https://github.com/python/mypy/issues/15633 Fixes https://github.com/python/mypy/issues/15667 Fixes https://github.com/python/mypy/issues/15897 Fixes https://github.com/python/mypy/issues/15929 OK, I started following the plan outlined in https://github.com/python/mypy/pull/15879. In this PR I focused mostly on "kinematics". Here are some notes (in random order): * I decided to normalize `TupleType` and `Instance` items in `semanal_typeargs.py` (not in the type constructors, like for unions). It looks like a simpler way to normalize for now. After this, we can rely on the fact that only non-trivial (more below on what is trivial) variadic items in a type list is either `*Ts` or `*tuple[X, ...]`. A single top-level `TupleType` can appear in an unpack only as type of `*args`. * Callables turned out to be tricky. There is certain tight coupling between `FuncDef.type` and `FuncDef.arguments` that makes it hard to normalize prefix to be expressed as individual arguments _at definition_. I faced exactly the same problem when I implemented `**` unpacking for TypedDicts. So we have two choices: either handle prefixes everywhere, or use normalization helper in relevant code. I propose to go with the latter (it worked well for `**` unpacking). * I decided to switch `Unpack` to be disallowed by default in `typeanal.py`, only very specific potions are allowed now. Although this required plumbing `allow_unpack` all the way from `semanal.py`, conceptually it is simple. This is similar to how `ParamSpec` is handled. * This PR fixes all currently open crash issues (some intentionally, some accidentally) plus a bunch of TODOs I found in the tests (but not all). * I decided to simplify `TypeAliasExpr` (and made it simple reference to the `SymbolNode`, like e.g. `TypedDictExpr` and `NamedTupleExpr`). This is not strictly necessary for this PR, but it makes some parts of it a bit simpler, and I wanted to do it for long time. Here is a more detailed plan of what I am leaving for future PRs (in rough order of priority): * Close non-crash open issues (it looks like there are only three, and all seem to be straightforward) * Handle trivial items in `UnpackType` gracefully. These are `` and `Any` (and also potentially `object`). They can appear e.g. after a user error. Currently they can cause assert crashes. (Not sure what is the best way to do this). * Go over current places where `Unpack` is handled, and verify both possible variadic items are handled. * Audit variadic `Instance` constrains and subtyping (the latter is probably OK, but the former may be broken). * Audit `Callable` and `Tuple` subtyping for variadic-related edge cases (constraints seem OK for these). * Figure out story about `map_instance_to_supertype()` (if no changes are needed, add tests for subclassing). * Clear most remaining TODOs. * Go once more over the large scale picture and check whether we have some important parts missing (or unhandled interactions between those). * Verify various "advanced" typing features work well with `TypeVarTuple`s (and add some support if missing but looks important). * Enable this feature by default. I hope to finish these in next few weeks. --- mypy/checker.py | 5 +- mypy/checkexpr.py | 11 +- mypy/constraints.py | 46 ++++-- mypy/expandtype.py | 111 +++----------- mypy/message_registry.py | 3 +- mypy/mixedtraverser.py | 2 +- mypy/nodes.py | 17 +-- mypy/semanal.py | 18 ++- mypy/semanal_typeargs.py | 45 ++++-- mypy/server/astmerge.py | 5 - mypy/server/deps.py | 2 +- mypy/strconv.py | 2 +- mypy/subtypes.py | 18 ++- mypy/typeanal.py | 72 ++++++++-- mypy/typeops.py | 9 +- mypy/types.py | 80 ++++++++++- mypy/types_utils.py | 12 +- mypy/typevartuples.py | 15 +- test-data/unit/check-generics.test | 1 - test-data/unit/check-typevar-tuple.test | 183 +++++++++++++++++++----- test-data/unit/check-varargs.test | 2 +- test-data/unit/semanal-errors.test | 9 +- 22 files changed, 439 insertions(+), 229 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 87dff91758f5..a44601b83e21 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4665,10 +4665,7 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: isinstance(iterable, TupleType) and iterable.partial_fallback.type.fullname == "builtins.tuple" ): - joined: Type = UninhabitedType() - for item in iterable.items: - joined = join_types(joined, item) - return iterator, joined + return iterator, tuple_fallback(iterable).args[0] else: # Non-tuple iterable. return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0] diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4d04390da84a..6de317f587cb 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -168,7 +168,7 @@ UninhabitedType, UnionType, UnpackType, - flatten_nested_tuples, + find_unpack_in_list, flatten_nested_unions, get_proper_type, get_proper_types, @@ -185,7 +185,6 @@ ) from mypy.typestate import type_state from mypy.typevars import fill_typevars -from mypy.typevartuples import find_unpack_in_list from mypy.util import split_module_names from mypy.visitor import ExpressionVisitor @@ -1600,7 +1599,7 @@ def check_callable_call( See the docstring of check_call for more information. """ # Always unpack **kwargs before checking a call. - callee = callee.with_unpacked_kwargs() + callee = callee.with_unpacked_kwargs().with_normalized_var_args() if callable_name is None and callee.name: callable_name = callee.name ret_type = get_proper_type(callee.ret_type) @@ -2409,7 +2408,12 @@ def check_argument_types( + unpacked_type.items[inner_unpack_index + 1 :] ) callee_arg_kinds = [ARG_POS] * len(actuals) + elif isinstance(unpacked_type, TypeVarTupleType): + callee_arg_types = [orig_callee_arg_type] + callee_arg_kinds = [ARG_STAR] else: + # TODO: Any and can appear in Unpack (as a result of user error), + # fail gracefully here and elsewhere (and/or normalize them away). assert isinstance(unpacked_type, Instance) assert unpacked_type.type.fullname == "builtins.tuple" callee_arg_types = [unpacked_type.args[0]] * len(actuals) @@ -4451,7 +4455,6 @@ class C(Generic[T, Unpack[Ts]]): ... prefix = next(i for (i, v) in enumerate(vars) if isinstance(v, TypeVarTupleType)) suffix = len(vars) - prefix - 1 - args = flatten_nested_tuples(args) if len(args) < len(vars) - 1: self.msg.incompatible_type_application(len(vars), len(args), ctx) return [AnyType(TypeOfAny.from_error)] * len(vars) diff --git a/mypy/constraints.py b/mypy/constraints.py index 47f312117264..edce11e778ab 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -49,6 +49,7 @@ UninhabitedType, UnionType, UnpackType, + find_unpack_in_list, get_proper_type, has_recursive_types, has_type_vars, @@ -57,7 +58,7 @@ ) from mypy.types_utils import is_union_with_any from mypy.typestate import type_state -from mypy.typevartuples import extract_unpack, find_unpack_in_list, split_with_mapped_and_template +from mypy.typevartuples import extract_unpack, split_with_mapped_and_template if TYPE_CHECKING: from mypy.infer import ArgumentInferContext @@ -155,16 +156,33 @@ def infer_constraints_for_callable( # not to hold we can always handle the prefixes too. inner_unpack = unpacked_type.items[0] assert isinstance(inner_unpack, UnpackType) - inner_unpacked_type = inner_unpack.type - assert isinstance(inner_unpacked_type, TypeVarTupleType) + inner_unpacked_type = get_proper_type(inner_unpack.type) suffix_len = len(unpacked_type.items) - 1 - constraints.append( - Constraint( - inner_unpacked_type, - SUPERTYPE_OF, - TupleType(actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback), + if isinstance(inner_unpacked_type, TypeVarTupleType): + # Variadic item can be either *Ts... + constraints.append( + Constraint( + inner_unpacked_type, + SUPERTYPE_OF, + TupleType( + actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback + ), + ) ) - ) + else: + # ...or it can be a homogeneous tuple. + assert ( + isinstance(inner_unpacked_type, Instance) + and inner_unpacked_type.type.fullname == "builtins.tuple" + ) + for at in actual_types[:-suffix_len]: + constraints.extend( + infer_constraints(inner_unpacked_type.args[0], at, SUPERTYPE_OF) + ) + # Now handle the suffix (if any). + if suffix_len: + for tt, at in zip(unpacked_type.items[1:], actual_types[-suffix_len:]): + constraints.extend(infer_constraints(tt, at, SUPERTYPE_OF)) else: assert False, "mypy bug: unhandled constraint inference case" else: @@ -863,6 +881,16 @@ def visit_instance(self, template: Instance) -> list[Constraint]: and self.direction == SUPERTYPE_OF ): for item in actual.items: + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, TypeVarType): + # Cannot infer anything for T from [T, ...] <: *Ts + continue + assert ( + isinstance(unpacked, Instance) + and unpacked.type.fullname == "builtins.tuple" + ) + item = unpacked.args[0] cb = infer_constraints(template.args[0], item, SUPERTYPE_OF) res.extend(cb) return res diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 6f69e09936db..e71f6429d9c0 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -2,7 +2,7 @@ from typing import Final, Iterable, Mapping, Sequence, TypeVar, cast, overload -from mypy.nodes import ARG_POS, ARG_STAR, ArgKind, Var +from mypy.nodes import ARG_STAR, Var from mypy.state import state from mypy.types import ( ANY_STRATEGY, @@ -35,12 +35,11 @@ UninhabitedType, UnionType, UnpackType, - flatten_nested_tuples, flatten_nested_unions, get_proper_type, split_with_prefix_and_suffix, ) -from mypy.typevartuples import find_unpack_in_list, split_with_instance +from mypy.typevartuples import split_with_instance # Solving the import cycle: import mypy.type_visitor # ruff: isort: skip @@ -294,11 +293,10 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types)) - # TODO: can we simplify this method? It is too long. - def interpolate_args_for_unpack( - self, t: CallableType, var_arg: UnpackType - ) -> tuple[list[str | None], list[ArgKind], list[Type]]: + def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> list[Type]: star_index = t.arg_kinds.index(ARG_STAR) + prefix = self.expand_types(t.arg_types[:star_index]) + suffix = self.expand_types(t.arg_types[star_index + 1 :]) var_arg_type = get_proper_type(var_arg.type) # We have something like Unpack[Tuple[Unpack[Ts], X1, X2]] @@ -306,89 +304,19 @@ def interpolate_args_for_unpack( expanded_tuple = var_arg_type.accept(self) assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType) expanded_items = expanded_tuple.items + fallback = var_arg_type.partial_fallback else: # We have plain Unpack[Ts] + assert isinstance(var_arg_type, TypeVarTupleType) + fallback = var_arg_type.tuple_fallback expanded_items_res = self.expand_unpack(var_arg) if isinstance(expanded_items_res, list): expanded_items = expanded_items_res else: # We got Any or - arg_types = ( - t.arg_types[:star_index] + [expanded_items_res] + t.arg_types[star_index + 1 :] - ) - return t.arg_names, t.arg_kinds, arg_types - - expanded_unpack_index = find_unpack_in_list(expanded_items) - # This is the case where we just have Unpack[Tuple[X1, X2, X3]] - # (for example if either the tuple had no unpacks, or the unpack in the - # tuple got fully expanded to something with fixed length) - if expanded_unpack_index is None: - arg_names = ( - t.arg_names[:star_index] - + [None] * len(expanded_items) - + t.arg_names[star_index + 1 :] - ) - arg_kinds = ( - t.arg_kinds[:star_index] - + [ARG_POS] * len(expanded_items) - + t.arg_kinds[star_index + 1 :] - ) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_items - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - else: - # If Unpack[Ts] simplest form still has an unpack or is a - # homogenous tuple, then only the prefix can be represented as - # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] - # as the star arg, for example. - expanded_unpack = expanded_items[expanded_unpack_index] - assert isinstance(expanded_unpack, UnpackType) - - # Extract the TypeVarTuple, so we can get a tuple fallback from it. - expanded_unpacked_tvt = expanded_unpack.type - if isinstance(expanded_unpacked_tvt, TypeVarTupleType): - fallback = expanded_unpacked_tvt.tuple_fallback - else: - # This can happen when tuple[Any, ...] is used to "patch" a variadic - # generic type without type arguments provided, or when substitution is - # homogeneous tuple. - assert isinstance(expanded_unpacked_tvt, ProperType) - assert isinstance(expanded_unpacked_tvt, Instance) - assert expanded_unpacked_tvt.type.fullname == "builtins.tuple" - fallback = expanded_unpacked_tvt - - prefix_len = expanded_unpack_index - arg_names = t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] - arg_kinds = ( - t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] - ) - if ( - len(expanded_items) == 1 - and isinstance(expanded_unpack.type, ProperType) - and isinstance(expanded_unpack.type, Instance) - ): - assert expanded_unpack.type.type.fullname == "builtins.tuple" - # Normalize *args: *tuple[X, ...] -> *args: X - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + [expanded_unpack.type.args[0]] - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - else: - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded_items[:prefix_len] - # Constructing the Unpack containing the tuple without the prefix. - + [ - UnpackType(TupleType(expanded_items[prefix_len:], fallback)) - if len(expanded_items) - prefix_len > 1 - else expanded_items[prefix_len] - ] - + self.expand_types(t.arg_types[star_index + 1 :]) - ) - return arg_names, arg_kinds, arg_types + return prefix + [expanded_items_res] + suffix + new_unpack = UnpackType(TupleType(expanded_items, fallback)) + return prefix + [new_unpack] + suffix def visit_callable_type(self, t: CallableType) -> CallableType: param_spec = t.param_spec() @@ -427,20 +355,20 @@ def visit_callable_type(self, t: CallableType) -> CallableType: ) var_arg = t.var_arg() + needs_normalization = False if var_arg is not None and isinstance(var_arg.typ, UnpackType): - arg_names, arg_kinds, arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) + needs_normalization = True + arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) else: - arg_names = t.arg_names - arg_kinds = t.arg_kinds arg_types = self.expand_types(t.arg_types) - - return t.copy_modified( + expanded = t.copy_modified( arg_types=arg_types, - arg_names=arg_names, - arg_kinds=arg_kinds, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) + if needs_normalization: + return expanded.with_normalized_var_args() + return expanded def visit_overloaded(self, t: Overloaded) -> Type: items: list[CallableType] = [] @@ -460,9 +388,6 @@ def expand_types_with_unpack( indicates use of Any or some error occurred earlier. In this case callers should simply propagate the resulting type. """ - # TODO: this will cause a crash on aliases like A = Tuple[int, Unpack[A]]. - # Although it is unlikely anyone will write this, we should fail gracefully. - typs = flatten_nested_tuples(typs) items: list[Type] = [] for item in typs: if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): diff --git a/mypy/message_registry.py b/mypy/message_registry.py index bd3b8571b69e..713ec2e3c759 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -171,7 +171,8 @@ def with_additional_msg(self, info: str) -> ErrorMessage: IMPLICIT_GENERIC_ANY_BUILTIN: Final = ( 'Implicit generic "Any". Use "{}" and specify generic parameters' ) -INVALID_UNPACK = "{} cannot be unpacked (must be tuple or TypeVarTuple)" +INVALID_UNPACK: Final = "{} cannot be unpacked (must be tuple or TypeVarTuple)" +INVALID_UNPACK_POSITION: Final = "Unpack is only valid in a variadic position" # TypeVar INCOMPATIBLE_TYPEVAR_VALUE: Final = 'Value of type variable "{}" of {} cannot be {}' diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py index 771f87fc6bd6..dfde41859c67 100644 --- a/mypy/mixedtraverser.py +++ b/mypy/mixedtraverser.py @@ -49,7 +49,7 @@ def visit_class_def(self, o: ClassDef) -> None: def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: super().visit_type_alias_expr(o) self.in_type_alias_expr = True - o.type.accept(self) + o.node.target.accept(self) self.in_type_alias_expr = False def visit_type_var_expr(self, o: TypeVarExpr) -> None: diff --git a/mypy/nodes.py b/mypy/nodes.py index 452a4f643255..7efb01c1b18e 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2625,27 +2625,14 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: class TypeAliasExpr(Expression): """Type alias expression (rvalue).""" - __slots__ = ("type", "tvars", "no_args", "node") + __slots__ = ("node",) - __match_args__ = ("type", "tvars", "no_args", "node") + __match_args__ = ("node",) - # The target type. - type: mypy.types.Type - # Names of type variables used to define the alias - tvars: list[str] - # Whether this alias was defined in bare form. Used to distinguish - # between - # A = List - # and - # A = List[Any] - no_args: bool node: TypeAlias def __init__(self, node: TypeAlias) -> None: super().__init__() - self.type = node.target - self.tvars = [v.name for v in node.alias_tvars] - self.no_args = node.no_args self.node = node def accept(self, visitor: ExpressionVisitor[T]) -> T: diff --git a/mypy/semanal.py b/mypy/semanal.py index ef66c9276664..55d4e6a3f506 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3680,7 +3680,10 @@ def disable_invalid_recursive_aliases( """Prohibit and fix recursive type aliases that are invalid/unsupported.""" messages = [] if is_invalid_recursive_alias({current_node}, current_node.target): - messages.append("Invalid recursive alias: a union item of itself") + target = ( + "tuple" if isinstance(get_proper_type(current_node.target), TupleType) else "union" + ) + messages.append(f"Invalid recursive alias: a {target} item of itself") if detect_diverging_alias( current_node, current_node.target, self.lookup_qualified, self.tvar_scope ): @@ -4213,6 +4216,7 @@ def get_typevarlike_argument( *, allow_unbound_tvars: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, report_invalid_typevar_arg: bool = True, ) -> ProperType | None: try: @@ -4224,6 +4228,7 @@ def get_typevarlike_argument( report_invalid_types=False, allow_unbound_tvars=allow_unbound_tvars, allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, ) if analyzed is None: # Type variables are special: we need to place them in the symbol table @@ -4375,6 +4380,7 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: s, allow_unbound_tvars=True, report_invalid_typevar_arg=False, + allow_unpack=True, ) default = tv_arg or AnyType(TypeOfAny.from_error) if not isinstance(default, UnpackType): @@ -5289,6 +5295,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: # Probably always allow Parameters literals, and validate in semanal_typeargs.py base = expr.base if isinstance(base, RefExpr) and isinstance(base.node, TypeAlias): + allow_unpack = base.node.tvar_tuple_index is not None alias = base.node if any(isinstance(t, ParamSpecType) for t in alias.alias_tvars): has_param_spec = True @@ -5297,9 +5304,11 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: has_param_spec = False num_args = -1 elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): + allow_unpack = base.node.has_type_var_tuple_type has_param_spec = base.node.has_param_spec_type num_args = len(base.node.type_vars) else: + allow_unpack = False has_param_spec = False num_args = -1 @@ -5317,6 +5326,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: allow_unbound_tvars=self.allow_unbound_tvars, allow_placeholder=True, allow_param_spec_literals=has_param_spec, + allow_unpack=allow_unpack, ) if analyzed is None: return None @@ -6486,6 +6496,7 @@ def expr_to_analyzed_type( allow_type_any: bool = False, allow_unbound_tvars: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, ) -> Type | None: if isinstance(expr, CallExpr): # This is a legacy syntax intended mostly for Python 2, we keep it for @@ -6516,6 +6527,7 @@ def expr_to_analyzed_type( allow_type_any=allow_type_any, allow_unbound_tvars=allow_unbound_tvars, allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, ) def analyze_type_expr(self, expr: Expression) -> None: @@ -6537,6 +6549,7 @@ def type_analyzer( allow_placeholder: bool = False, allow_required: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, allow_type_any: bool = False, @@ -6555,6 +6568,7 @@ def type_analyzer( allow_placeholder=allow_placeholder, allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, prohibit_self_type=prohibit_self_type, allow_type_any=allow_type_any, ) @@ -6575,6 +6589,7 @@ def anal_type( allow_placeholder: bool = False, allow_required: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, allow_type_any: bool = False, @@ -6612,6 +6627,7 @@ def anal_type( allow_placeholder=allow_placeholder, allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, + allow_unpack=allow_unpack, report_invalid_types=report_invalid_types, prohibit_self_type=prohibit_self_type, allow_type_any=allow_type_any, diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index e188955dabbb..8d8ef66b5c69 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -14,13 +14,14 @@ from mypy.errors import Errors from mypy.messages import format_type from mypy.mixedtraverser import MixedTraverserVisitor -from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile +from mypy.nodes import ARG_STAR, Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype from mypy.typeanal import set_any_tvars from mypy.types import ( AnyType, + CallableType, Instance, Parameters, ParamSpecType, @@ -116,20 +117,39 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # the expansion, most likely it will result in the same kind of error. get_proper_type(t).accept(self) + def visit_tuple_type(self, t: TupleType) -> None: + t.items = flatten_nested_tuples(t.items) + # We could also normalize Tuple[*tuple[X, ...]] -> tuple[X, ...] like in + # expand_type() but we can't do this here since it is not a translator visitor, + # and we need to return an Instance instead of TupleType. + super().visit_tuple_type(t) + + def visit_callable_type(self, t: CallableType) -> None: + super().visit_callable_type(t) + # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X + if t.is_var_arg: + star_index = t.arg_kinds.index(ARG_STAR) + star_type = t.arg_types[star_index] + if isinstance(star_type, UnpackType): + p_type = get_proper_type(star_type.type) + if isinstance(p_type, Instance): + assert p_type.type.fullname == "builtins.tuple" + t.arg_types[star_index] = p_type.args[0] + def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 + t.args = tuple(flatten_nested_tuples(t.args)) + # TODO: fix #15410 and #15411. self.validate_args(info.name, t.args, info.defn.type_vars, t) super().visit_instance(t) def validate_args( self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context ) -> bool: - # TODO: we need to do flatten_nested_tuples and validate arg count for instances - # similar to how do we do this for type aliases above, but this may have perf penalty. if any(isinstance(v, TypeVarTupleType) for v in type_vars): prefix = next(i for (i, v) in enumerate(type_vars) if isinstance(v, TypeVarTupleType)) tvt = type_vars[prefix] @@ -198,6 +218,7 @@ def validate_args( return is_error def visit_unpack_type(self, typ: UnpackType) -> None: + super().visit_unpack_type(typ) proper_type = get_proper_type(typ.type) if isinstance(proper_type, TupleType): return @@ -205,18 +226,14 @@ def visit_unpack_type(self, typ: UnpackType) -> None: return if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": return - if ( - isinstance(proper_type, UnboundType) - or isinstance(proper_type, AnyType) - and proper_type.type_of_any == TypeOfAny.from_error - ): + if isinstance(proper_type, AnyType) and proper_type.type_of_any == TypeOfAny.from_error: return - - # TODO: Infer something when it can't be unpacked to allow rest of - # typechecking to work. - self.fail( - message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ - ) + if not isinstance(proper_type, UnboundType): + # Avoid extra errors if there were some errors already. + self.fail( + message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ + ) + typ.type = AnyType(TypeOfAny.from_error) def check_type_var_values( self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index f58a4eedabc8..862c3898a383 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -73,7 +73,6 @@ SymbolNode, SymbolTable, TypeAlias, - TypeAliasExpr, TypedDictExpr, TypeInfo, Var, @@ -326,10 +325,6 @@ def visit_enum_call_expr(self, node: EnumCallExpr) -> None: self.process_synthetic_type_info(node.info) super().visit_enum_call_expr(node) - def visit_type_alias_expr(self, node: TypeAliasExpr) -> None: - self.fixup_type(node.type) - super().visit_type_alias_expr(node) - # Others def visit_var(self, node: Var) -> None: diff --git a/mypy/server/deps.py b/mypy/server/deps.py index ed85b74f2206..9ed2d4549629 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -472,7 +472,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.add_dependency(make_trigger(class_name + ".__init__")) self.add_dependency(make_trigger(class_name + ".__new__")) if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr): - self.add_type_dependencies(rvalue.analyzed.type) + self.add_type_dependencies(rvalue.analyzed.node.target) elif typ: self.add_type_dependencies(typ) else: diff --git a/mypy/strconv.py b/mypy/strconv.py index c428addd43aa..42a07c7f62fa 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -511,7 +511,7 @@ def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str: return self.dump(a, o) def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> str: - return f"TypeAliasExpr({self.stringify_type(o.type)})" + return f"TypeAliasExpr({self.stringify_type(o.node.target)})" def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> str: return f"NamedTupleExpr:{o.line}({o.info.name}, {self.stringify_type(o.info.tuple_type) if o.info.tuple_type is not None else None})" diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 288de10cc234..58ae4efdf582 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -660,6 +660,8 @@ def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: return self._is_subtype(left.upper_bound, self.right) def visit_unpack_type(self, left: UnpackType) -> bool: + # TODO: Ideally we should not need this (since it is not a real type). + # Instead callers (upper level types) should handle it when it appears in type list. if isinstance(self.right, UnpackType): return self._is_subtype(left.type, self.right.type) if isinstance(self.right, Instance) and self.right.type.fullname == "builtins.object": @@ -744,7 +746,15 @@ def visit_tuple_type(self, left: TupleType) -> bool: # TODO: We shouldn't need this special case. This is currently needed # for isinstance(x, tuple), though it's unclear why. return True - return all(self._is_subtype(li, iter_type) for li in left.items) + for li in left.items: + if isinstance(li, UnpackType): + unpack = get_proper_type(li.type) + if isinstance(unpack, Instance): + assert unpack.type.fullname == "builtins.tuple" + li = unpack.args[0] + if not self._is_subtype(li, iter_type): + return False + return True elif self._is_subtype(left.partial_fallback, right) and self._is_subtype( mypy.typeops.tuple_fallback(left), right ): @@ -752,6 +762,7 @@ def visit_tuple_type(self, left: TupleType) -> bool: return False elif isinstance(right, TupleType): if len(left.items) != len(right.items): + # TODO: handle tuple with variadic items better. return False if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): return False @@ -1385,8 +1396,8 @@ def g(x: int) -> int: ... whether or not we check the args covariantly. """ # Normalize both types before comparing them. - left = left.with_unpacked_kwargs() - right = right.with_unpacked_kwargs() + left = left.with_unpacked_kwargs().with_normalized_var_args() + right = right.with_unpacked_kwargs().with_normalized_var_args() if is_compat_return is None: is_compat_return = is_compat @@ -1539,6 +1550,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # Phase 1c: Check var args. Right has an infinite series of optional positional # arguments. Get all further positional args of left, and make sure # they're more general than the corresponding member in right. + # TODO: are we handling UnpackType correctly here? if right_star is not None: # Synthesize an anonymous formal argument for the right right_by_position = right.try_synthesizing_arg_from_vararg(None) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index b15b5c7654ba..14b37539afea 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -195,6 +195,7 @@ def __init__( allow_placeholder: bool = False, allow_required: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, allowed_alias_tvars: list[TypeVarLikeType] | None = None, @@ -241,6 +242,8 @@ def __init__( self.prohibit_self_type = prohibit_self_type # Allow variables typed as Type[Any] and type (useful for base classes). self.allow_type_any = allow_type_any + self.allow_type_var_tuple = False + self.allow_unpack = allow_unpack def lookup_qualified( self, name: str, ctx: Context, suppress_errors: bool = False @@ -277,7 +280,10 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) return PlaceholderType( node.fullname, self.anal_array( - t.args, allow_param_spec=True, allow_param_spec_literals=True + t.args, + allow_param_spec=True, + allow_param_spec_literals=True, + allow_unpack=True, ), t.line, ) @@ -365,6 +371,13 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.fail(f'TypeVarTuple "{t.name}" is unbound', t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, TypeVarTupleType) + if not self.allow_type_var_tuple: + self.fail( + f'TypeVarTuple "{t.name}" is only valid with an unpack', + t, + code=codes.VALID_TYPE, + ) + return AnyType(TypeOfAny.from_error) if len(t.args) > 0: self.fail( f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE @@ -390,6 +403,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) t.args, allow_param_spec=True, allow_param_spec_literals=node.has_param_spec_type, + allow_unpack=node.tvar_tuple_index is not None, ) if node.has_param_spec_type and len(node.alias_tvars) == 1: an_args = self.pack_paramspec_args(an_args) @@ -531,7 +545,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])]) instance.line = t.line return instance - return self.tuple_type(self.anal_array(t.args)) + return self.tuple_type(self.anal_array(t.args, allow_unpack=True)) elif fullname == "typing.Union": items = self.anal_array(t.args) return UnionType.make_union(items) @@ -631,7 +645,13 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ if len(t.args) != 1: self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) - return UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + if not self.allow_unpack: + self.fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + self.allow_type_var_tuple = True + result = UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + self.allow_type_var_tuple = False + return result elif fullname in SELF_TYPE_NAMES: if t.args: self.fail("Self type cannot have type arguments", t) @@ -666,7 +686,7 @@ def analyze_type_with_type_info( if len(args) > 0 and info.fullname == "builtins.tuple": fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) - return TupleType(self.anal_array(args), fallback, ctx.line) + return TupleType(self.anal_array(args, allow_unpack=True), fallback, ctx.line) # Analyze arguments and (usually) construct Instance type. The # number of type arguments and their values are @@ -679,7 +699,10 @@ def analyze_type_with_type_info( instance = Instance( info, self.anal_array( - args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type + args, + allow_param_spec=True, + allow_param_spec_literals=info.has_param_spec_type, + allow_unpack=info.has_type_var_tuple_type, ), ctx.line, ctx.column, @@ -715,7 +738,7 @@ def analyze_type_with_type_info( if info.special_alias: return instantiate_type_alias( info.special_alias, - # TODO: should we allow NamedTuples generic in ParamSpec? + # TODO: should we allow NamedTuples generic in ParamSpec and TypeVarTuple? self.anal_array(args), self.fail, False, @@ -723,7 +746,9 @@ def analyze_type_with_type_info( self.options, use_standard_error=True, ) - return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) + return tup.copy_modified( + items=self.anal_array(tup.items, allow_unpack=True), fallback=instance + ) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be @@ -940,7 +965,23 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: self.anal_star_arg_type(t.arg_types[-1], ARG_STAR2, nested=nested), ] else: - arg_types = self.anal_array(t.arg_types, nested=nested) + arg_types = self.anal_array(t.arg_types, nested=nested, allow_unpack=True) + star_index = None + if ARG_STAR in arg_kinds: + star_index = arg_kinds.index(ARG_STAR) + star2_index = None + if ARG_STAR2 in arg_kinds: + star2_index = arg_kinds.index(ARG_STAR2) + validated_args: list[Type] = [] + for i, at in enumerate(arg_types): + if isinstance(at, UnpackType) and i not in (star_index, star2_index): + self.fail( + message_registry.INVALID_UNPACK_POSITION, at, code=codes.VALID_TYPE + ) + validated_args.append(AnyType(TypeOfAny.from_error)) + else: + validated_args.append(at) + arg_types = validated_args # If there were multiple (invalid) unpacks, the arg types list will become shorter, # we need to trim the kinds/names as well to avoid crashes. arg_kinds = t.arg_kinds[: len(arg_types)] @@ -1012,7 +1053,7 @@ def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> Type: line=t.line, column=t.column, ) - return self.anal_type(t, nested=nested) + return self.anal_type(t, nested=nested, allow_unpack=True) def visit_overloaded(self, t: Overloaded) -> Type: # Overloaded types are manually constructed in semanal.py by analyzing the @@ -1051,7 +1092,7 @@ def visit_tuple_type(self, t: TupleType) -> Type: if t.partial_fallback.type else self.named_type("builtins.tuple", [any_type]) ) - return TupleType(self.anal_array(t.items), fallback, t.line) + return TupleType(self.anal_array(t.items, allow_unpack=True), fallback, t.line) def visit_typeddict_type(self, t: TypedDictType) -> Type: items = { @@ -1534,12 +1575,17 @@ def anal_array( *, allow_param_spec: bool = False, allow_param_spec_literals: bool = False, + allow_unpack: bool = False, ) -> list[Type]: old_allow_param_spec_literals = self.allow_param_spec_literals self.allow_param_spec_literals = allow_param_spec_literals res: list[Type] = [] for t in a: - res.append(self.anal_type(t, nested, allow_param_spec=allow_param_spec)) + res.append( + self.anal_type( + t, nested, allow_param_spec=allow_param_spec, allow_unpack=allow_unpack + ) + ) self.allow_param_spec_literals = old_allow_param_spec_literals return self.check_unpacks_in_list(res) @@ -1549,6 +1595,7 @@ def anal_type( nested: bool = True, *, allow_param_spec: bool = False, + allow_unpack: bool = False, allow_ellipsis: bool = False, ) -> Type: if nested: @@ -1557,6 +1604,8 @@ def anal_type( self.allow_required = False old_allow_ellipsis = self.allow_ellipsis self.allow_ellipsis = allow_ellipsis + old_allow_unpack = self.allow_unpack + self.allow_unpack = allow_unpack try: analyzed = t.accept(self) finally: @@ -1564,6 +1613,7 @@ def anal_type( self.nesting_level -= 1 self.allow_required = old_allow_required self.allow_ellipsis = old_allow_ellipsis + self.allow_unpack = old_allow_unpack if ( not allow_param_spec and isinstance(analyzed, ParamSpecType) diff --git a/mypy/typeops.py b/mypy/typeops.py index e01aad950573..0e0bc348942e 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -105,19 +105,18 @@ def tuple_fallback(typ: TupleType) -> Instance: unpacked_type = get_proper_type(item.type) if isinstance(unpacked_type, TypeVarTupleType): items.append(unpacked_type.upper_bound) - elif isinstance(unpacked_type, TupleType): - # TODO: might make sense to do recursion here to support nested unpacks - # of tuple constants - items.extend(unpacked_type.items) elif ( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): items.append(unpacked_type.args[0]) + elif isinstance(unpacked_type, (AnyType, UninhabitedType)): + continue else: - raise NotImplementedError + raise NotImplementedError(unpacked_type) else: items.append(item) + # TODO: we should really use a union here, tuple types are special. return Instance(info, [join_type_list(items)], extra_attrs=typ.partial_fallback.extra_attrs) diff --git a/mypy/types.py b/mypy/types.py index 301ce6e0cf18..c71412f4ea58 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2075,6 +2075,68 @@ def with_unpacked_kwargs(self) -> NormalizedCallableType: ) ) + def with_normalized_var_args(self) -> Self: + var_arg = self.var_arg() + if not var_arg or not isinstance(var_arg.typ, UnpackType): + return self + unpacked = get_proper_type(var_arg.typ.type) + if not isinstance(unpacked, TupleType): + # Note that we don't normalize *args: *tuple[X, ...] -> *args: X, + # this should be done once in semanal_typeargs.py for user-defined types, + # and we ourselves should never construct such type. + return self + unpack_index = find_unpack_in_list(unpacked.items) + if unpack_index == 0 and len(unpacked.items) > 1: + # Already normalized. + return self + + # Boilerplate: + var_arg_index = self.arg_kinds.index(ARG_STAR) + types_prefix = self.arg_types[:var_arg_index] + kinds_prefix = self.arg_kinds[:var_arg_index] + names_prefix = self.arg_names[:var_arg_index] + types_suffix = self.arg_types[var_arg_index + 1 :] + kinds_suffix = self.arg_kinds[var_arg_index + 1 :] + names_suffix = self.arg_names[var_arg_index + 1 :] + no_name: str | None = None # to silence mypy + + # Now we have something non-trivial to do. + if unpack_index is None: + # Plain *Tuple[X, Y, Z] -> replace with ARG_POS completely + types_middle = unpacked.items + kinds_middle = [ARG_POS] * len(unpacked.items) + names_middle = [no_name] * len(unpacked.items) + else: + # *Tuple[X, *Ts, Y, Z] or *Tuple[X, *tuple[T, ...], X, Z], here + # we replace the prefix by ARG_POS (this is how some places expect + # Callables to be represented) + nested_unpack = unpacked.items[unpack_index] + assert isinstance(nested_unpack, UnpackType) + nested_unpacked = get_proper_type(nested_unpack.type) + if unpack_index == len(unpacked.items) - 1: + # Normalize also single item tuples like + # *args: *Tuple[*tuple[X, ...]] -> *args: X + # *args: *Tuple[*Ts] -> *args: *Ts + # This may be not strictly necessary, but these are very verbose. + if isinstance(nested_unpacked, Instance): + assert nested_unpacked.type.fullname == "builtins.tuple" + new_unpack = nested_unpacked.args[0] + else: + assert isinstance(nested_unpacked, TypeVarTupleType) + new_unpack = nested_unpack + else: + new_unpack = UnpackType( + unpacked.copy_modified(items=unpacked.items[unpack_index:]) + ) + types_middle = unpacked.items[:unpack_index] + [new_unpack] + kinds_middle = [ARG_POS] * unpack_index + [ARG_STAR] + names_middle = [no_name] * unpack_index + [self.arg_names[var_arg_index]] + return self.copy_modified( + arg_types=types_prefix + types_middle + types_suffix, + arg_kinds=kinds_prefix + kinds_middle + kinds_suffix, + arg_names=names_prefix + names_middle + names_suffix, + ) + def __hash__(self) -> int: # self.is_type_obj() will fail if self.fallback.type is a FakeInfo if isinstance(self.fallback.type, FakeInfo): @@ -2259,10 +2321,6 @@ def __init__( ) -> None: super().__init__(line, column) self.partial_fallback = fallback - # TODO: flatten/normalize unpack items (very similar to unions) here. - # Probably also for instances, type aliases, callables, and Unpack itself. For example, - # tuple[*tuple[X, ...], ...] -> tuple[X, ...] and Tuple[*tuple[X, ...]] -> tuple[X, ...]. - # Currently normalization happens in expand_type() et al., which is sub-optimal. self.items = items self.implicit = implicit @@ -3426,6 +3484,20 @@ def flatten_nested_unions( return flat_items +def find_unpack_in_list(items: Sequence[Type]) -> int | None: + unpack_index: int | None = None + for i, item in enumerate(items): + if isinstance(item, UnpackType): + # We cannot fail here, so we must check this in an earlier + # semanal phase. + # Funky code here avoids mypyc narrowing the type of unpack_index. + old_index = unpack_index + assert old_index is None + # Don't return so that we can also sanity check there is only one. + unpack_index = i + return unpack_index + + def flatten_nested_tuples(types: Sequence[Type]) -> list[Type]: """Recursively flatten TupleTypes nested with Unpack. diff --git a/mypy/types_utils.py b/mypy/types_utils.py index 7f2e38ef3753..f289ac3e9ed1 100644 --- a/mypy/types_utils.py +++ b/mypy/types_utils.py @@ -54,7 +54,7 @@ def strip_type(typ: Type) -> Type: def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool: - """Flag aliases like A = Union[int, A] (and similar mutual aliases). + """Flag aliases like A = Union[int, A], T = tuple[int, *T] (and similar mutual aliases). Such aliases don't make much sense, and cause problems in later phases. """ @@ -64,9 +64,15 @@ def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool assert target.alias, f"Unfixed type alias {target.type_ref}" return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target)) assert isinstance(target, ProperType) - if not isinstance(target, UnionType): + if not isinstance(target, (UnionType, TupleType)): return False - return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items) + if isinstance(target, UnionType): + return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items) + for item in target.items: + if isinstance(item, UnpackType): + if is_invalid_recursive_alias(seen_nodes, item.type): + return True + return False def is_bad_type_type_item(item: Type) -> bool: diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index 29c800140eec..bcb5e96b615c 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -9,25 +9,12 @@ ProperType, Type, UnpackType, + find_unpack_in_list, get_proper_type, split_with_prefix_and_suffix, ) -def find_unpack_in_list(items: Sequence[Type]) -> int | None: - unpack_index: int | None = None - for i, item in enumerate(items): - if isinstance(item, UnpackType): - # We cannot fail here, so we must check this in an earlier - # semanal phase. - # Funky code here avoids mypyc narrowing the type of unpack_index. - old_index = unpack_index - assert old_index is None - # Don't return so that we can also sanity check there is only one. - unpack_index = i - return unpack_index - - def split_with_instance( typ: Instance, ) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 95a7bdd2b2cd..93674c0c2d5c 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -3360,7 +3360,6 @@ class Foo(Generic[Unpack[Ts]]): ... class Bar(Generic[Unpack[Ts], T]): ... def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], List[T]]: ... -# TODO: do not crash on Foo[Us] (with missing Unpack), instead give an error. def f(*args: Unpack[Us]) -> Foo[Unpack[Us]]: ... reveal_type(dec(f)) # N: Revealed type is "def [Ts] (*Unpack[Ts`1]) -> builtins.list[__main__.Foo[Unpack[Ts`1]]]" g: Callable[[Unpack[Us]], Foo[Unpack[Us]]] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index b28b2ead45e7..58fc1265ae99 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -60,9 +60,10 @@ reveal_type(f(f_args2)) # N: Revealed type is "Tuple[builtins.str]" reveal_type(f(f_args3)) # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.bool]" f(empty) # E: Argument 1 to "f" has incompatible type "Tuple[()]"; expected "Tuple[int]" f(bad_args) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[int, str]" -# TODO: This hits a crash where we assert len(templates.items) == 1. See visit_tuple_type -# in mypy/constraints.py. -#f(var_len_tuple) + +# The reason for error in subtle: actual can be empty, formal cannot. +reveal_type(f(var_len_tuple)) # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]]]" \ + # E: Argument 1 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]]]" g_args: Tuple[str, int] reveal_type(g(g_args)) # N: Revealed type is "Tuple[builtins.str, builtins.str]" @@ -123,13 +124,10 @@ reveal_type(empty) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tup bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]] # E: More than one Unpack in a type is not allowed reveal_type(bad) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]" -# TODO: This is tricky to fix because we need typeanal to know whether the current -# location is valid for an Unpack or not. -# bad2: Unpack[Tuple[int, ...]] +bad2: Unpack[Tuple[int, ...]] # E: Unpack is only valid in a variadic position m1: Mixed1[int, str, bool] reveal_type(m1) # N: Revealed type is "__main__.Mixed1[builtins.int, builtins.str, builtins.bool]" - [builtins fixtures/tuple.pyi] [case testTypeVarTupleGenericClassWithFunctions] @@ -148,7 +146,6 @@ def foo(t: Variadic[int, Unpack[Ts], object]) -> Tuple[int, Unpack[Ts]]: v: Variadic[int, str, bool, object] reveal_type(foo(v)) # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]" - [builtins fixtures/tuple.pyi] [case testTypeVarTupleGenericClassWithMethods] @@ -168,7 +165,6 @@ class Variadic(Generic[T, Unpack[Ts], S]): v: Variadic[float, str, bool, object] reveal_type(v.foo(0)) # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]" - [builtins fixtures/tuple.pyi] [case testTypeVarTupleIsNotValidAliasTarget] @@ -211,8 +207,8 @@ shape = (Height(480), Width(640)) x: Array[Height, Width] = Array(shape) reveal_type(abs(x)) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" reveal_type(x + x) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" - [builtins fixtures/tuple.pyi] + [case testTypeVarTuplePep646ArrayExampleWithDType] from typing import Generic, Tuple, TypeVar, Protocol, NewType from typing_extensions import TypeVarTuple, Unpack @@ -247,7 +243,6 @@ shape = (Height(480), Width(640)) x: Array[float, Height, Width] = Array(shape) reveal_type(abs(x)) # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]" reveal_type(x + x) # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]" - [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646ArrayExampleInfer] @@ -293,8 +288,8 @@ c = del_batch_axis(b) reveal_type(c) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" d = add_batch_channels(a) reveal_type(d) # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width, __main__.Channels]" - [builtins fixtures/tuple.pyi] + [case testTypeVarTuplePep646TypeVarConcatenation] from typing import Generic, TypeVar, NewType, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -311,6 +306,7 @@ def prefix_tuple( z = prefix_tuple(x=0, y=(True, 'a')) reveal_type(z) # N: Revealed type is "Tuple[builtins.int, builtins.bool, builtins.str]" [builtins fixtures/tuple.pyi] + [case testTypeVarTuplePep646TypeVarTupleUnpacking] from typing import Generic, TypeVar, NewType, Any, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -363,8 +359,6 @@ reveal_type(bad) # N: Revealed type is "def [Ts, Ts2] (x: Tuple[builtins.int, U def bad2(x: Tuple[int, Unpack[Tuple[int, ...]], str, Unpack[Tuple[str, ...]]]) -> None: # E: More than one Unpack in a type is not allowed ... reveal_type(bad2) # N: Revealed type is "def (x: Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.str])" - - [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646TypeVarStarArgsBasic] @@ -380,8 +374,8 @@ def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: return args reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" - [builtins fixtures/tuple.pyi] + [case testTypeVarTuplePep646TypeVarStarArgs] from typing import Tuple from typing_extensions import TypeVarTuple, Unpack @@ -410,8 +404,6 @@ with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" def foo(*args: Unpack[Ts]) -> None: reveal_type(with_prefix_suffix(True, "bar", *args, 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" - - [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646TypeVarStarArgsFixedLengthTuple] @@ -422,17 +414,23 @@ def foo(*args: Unpack[Tuple[int, str]]) -> None: reveal_type(args) # N: Revealed type is "Tuple[builtins.int, builtins.str]" foo(0, "foo") -foo(0, 1) # E: Argument 2 to "foo" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" -foo("foo", "bar") # E: Argument 1 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" -foo(0, "foo", 1) # E: Invalid number of arguments -foo(0) # E: Invalid number of arguments -foo() # E: Invalid number of arguments +foo(0, 1) # E: Argument 2 to "foo" has incompatible type "int"; expected "str" +foo("foo", "bar") # E: Argument 1 to "foo" has incompatible type "str"; expected "int" +foo(0, "foo", 1) # E: Too many arguments for "foo" +foo(0) # E: Too few arguments for "foo" +foo() # E: Too few arguments for "foo" foo(*(0, "foo")) -# TODO: fix this case to do something sensible. -#def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: -# reveal_type(args) +def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]" + +# It is hard to normalize callable types in definition, because there is deep relation between `FuncDef.type` +# and `FuncDef.arguments`, therefore various typeops need to be sure to normalize Callable types before using them. +reveal_type(foo2) # N: Revealed type is "def (*args: Unpack[Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]])" +class C: + def foo2(self, *args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: ... +reveal_type(C().foo2) # N: Revealed type is "def (*args: Unpack[Tuple[builtins.bool, builtins.int, builtins.str, builtins.bool]])" [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple] @@ -443,8 +441,7 @@ def foo(*args: Unpack[Tuple[int, ...]]) -> None: reveal_type(args) # N: Revealed type is "builtins.tuple[builtins.int, ...]" foo(0, 1, 2) -# TODO: this should say 'expected "int"' rather than the unpack -foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, ...]]" +foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "int" def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None: @@ -453,9 +450,9 @@ def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None # reveal_type(args[1]) foo2("bar", 1, 2, 3, False, True) -foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" -foo2("bar", "bar", 2, 3, False, True) # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" -foo2("bar", 1, 2, 3, 4, True) # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "str" +foo2("bar", "bar", 2, 3, False, True) # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", 1, 2, 3, 4, True) # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[Unpack[Tuple[int, ...]], bool, bool]]" foo2(*("bar", 1, 2, 3, False, True)) [builtins fixtures/tuple.pyi] @@ -550,8 +547,7 @@ def call( *args: Unpack[Ts], ) -> None: ... - # TODO: exposes unhandled case in checkexpr - # target(*args) + target(*args) class A: def func(self, arg1: int, arg2: str) -> None: ... @@ -569,7 +565,6 @@ call(A().func, 0, 1) # E: Argument 1 to "call" has incompatible type "Callable[ call(A().func2, 0, 0) call(A().func3, 0, 1, 2) call(A().func3) - [builtins fixtures/tuple.pyi] [case testVariadicAliasBasicTuple] @@ -805,3 +800,125 @@ reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple y: A[Unpack[Tuple[bool, ...]]] reveal_type(y) # N: Revealed type is "Tuple[builtins.bool, Unpack[builtins.tuple[builtins.bool, ...]], builtins.bool, builtins.bool]" [builtins fixtures/tuple.pyi] + +[case testBanPathologicalRecursiveTuples] +from typing import Tuple +from typing_extensions import Unpack +A = Tuple[int, Unpack[A]] # E: Invalid recursive alias: a tuple item of itself +B = Tuple[int, Unpack[C]] # E: Invalid recursive alias: a tuple item of itself \ + # E: Name "C" is used before definition +C = Tuple[int, Unpack[B]] +x: A +y: B +z: C +reveal_type(x) # N: Revealed type is "Any" +reveal_type(y) # N: Revealed type is "Any" +reveal_type(z) # N: Revealed type is "Tuple[builtins.int, Unpack[Any]]" +[builtins fixtures/tuple.pyi] + +[case testInferenceAgainstGenericVariadicWithBadType] +# flags: --new-type-inference +from typing import TypeVar, Callable, Generic +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +Us = TypeVarTuple("Us") + +class Foo(Generic[Unpack[Ts]]): ... + +def dec(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], T]: ... +def f(*args: Unpack[Us]) -> Foo[Us]: ... # E: TypeVarTuple "Us" is only valid with an unpack +dec(f) # No crash +[builtins fixtures/tuple.pyi] + +[case testHomogeneousGenericTupleUnpackInferenceNoCrash1] +from typing import Any, TypeVar, Tuple, Type, Optional +from typing_extensions import Unpack + +T = TypeVar("T") +def convert(obj: Any, *to_classes: Unpack[Tuple[Type[T], ...]]) -> Optional[T]: + ... + +x = convert(1, int, float) +reveal_type(x) # N: Revealed type is "Union[builtins.float, None]" +[builtins fixtures/tuple.pyi] + +[case testHomogeneousGenericTupleUnpackInferenceNoCrash2] +from typing import TypeVar, Tuple, Callable, Iterable +from typing_extensions import Unpack + +T = TypeVar("T") +def combine(x: T, y: T) -> T: ... +def reduce(fn: Callable[[T, T], T], xs: Iterable[T]) -> T: ... + +def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[str, ...]], bool]]) -> None: + reduce(combine, xs) +[builtins fixtures/tuple.pyi] + +[case testVariadicStarArgsCallNoCrash] +from typing import TypeVar, Callable, Tuple +from typing_extensions import TypeVarTuple, Unpack + +X = TypeVar("X") +Y = TypeVar("Y") +Xs = TypeVarTuple("Xs") +Ys = TypeVarTuple("Ys") + +def nil() -> Tuple[()]: + return () + +def cons( + f: Callable[[X], Y], + g: Callable[[Unpack[Xs]], Tuple[Unpack[Ys]]], +) -> Callable[[X, Unpack[Xs]], Tuple[Y, Unpack[Ys]]]: + def wrapped(x: X, *xs: Unpack[Xs]) -> Tuple[Y, Unpack[Ys]]: + y, ys = f(x), g(*xs) + return y, *ys + return wrapped + +def star(f: Callable[[X], Y]) -> Callable[[Unpack[Tuple[X, ...]]], Tuple[Y, ...]]: + def wrapped(*xs: X): + if not xs: + return nil() + return cons(f, star(f))(*xs) + return wrapped +[builtins fixtures/tuple.pyi] + +[case testInvalidTypeVarTupleUseNoCrash] +from typing_extensions import TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def f(x: Ts) -> Ts: # E: TypeVarTuple "Ts" is only valid with an unpack + return x + +v = f(1, 2, "A") # E: Too many arguments for "f" +reveal_type(v) # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleSimpleDecoratorWorks] +from typing import TypeVar, Callable +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +T = TypeVar("T") + +def decorator(f: Callable[[Unpack[Ts]], T]) -> Callable[[Unpack[Ts]], T]: + def wrapper(*args: Unpack[Ts]) -> T: + return f(*args) + return wrapper + +@decorator +def f(a: int, b: int) -> int: ... +reveal_type(f) # N: Revealed type is "def (builtins.int, builtins.int) -> builtins.int" +[builtins fixtures/tuple.pyi] + +[case testTupleWithUnpackIterator] +from typing import Tuple +from typing_extensions import Unpack + +def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[float, ...]], bool]]) -> None: + for x in xs: + reveal_type(x) # N: Revealed type is "builtins.float" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 6e118597551f..fe09fb43c97c 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -775,7 +775,7 @@ class Person(TypedDict): name: str age: int -def foo(x: Unpack[Person]) -> None: # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple) +def foo(x: Unpack[Person]) -> None: # E: Unpack is only valid in a variadic position ... def bar(x: int, *args: Unpack[Person]) -> None: # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple) ... diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 09d4da54bff3..f21ba5253437 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1457,7 +1457,7 @@ homogenous_tuple: Tuple[Unpack[Tuple[int, ...]]] bad: Tuple[Unpack[int]] # E: "int" cannot be unpacked (must be tuple or TypeVarTuple) [builtins fixtures/tuple.pyi] -[case testTypeVarTuple] +[case testTypeVarTupleErrors] from typing import Generic from typing_extensions import TypeVarTuple, Unpack @@ -1471,15 +1471,14 @@ TP5 = TypeVarTuple(t='TP5') # E: TypeVarTuple() expects a string literal as fir TP6 = TypeVarTuple('TP6', bound=int) # E: Unexpected keyword argument "bound" for "TypeVarTuple" x: TVariadic # E: TypeVarTuple "TVariadic" is unbound -y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound +y: Unpack[TVariadic] # E: Unpack is only valid in a variadic position class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]): # E: Can only use one type var tuple in a class def pass -# TODO: this should generate an error -#def bad_args(*args: TVariadic): -# pass +def bad_args(*args: TVariadic): # E: TypeVarTuple "TVariadic" is only valid with an unpack + pass def bad_kwargs(**kwargs: Unpack[TVariadic]): # E: Unpack item in ** argument must be a TypedDict pass From 0b303b53479897e24d57affef6a8cdfffbd08e3d Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 24 Aug 2023 00:47:22 +0100 Subject: [PATCH 71/88] stubtest: error if typeshed is missing modules from the stdlib (#15729) We currently flag modules missing from third-party stubs in stubtest, but don't do similarly for typeshed's stdlib stubs. This PR adds that functionality for typeshed's stdlib stubs as well. --- mypy/stubtest.py | 83 ++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 74 insertions(+), 9 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index b2506e6dcc02..d8a613034b3a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -11,6 +11,7 @@ import copy import enum import importlib +import importlib.machinery import inspect import os import pkgutil @@ -25,7 +26,7 @@ from contextlib import redirect_stderr, redirect_stdout from functools import singledispatch from pathlib import Path -from typing import Any, Generic, Iterator, TypeVar, Union +from typing import AbstractSet, Any, Generic, Iterator, TypeVar, Union from typing_extensions import get_origin, is_typeddict import mypy.build @@ -1639,7 +1640,7 @@ def get_stub(module: str) -> nodes.MypyFile | None: def get_typeshed_stdlib_modules( custom_typeshed_dir: str | None, version_info: tuple[int, int] | None = None -) -> list[str]: +) -> set[str]: """Returns a list of stdlib modules in typeshed (for current Python version).""" stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir) if version_info is None: @@ -1661,14 +1662,75 @@ def exists_in_version(module: str) -> bool: typeshed_dir = Path(mypy.build.default_data_dir()) / "typeshed" stdlib_dir = typeshed_dir / "stdlib" - modules = [] + modules: set[str] = set() for path in stdlib_dir.rglob("*.pyi"): if path.stem == "__init__": path = path.parent module = ".".join(path.relative_to(stdlib_dir).parts[:-1] + (path.stem,)) if exists_in_version(module): - modules.append(module) - return sorted(modules) + modules.add(module) + return modules + + +def get_importable_stdlib_modules() -> set[str]: + """Return all importable stdlib modules at runtime.""" + all_stdlib_modules: AbstractSet[str] + if sys.version_info >= (3, 10): + all_stdlib_modules = sys.stdlib_module_names + else: + all_stdlib_modules = set(sys.builtin_module_names) + python_exe_dir = Path(sys.executable).parent + for m in pkgutil.iter_modules(): + finder = m.module_finder + if isinstance(finder, importlib.machinery.FileFinder): + finder_path = Path(finder.path) + if ( + python_exe_dir in finder_path.parents + and "site-packages" not in finder_path.parts + ): + all_stdlib_modules.add(m.name) + + importable_stdlib_modules: set[str] = set() + for module_name in all_stdlib_modules: + if module_name in ANNOYING_STDLIB_MODULES: + continue + + try: + runtime = silent_import_module(module_name) + except ImportError: + continue + else: + importable_stdlib_modules.add(module_name) + + try: + # some stdlib modules (e.g. `nt`) don't have __path__ set... + runtime_path = runtime.__path__ + runtime_name = runtime.__name__ + except AttributeError: + continue + + for submodule in pkgutil.walk_packages(runtime_path, runtime_name + "."): + submodule_name = submodule.name + + # There are many annoying *.__main__ stdlib modules, + # and including stubs for them isn't really that useful anyway: + # tkinter.__main__ opens a tkinter windows; unittest.__main__ raises SystemExit; etc. + # + # The idlelib.* submodules are similarly annoying in opening random tkinter windows, + # and we're unlikely to ever add stubs for idlelib in typeshed + # (see discussion in https://github.com/python/typeshed/pull/9193) + if submodule_name.endswith(".__main__") or submodule_name.startswith("idlelib."): + continue + + try: + silent_import_module(submodule_name) + # importing multiprocessing.popen_forkserver on Windows raises AttributeError... + except Exception: + continue + else: + importable_stdlib_modules.add(submodule_name) + + return importable_stdlib_modules def get_allowlist_entries(allowlist_file: str) -> Iterator[str]: @@ -1699,6 +1761,10 @@ class _Arguments: version: str +# typeshed added a stub for __main__, but that causes stubtest to check itself +ANNOYING_STDLIB_MODULES: typing_extensions.Final = frozenset({"antigravity", "this", "__main__"}) + + def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int: """This is stubtest! It's time to test the stubs!""" # Load the allowlist. This is a series of strings corresponding to Error.object_desc @@ -1721,10 +1787,9 @@ def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int: "cannot pass both --check-typeshed and a list of modules", ) return 1 - modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) - # typeshed added a stub for __main__, but that causes stubtest to check itself - annoying_modules = {"antigravity", "this", "__main__"} - modules = [m for m in modules if m not in annoying_modules] + typeshed_modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) + runtime_modules = get_importable_stdlib_modules() + modules = sorted((typeshed_modules | runtime_modules) - ANNOYING_STDLIB_MODULES) if not modules: print(_style("error:", color="red", bold=True), "no modules to check") From 4077dc6c4b87b273bfd4552d75faaafa6c016c25 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 24 Aug 2023 08:29:00 +0100 Subject: [PATCH 72/88] stubtest: fix edge case for bytes enum subclasses (#15943) --- mypy/stubtest.py | 6 +++--- mypy/test/teststubtest.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index d8a613034b3a..34bb985b702e 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1552,10 +1552,10 @@ def anytype() -> mypy.types.AnyType: fallback = mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars]) value: bool | int | str - if isinstance(runtime, bytes): - value = bytes_to_human_readable_repr(runtime) - elif isinstance(runtime, enum.Enum) and isinstance(runtime.name, str): + if isinstance(runtime, enum.Enum) and isinstance(runtime.name, str): value = runtime.name + elif isinstance(runtime, bytes): + value = bytes_to_human_readable_repr(runtime) elif isinstance(runtime, (bool, int, str)): value = runtime else: diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index a6733a9e8bd0..a52d9ef5de31 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1068,6 +1068,26 @@ def spam(x=Flags4(0)): pass """, error="spam", ) + yield Case( + stub=""" + from typing_extensions import Final, Literal + class BytesEnum(bytes, enum.Enum): + a: bytes + FOO: Literal[BytesEnum.a] + BAR: Final = BytesEnum.a + BAZ: BytesEnum + EGGS: bytes + """, + runtime=""" + class BytesEnum(bytes, enum.Enum): + a = b'foo' + FOO = BytesEnum.a + BAR = BytesEnum.a + BAZ = BytesEnum.a + EGGS = BytesEnum.a + """, + error=None, + ) @collect_cases def test_decorator(self) -> Iterator[Case]: From dc7344539bd6e40825486dfaaa3d0bc34a64784e Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 24 Aug 2023 19:56:13 +0300 Subject: [PATCH 73/88] Do not advertise to create your own `assert_never` helper (#15947) --- docs/source/literal_types.rst | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst index a66d300bd0fd..283bf7f9dba1 100644 --- a/docs/source/literal_types.rst +++ b/docs/source/literal_types.rst @@ -329,13 +329,10 @@ perform an exhaustiveness check, you need to update your code to use an .. code-block:: python from typing import Literal, NoReturn + from typing_extensions import assert_never PossibleValues = Literal['one', 'two'] - def assert_never(value: NoReturn) -> NoReturn: - # This also works at runtime as well - assert False, f'This code should never be reached, got: {value}' - def validate(x: PossibleValues) -> bool: if x == 'one': return True @@ -443,10 +440,7 @@ Let's start with a definition: from enum import Enum from typing import NoReturn - - def assert_never(value: NoReturn) -> NoReturn: - # This also works in runtime as well: - assert False, f'This code should never be reached, got: {value}' + from typing_extensions import assert_never class Direction(Enum): up = 'up' From 351371d20c0b9e014528238761a6eeedf8dfb926 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 24 Aug 2023 20:10:47 +0100 Subject: [PATCH 74/88] Fix type arguments validation for variadic instances (#15944) Fixes https://github.com/python/mypy/issues/15410 Fixes https://github.com/python/mypy/issues/15411 --- mypy/expandtype.py | 8 +-- mypy/semanal_typeargs.py | 23 +++++++- mypy/test/testtypes.py | 2 +- mypy/typeanal.py | 8 ++- mypy/types.py | 1 + test-data/unit/check-typevar-tuple.test | 78 +++++++++++++++++++++++++ 6 files changed, 112 insertions(+), 8 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index e71f6429d9c0..dc3dae670c1f 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -409,10 +409,10 @@ def visit_tuple_type(self, t: TupleType) -> Type: # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] item = items[0] if isinstance(item, UnpackType): - assert isinstance(item.type, ProperType) - if isinstance(item.type, Instance): - assert item.type.type.fullname == "builtins.tuple" - return item.type + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + return unpacked fallback = t.partial_fallback.accept(self) assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) return t.copy_modified(items=items, fallback=fallback) diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 8d8ef66b5c69..1a37ac57be30 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -18,7 +18,7 @@ from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype -from mypy.typeanal import set_any_tvars +from mypy.typeanal import fix_type_var_tuple_argument, set_any_tvars from mypy.types import ( AnyType, CallableType, @@ -143,7 +143,26 @@ def visit_instance(self, t: Instance) -> None: if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 t.args = tuple(flatten_nested_tuples(t.args)) - # TODO: fix #15410 and #15411. + if t.type.has_type_var_tuple_type: + # Regular Instances are already validated in typeanal.py. + # TODO: do something with partial overlap (probably just reject). + # also in other places where split_with_prefix_and_suffix() is used. + correct = len(t.args) >= len(t.type.type_vars) - 1 + if any( + isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) + for a in t.args + ): + correct = True + if not correct: + exp_len = f"at least {len(t.type.type_vars) - 1}" + self.fail( + f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}", + t, + code=codes.TYPE_ARG, + ) + any_type = AnyType(TypeOfAny.from_error) + t.args = (any_type,) * len(t.type.type_vars) + fix_type_var_tuple_argument(any_type, t) self.validate_args(info.name, t.args, info.defn.type_vars, t) super().visit_instance(t) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 56ac86058ce4..12e7b207b00a 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr: class TestExpandTypeLimitGetProperType(TestCase): # WARNING: do not increase this number unless absolutely necessary, # and you understand what you are doing. - ALLOWED_GET_PROPER_TYPES = 6 + ALLOWED_GET_PROPER_TYPES = 7 @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy") def test_count_get_proper_type(self) -> None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 14b37539afea..806b9967039e 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1795,6 +1795,13 @@ def fix_instance( fix_type_var_tuple_argument(any_type, t) return + + if t.type.has_type_var_tuple_type: + # This can be only correctly analyzed when all arguments are fully + # analyzed, because there may be a variadic item among them, so we + # do this in semanal_typeargs.py. + return + # Invalid number of type parameters. fail( wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name), @@ -1805,7 +1812,6 @@ def fix_instance( # otherwise the type checker may crash as it expects # things to be right. t.args = tuple(AnyType(TypeOfAny.from_error) for _ in t.type.type_vars) - fix_type_var_tuple_argument(AnyType(TypeOfAny.from_error), t) t.invalid = True diff --git a/mypy/types.py b/mypy/types.py index c71412f4ea58..214978eab774 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -322,6 +322,7 @@ def _expand_once(self) -> Type: assert isinstance(self.alias.target, Instance) # type: ignore[misc] return self.alias.target.copy_modified(args=self.args) + # TODO: this logic duplicates the one in expand_type_by_instance(). if self.alias.tvar_tuple_index is None: mapping = {v.id: s for (v, s) in zip(self.alias.alias_tvars, self.args)} else: diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 58fc1265ae99..ee81597edadf 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -922,3 +922,81 @@ def pipeline(*xs: Unpack[Tuple[int, Unpack[Tuple[float, ...]], bool]]) -> None: for x in xs: reveal_type(x) # N: Revealed type is "builtins.float" [builtins fixtures/tuple.pyi] + +[case testFixedUnpackItemInInstanceArguments] +from typing import TypeVar, Callable, Tuple, Generic +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") + +class C(Generic[T, Unpack[Ts], S]): + prefix: T + suffix: S + middle: Tuple[Unpack[Ts]] + +Ints = Tuple[int, int] +c: C[Unpack[Ints]] +reveal_type(c.prefix) # N: Revealed type is "builtins.int" +reveal_type(c.suffix) # N: Revealed type is "builtins.int" +reveal_type(c.middle) # N: Revealed type is "Tuple[()]" +[builtins fixtures/tuple.pyi] + +[case testVariadicUnpackItemInInstanceArguments] +from typing import TypeVar, Callable, Tuple, Generic +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") + +class Other(Generic[Unpack[Ts]]): ... +class C(Generic[T, Unpack[Ts], S]): + prefix: T + suffix: S + x: Tuple[Unpack[Ts]] + y: Callable[[Unpack[Ts]], None] + z: Other[Unpack[Ts]] + +Ints = Tuple[int, ...] +c: C[Unpack[Ints]] +reveal_type(c.prefix) # N: Revealed type is "builtins.int" +reveal_type(c.suffix) # N: Revealed type is "builtins.int" +reveal_type(c.x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +reveal_type(c.y) # N: Revealed type is "def (*builtins.int)" +reveal_type(c.z) # N: Revealed type is "__main__.Other[Unpack[builtins.tuple[builtins.int, ...]]]" +[builtins fixtures/tuple.pyi] + +[case testTooFewItemsInInstanceArguments] +from typing import Generic, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +class C(Generic[T, Unpack[Ts], S]): ... + +c: C[int] # E: Bad number of arguments, expected: at least 2, given: 1 +reveal_type(c) # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]" +[builtins fixtures/tuple.pyi] + +[case testVariadicClassUpperBoundCheck] +from typing import Tuple, TypeVar, Generic +from typing_extensions import Unpack, TypeVarTuple + +class A: ... +class B: ... +class C: ... +class D: ... + +T = TypeVar("T", bound=int) +S = TypeVar("S", bound=str) +Ts = TypeVarTuple("Ts") + +class G(Generic[T, Unpack[Ts], S]): ... +First = Tuple[A, B] +Second = Tuple[C, D] +x: G[Unpack[First], Unpack[Second]] # E: Type argument "A" of "G" must be a subtype of "int" \ + # E: Type argument "D" of "G" must be a subtype of "str" +[builtins fixtures/tuple.pyi] From 9e1f4df133e155f213cf3714bf796bb9e8698907 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 24 Aug 2023 20:11:24 +0100 Subject: [PATCH 75/88] Use TypeVar refresh uniformly for class object access (#15945) Fixes https://github.com/python/mypy/issues/15934 I think this is a right thing to do, it may even fix some other rare accidental `TypeVar` clashes not involving self-types. This causes a bit of churn in tests, but not too much. --- mypy/checkmember.py | 4 +-- test-data/unit/check-classes.test | 2 +- test-data/unit/check-incremental.test | 26 +++++++++--------- .../unit/check-parameter-specification.test | 8 +++--- test-data/unit/check-plugin-attrs.test | 16 +++++------ test-data/unit/check-selftype.test | 27 ++++++++++++++++--- 6 files changed, 52 insertions(+), 31 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 2b0717f181a9..1bdc00a6eb59 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -1198,12 +1198,12 @@ class B(A[str]): pass # (i.e. appear in the return type of the class object on which the method was accessed). if isinstance(t, CallableType): tvars = original_vars if original_vars is not None else [] + t = freshen_all_functions_type_vars(t) if is_classmethod: - t = freshen_all_functions_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None t = expand_type_by_instance(t, isuper) - freeze_all_type_vars(t) + freeze_all_type_vars(t) return t.copy_modified(variables=list(tvars) + list(t.variables)) elif isinstance(t, Overloaded): return Overloaded( diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index b9e65ef4ad20..04b51bb603c5 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1162,7 +1162,7 @@ def test() -> None: reveal_type(x) # N: Revealed type is "T`-1" reveal_type(x.returns_int()) # N: Revealed type is "builtins.int" return foo - reveal_type(Foo.bar) # N: Revealed type is "def [T <: __main__.Foo@5] (self: __main__.Foo@5, foo: T`-1) -> T`-1" + reveal_type(Foo.bar) # N: Revealed type is "def [T <: __main__.Foo@5] (self: __main__.Foo@5, foo: T`1) -> T`1" [case testGenericClassWithInvalidTypevarUseWithinFunction] from typing import TypeVar diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 80f5e4e7d12d..fcab0545b982 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -3035,10 +3035,10 @@ main:15: error: Unsupported left operand type for >= ("NoCmp") [case testAttrsIncrementalDunder] from a import A reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> a.A" -reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" +reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool" +reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool" +reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool" +reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool" A(1) < A(2) A(1) <= A(2) @@ -3072,10 +3072,10 @@ class A: [stale] [out2] main:2: note: Revealed type is "def (a: builtins.int) -> a.A" -main:3: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -main:4: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -main:5: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -main:6: note: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" +main:3: note: Revealed type is "def [_AT] (self: _AT`1, other: _AT`1) -> builtins.bool" +main:4: note: Revealed type is "def [_AT] (self: _AT`2, other: _AT`2) -> builtins.bool" +main:5: note: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool" +main:6: note: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool" main:15: error: Unsupported operand types for < ("A" and "int") main:16: error: Unsupported operand types for <= ("A" and "int") main:17: error: Unsupported operand types for > ("A" and "int") @@ -3963,10 +3963,10 @@ class A: tmp/b.py:3: note: Revealed type is "def (a: builtins.int) -> a.A" tmp/b.py:4: note: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool" tmp/b.py:5: note: Revealed type is "def (builtins.object, builtins.object) -> builtins.bool" -tmp/b.py:6: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool" -tmp/b.py:7: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool" -tmp/b.py:8: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool" -tmp/b.py:9: note: Revealed type is "def [_DT] (self: _DT`-1, other: _DT`-1) -> builtins.bool" +tmp/b.py:6: note: Revealed type is "def [_DT] (self: _DT`1, other: _DT`1) -> builtins.bool" +tmp/b.py:7: note: Revealed type is "def [_DT] (self: _DT`2, other: _DT`2) -> builtins.bool" +tmp/b.py:8: note: Revealed type is "def [_DT] (self: _DT`3, other: _DT`3) -> builtins.bool" +tmp/b.py:9: note: Revealed type is "def [_DT] (self: _DT`4, other: _DT`4) -> builtins.bool" tmp/b.py:18: error: Unsupported operand types for < ("A" and "int") tmp/b.py:19: error: Unsupported operand types for <= ("A" and "int") tmp/b.py:20: error: Unsupported operand types for > ("A" and "int") @@ -6325,7 +6325,7 @@ reveal_type(D.meth) reveal_type(D().meth) [out] [out2] -tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0" +tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`1, other: Self`1) -> Self`1" tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D" [case testIncrementalNestedGenericCallableCrash] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index dee8a971f925..dba73974aef6 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -901,8 +901,8 @@ class A: def func(self, action: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... -reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`-2, *_P.args, **_P.kwargs) -> _R`-2" -reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`5, *_P.args, **_P.kwargs) -> _R`5" +reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`3, *_P.args, **_P.kwargs) -> _R`3" +reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`7, *_P.args, **_P.kwargs) -> _R`7" def f(x: int) -> int: ... @@ -934,8 +934,8 @@ class A: def func(self, action: Job[_P, None]) -> Job[_P, None]: ... -reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`-1, None]) -> __main__.Job[_P`-1, None]" -reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]" +reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`2, None]) -> __main__.Job[_P`2, None]" +reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]" reveal_type(A().func(Job(lambda x: x))) # N: Revealed type is "__main__.Job[[x: Any], None]" def f(x: int, y: int) -> None: ... diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 3534d206c060..7580531bebc9 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -185,10 +185,10 @@ from attr import attrib, attrs class A: a: int reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> __main__.A" -reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" +reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`3, other: _AT`3) -> builtins.bool" +reveal_type(A.__le__) # N: Revealed type is "def [_AT] (self: _AT`4, other: _AT`4) -> builtins.bool" +reveal_type(A.__gt__) # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool" +reveal_type(A.__ge__) # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool" A(1) < A(2) A(1) <= A(2) @@ -989,10 +989,10 @@ class C(A, B): pass @attr.s class D(A): pass -reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(B.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(C.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" -reveal_type(D.__lt__) # N: Revealed type is "def [_AT] (self: _AT`-1, other: _AT`-1) -> builtins.bool" +reveal_type(A.__lt__) # N: Revealed type is "def [_AT] (self: _AT`5, other: _AT`5) -> builtins.bool" +reveal_type(B.__lt__) # N: Revealed type is "def [_AT] (self: _AT`6, other: _AT`6) -> builtins.bool" +reveal_type(C.__lt__) # N: Revealed type is "def [_AT] (self: _AT`7, other: _AT`7) -> builtins.bool" +reveal_type(D.__lt__) # N: Revealed type is "def [_AT] (self: _AT`8, other: _AT`8) -> builtins.bool" A() < A() B() < B() diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 77d2d519214a..d5024412ca97 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1484,7 +1484,7 @@ class C: return self class D(C): ... -reveal_type(C.meth) # N: Revealed type is "def [Self <: __main__.C] (self: Self`0) -> builtins.list[Self`0]" +reveal_type(C.meth) # N: Revealed type is "def [Self <: __main__.C] (self: Self`1) -> builtins.list[Self`1]" C.attr # E: Access to generic instance variables via class is ambiguous reveal_type(D().meth()) # N: Revealed type is "builtins.list[__main__.D]" reveal_type(D().attr) # N: Revealed type is "builtins.list[__main__.D]" @@ -1793,7 +1793,7 @@ class C: def bar(self) -> Self: ... def foo(self, x: S) -> Tuple[Self, S]: ... -reveal_type(C.foo) # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`0, x: S`-1) -> Tuple[Self`0, S`-1]" +reveal_type(C.foo) # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`1, x: S`2) -> Tuple[Self`1, S`2]" reveal_type(C().foo(42)) # N: Revealed type is "Tuple[__main__.C, builtins.int]" [builtins fixtures/tuple.pyi] @@ -1903,7 +1903,7 @@ class C: class D(C): ... -reveal_type(D.f) # N: Revealed type is "def [T] (T`-1) -> T`-1" +reveal_type(D.f) # N: Revealed type is "def [T] (T`1) -> T`1" reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" [case testTypingSelfOnSuperTypeVarValues] @@ -2015,3 +2015,24 @@ class Add(Async): reveal_type(Add.async_func()) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int" reveal_type(Add().async_func()) # N: Revealed type is "def (x: builtins.int, y: builtins.int) -> builtins.int" [builtins fixtures/classmethod.pyi] + +[case testSelfTypeMethodOnClassObject] +from typing import Self + +class Object: # Needed to mimic object in typeshed + ref: Self + +class Foo: + def foo(self) -> Self: + return self + +class Ben(Object): + MY_MAP = { + "foo": Foo.foo, + } + @classmethod + def doit(cls) -> Foo: + reveal_type(cls.MY_MAP) # N: Revealed type is "builtins.dict[builtins.str, def [Self <: __main__.Foo] (self: Self`4) -> Self`4]" + foo_method = cls.MY_MAP["foo"] + return foo_method(Foo()) +[builtins fixtures/isinstancelist.pyi] From f9b1db6519cd88a081e8b8597240e166eb513245 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 25 Aug 2023 21:41:25 +0100 Subject: [PATCH 76/88] Fix crash on invalid type variable with ParamSpec (#15953) Fixes https://github.com/python/mypy/issues/15948 The fix is straightforward: invalid type variable resulted in applying type arguments packing/simplification when we shouldn't. Making the latter more strict fixes the issue. --------- Co-authored-by: Jelle Zijlstra --- mypy/typeanal.py | 37 +++++++++++++++---- .../unit/check-parameter-specification.test | 23 ++++++++++++ 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 806b9967039e..e29cca09be63 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -458,11 +458,30 @@ def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: # These do not support mypy_extensions VarArgs, etc. as they were already analyzed # TODO: should these be re-analyzed to get rid of this inconsistency? count = len(an_args) - if count > 0: - first_arg = get_proper_type(an_args[0]) - if not (count == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType))): - return [Parameters(an_args, [ARG_POS] * count, [None] * count)] - return list(an_args) + if count == 0: + return [] + if count == 1 and isinstance(get_proper_type(an_args[0]), AnyType): + # Single Any is interpreted as ..., rather that a single argument with Any type. + # I didn't find this in the PEP, but it sounds reasonable. + return list(an_args) + if any(isinstance(a, (Parameters, ParamSpecType)) for a in an_args): + if len(an_args) > 1: + first_wrong = next( + arg for arg in an_args if isinstance(arg, (Parameters, ParamSpecType)) + ) + self.fail( + "Nested parameter specifications are not allowed", + first_wrong, + code=codes.VALID_TYPE, + ) + return [AnyType(TypeOfAny.from_error)] + return list(an_args) + first = an_args[0] + return [ + Parameters( + an_args, [ARG_POS] * count, [None] * count, line=first.line, column=first.column + ) + ] def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first @@ -503,7 +522,11 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: names: list[str | None] = [None] * len(args) pre = Parameters( - args + pre.arg_types, [ARG_POS] * len(args) + pre.arg_kinds, names + pre.arg_names + args + pre.arg_types, + [ARG_POS] * len(args) + pre.arg_kinds, + names + pre.arg_names, + line=t.line, + column=t.column, ) return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre @@ -913,7 +936,7 @@ def visit_type_list(self, t: TypeList) -> Type: if params: ts, kinds, names = params # bind these types - return Parameters(self.anal_array(ts), kinds, names) + return Parameters(self.anal_array(ts), kinds, names, line=t.line, column=t.column) else: return AnyType(TypeOfAny.from_error) else: diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index dba73974aef6..257fb9241373 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1741,3 +1741,26 @@ def bar(x): ... reveal_type(bar) # N: Revealed type is "Overload(def (x: builtins.int) -> builtins.float, def (x: builtins.str) -> builtins.str)" [builtins fixtures/paramspec.pyi] + +[case testParamSpecDecoratorOverloadNoCrashOnInvalidTypeVar] +from typing import Any, Callable, List +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = 1 + +Alias = Callable[P, List[T]] # type: ignore +def dec(fn: Callable[P, T]) -> Alias[P, T]: ... # type: ignore +f: Any +dec(f) # No crash +[builtins fixtures/paramspec.pyi] + +[case testParamSpecErrorNestedParams] +from typing import Generic +from typing_extensions import ParamSpec + +P = ParamSpec("P") +class C(Generic[P]): ... +c: C[int, [int, str], str] # E: Nested parameter specifications are not allowed +reveal_type(c) # N: Revealed type is "__main__.C[Any]" +[builtins fixtures/paramspec.pyi] From 7f65cc7570eaa4206ae086680e1c1d0489897efa Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 25 Aug 2023 22:30:39 +0100 Subject: [PATCH 77/88] Infer ParamSpec constraint from arguments (#15896) Fixes https://github.com/python/mypy/issues/12278 Fixes https://github.com/python/mypy/issues/13191 (more tricky nested use cases with optional/keyword args still don't work, but they are quite tricky to fix and may selectively fixed later) This unfortunately requires some special-casing, here is its summary: * If actual argument for `Callable[P, T]` is non-generic and non-lambda, do not put it into inference second pass. * If we are able to infer constraints for `P` without using arguments mapped to `*args: P.args` etc., do not add the constraint for `P` vs those arguments (this applies to both top-level callable constraints, and for nested callable constraints against callables that are known to have imprecise argument kinds). (Btw TODO I added is not related to this PR, I just noticed something obviously wrong) --- mypy/checkexpr.py | 41 +++++- mypy/constraints.py | 136 +++++++++++++----- mypy/expandtype.py | 2 + mypy/infer.py | 3 +- mypy/types.py | 22 +++ .../unit/check-parameter-specification.test | 82 +++++++++-- test-data/unit/fixtures/paramspec.pyi | 3 +- test-data/unit/typexport-basic.test | 24 ++-- 8 files changed, 244 insertions(+), 69 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 6de317f587cb..4430d0773cfa 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1987,7 +1987,7 @@ def infer_function_type_arguments( ) arg_pass_nums = self.get_arg_infer_passes( - callee_type.arg_types, formal_to_actual, len(args) + callee_type, args, arg_types, formal_to_actual, len(args) ) pass1_args: list[Type | None] = [] @@ -2001,6 +2001,7 @@ def infer_function_type_arguments( callee_type, pass1_args, arg_kinds, + arg_names, formal_to_actual, context=self.argument_infer_context(), strict=self.chk.in_checked_function(), @@ -2061,6 +2062,7 @@ def infer_function_type_arguments( callee_type, arg_types, arg_kinds, + arg_names, formal_to_actual, context=self.argument_infer_context(), strict=self.chk.in_checked_function(), @@ -2140,6 +2142,7 @@ def infer_function_type_arguments_pass2( callee_type, arg_types, arg_kinds, + arg_names, formal_to_actual, context=self.argument_infer_context(), ) @@ -2152,7 +2155,12 @@ def argument_infer_context(self) -> ArgumentInferContext: ) def get_arg_infer_passes( - self, arg_types: list[Type], formal_to_actual: list[list[int]], num_actuals: int + self, + callee: CallableType, + args: list[Expression], + arg_types: list[Type], + formal_to_actual: list[list[int]], + num_actuals: int, ) -> list[int]: """Return pass numbers for args for two-pass argument type inference. @@ -2163,8 +2171,28 @@ def get_arg_infer_passes( lambdas more effectively. """ res = [1] * num_actuals - for i, arg in enumerate(arg_types): - if arg.accept(ArgInferSecondPassQuery()): + for i, arg in enumerate(callee.arg_types): + skip_param_spec = False + p_formal = get_proper_type(callee.arg_types[i]) + if isinstance(p_formal, CallableType) and p_formal.param_spec(): + for j in formal_to_actual[i]: + p_actual = get_proper_type(arg_types[j]) + # This is an exception from the usual logic where we put generic Callable + # arguments in the second pass. If we have a non-generic actual, it is + # likely to infer good constraints, for example if we have: + # def run(Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... + # def test(x: int, y: int) -> int: ... + # run(test, 1, 2) + # we will use `test` for inference, since it will allow to infer also + # argument *names* for P <: [x: int, y: int]. + if ( + isinstance(p_actual, CallableType) + and not p_actual.variables + and not isinstance(args[j], LambdaExpr) + ): + skip_param_spec = True + break + if not skip_param_spec and arg.accept(ArgInferSecondPassQuery()): for j in formal_to_actual[i]: res[j] = 2 return res @@ -4903,7 +4931,9 @@ def infer_lambda_type_using_context( self.chk.fail(message_registry.CANNOT_INFER_LAMBDA_TYPE, e) return None, None - return callable_ctx, callable_ctx + # Type of lambda must have correct argument names, to prevent false + # negatives when lambdas appear in `ParamSpec` context. + return callable_ctx.copy_modified(arg_names=e.arg_names), callable_ctx def visit_super_expr(self, e: SuperExpr) -> Type: """Type check a super expression (non-lvalue).""" @@ -5921,6 +5951,7 @@ def __init__(self) -> None: super().__init__(types.ANY_STRATEGY) def visit_callable_type(self, t: CallableType) -> bool: + # TODO: we need to check only for type variables of original callable. return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) diff --git a/mypy/constraints.py b/mypy/constraints.py index edce11e778ab..0e59b5459fd4 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -108,6 +108,7 @@ def infer_constraints_for_callable( callee: CallableType, arg_types: Sequence[Type | None], arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, formal_to_actual: list[list[int]], context: ArgumentInferContext, ) -> list[Constraint]: @@ -118,6 +119,20 @@ def infer_constraints_for_callable( constraints: list[Constraint] = [] mapper = ArgTypeExpander(context) + param_spec = callee.param_spec() + param_spec_arg_types = [] + param_spec_arg_names = [] + param_spec_arg_kinds = [] + + incomplete_star_mapping = False + for i, actuals in enumerate(formal_to_actual): + for actual in actuals: + if actual is None and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2): + # We can't use arguments to infer ParamSpec constraint, if only some + # are present in the current inference pass. + incomplete_star_mapping = True + break + for i, actuals in enumerate(formal_to_actual): if isinstance(callee.arg_types[i], UnpackType): unpack_type = callee.arg_types[i] @@ -194,11 +209,47 @@ def infer_constraints_for_callable( actual_type = mapper.expand_actual_type( actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] ) - # TODO: if callee has ParamSpec, we need to collect all actuals that map to star - # args and create single constraint between P and resulting Parameters instead. - c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) - constraints.extend(c) - + if ( + param_spec + and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2) + and not incomplete_star_mapping + ): + # If actual arguments are mapped to ParamSpec type, we can't infer individual + # constraints, instead store them and infer single constraint at the end. + # It is impossible to map actual kind to formal kind, so use some heuristic. + # This inference is used as a fallback, so relying on heuristic should be OK. + param_spec_arg_types.append( + mapper.expand_actual_type( + actual_arg_type, arg_kinds[actual], None, arg_kinds[actual] + ) + ) + actual_kind = arg_kinds[actual] + param_spec_arg_kinds.append( + ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind + ) + param_spec_arg_names.append(arg_names[actual] if arg_names else None) + else: + c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) + constraints.extend(c) + if ( + param_spec + and not any(c.type_var == param_spec.id for c in constraints) + and not incomplete_star_mapping + ): + # Use ParamSpec constraint from arguments only if there are no other constraints, + # since as explained above it is quite ad-hoc. + constraints.append( + Constraint( + param_spec, + SUPERTYPE_OF, + Parameters( + arg_types=param_spec_arg_types, + arg_kinds=param_spec_arg_kinds, + arg_names=param_spec_arg_names, + imprecise_arg_kinds=True, + ), + ) + ) return constraints @@ -949,6 +1000,14 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: res: list[Constraint] = [] cactual = self.actual.with_unpacked_kwargs() param_spec = template.param_spec() + + template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type + if template.type_guard is not None: + template_ret_type = template.type_guard + if cactual.type_guard is not None: + cactual_ret_type = cactual.type_guard + res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction)) + if param_spec is None: # TODO: Erase template variables if it is generic? if ( @@ -1008,51 +1067,50 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) extra_tvars = True + # Compare prefixes as well + cactual_prefix = cactual.copy_modified( + arg_types=cactual.arg_types[:prefix_len], + arg_kinds=cactual.arg_kinds[:prefix_len], + arg_names=cactual.arg_names[:prefix_len], + ) + res.extend( + infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction) + ) + + param_spec_target: Type | None = None + skip_imprecise = ( + any(c.type_var == param_spec.id for c in res) and cactual.imprecise_arg_kinds + ) if not cactual_ps: max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)]) prefix_len = min(prefix_len, max_prefix_len) - res.append( - Constraint( - param_spec, - neg_op(self.direction), - Parameters( - arg_types=cactual.arg_types[prefix_len:], - arg_kinds=cactual.arg_kinds[prefix_len:], - arg_names=cactual.arg_names[prefix_len:], - variables=cactual.variables - if not type_state.infer_polymorphic - else [], - ), + # This logic matches top-level callable constraint exception, if we managed + # to get other constraints for ParamSpec, don't infer one with imprecise kinds + if not skip_imprecise: + param_spec_target = Parameters( + arg_types=cactual.arg_types[prefix_len:], + arg_kinds=cactual.arg_kinds[prefix_len:], + arg_names=cactual.arg_names[prefix_len:], + variables=cactual.variables + if not type_state.infer_polymorphic + else [], + imprecise_arg_kinds=cactual.imprecise_arg_kinds, ) - ) else: - if len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types): - cactual_ps = cactual_ps.copy_modified( + if ( + len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types) + and not skip_imprecise + ): + param_spec_target = cactual_ps.copy_modified( prefix=Parameters( arg_types=cactual_ps.prefix.arg_types[prefix_len:], arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:], arg_names=cactual_ps.prefix.arg_names[prefix_len:], + imprecise_arg_kinds=cactual_ps.prefix.imprecise_arg_kinds, ) ) - res.append(Constraint(param_spec, neg_op(self.direction), cactual_ps)) - - # Compare prefixes as well - cactual_prefix = cactual.copy_modified( - arg_types=cactual.arg_types[:prefix_len], - arg_kinds=cactual.arg_kinds[:prefix_len], - arg_names=cactual.arg_names[:prefix_len], - ) - res.extend( - infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction) - ) - - template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type - if template.type_guard is not None: - template_ret_type = template.type_guard - if cactual.type_guard is not None: - cactual_ret_type = cactual.type_guard - - res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction)) + if param_spec_target is not None: + res.append(Constraint(param_spec, neg_op(self.direction), param_spec_target)) if extra_tvars: for c in res: c.extra_tvars += cactual.variables diff --git a/mypy/expandtype.py b/mypy/expandtype.py index dc3dae670c1f..7168d7c30b0d 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -336,6 +336,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType: arg_types=self.expand_types(t.arg_types), ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds), ) elif isinstance(repl, ParamSpecType): # We're substituting one ParamSpec for another; this can mean that the prefix @@ -352,6 +353,7 @@ def visit_callable_type(self, t: CallableType) -> CallableType: arg_names=t.arg_names[:-2] + prefix.arg_names + t.arg_names[-2:], ret_type=t.ret_type.accept(self), from_concatenate=t.from_concatenate or bool(repl.prefix.arg_types), + imprecise_arg_kinds=(t.imprecise_arg_kinds or prefix.imprecise_arg_kinds), ) var_arg = t.var_arg() diff --git a/mypy/infer.py b/mypy/infer.py index f34087910e4b..ba4a1d2bc9b1 100644 --- a/mypy/infer.py +++ b/mypy/infer.py @@ -33,6 +33,7 @@ def infer_function_type_arguments( callee_type: CallableType, arg_types: Sequence[Type | None], arg_kinds: list[ArgKind], + arg_names: Sequence[str | None] | None, formal_to_actual: list[list[int]], context: ArgumentInferContext, strict: bool = True, @@ -53,7 +54,7 @@ def infer_function_type_arguments( """ # Infer constraints. constraints = infer_constraints_for_callable( - callee_type, arg_types, arg_kinds, formal_to_actual, context + callee_type, arg_types, arg_kinds, arg_names, formal_to_actual, context ) # Solve constraints. diff --git a/mypy/types.py b/mypy/types.py index 214978eab774..cf2c343655dd 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1560,6 +1560,7 @@ class Parameters(ProperType): # TODO: variables don't really belong here, but they are used to allow hacky support # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909 "variables", + "imprecise_arg_kinds", ) def __init__( @@ -1570,6 +1571,7 @@ def __init__( *, variables: Sequence[TypeVarLikeType] | None = None, is_ellipsis_args: bool = False, + imprecise_arg_kinds: bool = False, line: int = -1, column: int = -1, ) -> None: @@ -1582,6 +1584,7 @@ def __init__( self.min_args = arg_kinds.count(ARG_POS) self.is_ellipsis_args = is_ellipsis_args self.variables = variables or [] + self.imprecise_arg_kinds = imprecise_arg_kinds def copy_modified( self, @@ -1591,6 +1594,7 @@ def copy_modified( *, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, is_ellipsis_args: Bogus[bool] = _dummy, + imprecise_arg_kinds: Bogus[bool] = _dummy, ) -> Parameters: return Parameters( arg_types=arg_types if arg_types is not _dummy else self.arg_types, @@ -1600,6 +1604,11 @@ def copy_modified( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), variables=variables if variables is not _dummy else self.variables, + imprecise_arg_kinds=( + imprecise_arg_kinds + if imprecise_arg_kinds is not _dummy + else self.imprecise_arg_kinds + ), ) # TODO: here is a lot of code duplication with Callable type, fix this. @@ -1696,6 +1705,7 @@ def serialize(self) -> JsonDict: "arg_kinds": [int(x.value) for x in self.arg_kinds], "arg_names": self.arg_names, "variables": [tv.serialize() for tv in self.variables], + "imprecise_arg_kinds": self.imprecise_arg_kinds, } @classmethod @@ -1706,6 +1716,7 @@ def deserialize(cls, data: JsonDict) -> Parameters: [ArgKind(x) for x in data["arg_kinds"]], data["arg_names"], variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]], + imprecise_arg_kinds=data["imprecise_arg_kinds"], ) def __hash__(self) -> int: @@ -1762,6 +1773,7 @@ class CallableType(FunctionLike): "type_guard", # T, if -> TypeGuard[T] (ret_type is bool in this case). "from_concatenate", # whether this callable is from a concatenate object # (this is used for error messages) + "imprecise_arg_kinds", "unpack_kwargs", # Was an Unpack[...] with **kwargs used to define this callable? ) @@ -1786,6 +1798,7 @@ def __init__( def_extras: dict[str, Any] | None = None, type_guard: Type | None = None, from_concatenate: bool = False, + imprecise_arg_kinds: bool = False, unpack_kwargs: bool = False, ) -> None: super().__init__(line, column) @@ -1812,6 +1825,7 @@ def __init__( self.special_sig = special_sig self.from_type_type = from_type_type self.from_concatenate = from_concatenate + self.imprecise_arg_kinds = imprecise_arg_kinds if not bound_args: bound_args = () self.bound_args = bound_args @@ -1854,6 +1868,7 @@ def copy_modified( def_extras: Bogus[dict[str, Any]] = _dummy, type_guard: Bogus[Type | None] = _dummy, from_concatenate: Bogus[bool] = _dummy, + imprecise_arg_kinds: Bogus[bool] = _dummy, unpack_kwargs: Bogus[bool] = _dummy, ) -> CT: modified = CallableType( @@ -1879,6 +1894,11 @@ def copy_modified( from_concatenate=( from_concatenate if from_concatenate is not _dummy else self.from_concatenate ), + imprecise_arg_kinds=( + imprecise_arg_kinds + if imprecise_arg_kinds is not _dummy + else self.imprecise_arg_kinds + ), unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, ) # Optimization: Only NewTypes are supported as subtypes since @@ -2191,6 +2211,7 @@ def serialize(self) -> JsonDict: "def_extras": dict(self.def_extras), "type_guard": self.type_guard.serialize() if self.type_guard is not None else None, "from_concatenate": self.from_concatenate, + "imprecise_arg_kinds": self.imprecise_arg_kinds, "unpack_kwargs": self.unpack_kwargs, } @@ -2214,6 +2235,7 @@ def deserialize(cls, data: JsonDict) -> CallableType: deserialize_type(data["type_guard"]) if data["type_guard"] is not None else None ), from_concatenate=data["from_concatenate"], + imprecise_arg_kinds=data["imprecise_arg_kinds"], unpack_kwargs=data["unpack_kwargs"], ) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 257fb9241373..ed1d59b376d2 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -239,7 +239,6 @@ reveal_type(f(g, 1, y='x')) # N: Revealed type is "None" f(g, 'x', y='x') # E: Argument 2 to "f" has incompatible type "str"; expected "int" f(g, 1, y=1) # E: Argument "y" to "f" has incompatible type "int"; expected "str" f(g) # E: Missing positional arguments "x", "y" in call to "f" - [builtins fixtures/dict.pyi] [case testParamSpecSpecialCase] @@ -415,14 +414,19 @@ P = ParamSpec('P') T = TypeVar('T') # Similar to atexit.register -def register(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Callable[P, T]: ... # N: "register" defined here +def register(f: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Callable[P, T]: ... def f(x: int) -> None: pass +def g(x: int, y: str) -> None: pass reveal_type(register(lambda: f(1))) # N: Revealed type is "def ()" -reveal_type(register(lambda x: f(x), x=1)) # N: Revealed type is "def (x: Any)" -register(lambda x: f(x)) # E: Missing positional argument "x" in call to "register" -register(lambda x: f(x), y=1) # E: Unexpected keyword argument "y" for "register" +reveal_type(register(lambda x: f(x), x=1)) # N: Revealed type is "def (x: Literal[1]?)" +register(lambda x: f(x)) # E: Cannot infer type of lambda \ + # E: Argument 1 to "register" has incompatible type "Callable[[Any], None]"; expected "Callable[[], None]" +register(lambda x: f(x), y=1) # E: Argument 1 to "register" has incompatible type "Callable[[Arg(int, 'x')], None]"; expected "Callable[[Arg(int, 'y')], None]" +reveal_type(register(lambda x: f(x), 1)) # N: Revealed type is "def (Literal[1]?)" +reveal_type(register(lambda x, y: g(x, y), 1, "a")) # N: Revealed type is "def (Literal[1]?, Literal['a']?)" +reveal_type(register(lambda x, y: g(x, y), 1, y="a")) # N: Revealed type is "def (Literal[1]?, y: Literal['a']?)" [builtins fixtures/dict.pyi] [case testParamSpecInvalidCalls] @@ -909,8 +913,7 @@ def f(x: int) -> int: reveal_type(A().func(f, 42)) # N: Revealed type is "builtins.int" -# TODO: this should reveal `int` -reveal_type(A().func(lambda x: x + x, 42)) # N: Revealed type is "Any" +reveal_type(A().func(lambda x: x + x, 42)) # N: Revealed type is "builtins.int" [builtins fixtures/paramspec.pyi] [case testParamSpecConstraintOnOtherParamSpec] @@ -1355,7 +1358,6 @@ P = ParamSpec('P') class Some(Generic[P]): def call(self, *args: P.args, **kwargs: P.kwargs): ... -# TODO: this probably should be reported. def call(*args: P.args, **kwargs: P.kwargs): ... [builtins fixtures/paramspec.pyi] @@ -1631,7 +1633,41 @@ dec(test_with_bound)(0) # E: Value of type variable "T" of function cannot be " dec(test_with_bound)(A()) # OK [builtins fixtures/paramspec.pyi] +[case testParamSpecArgumentParamInferenceRegular] +from typing import TypeVar, Generic +from typing_extensions import ParamSpec + +P = ParamSpec("P") +class Foo(Generic[P]): + def call(self, *args: P.args, **kwargs: P.kwargs) -> None: ... +def test(*args: P.args, **kwargs: P.kwargs) -> Foo[P]: ... + +reveal_type(test(1, 2)) # N: Revealed type is "__main__.Foo[[Literal[1]?, Literal[2]?]]" +reveal_type(test(x=1, y=2)) # N: Revealed type is "__main__.Foo[[x: Literal[1]?, y: Literal[2]?]]" +ints = [1, 2, 3] +reveal_type(test(*ints)) # N: Revealed type is "__main__.Foo[[*builtins.int]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecArgumentParamInferenceGeneric] +# flags: --new-type-inference +from typing import Callable, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec("P") +R = TypeVar("R") +def call(f: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: + return f(*args, **kwargs) + +T = TypeVar("T") +def identity(x: T) -> T: + return x + +reveal_type(call(identity, 2)) # N: Revealed type is "builtins.int" +y: int = call(identity, 2) +[builtins fixtures/paramspec.pyi] + [case testParamSpecNestedApplyNoCrash] +# flags: --new-type-inference from typing import Callable, TypeVar from typing_extensions import ParamSpec @@ -1639,9 +1675,33 @@ P = ParamSpec("P") T = TypeVar("T") def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: ... -def test() -> None: ... -# TODO: avoid this error, although it may be non-trivial. -apply(apply, test) # E: Argument 2 to "apply" has incompatible type "Callable[[], None]"; expected "Callable[P, T]" +def test() -> int: ... +reveal_type(apply(apply, test)) # N: Revealed type is "builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecNestedApplyPosVsNamed] +from typing import Callable, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> None: ... +def test(x: int) -> int: ... +apply(apply, test, x=42) # OK +apply(apply, test, 42) # Also OK (but requires some special casing) +[builtins fixtures/paramspec.pyi] + +[case testParamSpecApplyPosVsNamedOptional] +from typing import Callable, TypeVar +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +def apply(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> None: ... +def test(x: str = ..., y: int = ...) -> int: ... +apply(test, y=42) # OK [builtins fixtures/paramspec.pyi] [case testParamSpecPrefixSubtypingGenericInvalid] diff --git a/test-data/unit/fixtures/paramspec.pyi b/test-data/unit/fixtures/paramspec.pyi index 5e4b8564e238..9b0089f6a7e9 100644 --- a/test-data/unit/fixtures/paramspec.pyi +++ b/test-data/unit/fixtures/paramspec.pyi @@ -30,7 +30,8 @@ class list(Sequence[T], Generic[T]): def __iter__(self) -> Iterator[T]: ... class int: - def __neg__(self) -> 'int': ... + def __neg__(self) -> int: ... + def __add__(self, other: int) -> int: ... class bool(int): ... class float: ... diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index cd2afe2c1c75..c4c3a1d36f83 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -727,7 +727,7 @@ class A: pass class B: a = None # type: A [out] -LambdaExpr(2) : def (B) -> A +LambdaExpr(2) : def (x: B) -> A MemberExpr(2) : A NameExpr(2) : B @@ -756,7 +756,7 @@ class B: a = None # type: A [builtins fixtures/list.pyi] [out] -LambdaExpr(2) : def (B) -> builtins.list[A] +LambdaExpr(2) : def (x: B) -> builtins.list[A] ListExpr(2) : builtins.list[A] [case testLambdaAndHigherOrderFunction] @@ -775,7 +775,7 @@ map( CallExpr(9) : builtins.list[B] NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] CallExpr(10) : B -LambdaExpr(10) : def (A) -> B +LambdaExpr(10) : def (x: A) -> B NameExpr(10) : def (a: A) -> B NameExpr(10) : builtins.list[A] NameExpr(10) : A @@ -795,7 +795,7 @@ map( [builtins fixtures/list.pyi] [out] NameExpr(10) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(11) : def (A) -> builtins.list[B] +LambdaExpr(11) : def (x: A) -> builtins.list[B] ListExpr(11) : builtins.list[B] NameExpr(11) : def (a: A) -> B NameExpr(11) : builtins.list[A] @@ -817,7 +817,7 @@ map( -- context. Perhaps just fail instead? CallExpr(7) : builtins.list[Any] NameExpr(7) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] -LambdaExpr(8) : def (A) -> A +LambdaExpr(8) : def (x: A) -> A ListExpr(8) : builtins.list[def (A) -> Any] NameExpr(8) : A NameExpr(9) : builtins.list[A] @@ -838,7 +838,7 @@ map( [out] CallExpr(9) : builtins.list[B] NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(10) : def (A) -> B +LambdaExpr(10) : def (x: A) -> B MemberExpr(10) : B NameExpr(10) : A NameExpr(11) : builtins.list[A] @@ -860,7 +860,7 @@ map( CallExpr(9) : builtins.list[B] NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] NameExpr(10) : builtins.list[A] -LambdaExpr(11) : def (A) -> B +LambdaExpr(11) : def (x: A) -> B MemberExpr(11) : B NameExpr(11) : A @@ -1212,7 +1212,7 @@ f( [builtins fixtures/list.pyi] [out] NameExpr(8) : Overload(def (x: builtins.int, f: def (builtins.int) -> builtins.int), def (x: builtins.str, f: def (builtins.str) -> builtins.str)) -LambdaExpr(9) : def (builtins.int) -> builtins.int +LambdaExpr(9) : def (x: builtins.int) -> builtins.int NameExpr(9) : builtins.int [case testExportOverloadArgTypeNested] @@ -1231,10 +1231,10 @@ f( lambda x: x) [builtins fixtures/list.pyi] [out] -LambdaExpr(9) : def (builtins.int) -> builtins.int -LambdaExpr(10) : def (builtins.int) -> builtins.int -LambdaExpr(12) : def (builtins.str) -> builtins.str -LambdaExpr(13) : def (builtins.str) -> builtins.str +LambdaExpr(9) : def (y: builtins.int) -> builtins.int +LambdaExpr(10) : def (x: builtins.int) -> builtins.int +LambdaExpr(12) : def (y: builtins.str) -> builtins.str +LambdaExpr(13) : def (x: builtins.str) -> builtins.str -- TODO -- From 29abf398d6a9e88e899df8a1941019105821f9f0 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 26 Aug 2023 21:30:13 +0100 Subject: [PATCH 78/88] Support PEP 646 syntax for Callable (#15951) Fixes https://github.com/python/mypy/issues/15412 Two new things here as specified by PEP 646: * Using star for an (explicit) type unpaking in callables, like `Callable[[str, *tuple[int, ...]], None]` * Allowing suffix items after a variadic item, like `Callable[[X, Unpack[Ys], Z], bool]` Implementation is straightforward. Btw while working in this I accidentally fixed a nasty bug, tuple types were often not given any line/column numbers, so if such type becomes a location of an error, it is impossible to ignore. --- mypy/exprtotype.py | 10 +++- mypy/fastparse.py | 14 ++++- mypy/typeanal.py | 73 +++++++++++++++++++------ test-data/unit/check-typevar-tuple.test | 58 +++++++++++++++----- 4 files changed, 123 insertions(+), 32 deletions(-) diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index bbc284a5188a..b82d35607ef1 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -17,6 +17,7 @@ NameExpr, OpExpr, RefExpr, + StarExpr, StrExpr, TupleExpr, UnaryExpr, @@ -35,6 +36,7 @@ TypeOfAny, UnboundType, UnionType, + UnpackType, ) @@ -56,6 +58,7 @@ def expr_to_unanalyzed_type( options: Options | None = None, allow_new_syntax: bool = False, _parent: Expression | None = None, + allow_unpack: bool = False, ) -> ProperType: """Translate an expression to the corresponding type. @@ -163,7 +166,10 @@ def expr_to_unanalyzed_type( return CallableArgument(typ, name, arg_const, expr.line, expr.column) elif isinstance(expr, ListExpr): return TypeList( - [expr_to_unanalyzed_type(t, options, allow_new_syntax, expr) for t in expr.items], + [ + expr_to_unanalyzed_type(t, options, allow_new_syntax, expr, allow_unpack=True) + for t in expr.items + ], line=expr.line, column=expr.column, ) @@ -189,5 +195,7 @@ def expr_to_unanalyzed_type( return RawExpressionType(None, "builtins.complex", line=expr.line, column=expr.column) elif isinstance(expr, EllipsisExpr): return EllipsisType(expr.line) + elif allow_unpack and isinstance(expr, StarExpr): + return UnpackType(expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax)) else: raise TypeTranslationError() diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 3a26cfe7d6ff..6aa626afb81e 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -115,6 +115,7 @@ TypeOfAny, UnboundType, UnionType, + UnpackType, ) from mypy.util import bytes_to_human_readable_repr, unnamed_function @@ -1730,6 +1731,7 @@ def __init__( self.override_column = override_column self.node_stack: list[AST] = [] self.is_evaluated = is_evaluated + self.allow_unpack = False def convert_column(self, column: int) -> int: """Apply column override if defined; otherwise return column. @@ -2006,10 +2008,20 @@ def visit_Attribute(self, n: Attribute) -> Type: else: return self.invalid_type(n) + # Used for Callable[[X *Ys, Z], R] + def visit_Starred(self, n: ast3.Starred) -> Type: + return UnpackType(self.visit(n.value)) + # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Type: assert isinstance(n.ctx, ast3.Load) - return self.translate_argument_list(n.elts) + old_allow_unpack = self.allow_unpack + # We specifically only allow starred expressions in a list to avoid + # confusing errors for top-level unpacks (e.g. in base classes). + self.allow_unpack = True + result = self.translate_argument_list(n.elts) + self.allow_unpack = old_allow_unpack + return result def stringify_name(n: AST) -> str | None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index e29cca09be63..1955d2bc3c43 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -568,7 +568,9 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])]) instance.line = t.line return instance - return self.tuple_type(self.anal_array(t.args, allow_unpack=True)) + return self.tuple_type( + self.anal_array(t.args, allow_unpack=True), line=t.line, column=t.column + ) elif fullname == "typing.Union": items = self.anal_array(t.args) return UnionType.make_union(items) @@ -968,7 +970,10 @@ def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: return t def visit_unpack_type(self, t: UnpackType) -> Type: - raise NotImplementedError + if not self.allow_unpack: + self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE) + return AnyType(TypeOfAny.from_error) + return UnpackType(self.anal_type(t.type)) def visit_parameters(self, t: Parameters) -> Type: raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") @@ -1364,12 +1369,22 @@ def analyze_callable_type(self, t: UnboundType) -> Type: assert isinstance(ret, CallableType) return ret.accept(self) + def refers_to_full_names(self, arg: UnboundType, names: Sequence[str]) -> bool: + sym = self.lookup_qualified(arg.name, arg) + if sym is not None: + if sym.fullname in names: + return True + return False + def analyze_callable_args( self, arglist: TypeList ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None: args: list[Type] = [] kinds: list[ArgKind] = [] names: list[str | None] = [] + seen_unpack = False + unpack_types: list[Type] = [] + invalid_unpacks = [] for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) @@ -1390,20 +1405,42 @@ def analyze_callable_args( if arg.name is not None and kind.is_star(): self.fail(f"{arg.constructor} arguments should not have names", arg) return None - elif isinstance(arg, UnboundType): - kind = ARG_POS - # Potentially a unpack. - sym = self.lookup_qualified(arg.name, arg) - if sym is not None: - if sym.fullname in ("typing_extensions.Unpack", "typing.Unpack"): - kind = ARG_STAR - args.append(arg) - kinds.append(kind) - names.append(None) + elif ( + isinstance(arg, UnboundType) + and self.refers_to_full_names(arg, ("typing_extensions.Unpack", "typing.Unpack")) + or isinstance(arg, UnpackType) + ): + if seen_unpack: + # Multiple unpacks, preserve them, so we can give an error later. + invalid_unpacks.append(arg) + continue + seen_unpack = True + unpack_types.append(arg) + else: + if seen_unpack: + unpack_types.append(arg) + else: + args.append(arg) + kinds.append(ARG_POS) + names.append(None) + if seen_unpack: + if len(unpack_types) == 1: + args.append(unpack_types[0]) else: - args.append(arg) - kinds.append(ARG_POS) - names.append(None) + first = unpack_types[0] + if isinstance(first, UnpackType): + # UnpackType doesn't have its own line/column numbers, + # so use the unpacked type for error messages. + first = first.type + args.append( + UnpackType(self.tuple_type(unpack_types, line=first.line, column=first.column)) + ) + kinds.append(ARG_STAR) + names.append(None) + for arg in invalid_unpacks: + args.append(arg) + kinds.append(ARG_STAR) + names.append(None) # Note that arglist below is only used for error context. check_arg_names(names, [arglist] * len(args), self.fail, "Callable") check_arg_kinds(kinds, [arglist] * len(args), self.fail) @@ -1713,9 +1750,11 @@ def check_unpacks_in_list(self, items: list[Type]) -> list[Type]: self.fail("More than one Unpack in a type is not allowed", final_unpack) return new_items - def tuple_type(self, items: list[Type]) -> TupleType: + def tuple_type(self, items: list[Type], line: int, column: int) -> TupleType: any_type = AnyType(TypeOfAny.special_form) - return TupleType(items, fallback=self.named_type("builtins.tuple", [any_type])) + return TupleType( + items, fallback=self.named_type("builtins.tuple", [any_type]), line=line, column=column + ) TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index ee81597edadf..c7716f3e8346 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -509,6 +509,51 @@ call_prefix(target=func_prefix, args=(0, 'foo')) call_prefix(target=func2_prefix, args=(0, 'foo')) # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]" [builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646CallableSuffixSyntax] +from typing import Callable, Tuple, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +x: Callable[[str, Unpack[Tuple[int, ...]], bool], None] +reveal_type(x) # N: Revealed type is "def (builtins.str, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.bool]])" + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +A = Callable[[T, Unpack[Ts], S], int] +y: A[int, str, bool] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str, builtins.bool) -> builtins.int" +z: A[Unpack[Tuple[int, ...]]] +reveal_type(z) # N: Revealed type is "def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]]) -> builtins.int" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableInvalidSyntax] +from typing import Callable, Tuple, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") +Us = TypeVarTuple("Us") +a: Callable[[Unpack[Ts], Unpack[Us]], int] # E: Var args may not appear after named or var args \ + # E: More than one Unpack in a type is not allowed +reveal_type(a) # N: Revealed type is "def [Ts, Us] (*Unpack[Ts`-1]) -> builtins.int" +b: Callable[[Unpack], int] # E: Unpack[...] requires exactly one type argument +reveal_type(b) # N: Revealed type is "def (*Any) -> builtins.int" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableNewSyntax] +from typing import Callable, Generic, Tuple +from typing_extensions import ParamSpec + +x: Callable[[str, *Tuple[int, ...]], None] +reveal_type(x) # N: Revealed type is "def (builtins.str, *builtins.int)" +y: Callable[[str, *Tuple[int, ...], bool], None] +reveal_type(y) # N: Revealed type is "def (builtins.str, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.bool]])" + +P = ParamSpec("P") +class C(Generic[P]): ... +bad: C[[int, *Tuple[int, ...], int]] # E: Unpack is only valid in a variadic position +reveal_type(bad) # N: Revealed type is "__main__.C[[builtins.int, *Any]]" +[builtins fixtures/tuple.pyi] + [case testTypeVarTuplePep646UnspecifiedParameters] from typing import Tuple, Generic, TypeVar from typing_extensions import Unpack, TypeVarTuple @@ -635,19 +680,6 @@ x: A[str, str] reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str, builtins.str]" [builtins fixtures/tuple.pyi] -[case testVariadicAliasWrongCallable] -from typing import TypeVar, Callable -from typing_extensions import Unpack, TypeVarTuple - -T = TypeVar("T") -S = TypeVar("S") -Ts = TypeVarTuple("Ts") - -A = Callable[[T, Unpack[Ts], S], int] # E: Required positional args may not appear after default, named or var args -x: A[int, str, int, str] -reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str, builtins.int, builtins.str) -> builtins.int" -[builtins fixtures/tuple.pyi] - [case testVariadicAliasMultipleUnpacks] from typing import Tuple, Generic, Callable from typing_extensions import Unpack, TypeVarTuple From efecd591e4198232f35e1db66bf99e56fc2f068b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 26 Aug 2023 21:34:38 +0100 Subject: [PATCH 79/88] Support user defined variadic tuple types (#15961) Fixes https://github.com/python/mypy/issues/15946 Note this actually adds support also for variadic NamedTuples and variadic TypedDicts. Not that anyone requested this, but since generic NamedTuples and generic TypedDicts are supported using the same mechanism (special aliases) as generic tuple types (like `class A(Tuple[T, S]): ...` in the issue), it looked more risky and arbitrary to _not_support them. Btw the implementation is simple, but while I was working on this, I accidentally found a problem with my general idea of doing certain type normlaizations in `semanal_typeargs.py`. The problem is that sometimes we can call `get_proper_type()` during semantic analysis, so all the code that gets triggered by this (mostly `expand_type()`) can't really rely on types being normalized. Fortunately, with just few tweaks I manged to make the code mostly robust to such scenarios (TBH there are few possible holes left, but this is getting really complex, I think it is better to release this, and see if people will ever hit such scenarios, then fix accordingly). --- mypy/expandtype.py | 7 +- mypy/maptype.py | 1 - mypy/nodes.py | 12 +++- mypy/semanal.py | 10 ++- mypy/semanal_typeargs.py | 14 ++-- mypy/semanal_typeddict.py | 1 + mypy/typeanal.py | 12 ++-- mypy/types.py | 26 ++++++-- test-data/unit/check-typevar-tuple.test | 87 +++++++++++++++++++++++++ 9 files changed, 149 insertions(+), 21 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 7168d7c30b0d..ef8ebe1a9128 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -269,7 +269,8 @@ def visit_unpack_type(self, t: UnpackType) -> Type: # instead. # However, if the item is a variadic tuple, we can simply carry it over. # In particular, if we expand A[*tuple[T, ...]] with substitutions {T: str}, - # it is hard to assert this without getting proper type. + # it is hard to assert this without getting proper type. Another important + # example is non-normalized types when called from semanal.py. return UnpackType(t.type.accept(self)) def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType: @@ -414,6 +415,10 @@ def visit_tuple_type(self, t: TupleType) -> Type: unpacked = get_proper_type(item.type) if isinstance(unpacked, Instance): assert unpacked.type.fullname == "builtins.tuple" + if t.partial_fallback.type.fullname != "builtins.tuple": + # If it is a subtype (like named tuple) we need to preserve it, + # this essentially mimics the logic in tuple_fallback(). + return t.partial_fallback.accept(self) return unpacked fallback = t.partial_fallback.accept(self) assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) diff --git a/mypy/maptype.py b/mypy/maptype.py index cae904469fed..4951306573c2 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -113,6 +113,5 @@ def instance_to_type_environment(instance: Instance) -> dict[TypeVarId, Type]: required number of type arguments. So this environment consists of the class's type variables mapped to the Instance's actual arguments. The type variables are mapped by their `id`. - """ return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)} diff --git a/mypy/nodes.py b/mypy/nodes.py index 7efb01c1b18e..9b4ba5e76667 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3546,7 +3546,12 @@ def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: assert info.tuple_type # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( - info.tuple_type.copy_modified(fallback=mypy.types.Instance(info, info.defn.type_vars)), + info.tuple_type.copy_modified( + # Create an Instance similar to fill_typevars(). + fallback=mypy.types.Instance( + info, mypy.types.type_vars_as_args(info.defn.type_vars) + ) + ), info.fullname, info.line, info.column, @@ -3563,7 +3568,10 @@ def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias: # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( info.typeddict_type.copy_modified( - fallback=mypy.types.Instance(info, info.defn.type_vars) + # Create an Instance similar to fill_typevars(). + fallback=mypy.types.Instance( + info, mypy.types.type_vars_as_args(info.defn.type_vars) + ) ), info.fullname, info.line, diff --git a/mypy/semanal.py b/mypy/semanal.py index 55d4e6a3f506..be7e733a0816 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -277,6 +277,7 @@ get_proper_types, is_named_instance, remove_dups, + type_vars_as_args, ) from mypy.types_utils import is_invalid_recursive_alias, store_argument_type from mypy.typevars import fill_typevars @@ -1702,12 +1703,17 @@ def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> N def setup_alias_type_vars(self, defn: ClassDef) -> None: assert defn.info.special_alias is not None defn.info.special_alias.alias_tvars = list(defn.type_vars) + # It is a bit unfortunate that we need to inline some logic from TypeAlias constructor, + # but it is required, since type variables may change during semantic analyzer passes. + for i, t in enumerate(defn.type_vars): + if isinstance(t, TypeVarTupleType): + defn.info.special_alias.tvar_tuple_index = i target = defn.info.special_alias.target assert isinstance(target, ProperType) if isinstance(target, TypedDictType): - target.fallback.args = tuple(defn.type_vars) + target.fallback.args = type_vars_as_args(defn.type_vars) elif isinstance(target, TupleType): - target.partial_fallback.args = tuple(defn.type_vars) + target.partial_fallback.args = type_vars_as_args(defn.type_vars) else: assert False, f"Unexpected special alias type: {type(target)}" diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 1a37ac57be30..1ae6fada8f38 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -86,31 +86,31 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # correct aliases. Also, variadic aliases are better to check when fully analyzed, # so we do this here. assert t.alias is not None, f"Unfixed type alias {t.type_ref}" - args = flatten_nested_tuples(t.args) + # TODO: consider moving this validation to typeanal.py, expanding invalid aliases + # during semantic analysis may cause crashes. if t.alias.tvar_tuple_index is not None: - correct = len(args) >= len(t.alias.alias_tvars) - 1 + correct = len(t.args) >= len(t.alias.alias_tvars) - 1 if any( isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) - for a in args + for a in t.args ): correct = True else: - correct = len(args) == len(t.alias.alias_tvars) + correct = len(t.args) == len(t.alias.alias_tvars) if not correct: if t.alias.tvar_tuple_index is not None: exp_len = f"at least {len(t.alias.alias_tvars) - 1}" else: exp_len = f"{len(t.alias.alias_tvars)}" self.fail( - f"Bad number of arguments for type alias, expected: {exp_len}, given: {len(args)}", + "Bad number of arguments for type alias," + f" expected: {exp_len}, given: {len(t.args)}", t, code=codes.TYPE_ARG, ) t.args = set_any_tvars( t.alias, t.line, t.column, self.options, from_error=True, fail=self.fail ).args - else: - t.args = args is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) if not is_error: # If there was already an error for the alias itself, there is no point in checking diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index aba5bf69b130..fb3fa713e3fb 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -252,6 +252,7 @@ def map_items_to_base( if not tvars: mapped_items[key] = type_in_base continue + # TODO: simple zip can't be used for variadic types. mapped_items[key] = expand_type( type_in_base, {t.id: a for (t, a) in zip(tvars, base_args)} ) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 1955d2bc3c43..ed1a8073887b 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -82,6 +82,7 @@ UnionType, UnpackType, callable_with_ellipsis, + flatten_nested_tuples, flatten_nested_unions, get_proper_type, has_type_vars, @@ -763,8 +764,8 @@ def analyze_type_with_type_info( if info.special_alias: return instantiate_type_alias( info.special_alias, - # TODO: should we allow NamedTuples generic in ParamSpec and TypeVarTuple? - self.anal_array(args), + # TODO: should we allow NamedTuples generic in ParamSpec? + self.anal_array(args, allow_unpack=True), self.fail, False, ctx, @@ -782,7 +783,7 @@ def analyze_type_with_type_info( return instantiate_type_alias( info.special_alias, # TODO: should we allow TypedDicts generic in ParamSpec? - self.anal_array(args), + self.anal_array(args, allow_unpack=True), self.fail, False, ctx, @@ -1948,7 +1949,10 @@ def instantiate_type_alias( # TODO: we need to check args validity w.r.t alias.alias_tvars. # Otherwise invalid instantiations will be allowed in runtime context. # Note: in type context, these will be still caught by semanal_typeargs. - typ = TypeAliasType(node, args, ctx.line, ctx.column) + # Type aliases are special, since they can be expanded during semantic analysis, + # so we need to normalize them as soon as possible. + # TODO: can this cause an infinite recursion? + typ = TypeAliasType(node, flatten_nested_tuples(args), ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. if ( diff --git a/mypy/types.py b/mypy/types.py index cf2c343655dd..fb360fb892f1 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1045,9 +1045,12 @@ class UnpackType(ProperType): """Type operator Unpack from PEP646. Can be either with Unpack[] or unpacking * syntax. - The inner type should be either a TypeVarTuple, a constant size - tuple, or a variable length tuple. Type aliases to these are not allowed, - except during semantic analysis. + The inner type should be either a TypeVarTuple, or a variable length tuple. + In an exceptional case of callable star argument it can be a fixed length tuple. + + Note: the above restrictions are only guaranteed by normalizations after semantic + analysis, if your code needs to handle UnpackType *during* semantic analysis, it is + wild west, technically anything can be present in the wrapped type. """ __slots__ = ["type"] @@ -2143,7 +2146,11 @@ def with_normalized_var_args(self) -> Self: assert nested_unpacked.type.fullname == "builtins.tuple" new_unpack = nested_unpacked.args[0] else: - assert isinstance(nested_unpacked, TypeVarTupleType) + if not isinstance(nested_unpacked, TypeVarTupleType): + # We found a non-nomralized tuple type, this means this method + # is called during semantic analysis (e.g. from get_proper_type()) + # there is no point in normalizing callables at this stage. + return self new_unpack = nested_unpack else: new_unpack = UnpackType( @@ -3587,6 +3594,17 @@ def remove_dups(types: list[T]) -> list[T]: return new_types +def type_vars_as_args(type_vars: Sequence[TypeVarLikeType]) -> tuple[Type, ...]: + """Represent type variables as they would appear in a type argument list.""" + args: list[Type] = [] + for tv in type_vars: + if isinstance(tv, TypeVarTupleType): + args.append(UnpackType(tv)) + else: + args.append(tv) + return tuple(args) + + # This cyclic import is unfortunate, but to avoid it we would need to move away all uses # of get_proper_type() from types.py. Majority of them have been removed, but few remaining # are quite tricky to get rid of, but ultimately we want to do it at some point. diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index c7716f3e8346..a36c4d4d6741 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1032,3 +1032,90 @@ Second = Tuple[C, D] x: G[Unpack[First], Unpack[Second]] # E: Type argument "A" of "G" must be a subtype of "int" \ # E: Type argument "D" of "G" must be a subtype of "str" [builtins fixtures/tuple.pyi] + +[case testVariadicTupleType] +from typing import Tuple, Callable +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class A(Tuple[Unpack[Ts]]): + fn: Callable[[Unpack[Ts]], None] + +x: A[int] +reveal_type(x) # N: Revealed type is "Tuple[builtins.int, fallback=__main__.A[builtins.int]]" +reveal_type(x[0]) # N: Revealed type is "builtins.int" +reveal_type(x.fn) # N: Revealed type is "def (builtins.int)" + +y: A[int, str] +reveal_type(y) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]" +reveal_type(y[0]) # N: Revealed type is "builtins.int" +reveal_type(y.fn) # N: Revealed type is "def (builtins.int, builtins.str)" + +z: A[Unpack[Tuple[int, ...]]] +reveal_type(z) # N: Revealed type is "__main__.A[Unpack[builtins.tuple[builtins.int, ...]]]" +# TODO: this requires fixing map_instance_to_supertype(). +# reveal_type(z[0]) +reveal_type(z.fn) # N: Revealed type is "def (*builtins.int)" + +t: A[int, Unpack[Tuple[int, str]], str] +reveal_type(t) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str, builtins.str, fallback=__main__.A[builtins.int, builtins.int, builtins.str, builtins.str]]" +reveal_type(t[0]) # N: Revealed type is "builtins.int" +reveal_type(t.fn) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str, builtins.str)" +[builtins fixtures/tuple.pyi] + +[case testVariadicNamedTuple] +from typing import Tuple, Callable, NamedTuple, Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class A(NamedTuple, Generic[Unpack[Ts], T]): + fn: Callable[[Unpack[Ts]], None] + val: T + +y: A[int, str] +reveal_type(y) # N: Revealed type is "Tuple[def (builtins.int), builtins.str, fallback=__main__.A[builtins.int, builtins.str]]" +reveal_type(y[0]) # N: Revealed type is "def (builtins.int)" +reveal_type(y.fn) # N: Revealed type is "def (builtins.int)" + +z: A[Unpack[Tuple[int, ...]]] +reveal_type(z) # N: Revealed type is "Tuple[def (*builtins.int), builtins.int, fallback=__main__.A[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]]" +reveal_type(z.fn) # N: Revealed type is "def (*builtins.int)" + +t: A[int, Unpack[Tuple[int, str]], str] +reveal_type(t) # N: Revealed type is "Tuple[def (builtins.int, builtins.int, builtins.str), builtins.str, fallback=__main__.A[builtins.int, builtins.int, builtins.str, builtins.str]]" + +def test(x: int, y: str) -> None: ... +nt = A(fn=test, val=42) +reveal_type(nt) # N: Revealed type is "Tuple[def (builtins.int, builtins.str), builtins.int, fallback=__main__.A[builtins.int, builtins.str, builtins.int]]" + +def bad() -> int: ... +nt2 = A(fn=bad, val=42) # E: Argument "fn" to "A" has incompatible type "Callable[[], int]"; expected "Callable[[], None]" +[builtins fixtures/tuple.pyi] + +[case testVariadicTypedDict] +from typing import Tuple, Callable, Generic +from typing_extensions import TypeVarTuple, Unpack, TypedDict + +Ts = TypeVarTuple("Ts") +class A(TypedDict, Generic[Unpack[Ts], T]): + fn: Callable[[Unpack[Ts]], None] + val: T + +y: A[int, str] +reveal_type(y) # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int), 'val': builtins.str})" +reveal_type(y["fn"]) # N: Revealed type is "def (builtins.int)" + +z: A[Unpack[Tuple[int, ...]]] +reveal_type(z) # N: Revealed type is "TypedDict('__main__.A', {'fn': def (*builtins.int), 'val': builtins.int})" +reveal_type(z["fn"]) # N: Revealed type is "def (*builtins.int)" + +t: A[int, Unpack[Tuple[int, str]], str] +reveal_type(t) # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int, builtins.int, builtins.str), 'val': builtins.str})" + +def test(x: int, y: str) -> None: ... +td = A({"fn": test, "val": 42}) +reveal_type(td) # N: Revealed type is "TypedDict('__main__.A', {'fn': def (builtins.int, builtins.str), 'val': builtins.int})" + +def bad() -> int: ... +td2 = A({"fn": bad, "val": 42}) # E: Incompatible types (expression has type "Callable[[], int]", TypedDict item "fn" has type "Callable[[], None]") +[builtins fixtures/tuple.pyi] From d7b24514d7301f86031b7d1e2215cf8c2476bec0 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sun, 27 Aug 2023 23:20:13 +0100 Subject: [PATCH 80/88] Fixes to stubtest's new check for missing stdlib modules (#15960) - It's not easy to predict where stdlib modules are going to be located. (It varies between platforms, and between venvs and conda envs; on some platforms it's in a completely different directory to the Python executable.) - Some modules appear to raise `SystemExit` when stubtest tries to import them in CI, leading stubtest to instantly exit without logging a message to the terminal. - Importing some `test.*` submodules leads to unraisable exceptions being printed to the terminal at the end of the stubtest run, which is somewhat annoying. --- mypy/stubtest.py | 41 ++++++++++++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 34bb985b702e..a804835a632b 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -23,6 +23,7 @@ import typing import typing_extensions import warnings +from collections import defaultdict from contextlib import redirect_stderr, redirect_stdout from functools import singledispatch from pathlib import Path @@ -1679,16 +1680,22 @@ def get_importable_stdlib_modules() -> set[str]: all_stdlib_modules = sys.stdlib_module_names else: all_stdlib_modules = set(sys.builtin_module_names) - python_exe_dir = Path(sys.executable).parent + modules_by_finder: defaultdict[importlib.machinery.FileFinder, set[str]] = defaultdict(set) for m in pkgutil.iter_modules(): - finder = m.module_finder - if isinstance(finder, importlib.machinery.FileFinder): - finder_path = Path(finder.path) - if ( - python_exe_dir in finder_path.parents - and "site-packages" not in finder_path.parts - ): - all_stdlib_modules.add(m.name) + if isinstance(m.module_finder, importlib.machinery.FileFinder): + modules_by_finder[m.module_finder].add(m.name) + for finder, module_group in modules_by_finder.items(): + if ( + "site-packages" not in Path(finder.path).parents + # if "_queue" is present, it's most likely the module finder + # for stdlib extension modules; + # if "queue" is present, it's most likely the module finder + # for pure-Python stdlib modules. + # In either case, we'll want to add all the modules that the finder has to offer us. + # This is a bit hacky, but seems to work well in a cross-platform way. + and {"_queue", "queue"} & module_group + ): + all_stdlib_modules.update(module_group) importable_stdlib_modules: set[str] = set() for module_name in all_stdlib_modules: @@ -1719,13 +1726,25 @@ def get_importable_stdlib_modules() -> set[str]: # The idlelib.* submodules are similarly annoying in opening random tkinter windows, # and we're unlikely to ever add stubs for idlelib in typeshed # (see discussion in https://github.com/python/typeshed/pull/9193) - if submodule_name.endswith(".__main__") or submodule_name.startswith("idlelib."): + # + # test.* modules do weird things like raising exceptions in __del__ methods, + # leading to unraisable exceptions being logged to the terminal + # as a warning at the end of the stubtest run + if ( + submodule_name.endswith(".__main__") + or submodule_name.startswith("idlelib.") + or submodule_name.startswith("test.") + ): continue try: silent_import_module(submodule_name) + except KeyboardInterrupt: + raise # importing multiprocessing.popen_forkserver on Windows raises AttributeError... - except Exception: + # some submodules also appear to raise SystemExit as well on some Python versions + # (not sure exactly which) + except BaseException: continue else: importable_stdlib_modules.add(submodule_name) From 807bd3990920271dddd64a368056c30f78dcba76 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 30 Aug 2023 03:08:05 +0100 Subject: [PATCH 81/88] Fix ParamSpec inference for callback protocols (#15986) Fixes https://github.com/python/mypy/issues/15984 Fix is straightforward, `ParamSpec` inference special-casing should put instances with `__call__` and callable types on same ground. --- mypy/checkexpr.py | 4 ++++ test-data/unit/check-parameter-specification.test | 15 +++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4430d0773cfa..218568007b9e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2185,6 +2185,10 @@ def get_arg_infer_passes( # run(test, 1, 2) # we will use `test` for inference, since it will allow to infer also # argument *names* for P <: [x: int, y: int]. + if isinstance(p_actual, Instance): + call_method = find_member("__call__", p_actual, p_actual, is_operator=True) + if call_method is not None: + p_actual = get_proper_type(call_method) if ( isinstance(p_actual, CallableType) and not p_actual.variables diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index ed1d59b376d2..a98c92ce14e7 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1824,3 +1824,18 @@ class C(Generic[P]): ... c: C[int, [int, str], str] # E: Nested parameter specifications are not allowed reveal_type(c) # N: Revealed type is "__main__.C[Any]" [builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceWithCallbackProtocol] +from typing import Protocol, Callable, ParamSpec + +class CB(Protocol): + def __call__(self, x: str, y: int) -> None: ... + +P = ParamSpec('P') +def g(fn: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... + +cb: CB +g(cb, y=0, x='a') # OK +g(cb, y='a', x=0) # E: Argument "y" to "g" has incompatible type "str"; expected "int" \ + # E: Argument "x" to "g" has incompatible type "int"; expected "str" +[builtins fixtures/paramspec.pyi] From b33373c1f4877ba5749e7f79a5efe842f53c4ce0 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 29 Aug 2023 17:46:46 -0700 Subject: [PATCH 82/88] Try upgrading tox (#15992) Fixes #15990 --- .github/workflows/docs.yml | 2 +- .github/workflows/test.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5dc86a1159f4..8beb293c2d76 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -27,7 +27,7 @@ jobs: with: python-version: '3.8' - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Setup tox environment run: tox run -e ${{ env.TOXENV }} --notest - name: Test diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f594353ed05a..76d9cc6ab570 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -130,7 +130,7 @@ jobs: ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV source $VENV/bin/activate - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Compiled with mypyc if: ${{ matrix.test_mypyc }} run: | @@ -182,7 +182,7 @@ jobs: default: 3.11.1 command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');" - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Setup tox environment run: tox run -e py --notest - name: Test From 00cba138a382f8e64b3bbc291c9a4b2ba25a6cb0 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 6 Sep 2023 19:43:24 +0200 Subject: [PATCH 83/88] Clear cache when adding --new-type-inference (#16059) Add `new_type_inference` to the list of options affecting the cache. --- mypy/options.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mypy/options.py b/mypy/options.py index 5e451c0aa0a3..007ae0a78aa1 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -62,6 +62,7 @@ class BuildType: | { "platform", "bazel", + "new_type_inference", "plugins", "disable_bytearray_promotion", "disable_memoryview_promotion", From d3766331e89e4cf9f58d7d094b8df10acf63a0e5 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 17 Sep 2023 05:22:59 -0700 Subject: [PATCH 84/88] Fix mypyc regression with pretty (#16124) Fixes #15877 Regression was introduced by #15070. Previously Errors objects created in mypyc build would just use all the default values, now they use the actual options object involved --- mypy/errors.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mypy/errors.py b/mypy/errors.py index 680b7f1d31ea..a47a8385c735 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -909,8 +909,7 @@ def file_messages(self, path: str) -> list[str]: return [] self.flushed_files.add(path) source_lines = None - if self.options.pretty: - assert self.read_source + if self.options.pretty and self.read_source: source_lines = self.read_source(path) return self.format_messages(self.error_info_map[path], source_lines) From e78df552ebf84c3e89fbc7dab0cbc459a7ca306c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 22:47:37 -0700 Subject: [PATCH 85/88] Match note error codes to import error codes (#16004) Fixes #16003. Follow up to #14740 --- mypy/build.py | 2 +- mypy/errors.py | 2 +- mypy/report.py | 2 +- mypy/test/testcheck.py | 2 +- mypy/test/testcmdline.py | 2 +- mypy/test/testreports.py | 4 ++-- test-data/unit/pep561.test | 1 + 7 files changed, 8 insertions(+), 7 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 525d5f436e7e..39629c2dc455 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2798,7 +2798,7 @@ def module_not_found( for note in notes: if "{stub_dist}" in note: note = note.format(stub_dist=stub_distribution_name(module)) - errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT) + errors.report(line, 0, note, severity="note", only_once=True, code=code) if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: manager.missing_stub_packages.add(stub_distribution_name(module)) errors.set_import_context(save_import_context) diff --git a/mypy/errors.py b/mypy/errors.py index a47a8385c735..4e62a48aeb27 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -469,7 +469,7 @@ def _add_error_info(self, file: str, info: ErrorInfo) -> None: self.error_info_map[file].append(info) if info.blocker: self.has_blockers.add(file) - if info.code is IMPORT: + if info.code in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND): self.seen_import_error = True def _filter_error(self, file: str, info: ErrorInfo) -> bool: diff --git a/mypy/report.py b/mypy/report.py index 5d93351aa37d..d5f16464c0fb 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -25,7 +25,7 @@ from mypy.version import __version__ try: - from lxml import etree # type: ignore[import] + from lxml import etree # type: ignore[import-untyped] LXML_INSTALLED = True except ImportError: diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 7b81deeafe9d..98328e070232 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -26,7 +26,7 @@ from mypy.test.update_data import update_testcase_output try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 30ecef07a821..9bc02d319964 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -20,7 +20,7 @@ ) try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py index a422b4bb2a7b..5ff315f83ba8 100644 --- a/mypy/test/testreports.py +++ b/mypy/test/testreports.py @@ -7,7 +7,7 @@ from mypy.test.helpers import Suite, assert_equal try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None @@ -22,7 +22,7 @@ def test_get_line_rate(self) -> None: @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_as_xml(self) -> None: - import lxml.etree as etree # type: ignore[import] + import lxml.etree as etree # type: ignore[import-untyped] cobertura_package = CoberturaPackage("foobar") cobertura_package.covered_lines = 21 diff --git a/test-data/unit/pep561.test b/test-data/unit/pep561.test index e8ebbd03dca7..9969c2894c36 100644 --- a/test-data/unit/pep561.test +++ b/test-data/unit/pep561.test @@ -167,6 +167,7 @@ a.bf(False) b.bf(False) a.bf(1) b.bf(1) +import typedpkg_ns.whatever as c # type: ignore[import-untyped] [out] testNamespacePkgWStubs.py:4: error: Skipping analyzing "typedpkg_ns.b.bbb": module is installed, but missing library stubs or py.typed marker testNamespacePkgWStubs.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports From 4c963c9973d319e145be540ce40637d1f43506cd Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 19:16:11 -0700 Subject: [PATCH 86/88] Make PEP 695 constructs give a reasonable error message (#16013) Mypy does not yet support PEP 695 Fixes #16011, linking #15238 --- mypy/fastparse.py | 31 +++++++++++++++ mypy/test/helpers.py | 4 +- mypy/test/testcheck.py | 2 + test-data/unit/check-python312.test | 59 +++++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 1 deletion(-) create mode 100644 test-data/unit/check-python312.test diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 6aa626afb81e..a96e697d40bf 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -144,6 +144,11 @@ def ast3_parse( NamedExpr = ast3.NamedExpr Constant = ast3.Constant +if sys.version_info >= (3, 12): + ast_TypeAlias = ast3.TypeAlias +else: + ast_TypeAlias = Any + if sys.version_info >= (3, 10): Match = ast3.Match MatchValue = ast3.MatchValue @@ -936,6 +941,14 @@ def do_func_def( arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: + if sys.version_info >= (3, 12) and n.type_params: + self.fail( + ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE), + n.type_params[0].lineno, + n.type_params[0].col_offset, + blocker=False, + ) + arg_types = [a.type_annotation for a in args] return_type = TypeConverter( self.errors, line=n.returns.lineno if n.returns else lineno @@ -1110,6 +1123,14 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: self.class_and_function_stack.append("C") keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg] + if sys.version_info >= (3, 12) and n.type_params: + self.fail( + ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE), + n.type_params[0].lineno, + n.type_params[0].col_offset, + blocker=False, + ) + cdef = ClassDef( n.name, self.as_required_block(n.body), @@ -1717,6 +1738,16 @@ def visit_MatchOr(self, n: MatchOr) -> OrPattern: node = OrPattern([self.visit(pattern) for pattern in n.patterns]) return self.set_line(node, n) + def visit_TypeAlias(self, n: ast_TypeAlias) -> AssignmentStmt: + self.fail( + ErrorMessage("PEP 695 type aliases are not yet supported", code=codes.VALID_TYPE), + n.lineno, + n.col_offset, + blocker=False, + ) + node = AssignmentStmt([NameExpr(n.name.id)], self.visit(n.value)) + return self.set_line(node, n) + class TypeConverter: def __init__( diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index d1850219e60a..7447391593d5 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -241,7 +241,9 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: def testfile_pyversion(path: str) -> tuple[int, int]: - if path.endswith("python311.test"): + if path.endswith("python312.test"): + return 3, 12 + elif path.endswith("python311.test"): return 3, 11 elif path.endswith("python310.test"): return 3, 10 diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 98328e070232..85fbe5dc2990 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -43,6 +43,8 @@ typecheck_files.remove("check-python310.test") if sys.version_info < (3, 11): typecheck_files.remove("check-python311.test") +if sys.version_info < (3, 12): + typecheck_files.remove("check-python312.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test new file mode 100644 index 000000000000..91aca7794071 --- /dev/null +++ b/test-data/unit/check-python312.test @@ -0,0 +1,59 @@ +[case test695TypeAlias] +type MyInt = int # E: PEP 695 type aliases are not yet supported + +def f(x: MyInt) -> MyInt: + return reveal_type(x) # N: Revealed type is "builtins.int" + +type MyList[T] = list[T] # E: PEP 695 type aliases are not yet supported \ + # E: Name "T" is not defined + +def g(x: MyList[int]) -> MyList[int]: # E: Variable "__main__.MyList" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases + return reveal_type(x) # N: Revealed type is "MyList?[builtins.int]" + +[case test695Class] +class MyGen[T]: # E: PEP 695 generics are not yet supported + def __init__(self, x: T) -> None: # E: Name "T" is not defined + self.x = x + +def f(x: MyGen[int]): # E: "MyGen" expects no type arguments, but 1 given + reveal_type(x.x) # N: Revealed type is "Any" + +[case test695Function] +def f[T](x: T) -> T: # E: PEP 695 generics are not yet supported \ + # E: Name "T" is not defined + return reveal_type(x) # N: Revealed type is "Any" + +reveal_type(f(1)) # N: Revealed type is "Any" + +async def g[T](x: T) -> T: # E: PEP 695 generics are not yet supported \ + # E: Name "T" is not defined + return reveal_type(x) # N: Revealed type is "Any" + +reveal_type(g(1)) # E: Value of type "Coroutine[Any, Any, Any]" must be used \ + # N: Are you missing an await? \ + # N: Revealed type is "typing.Coroutine[Any, Any, Any]" + +[case test695TypeVar] +from typing import Callable +type Alias1[T: int] = list[T] # E: PEP 695 type aliases are not yet supported +type Alias2[**P] = Callable[P, int] # E: PEP 695 type aliases are not yet supported \ + # E: Value of type "int" is not indexable \ + # E: Name "P" is not defined +type Alias3[*Ts] = tuple[*Ts] # E: PEP 695 type aliases are not yet supported \ + # E: Type expected within [...] \ + # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable \ + # E: Name "Ts" is not defined + +class Cls1[T: int]: ... # E: PEP 695 generics are not yet supported +class Cls2[**P]: ... # E: PEP 695 generics are not yet supported +class Cls3[*Ts]: ... # E: PEP 695 generics are not yet supported + +def func1[T: int](x: T) -> T: ... # E: PEP 695 generics are not yet supported +def func2[**P](x: Callable[P, int]) -> Callable[P, str]: ... # E: PEP 695 generics are not yet supported \ + # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ + # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas \ + # E: Name "P" is not defined +def func3[*Ts](x: tuple[*Ts]) -> tuple[int, *Ts]: ... # E: PEP 695 generics are not yet supported \ + # E: Name "Ts" is not defined +[builtins fixtures/tuple.pyi] From 5b488ab8ad82ce11388a63367a60e1443f359f04 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 29 Aug 2023 10:17:52 +0100 Subject: [PATCH 87/88] Optimize Unpack for failures (#15967) This is a small but possibly important PR. Wherever possible we should represent user error and/or failed type inference as `*tuple[Any, ...]`/`*tuple[, ...]`, rather than `Unpack[Any]`/`Unpack[]` or plain `Any`/``. This way we will not need any special casing for failure conditions in various places without risking a crash instead of a graceful failure (error message). --- mypy/expandtype.py | 23 ++++++----------------- mypy/semanal_main.py | 2 ++ mypy/semanal_typeargs.py | 21 ++++++++++++++------- test-data/unit/check-typevar-tuple.test | 5 ++--- 4 files changed, 24 insertions(+), 27 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index ef8ebe1a9128..26353c043cb7 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -273,7 +273,7 @@ def visit_unpack_type(self, t: UnpackType) -> Type: # example is non-normalized types when called from semanal.py. return UnpackType(t.type.accept(self)) - def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType: + def expand_unpack(self, t: UnpackType) -> list[Type]: assert isinstance(t.type, TypeVarTupleType) repl = get_proper_type(self.variables.get(t.type.id, t.type)) if isinstance(repl, TupleType): @@ -285,9 +285,9 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType ): return [UnpackType(typ=repl)] elif isinstance(repl, (AnyType, UninhabitedType)): - # tuple[Any, ...] for Any would be better, but we don't have - # the type info to construct that type here. - return repl + # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for . + # These types may appear here as a result of user error or failed inference. + return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))] else: raise RuntimeError(f"Invalid type replacement to expand: {repl}") @@ -310,12 +310,7 @@ def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> l # We have plain Unpack[Ts] assert isinstance(var_arg_type, TypeVarTupleType) fallback = var_arg_type.tuple_fallback - expanded_items_res = self.expand_unpack(var_arg) - if isinstance(expanded_items_res, list): - expanded_items = expanded_items_res - else: - # We got Any or - return prefix + [expanded_items_res] + suffix + expanded_items = self.expand_unpack(var_arg) new_unpack = UnpackType(TupleType(expanded_items, fallback)) return prefix + [new_unpack] + suffix @@ -394,14 +389,8 @@ def expand_types_with_unpack( items: list[Type] = [] for item in typs: if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): - unpacked_items = self.expand_unpack(item) - if isinstance(unpacked_items, (AnyType, UninhabitedType)): - # TODO: better error for , something like tuple of unknown? - return unpacked_items - else: - items.extend(unpacked_items) + items.extend(self.expand_unpack(item)) else: - # Must preserve original aliases when possible. items.append(item.accept(self)) return items diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 51a7014fac1a..ec09deb0952f 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -381,6 +381,7 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None: errors, state.options, is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.manager.semantic_analyzer.named_type, ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): @@ -399,6 +400,7 @@ def check_type_arguments_in_targets( errors, state.options, is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.manager.semantic_analyzer.named_type, ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 1ae6fada8f38..749b02391e06 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import Sequence +from typing import Callable, Sequence from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode @@ -42,11 +42,18 @@ class TypeArgumentAnalyzer(MixedTraverserVisitor): - def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: + def __init__( + self, + errors: Errors, + options: Options, + is_typeshed_file: bool, + named_type: Callable[[str, list[Type]], Instance], + ) -> None: super().__init__() self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file + self.named_type = named_type self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True @@ -243,16 +250,16 @@ def visit_unpack_type(self, typ: UnpackType) -> None: return if isinstance(proper_type, TypeVarTupleType): return + # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": return - if isinstance(proper_type, AnyType) and proper_type.type_of_any == TypeOfAny.from_error: - return - if not isinstance(proper_type, UnboundType): - # Avoid extra errors if there were some errors already. + if not isinstance(proper_type, (UnboundType, AnyType)): + # Avoid extra errors if there were some errors already. Also interpret plain Any + # as tuple[Any, ...] (this is better for the code in type checker). self.fail( message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ ) - typ.type = AnyType(TypeOfAny.from_error) + typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)]) def check_type_var_values( self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index a36c4d4d6741..c8b33ec96b06 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -17,8 +17,7 @@ reveal_type(f(args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(f(varargs)) # N: Revealed type is "builtins.tuple[builtins.int, ...]" -if object(): - f(0) # E: Argument 1 to "f" has incompatible type "int"; expected +f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[, ...]" def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: return a @@ -26,7 +25,7 @@ def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: reveal_type(g(args, args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(g(args, args2)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(g(args, args3)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" -reveal_type(g(any, any)) # N: Revealed type is "Any" +reveal_type(g(any, any)) # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testTypeVarTupleMixed] From ac2d56f9145a4e15e7c0d1fbcd9a466ad0cec9a1 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 10 Oct 2023 17:06:00 +0100 Subject: [PATCH 88/88] Remove +dev from version --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 512890ce7d2b..9271eba74aa1 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.6.0+dev" +__version__ = "1.6.0" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))