diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 4256387e1209..006809c4a0e1 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -2,3 +2,5 @@ 97c5ee99bc98dc475512e549b252b23a6e7e0997 # Use builtin generics and PEP 604 for type annotations wherever possible (#13427) 23ee1e7aff357e656e3102435ad0fe3b5074571e +# Use variable annotations (#10723) +f98f78216ba9d6ab68c8e69c19e9f3c7926c5efe diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8851f7fbb0f3..3e06b5e909f1 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,6 +20,7 @@ jobs: env: TOXENV: docs TOX_SKIP_MISSING_INTERPRETERS: False + VERIFY_MYPY_ERROR_CODES: 1 steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 31242d0ad0bc..297427e72aca 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -178,8 +178,6 @@ Classes class AuditedBankAccount(BankAccount): # You can optionally declare instance variables in the class body audit_log: list[str] - # This is an instance variable with a default value - auditor_name: str = "The Spanish Inquisition" def __init__(self, account_name: str, initial_balance: int = 0) -> None: super().__init__(account_name, initial_balance) diff --git a/docs/source/conf.py b/docs/source/conf.py index 80097ef5b3a8..683b2a6785b3 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -35,7 +35,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.intersphinx"] +extensions = ["sphinx.ext.intersphinx", "docs.source.html_builder"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 54dc31f2cfcb..1654c5910f98 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -8,6 +8,8 @@ with default options. See :ref:`error-codes` for general documentation about error codes. :ref:`error-codes-optional` documents additional error codes that you can enable. +.. _code-attr-defined: + Check that attribute exists [attr-defined] ------------------------------------------ @@ -43,6 +45,8 @@ A reference to a missing attribute is given the ``Any`` type. In the above example, the type of ``non_existent`` will be ``Any``, which can be important if you silence the error. +.. _code-union-attr: + Check that attribute exists in each union item [union-attr] ----------------------------------------------------------- @@ -75,6 +79,8 @@ You can often work around these errors by using ``assert isinstance(obj, ClassNa or ``assert obj is not None`` to tell mypy that you know that the type is more specific than what mypy thinks. +.. _code-name-defined: + Check that name is defined [name-defined] ----------------------------------------- @@ -89,6 +95,7 @@ This example accidentally calls ``sort()`` instead of :py:func:`sorted`: x = sort([3, 2, 4]) # Error: Name "sort" is not defined [name-defined] +.. _code-used-before-def: Check that a variable is not used before it's defined [used-before-def] ----------------------------------------------------------------------- @@ -105,6 +112,7 @@ Example: print(x) # Error: Name "x" is used before definition [used-before-def] x = 123 +.. _code-call-arg: Check arguments in calls [call-arg] ----------------------------------- @@ -124,6 +132,8 @@ Example: greet('jack') # OK greet('jill', 'jack') # Error: Too many arguments for "greet" [call-arg] +.. _code-arg-type: + Check argument types [arg-type] ------------------------------- @@ -144,6 +154,8 @@ Example: # expected "list[int]" [arg-type] print(first(t)) +.. _code-call-overload: + Check calls to overloaded functions [call-overload] --------------------------------------------------- @@ -175,6 +187,8 @@ Example: # Error: No overload variant of "inc_maybe" matches argument type "float" [call-overload] inc_maybe(1.2) +.. _code-valid-type: + Check validity of types [valid-type] ------------------------------------ @@ -207,6 +221,8 @@ You can use :py:data:`~typing.Callable` as the type for callable objects: for x in objs: f(x) +.. _code-var-annotated: + Require annotation if variable type is unclear [var-annotated] -------------------------------------------------------------- @@ -239,6 +255,8 @@ To address this, we add an explicit annotation: reveal_type(Bundle().items) # list[str] +.. _code-override: + Check validity of overrides [override] -------------------------------------- @@ -275,6 +293,8 @@ Example: arg: bool) -> int: ... +.. _code-return: + Check that function returns a value [return] -------------------------------------------- @@ -303,6 +323,8 @@ Example: else: raise ValueError('not defined for zero') +.. _code-return-value: + Check that return value is compatible [return-value] ---------------------------------------------------- @@ -317,6 +339,8 @@ Example: # Error: Incompatible return value type (got "int", expected "str") [return-value] return x + 1 +.. _code-assignment: + Check types in assignment statement [assignment] ------------------------------------------------ @@ -339,6 +363,8 @@ Example: # variable has type "str") [assignment] r.name = 5 +.. _code-method-assign: + Check that assignment target is not a method [method-assign] ------------------------------------------------------------ @@ -368,6 +394,8 @@ so only the second assignment will still generate an error. This error code is a subcode of the more general ``[assignment]`` code. +.. _code-type-var: + Check type variable values [type-var] ------------------------------------- @@ -390,6 +418,8 @@ Example: # Error: Value of type variable "T1" of "add" cannot be "str" [type-var] add('x', 'y') +.. _code-operator: + Check uses of various operators [operator] ------------------------------------------ @@ -404,6 +434,8 @@ Example: # Error: Unsupported operand types for + ("int" and "str") [operator] 1 + 'x' +.. _code-index: + Check indexing operations [index] --------------------------------- @@ -425,6 +457,8 @@ Example: # Error: Invalid index type "bytes" for "dict[str, int]"; expected type "str" [index] a[b'x'] = 4 +.. _code-list-item: + Check list items [list-item] ---------------------------- @@ -439,6 +473,8 @@ Example: # Error: List item 0 has incompatible type "int"; expected "str" [list-item] a: list[str] = [0] +.. _code-dict-item: + Check dict items [dict-item] ---------------------------- @@ -453,6 +489,8 @@ Example: # Error: Dict entry 0 has incompatible type "str": "str"; expected "str": "int" [dict-item] d: dict[str, int] = {'key': 'value'} +.. _code-typeddict-item: + Check TypedDict items [typeddict-item] -------------------------------------- @@ -477,6 +515,8 @@ Example: # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} +.. _code-typeddict-unknown-key: + Check TypedDict Keys [typeddict-unknown-key] -------------------------------------------- @@ -533,6 +573,8 @@ runtime: This error code is a subcode of the wider ``[typeddict-item]`` code. +.. _code-has-type: + Check that type of target is known [has-type] --------------------------------------------- @@ -572,6 +614,8 @@ the issue: def set_y(self) -> None: self.y: int = self.x # Added annotation here +.. _code-import: + Check that import target can be found [import] ---------------------------------------------- @@ -587,6 +631,8 @@ Example: See :ref:`ignore-missing-imports` for how to work around these errors. +.. _code-no-redef: + Check that each name is defined once [no-redef] ----------------------------------------------- @@ -613,6 +659,8 @@ Example: # (the first definition wins!) A('x') +.. _code-func-returns-value: + Check that called function returns a value [func-returns-value] --------------------------------------------------------------- @@ -635,6 +683,8 @@ returns ``None``: if f(): print("not false") +.. _code-abstract: + Check instantiation of abstract classes [abstract] -------------------------------------------------- @@ -666,6 +716,8 @@ Example: # Error: Cannot instantiate abstract class "Thing" with abstract attribute "save" [abstract] t = Thing() +.. _code-type-abstract: + Safe handling of abstract type object types [type-abstract] ----------------------------------------------------------- @@ -692,6 +744,8 @@ Example: # Error: Only concrete class can be given where "Type[Config]" is expected [type-abstract] make_many(Config, 5) +.. _code-safe-super: + Check that call to an abstract method via super is valid [safe-super] --------------------------------------------------------------------- @@ -714,6 +768,8 @@ will cause runtime errors, so mypy prevents you from doing so: Mypy considers the following as trivial bodies: a ``pass`` statement, a literal ellipsis ``...``, a docstring, and a ``raise NotImplementedError`` statement. +.. _code-valid-newtype: + Check the target of NewType [valid-newtype] ------------------------------------------- @@ -738,6 +794,8 @@ To work around the issue, you can either give mypy access to the sources for ``acme`` or create a stub file for the module. See :ref:`ignore-missing-imports` for more information. +.. _code-exit-return: + Check the return type of __exit__ [exit-return] ----------------------------------------------- @@ -794,6 +852,8 @@ You can also use ``None``: def __exit__(self, exc, value, tb) -> None: # Also OK print('exit') +.. _code-name-match: + Check that naming is consistent [name-match] -------------------------------------------- @@ -807,6 +867,8 @@ consistently when using the call-based syntax. Example: # Error: First argument to namedtuple() should be "Point2D", not "Point" Point2D = NamedTuple("Point", [("x", int), ("y", int)]) +.. _code-literal-required: + Check that literal is used where expected [literal-required] ------------------------------------------------------------ @@ -836,6 +898,8 @@ or ``Literal`` variables. Example: # expected one of ("x", "y") [literal-required] p[key] +.. _code-no-overload-impl: + Check that overloaded functions have an implementation [no-overload-impl] ------------------------------------------------------------------------- @@ -858,6 +922,8 @@ implementation. def func(value): pass # actual implementation +.. _code-unused-coroutine: + Check that coroutine return value is used [unused-coroutine] ------------------------------------------------------------ @@ -881,6 +947,8 @@ otherwise unused variable: _ = f() # No error +.. _code-assert-type: + Check types in assert_type [assert-type] ---------------------------------------- @@ -895,6 +963,8 @@ the provided type. assert_type([1], list[str]) # Error +.. _code-truthy-function: + Check that function isn't used in boolean context [truthy-function] ------------------------------------------------------------------- @@ -908,6 +978,8 @@ Functions will always evaluate to true in boolean contexts. if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] pass +.. _code-str-bytes-safe: + Check for implicit bytes coercions [str-bytes-safe] ------------------------------------------------------------------- @@ -926,6 +998,8 @@ Warn about cases where a bytes object may be converted to a string in an unexpec print(f"The alphabet starts with {b!r}") # The alphabet starts with b'abc' print(f"The alphabet starts with {b.decode('utf-8')}") # The alphabet starts with abc +.. _code-syntax: + Report syntax errors [syntax] ----------------------------- @@ -933,6 +1007,8 @@ If the code being checked is not syntactically valid, mypy issues a syntax error. Most, but not all, syntax errors are *blocking errors*: they can't be ignored with a ``# type: ignore`` comment. +.. _code-misc: + Miscellaneous checks [misc] --------------------------- diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 8be2ac0b1d73..11f463f93018 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -15,6 +15,8 @@ error codes that are enabled by default. options by using a :ref:`configuration file ` or :ref:`command-line options `. +.. _code-type-arg: + Check that type arguments exist [type-arg] ------------------------------------------ @@ -34,6 +36,8 @@ Example: def remove_dups(items: list) -> list: ... +.. _code-no-untyped-def: + Check that every function has an annotation [no-untyped-def] ------------------------------------------------------------ @@ -62,6 +66,8 @@ Example: def __init__(self) -> None: self.value = 0 +.. _code-redundant-cast: + Check that cast is not redundant [redundant-cast] ------------------------------------------------- @@ -82,6 +88,8 @@ Example: # Error: Redundant cast to "int" [redundant-cast] return cast(int, x) +.. _code-redundant-self: + Check that methods do not have redundant Self annotations [redundant-self] -------------------------------------------------------------------------- @@ -104,6 +112,8 @@ Example: def copy(self: Self) -> Self: return type(self)() +.. _code-comparison-overlap: + Check that comparisons are overlapping [comparison-overlap] ----------------------------------------------------------- @@ -135,6 +145,8 @@ literal: def is_magic(x: bytes) -> bool: return x == b'magic' # OK +.. _code-no-untyped-call: + Check that no untyped functions are called [no-untyped-call] ------------------------------------------------------------ @@ -154,6 +166,7 @@ Example: def bad(): ... +.. _code-no-any-return: Check that function does not return Any value [no-any-return] ------------------------------------------------------------- @@ -175,6 +188,8 @@ Example: # Error: Returning Any from function declared to return "str" [no-any-return] return fields(x)[0] +.. _code-no-any-unimported: + Check that types have no Any components due to missing imports [no-any-unimported] ---------------------------------------------------------------------------------- @@ -195,6 +210,8 @@ that ``Cat`` falls back to ``Any`` in a type annotation: def feed(cat: Cat) -> None: ... +.. _code-unreachable: + Check that statement or expression is unreachable [unreachable] --------------------------------------------------------------- @@ -214,6 +231,8 @@ incorrect control flow or conditional checks that are accidentally always true o # Error: Statement is unreachable [unreachable] print('unreachable') +.. _code-redundant-expr: + Check that expression is redundant [redundant-expr] --------------------------------------------------- @@ -236,6 +255,8 @@ mypy generates an error if it thinks that an expression is redundant. [i for i in range(x) if isinstance(i, int)] +.. _code-truthy-bool: + Check that expression is not implicitly true in boolean context [truthy-bool] ----------------------------------------------------------------------------- @@ -259,6 +280,7 @@ Using an iterable value in a boolean context has a separate error code if foo: ... +.. _code-truthy-iterable: Check that iterable is not implicitly true in boolean context [truthy-iterable] ------------------------------------------------------------------------------- @@ -286,8 +308,7 @@ items`` check is actually valid. If that is the case, it is recommended to annotate ``items`` as ``Collection[int]`` instead of ``Iterable[int]``. - -.. _ignore-without-code: +.. _code-ignore-without-code: Check that ``# type: ignore`` include an error code [ignore-without-code] ------------------------------------------------------------------------- @@ -319,6 +340,8 @@ Example: # Error: "Foo" has no attribute "nme"; maybe "name"? f.nme = 42 # type: ignore[assignment] +.. _code-unused-awaitable: + Check that awaitable return value is used [unused-awaitable] ------------------------------------------------------------ @@ -348,6 +371,8 @@ silence the error: async def g() -> None: _ = asyncio.create_task(f()) # No error +.. _code-unused-ignore: + Check that ``# type: ignore`` comment is used [unused-ignore] ------------------------------------------------------------- diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index c8a2728b5697..65ae0e5816e8 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -32,7 +32,7 @@ or config ``hide_error_codes = True`` to hide error codes. Error codes are shown prog.py:1: error: "str" has no attribute "trim" [attr-defined] It's also possible to require error codes for ``type: ignore`` comments. -See :ref:`ignore-without-code` for more information. +See :ref:`ignore-without-code` for more information. .. _silence-error-codes: diff --git a/docs/source/html_builder.py b/docs/source/html_builder.py new file mode 100644 index 000000000000..405b80ac53d2 --- /dev/null +++ b/docs/source/html_builder.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +import json +import os +import textwrap +from pathlib import Path +from typing import Any + +from sphinx.addnodes import document +from sphinx.application import Sphinx +from sphinx.builders.html import StandaloneHTMLBuilder + + +class MypyHTMLBuilder(StandaloneHTMLBuilder): + def __init__(self, app: Sphinx) -> None: + super().__init__(app) + self._ref_to_doc = {} + + def write_doc(self, docname: str, doctree: document) -> None: + super().write_doc(docname, doctree) + self._ref_to_doc.update({_id: docname for _id in doctree.ids}) + + def _verify_error_codes(self) -> None: + from mypy.errorcodes import error_codes + + known_missing = { + # TODO: fix these before next release + "annotation-unchecked", + "empty-body", + "possibly-undefined", + "str-format", + "top-level-await", + } + missing_error_codes = { + c + for c in error_codes + if f"code-{c}" not in self._ref_to_doc and c not in known_missing + } + if missing_error_codes: + raise ValueError( + f"Some error codes are not documented: {', '.join(sorted(missing_error_codes))}" + ) + + def _write_ref_redirector(self) -> None: + if os.getenv("VERIFY_MYPY_ERROR_CODES"): + self._verify_error_codes() + p = Path(self.outdir) / "_refs.html" + data = f""" + + + + + + """ + p.write_text(textwrap.dedent(data)) + + def finish(self) -> None: + super().finish() + self._write_ref_redirector() + + +def setup(app: Sphinx) -> dict[str, Any]: + app.add_builder(MypyHTMLBuilder, override=True) + + return {"version": "0.1", "parallel_read_safe": True, "parallel_write_safe": True} diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index fc6cbc1d88e7..a8aca425696a 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -179,10 +179,10 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "c844270a4", # LiteralString reverts - "9ebe5fd49", # sum reverts - "d1987191f", # ctypes reverts - "b1761f4c9", # ParamSpec for functools.wraps + "6f913a148", # LiteralString reverts + "475a46a78", # sum reverts + "f5e5c117d", # ctypes reverts + "9f3bbbeb1", # ParamSpec for functools.wraps ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) diff --git a/mypy/build.py b/mypy/build.py index c239afb56236..7913eae9c6ed 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -116,7 +116,10 @@ "types", "typing_extensions", "mypy_extensions", - "_importlib_modulespec", + "_typeshed", + "_collections_abc", + "collections", + "collections.abc", "sys", "abc", } @@ -659,8 +662,6 @@ def __init__( for module in CORE_BUILTIN_MODULES: if options.use_builtins_fixtures: continue - if module == "_importlib_modulespec": - continue path = self.find_module_cache.find_module(module) if not isinstance(path, str): raise CompileError( @@ -2637,7 +2638,7 @@ def find_module_and_diagnose( result.endswith(".pyi") # Stubs are always normal and not options.follow_imports_for_stubs # except when they aren't ) - or id in mypy.semanal_main.core_modules # core is always normal + or id in CORE_BUILTIN_MODULES # core is always normal ): follow_imports = "normal" if skip_diagnose: diff --git a/mypy/checker.py b/mypy/checker.py index c1c31538b7de..1026376cce63 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4270,6 +4270,7 @@ def check_return_stmt(self, s: ReturnStmt) -> None: isinstance(return_type, Instance) and return_type.type.fullname == "builtins.object" ) + and not is_lambda ): self.msg.incorrectly_returning_any(return_type, s) return @@ -6793,6 +6794,9 @@ def has_valid_attribute(self, typ: Type, name: str) -> bool: ) return not watcher.has_new_errors() + def get_expression_type(self, node: Expression, type_context: Type | None = None) -> Type: + return self.expr_checker.accept(node, type_context=type_context) + class CollectArgTypeVarTypes(TypeTraverserVisitor): """Collects the non-nested argument types in a set.""" diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index cd0ff1100183..4b204a80c130 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -5197,6 +5197,15 @@ def visit_temp_node(self, e: TempNode) -> Type: return e.type def visit_type_var_expr(self, e: TypeVarExpr) -> Type: + p_default = get_proper_type(e.default) + if not ( + isinstance(p_default, AnyType) + and p_default.type_of_any == TypeOfAny.from_omitted_generics + ): + if not is_subtype(p_default, e.upper_bound): + self.chk.fail("TypeVar default must be a subtype of the bound type", e) + if e.values and not any(p_default == value for value in e.values): + self.chk.fail("TypeVar default must be one of the constraint types", e) return AnyType(TypeOfAny.special_form) def visit_paramspec_expr(self, e: ParamSpecExpr) -> Type: diff --git a/mypy/checkmember.py b/mypy/checkmember.py index c2c6b3555805..7af61a532b7b 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -388,7 +388,7 @@ def analyze_type_callable_member_access(name: str, typ: FunctionLike, mx: Member # See https://github.com/python/mypy/pull/1787 for more info. # TODO: do not rely on same type variables being present in all constructor overloads. result = analyze_class_attribute_access( - ret_type, name, mx, original_vars=typ.items[0].variables + ret_type, name, mx, original_vars=typ.items[0].variables, mcs_fallback=typ.fallback ) if result: return result @@ -434,17 +434,21 @@ def analyze_type_type_member_access( if isinstance(typ.item.item, Instance): item = typ.item.item.type.metaclass_type ignore_messages = False + + if item is not None: + fallback = item.type.metaclass_type or fallback + if item and not mx.is_operator: # See comment above for why operators are skipped - result = analyze_class_attribute_access(item, name, mx, override_info) + result = analyze_class_attribute_access( + item, name, mx, mcs_fallback=fallback, override_info=override_info + ) if result: if not (isinstance(get_proper_type(result), AnyType) and item.type.fallback_to_any): return result else: # We don't want errors on metaclass lookup for classes with Any fallback ignore_messages = True - if item is not None: - fallback = item.type.metaclass_type or fallback with mx.msg.filter_errors(filter_errors=ignore_messages): return _analyze_member_access(name, fallback, mx, override_info) @@ -893,6 +897,8 @@ def analyze_class_attribute_access( itype: Instance, name: str, mx: MemberContext, + *, + mcs_fallback: Instance, override_info: TypeInfo | None = None, original_vars: Sequence[TypeVarLikeType] | None = None, ) -> Type | None: @@ -919,6 +925,22 @@ def analyze_class_attribute_access( return apply_class_attr_hook(mx, hook, AnyType(TypeOfAny.special_form)) return None + if ( + isinstance(node.node, Var) + and not node.node.is_classvar + and not hook + and mcs_fallback.type.get(name) + ): + # If the same attribute is declared on the metaclass and the class but with different types, + # and the attribute on the class is not a ClassVar, + # the type of the attribute on the metaclass should take priority + # over the type of the attribute on the class, + # when the attribute is being accessed from the class object itself. + # + # Return `None` here to signify that the name should be looked up + # on the class object itself rather than the instance. + return None + is_decorated = isinstance(node.node, Decorator) is_method = is_decorated or isinstance(node.node, FuncBase) if mx.is_lvalue: diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index c742f3116402..54544f4c01ce 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -330,7 +330,7 @@ def cmd_run( header=argparse.SUPPRESS, ) # Signal that we need to restart if the options have changed - if self.options_snapshot != options.snapshot(): + if not options.compare_stable(self.options_snapshot): return {"restart": "configuration changed"} if __version__ != version: return {"restart": "mypy version changed"} @@ -857,6 +857,21 @@ def _find_changed( assert path removed.append((source.module, path)) + # Always add modules that were (re-)added, since they may be detected as not changed by + # fswatcher (if they were actually not changed), but they may still need to be checked + # in case they had errors before they were deleted from sources on previous runs. + previous_modules = {source.module for source in self.previous_sources} + changed_set = set(changed) + changed.extend( + [ + (source.module, source.path) + for source in sources + if source.path + and source.module not in previous_modules + and (source.module, source.path) not in changed_set + ] + ) + # Find anything that has had its module path change because of added or removed __init__s last = {s.path: s.module for s in self.previous_sources} for s in sources: diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 50a82be9816d..e87b04b6f473 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -37,6 +37,14 @@ def __init__( def __str__(self) -> str: return f"" + def __eq__(self, other: object) -> bool: + if not isinstance(other, ErrorCode): + return False + return self.code == other.code + + def __hash__(self) -> int: + return hash((self.code,)) + ATTR_DEFINED: Final = ErrorCode("attr-defined", "Check that attribute exists", "General") NAME_DEFINED: Final = ErrorCode("name-defined", "Check that name is defined", "General") diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 776d0bfef213..c5164d48fd13 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -181,7 +181,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: INVALID_TYPEVAR_ARG_BOUND: Final = 'Type argument {} of "{}" must be a subtype of {}' INVALID_TYPEVAR_ARG_VALUE: Final = 'Invalid type argument value for "{}"' TYPEVAR_VARIANCE_DEF: Final = 'TypeVar "{}" may only be a literal bool' -TYPEVAR_BOUND_MUST_BE_TYPE: Final = 'TypeVar "bound" must be a type' +TYPEVAR_ARG_MUST_BE_TYPE: Final = '{} "{}" must be a type' TYPEVAR_UNEXPECTED_ARGUMENT: Final = 'Unexpected argument to "TypeVar()"' UNBOUND_TYPEVAR: Final = ( "A function returning TypeVar should receive at least " diff --git a/mypy/messages.py b/mypy/messages.py index a732d612123c..9d703a1a974a 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2437,7 +2437,7 @@ def format_literal_value(typ: LiteralType) -> str: if isinstance(typ, Instance): itype = typ # Get the short name of the type. - if itype.type.fullname in ("types.ModuleType", "_importlib_modulespec.ModuleType"): + if itype.type.fullname == "types.ModuleType": # Make some common error messages simpler and tidier. base_str = "Module" if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: @@ -2989,8 +2989,9 @@ def _real_quick_ratio(a: str, b: str) -> float: def best_matches(current: str, options: Collection[str], n: int) -> list[str]: + if not current: + return [] # narrow down options cheaply - assert current options = [o for o in options if _real_quick_ratio(current, o) > 0.75] if len(options) >= 50: options = [o for o in options if abs(len(o) - len(current)) <= 1] diff --git a/mypy/nodes.py b/mypy/nodes.py index 8457e39b6aa1..4e29a434d7fe 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2493,7 +2493,7 @@ class TypeVarExpr(TypeVarLikeExpr): __slots__ = ("values",) - __match_args__ = ("name", "values", "upper_bound") + __match_args__ = ("name", "values", "upper_bound", "default") # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. @@ -2541,7 +2541,7 @@ def deserialize(cls, data: JsonDict) -> TypeVarExpr: class ParamSpecExpr(TypeVarLikeExpr): __slots__ = () - __match_args__ = ("name", "upper_bound") + __match_args__ = ("name", "upper_bound", "default") def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_paramspec_expr(self) @@ -2575,7 +2575,7 @@ class TypeVarTupleExpr(TypeVarLikeExpr): tuple_fallback: mypy.types.Instance - __match_args__ = ("name", "upper_bound") + __match_args__ = ("name", "upper_bound", "default") def __init__( self, diff --git a/mypy/options.py b/mypy/options.py index 45591597ba69..2785d2034c54 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -373,7 +373,7 @@ def use_or_syntax(self) -> bool: def new_semantic_analyzer(self) -> bool: return True - def snapshot(self) -> object: + def snapshot(self) -> dict[str, object]: """Produce a comparable snapshot of this Option""" # Under mypyc, we don't have a __dict__, so we need to do worse things. d = dict(getattr(self, "__dict__", ())) @@ -388,6 +388,7 @@ def __repr__(self) -> str: return f"Options({pprint.pformat(self.snapshot())})" def apply_changes(self, changes: dict[str, object]) -> Options: + # Note: effects of this method *must* be idempotent. new_options = Options() # Under mypyc, we don't have a __dict__, so we need to do worse things. replace_object_state(new_options, self, copy_dict=True) @@ -413,6 +414,17 @@ def apply_changes(self, changes: dict[str, object]) -> Options: return new_options + def compare_stable(self, other_snapshot: dict[str, object]) -> bool: + """Compare options in a way that is stable for snapshot() -> apply_changes() roundtrip. + + This is needed because apply_changes() has non-trivial effects for some flags, so + Options().apply_changes(options.snapshot()) may result in a (slightly) different object. + """ + return ( + Options().apply_changes(self.snapshot()).snapshot() + == Options().apply_changes(other_snapshot).snapshot() + ) + def build_per_module_cache(self) -> None: self._per_module_cache = {} diff --git a/mypy/plugin.py b/mypy/plugin.py index cf124b45d04f..4d62c2bd184b 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -250,6 +250,11 @@ def named_generic_type(self, name: str, args: list[Type]) -> Instance: """Construct an instance of a builtin type with given type arguments.""" raise NotImplementedError + @abstractmethod + def get_expression_type(self, node: Expression, type_context: Type | None = None) -> Type: + """Checks the type of the given expression.""" + raise NotImplementedError + @trait class SemanticAnalyzerPluginInterface: diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index afd9423d6820..e05a8e44b2f8 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -9,7 +9,6 @@ import mypy.plugin # To avoid circular imports. from mypy.applytype import apply_generic_arguments -from mypy.checker import TypeChecker from mypy.errorcodes import LITERAL_REQ from mypy.expandtype import expand_type, expand_type_by_instance from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -1048,13 +1047,7 @@ def evolve_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> Callabl return ctx.default_signature # leave it to the type checker to complain inst_arg = ctx.args[0][0] - - # - assert isinstance(ctx.api, TypeChecker) - inst_type = ctx.api.expr_checker.accept(inst_arg) - # - - inst_type = get_proper_type(inst_type) + inst_type = get_proper_type(ctx.api.get_expression_type(inst_arg)) inst_type_str = format_type_bare(inst_type, ctx.api.options) attr_types = _get_expanded_attr_types(ctx, inst_type, inst_type, inst_type) @@ -1074,14 +1067,10 @@ def evolve_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> Callabl def fields_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> CallableType: """Provide the signature for `attrs.fields`.""" - if not ctx.args or len(ctx.args) != 1 or not ctx.args[0] or not ctx.args[0][0]: + if len(ctx.args) != 1 or len(ctx.args[0]) != 1: return ctx.default_signature - # - assert isinstance(ctx.api, TypeChecker) - inst_type = ctx.api.expr_checker.accept(ctx.args[0][0]) - # - proper_type = get_proper_type(inst_type) + proper_type = get_proper_type(ctx.api.get_expression_type(ctx.args[0][0])) # fields(Any) -> Any, fields(type[Any]) -> Any if ( @@ -1098,7 +1087,7 @@ def fields_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> Callabl inner = get_proper_type(proper_type.upper_bound) if isinstance(inner, Instance): # We need to work arg_types to compensate for the attrs stubs. - arg_types = [inst_type] + arg_types = [proper_type] cls = inner.type elif isinstance(proper_type, CallableType): cls = proper_type.type_object() diff --git a/mypy/semanal.py b/mypy/semanal.py index 8934dc9321b7..073bde661617 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -375,7 +375,7 @@ class SemanticAnalyzer( missing_names: list[set[str]] # Callbacks that will be called after semantic analysis to tweak things. patches: list[tuple[int, Callable[[], None]]] - loop_depth = 0 # Depth of breakable loops + loop_depth: list[int] # Depth of breakable loops cur_mod_id = "" # Current module id (or None) (phase 2) _is_stub_file = False # Are we analyzing a stub file? _is_typeshed_stub_file = False # Are we analyzing a typeshed stub file? @@ -428,7 +428,7 @@ def __init__( self.tvar_scope = TypeVarLikeScope() self.function_stack = [] self.block_depth = [0] - self.loop_depth = 0 + self.loop_depth = [0] self.errors = errors self.modules = modules self.msg = MessageBuilder(errors, modules) @@ -1337,6 +1337,8 @@ def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) - first_item.var.is_settable_property = True # Get abstractness from the original definition. item.func.abstract_status = first_item.func.abstract_status + if node.name == "deleter": + item.func.abstract_status = first_item.func.abstract_status else: self.fail( f"Only supported top decorator is @{first_item.func.name}.setter", item @@ -1808,12 +1810,14 @@ def enter_class(self, info: TypeInfo) -> None: self.locals.append(None) # Add class scope self.is_comprehension_stack.append(False) self.block_depth.append(-1) # The class body increments this to 0 + self.loop_depth.append(0) self._type = info self.missing_names.append(set()) def leave_class(self) -> None: """Restore analyzer state.""" self.block_depth.pop() + self.loop_depth.pop() self.locals.pop() self.is_comprehension_stack.pop() self._type = self.type_stack.pop() @@ -2189,6 +2193,10 @@ def configure_base_classes( if not self.verify_base_classes(defn): self.set_dummy_mro(defn.info) return + if not self.verify_duplicate_base_classes(defn): + # We don't want to block the typechecking process, + # so, we just insert `Any` as the base class and show an error. + self.set_any_mro(defn.info) self.calculate_class_mro(defn, self.object_type) def configure_tuple_base_class(self, defn: ClassDef, base: TupleType) -> Instance: @@ -2218,6 +2226,11 @@ def set_dummy_mro(self, info: TypeInfo) -> None: info.mro = [info, self.object_type().type] info.bad_mro = True + def set_any_mro(self, info: TypeInfo) -> None: + # Give it an MRO consisting direct `Any` subclass. + info.fallback_to_any = True + info.mro = [info, self.object_type().type] + def calculate_class_mro( self, defn: ClassDef, obj_type: Callable[[], Instance] | None = None ) -> None: @@ -2298,12 +2311,14 @@ def verify_base_classes(self, defn: ClassDef) -> bool: if self.is_base_class(info, baseinfo): self.fail("Cycle in inheritance hierarchy", defn) cycle = True - dup = find_duplicate(info.direct_base_classes()) - if dup: - self.fail(f'Duplicate base class "{dup.name}"', defn, blocker=True) - return False return not cycle + def verify_duplicate_base_classes(self, defn: ClassDef) -> bool: + dup = find_duplicate(defn.info.direct_base_classes()) + if dup: + self.fail(f'Duplicate base class "{dup.name}"', defn) + return not dup + def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: """Determine if t is a base class of s (but do not use mro).""" # Search the base class graph for t, starting from s. @@ -3054,6 +3069,12 @@ def analyze_namedtuple_assign(self, s: AssignmentStmt) -> bool: if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], (NameExpr, MemberExpr)): return False lvalue = s.lvalues[0] + if isinstance(lvalue, MemberExpr): + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + fullname = s.rvalue.callee.fullname + if fullname == "collections.namedtuple" or fullname in TYPED_NAMEDTUPLE_NAMES: + self.fail("NamedTuple type as an attribute is not supported", lvalue) + return False name = lvalue.name namespace = self.qualified_name(name) with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): @@ -3062,9 +3083,6 @@ def analyze_namedtuple_assign(self, s: AssignmentStmt) -> bool: ) if internal_name is None: return False - if isinstance(lvalue, MemberExpr): - self.fail("NamedTuple type as an attribute is not supported", lvalue) - return False if internal_name != name: self.fail( 'First argument to namedtuple() should be "{}", not "{}"'.format( @@ -3208,7 +3226,7 @@ def unwrap_final(self, s: AssignmentStmt) -> bool: if lval.is_new_def: lval.is_inferred_def = s.type is None - if self.loop_depth > 0: + if self.loop_depth[-1] > 0: self.fail("Cannot use Final inside a loop", s) if self.type and self.type.is_protocol: self.msg.protocol_members_cant_be_final(s) @@ -4124,28 +4142,15 @@ def process_typevar_parameters( if has_values: self.fail("TypeVar cannot have both values and an upper bound", context) return None - try: - # We want to use our custom error message below, so we suppress - # the default error message for invalid types here. - analyzed = self.expr_to_analyzed_type( - param_value, allow_placeholder=True, report_invalid_types=False - ) - if analyzed is None: - # Type variables are special: we need to place them in the symbol table - # soon, even if upper bound is not ready yet. Otherwise avoiding - # a "deadlock" in this common pattern would be tricky: - # T = TypeVar('T', bound=Custom[Any]) - # class Custom(Generic[T]): - # ... - analyzed = PlaceholderType(None, [], context.line) - upper_bound = get_proper_type(analyzed) - if isinstance(upper_bound, AnyType) and upper_bound.is_from_error: - self.fail(message_registry.TYPEVAR_BOUND_MUST_BE_TYPE, param_value) - # Note: we do not return 'None' here -- we want to continue - # using the AnyType as the upper bound. - except TypeTranslationError: - self.fail(message_registry.TYPEVAR_BOUND_MUST_BE_TYPE, param_value) + tv_arg = self.get_typevarlike_argument("TypeVar", param_name, param_value, context) + if tv_arg is None: return None + upper_bound = tv_arg + elif param_name == "default": + tv_arg = self.get_typevarlike_argument( + "TypeVar", param_name, param_value, context, allow_unbound_tvars=True + ) + default = tv_arg or AnyType(TypeOfAny.from_error) elif param_name == "values": # Probably using obsolete syntax with values=(...). Explain the current syntax. self.fail('TypeVar "values" argument not supported', context) @@ -4173,6 +4178,52 @@ def process_typevar_parameters( variance = INVARIANT return variance, upper_bound, default + def get_typevarlike_argument( + self, + typevarlike_name: str, + param_name: str, + param_value: Expression, + context: Context, + *, + allow_unbound_tvars: bool = False, + allow_param_spec_literals: bool = False, + report_invalid_typevar_arg: bool = True, + ) -> ProperType | None: + try: + # We want to use our custom error message below, so we suppress + # the default error message for invalid types here. + analyzed = self.expr_to_analyzed_type( + param_value, + allow_placeholder=True, + report_invalid_types=False, + allow_unbound_tvars=allow_unbound_tvars, + allow_param_spec_literals=allow_param_spec_literals, + ) + if analyzed is None: + # Type variables are special: we need to place them in the symbol table + # soon, even if upper bound is not ready yet. Otherwise avoiding + # a "deadlock" in this common pattern would be tricky: + # T = TypeVar('T', bound=Custom[Any]) + # class Custom(Generic[T]): + # ... + analyzed = PlaceholderType(None, [], context.line) + typ = get_proper_type(analyzed) + if report_invalid_typevar_arg and isinstance(typ, AnyType) and typ.is_from_error: + self.fail( + message_registry.TYPEVAR_ARG_MUST_BE_TYPE.format(typevarlike_name, param_name), + param_value, + ) + # Note: we do not return 'None' here -- we want to continue + # using the AnyType. + return typ + except TypeTranslationError: + if report_invalid_typevar_arg: + self.fail( + message_registry.TYPEVAR_ARG_MUST_BE_TYPE.format(typevarlike_name, param_name), + param_value, + ) + return None + def extract_typevarlike_name(self, s: AssignmentStmt, call: CallExpr) -> str | None: if not call: return None @@ -4205,13 +4256,50 @@ def process_paramspec_declaration(self, s: AssignmentStmt) -> bool: if name is None: return False - # ParamSpec is different from a regular TypeVar: - # arguments are not semantically valid. But, allowed in runtime. - # So, we need to warn users about possible invalid usage. - if len(call.args) > 1: - self.fail("Only the first argument to ParamSpec has defined semantics", s) + n_values = call.arg_kinds[1:].count(ARG_POS) + if n_values != 0: + self.fail('Too many positional arguments for "ParamSpec"', s) default: Type = AnyType(TypeOfAny.from_omitted_generics) + for param_value, param_name in zip( + call.args[1 + n_values :], call.arg_names[1 + n_values :] + ): + if param_name == "default": + tv_arg = self.get_typevarlike_argument( + "ParamSpec", + param_name, + param_value, + s, + allow_unbound_tvars=True, + allow_param_spec_literals=True, + report_invalid_typevar_arg=False, + ) + default = tv_arg or AnyType(TypeOfAny.from_error) + if isinstance(tv_arg, Parameters): + for i, arg_type in enumerate(tv_arg.arg_types): + typ = get_proper_type(arg_type) + if isinstance(typ, AnyType) and typ.is_from_error: + self.fail( + f"Argument {i} of ParamSpec default must be a type", param_value + ) + elif ( + isinstance(default, AnyType) + and default.is_from_error + or not isinstance(default, (AnyType, UnboundType)) + ): + self.fail( + "The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec", + param_value, + ) + default = AnyType(TypeOfAny.from_error) + else: + # ParamSpec is different from a regular TypeVar: + # arguments are not semantically valid. But, allowed in runtime. + # So, we need to warn users about possible invalid usage. + self.fail( + "The variance and bound arguments to ParamSpec do not have defined semantics yet", + s, + ) # PEP 612 reserves the right to define bound, covariant and contravariant arguments to # ParamSpec in a later PEP. If and when that happens, we should do something @@ -4245,10 +4333,32 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: if not call: return False - if len(call.args) > 1: - self.fail("Only the first argument to TypeVarTuple has defined semantics", s) + n_values = call.arg_kinds[1:].count(ARG_POS) + if n_values != 0: + self.fail('Too many positional arguments for "TypeVarTuple"', s) default: Type = AnyType(TypeOfAny.from_omitted_generics) + for param_value, param_name in zip( + call.args[1 + n_values :], call.arg_names[1 + n_values :] + ): + if param_name == "default": + tv_arg = self.get_typevarlike_argument( + "TypeVarTuple", + param_name, + param_value, + s, + allow_unbound_tvars=True, + report_invalid_typevar_arg=False, + ) + default = tv_arg or AnyType(TypeOfAny.from_error) + if not isinstance(default, UnpackType): + self.fail( + "The default argument to TypeVarTuple must be an Unpacked tuple", + param_value, + ) + default = AnyType(TypeOfAny.from_error) + else: + self.fail(f'Unexpected keyword argument "{param_name}" for "TypeVarTuple"', s) if not self.incomplete_feature_enabled(TYPE_VAR_TUPLE, s): return False @@ -4595,9 +4705,9 @@ def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: def visit_while_stmt(self, s: WhileStmt) -> None: self.statement = s s.expr.accept(self) - self.loop_depth += 1 + self.loop_depth[-1] += 1 s.body.accept(self) - self.loop_depth -= 1 + self.loop_depth[-1] -= 1 self.visit_block_maybe(s.else_body) def visit_for_stmt(self, s: ForStmt) -> None: @@ -4619,20 +4729,20 @@ def visit_for_stmt(self, s: ForStmt) -> None: self.store_declared_types(s.index, analyzed) s.index_type = analyzed - self.loop_depth += 1 + self.loop_depth[-1] += 1 self.visit_block(s.body) - self.loop_depth -= 1 + self.loop_depth[-1] -= 1 self.visit_block_maybe(s.else_body) def visit_break_stmt(self, s: BreakStmt) -> None: self.statement = s - if self.loop_depth == 0: + if self.loop_depth[-1] == 0: self.fail('"break" outside loop', s, serious=True, blocker=True) def visit_continue_stmt(self, s: ContinueStmt) -> None: self.statement = s - if self.loop_depth == 0: + if self.loop_depth[-1] == 0: self.fail('"continue" outside loop', s, serious=True, blocker=True) def visit_if_stmt(self, s: IfStmt) -> None: @@ -6127,6 +6237,7 @@ def enter( self.nonlocal_decls.append(set()) # -1 since entering block will increment this to 0. self.block_depth.append(-1) + self.loop_depth.append(0) self.missing_names.append(set()) try: yield @@ -6136,6 +6247,7 @@ def enter( self.global_decls.pop() self.nonlocal_decls.pop() self.block_depth.pop() + self.loop_depth.pop() self.missing_names.pop() def is_func_scope(self) -> bool: @@ -6348,6 +6460,8 @@ def expr_to_analyzed_type( report_invalid_types: bool = True, allow_placeholder: bool = False, allow_type_any: bool = False, + allow_unbound_tvars: bool = False, + allow_param_spec_literals: bool = False, ) -> Type | None: if isinstance(expr, CallExpr): # This is a legacy syntax intended mostly for Python 2, we keep it for @@ -6376,6 +6490,8 @@ def expr_to_analyzed_type( report_invalid_types=report_invalid_types, allow_placeholder=allow_placeholder, allow_type_any=allow_type_any, + allow_unbound_tvars=allow_unbound_tvars, + allow_param_spec_literals=allow_param_spec_literals, ) def analyze_type_expr(self, expr: Expression) -> None: diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 4e038cfd75d1..f06faa962b07 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -209,7 +209,12 @@ def test_module(module_name: str) -> Iterator[Error]: except KeyboardInterrupt: raise except BaseException as e: - yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) + note = "" + if isinstance(e, ModuleNotFoundError): + note = " Maybe install the runtime package or alter PYTHONPATH?" + yield Error( + [module_name], f"failed to import.{note} {type(e).__name__}: {e}", stub, MISSING + ) return with warnings.catch_warnings(): @@ -668,7 +673,7 @@ def _verify_arg_default_value( def maybe_strip_cls(name: str, args: list[nodes.Argument]) -> list[nodes.Argument]: - if name in ("__init_subclass__", "__class_getitem__"): + if args and name in ("__init_subclass__", "__class_getitem__"): # These are implicitly classmethods. If the stub chooses not to have @classmethod, we # should remove the cls argument if args[0].variable.name == "cls": diff --git a/mypy/test/data.py b/mypy/test/data.py index daf815dbdbdc..9b3931ee8be6 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -12,11 +12,12 @@ from abc import abstractmethod from dataclasses import dataclass from pathlib import Path -from typing import Any, Iterator, NamedTuple, Pattern, Union +from typing import Any, Iterator, NamedTuple, NoReturn, Pattern, Union from typing_extensions import Final, TypeAlias as _TypeAlias import pytest +from mypy import defaults from mypy.test.config import PREFIX, test_data_prefix, test_temp_dir root_dir = os.path.normpath(PREFIX) @@ -77,11 +78,19 @@ def parse_test_case(case: DataDrivenTestCase) -> None: targets: dict[int, list[str]] = {} # Fine-grained targets (per fine-grained update) test_modules: list[str] = [] # Modules which are deemed "test" (vs "fixture") + def _case_fail(msg: str) -> NoReturn: + pytest.fail(f"{case.file}:{case.line}: {msg}", pytrace=False) + # Process the parsed items. Each item has a header of form [id args], # optionally followed by lines of text. item = first_item = test_items[0] test_modules.append("__main__") for item in test_items[1:]: + + def _item_fail(msg: str) -> NoReturn: + item_abs_line = case.line + item.line - 2 + pytest.fail(f"{case.file}:{item_abs_line}: {msg}", pytrace=False) + if item.id in {"file", "fixture", "outfile", "outfile-re"}: # Record an extra file needed for the test case. assert item.arg is not None @@ -132,9 +141,11 @@ def parse_test_case(case: DataDrivenTestCase) -> None: # File/directory to delete during a multi-step test case assert item.arg is not None m = re.match(r"(.*)\.([0-9]+)$", item.arg) - assert m, f"Invalid delete section: {item.arg}" + if m is None: + _item_fail(f"Invalid delete section {item.arg!r}") num = int(m.group(2)) - assert num >= 2, f"Can't delete during step {num}" + if num < 2: + _item_fail(f"Can't delete during step {num}") full = join(base_path, m.group(1)) deleted_paths.setdefault(num, set()).add(full) elif re.match(r"out[0-9]*$", item.id): @@ -150,29 +161,22 @@ def parse_test_case(case: DataDrivenTestCase) -> None: if arg.startswith("version"): compare_op = arg[7:9] if compare_op not in {">=", "=="}: - raise ValueError( - "{}, line {}: Only >= and == version checks are currently supported".format( - case.file, item.line - ) - ) + _item_fail("Only >= and == version checks are currently supported") version_str = arg[9:] try: version = tuple(int(x) for x in version_str.split(".")) except ValueError: - raise ValueError( - '{}, line {}: "{}" is not a valid python version'.format( - case.file, item.line, version_str - ) + _item_fail(f"{version_str!r} is not a valid python version") + if version < defaults.PYTHON3_VERSION: + _item_fail( + f"Version check against {version}; must be >= {defaults.PYTHON3_VERSION}" ) if compare_op == ">=": version_check = sys.version_info >= version elif compare_op == "==": if not 1 < len(version) < 4: - raise ValueError( - "{}, line {}: Only minor or patch version checks " - 'are currently supported with "==": "{}"'.format( - case.file, item.line, version_str - ) + _item_fail( + f'Only minor or patch version checks are currently supported with "==": {version_str!r}' ) version_check = sys.version_info[: len(version)] == version if version_check: @@ -189,10 +193,11 @@ def parse_test_case(case: DataDrivenTestCase) -> None: elif item.id == "triggered" and item.arg is None: triggered = item.data else: - raise ValueError(f"Invalid section header {item.id} in {case.file}:{item.line}") + section_str = item.id + (f" {item.arg}" if item.arg else "") + _item_fail(f"Invalid section header [{section_str}] in case {case.name!r}") if out_section_missing: - raise ValueError(f"{case.file}, line {first_item.line}: Required output section not found") + _case_fail(f"Required output section not found in case {case.name!r}") for passnum in stale_modules.keys(): if passnum not in rechecked_modules: @@ -204,11 +209,7 @@ def parse_test_case(case: DataDrivenTestCase) -> None: and passnum in rechecked_modules and not stale_modules[passnum].issubset(rechecked_modules[passnum]) ): - raise ValueError( - ( - "Stale modules after pass {} must be a subset of rechecked modules ({}:{})" - ).format(passnum, case.file, first_item.line) - ) + _case_fail(f"Stale modules after pass {passnum} must be a subset of rechecked modules") output_inline_start = len(output) input = first_item.data @@ -219,10 +220,7 @@ def parse_test_case(case: DataDrivenTestCase) -> None: seen_files = set() for file, _ in files: if file in seen_files: - raise ValueError( - f"{case.file}, line {first_item.line}: Duplicated filename {file}. Did you include" - " it multiple times?" - ) + _case_fail(f"Duplicated filename {file}. Did you include it multiple times?") seen_files.add(file) @@ -367,12 +365,13 @@ def setup(self) -> None: self.steps = [steps.get(num, []) for num in range(2, max_step + 1)] def teardown(self) -> None: - assert self.old_cwd is not None and self.tmpdir is not None, "test was not properly set up" - os.chdir(self.old_cwd) - try: - self.tmpdir.cleanup() - except OSError: - pass + if self.old_cwd is not None: + os.chdir(self.old_cwd) + if self.tmpdir is not None: + try: + self.tmpdir.cleanup() + except OSError: + pass self.old_cwd = None self.tmpdir = None @@ -634,6 +633,16 @@ def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | N return None +_case_name_pattern = re.compile( + r"(?P[a-zA-Z_0-9]+)" + r"(?P-writescache)?" + r"(?P-only_when_cache|-only_when_nocache)?" + r"(-(?Pposix|windows))?" + r"(?P-skip)?" + r"(?P-xfail)?" +) + + def split_test_cases( parent: DataFileCollector, suite: DataSuite, file: str ) -> Iterator[DataDrivenTestCase]: @@ -644,40 +653,33 @@ def split_test_cases( """ with open(file, encoding="utf-8") as f: data = f.read() - # number of groups in the below regex - NUM_GROUPS = 7 - cases = re.split( - r"^\[case ([a-zA-Z_0-9]+)" - r"(-writescache)?" - r"(-only_when_cache|-only_when_nocache)?" - r"(-posix|-windows)?" - r"(-skip)?" - r"(-xfail)?" - r"\][ \t]*$\n", - data, - flags=re.DOTALL | re.MULTILINE, - ) - line_no = cases[0].count("\n") + 1 + cases = re.split(r"^\[case ([^]+)]+)\][ \t]*$\n", data, flags=re.DOTALL | re.MULTILINE) + cases_iter = iter(cases) + line_no = next(cases_iter).count("\n") + 1 test_names = set() - for i in range(1, len(cases), NUM_GROUPS): - name, writescache, only_when, platform_flag, skip, xfail, data = cases[i : i + NUM_GROUPS] + for case_id in cases_iter: + data = next(cases_iter) + + m = _case_name_pattern.fullmatch(case_id) + if not m: + raise RuntimeError(f"Invalid testcase id {case_id!r}") + name = m.group("name") if name in test_names: raise RuntimeError( 'Found a duplicate test name "{}" in {} on line {}'.format( name, parent.name, line_no ) ) - platform = platform_flag[1:] if platform_flag else None yield DataDrivenTestCase.from_parent( parent=parent, suite=suite, file=file, name=add_test_name_suffix(name, suite.test_name_suffix), - writescache=bool(writescache), - only_when=only_when, - platform=platform, - skip=bool(skip), - xfail=bool(xfail), + writescache=bool(m.group("writescache")), + only_when=m.group("only_when"), + platform=m.group("platform"), + skip=bool(m.group("skip")), + xfail=bool(m.group("xfail")), data=data, line=line_no, ) diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 849ccdc376bd..630ba305f349 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -46,15 +46,9 @@ def run_mypy(args: list[str]) -> None: def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None: """Assert that two string arrays are equal. - We consider "can't" and "cannot" equivalent, by replacing the - former with the latter before comparing. - Display any differences in a human-readable form. """ actual = clean_up(actual) - actual = [line.replace("can't", "cannot") for line in actual] - expected = [line.replace("can't", "cannot") for line in expected] - if actual != expected: num_skip_start = num_skipped_prefix_lines(expected, actual) num_skip_end = num_skipped_suffix_lines(expected, actual) diff --git a/mypy/test/meta/__init__.py b/mypy/test/meta/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mypy/test/meta/test_parse_data.py b/mypy/test/meta/test_parse_data.py new file mode 100644 index 000000000000..a74b3d71d392 --- /dev/null +++ b/mypy/test/meta/test_parse_data.py @@ -0,0 +1,68 @@ +""" +A "meta test" which tests the parsing of .test files. This is not meant to become exhaustive +but to ensure we maintain a basic level of ergonomics for mypy contributors. +""" +import subprocess +import sys +import textwrap +from pathlib import Path + +from mypy.test.config import test_data_prefix +from mypy.test.helpers import Suite + + +class ParseTestDataSuite(Suite): + def _dedent(self, s: str) -> str: + return textwrap.dedent(s).lstrip() + + def _run_pytest(self, data_suite: str) -> str: + p_test_data = Path(test_data_prefix) + p_root = p_test_data.parent.parent + p = p_test_data / "check-__fixture__.test" + assert not p.exists() + try: + p.write_text(data_suite) + test_nodeid = f"mypy/test/testcheck.py::TypeCheckSuite::{p.name}" + args = [sys.executable, "-m", "pytest", "-n", "0", "-s", test_nodeid] + proc = subprocess.run(args, cwd=p_root, capture_output=True, check=False) + return proc.stdout.decode() + finally: + p.unlink() + + def test_parse_invalid_case(self) -> None: + # Arrange + data = self._dedent( + """ + [case abc] + s: str + [case foo-XFAIL] + s: str + """ + ) + + # Act + actual = self._run_pytest(data) + + # Assert + assert "Invalid testcase id 'foo-XFAIL'" in actual + + def test_parse_invalid_section(self) -> None: + # Arrange + data = self._dedent( + """ + [case abc] + s: str + [unknownsection] + abc + """ + ) + + # Act + actual = self._run_pytest(data) + + # Assert + expected_lineno = data.splitlines().index("[unknownsection]") + 1 + expected = ( + f".test:{expected_lineno}: Invalid section header [unknownsection] in case 'abc'" + ) + assert expected in actual diff --git a/mypy/test/testupdatedata.py b/mypy/test/meta/test_update_data.py similarity index 92% rename from mypy/test/testupdatedata.py rename to mypy/test/meta/test_update_data.py index 2a6d2462f10c..1239679072e6 100644 --- a/mypy/test/testupdatedata.py +++ b/mypy/test/meta/test_update_data.py @@ -1,3 +1,8 @@ +""" +A "meta test" which tests the `--update-data` feature for updating .test files. +Updating the expected output, especially when it's in the form of inline (comment) assertions, +can be brittle, which is why we're "meta-testing" here. +""" import shlex import subprocess import sys @@ -14,7 +19,9 @@ def _run_pytest_update_data(self, data_suite: str, *, max_attempts: int) -> str: Runs a suite of data test cases through 'pytest --update-data' until either tests pass or until a maximum number of attempts (needed for incremental tests). """ - p = Path(test_data_prefix) / "check-update-data.test" + p_test_data = Path(test_data_prefix) + p_root = p_test_data.parent.parent + p = p_test_data / "check-__fixture__.test" assert not p.exists() try: p.write_text(textwrap.dedent(data_suite).lstrip()) @@ -26,7 +33,7 @@ def _run_pytest_update_data(self, data_suite: str, *, max_attempts: int) -> str: else: cmd = " ".join(args) for i in range(max_attempts - 1, -1, -1): - res = subprocess.run(args) + res = subprocess.run(args, cwd=p_root) if res.returncode == 0: break print(f"`{cmd}` returned {res.returncode}: {i} attempts remaining") diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py index ed8674e8d5bb..b8a11d7fc8af 100644 --- a/mypy/test/testpep561.py +++ b/mypy/test/testpep561.py @@ -47,22 +47,16 @@ def virtualenv(python_executable: str = sys.executable) -> Iterator[tuple[str, s def install_package( - pkg: str, python_executable: str = sys.executable, use_pip: bool = True, editable: bool = False + pkg: str, python_executable: str = sys.executable, editable: bool = False ) -> None: """Install a package from test-data/packages/pkg/""" working_dir = os.path.join(package_path, pkg) with tempfile.TemporaryDirectory() as dir: - if use_pip: - install_cmd = [python_executable, "-m", "pip", "install"] - if editable: - install_cmd.append("-e") - install_cmd.append(".") - else: - install_cmd = [python_executable, "setup.py"] - if editable: - install_cmd.append("develop") - else: - install_cmd.append("install") + install_cmd = [python_executable, "-m", "pip", "install"] + if editable: + install_cmd.append("-e") + install_cmd.append(".") + # Note that newer versions of pip (21.3+) don't # follow this env variable, but this is for compatibility env = {"PIP_BUILD": dir} @@ -82,21 +76,25 @@ def test_pep561(testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" python = sys.executable + if sys.version_info < (3, 8) and testcase.location[-1] == "testTypedPkgSimpleEditable": + # Python 3.7 doesn't ship with new enough pip to support PEP 660 + # This is a quick hack to skip the test; we'll drop Python 3.7 support soon enough + return + assert python is not None, "Should be impossible" pkgs, pip_args = parse_pkgs(testcase.input[0]) mypy_args = parse_mypy_args(testcase.input[1]) - use_pip = True editable = False for arg in pip_args: - if arg == "no-pip": - use_pip = False - elif arg == "editable": + if arg == "editable": editable = True + else: + raise ValueError(f"Unknown pip argument: {arg}") assert pkgs, "No packages to install for PEP 561 test?" with virtualenv(python) as venv: venv_dir, python_executable = venv for pkg in pkgs: - install_package(pkg, python_executable, use_pip, editable) + install_package(pkg, python_executable, editable) cmd_line = list(mypy_args) has_program = not ("-p" in cmd_line or "--package" in cmd_line) diff --git a/mypy/typeops.py b/mypy/typeops.py index ee544c6740bb..58a641a54ab7 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -76,12 +76,18 @@ def is_recursive_pair(s: Type, t: Type) -> bool: isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive + # Tuple types are special, they can cause an infinite recursion even if + # the other type is not recursive, because of the tuple fallback that is + # calculated "on the fly". + or isinstance(get_proper_type(s), TupleType) ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive + # Same as above. + or isinstance(get_proper_type(t), TupleType) ) return False diff --git a/mypy/types.py b/mypy/types.py index 53f21e8c0222..5fbdd385826c 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3049,6 +3049,8 @@ def visit_type_var(self, t: TypeVarType) -> str: s = f"{t.name}`{t.id}" if self.id_mapper and t.upper_bound: s += f"(upper_bound={t.upper_bound.accept(self)})" + if t.has_default(): + s += f" = {t.default.accept(self)}" return s def visit_param_spec(self, t: ParamSpecType) -> str: @@ -3064,6 +3066,8 @@ def visit_param_spec(self, t: ParamSpecType) -> str: s += f"{t.name_with_suffix()}`{t.id}" if t.prefix.arg_types: s += "]" + if t.has_default(): + s += f" = {t.default.accept(self)}" return s def visit_parameters(self, t: Parameters) -> str: @@ -3102,6 +3106,8 @@ def visit_type_var_tuple(self, t: TypeVarTupleType) -> str: else: # Named type variable type. s = f"{t.name}`{t.id}" + if t.has_default(): + s += f" = {t.default.accept(self)}" return s def visit_callable_type(self, t: CallableType) -> str: @@ -3138,6 +3144,8 @@ def visit_callable_type(self, t: CallableType) -> str: if s: s += ", " s += f"*{n}.args, **{n}.kwargs" + if param_spec.has_default(): + s += f" = {param_spec.default.accept(self)}" s = f"({s})" @@ -3156,12 +3164,18 @@ def visit_callable_type(self, t: CallableType) -> str: vals = f"({', '.join(val.accept(self) for val in var.values)})" vs.append(f"{var.name} in {vals}") elif not is_named_instance(var.upper_bound, "builtins.object"): - vs.append(f"{var.name} <: {var.upper_bound.accept(self)}") + vs.append( + f"{var.name} <: {var.upper_bound.accept(self)}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) else: - vs.append(var.name) + vs.append( + f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) else: - # For other TypeVarLikeTypes, just use the name - vs.append(var.name) + # For other TypeVarLikeTypes, use the name and default + vs.append( + f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" + ) s = f"[{', '.join(vs)}] {s}" return f"def {s}" diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index 86e8da78677c..49433e346765 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -112,7 +112,7 @@ dbm: 2.7- decimal: 2.7- difflib: 2.7- dis: 2.7- -distutils: 2.7- +distutils: 2.7-3.11 distutils.command.bdist_msi: 2.7-3.10 distutils.command.bdist_wininst: 2.7-3.9 doctest: 2.7- @@ -147,7 +147,7 @@ html: 3.0- http: 3.0- imaplib: 2.7- imghdr: 2.7- -imp: 2.7- +imp: 2.7-3.11 importlib: 2.7- importlib.metadata: 3.8- importlib.metadata._meta: 3.10- diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index 7bc47266d713..05e2a08fdc88 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -1,13 +1,14 @@ import sys +import typing_extensions from typing import Any, ClassVar -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal PyCF_ONLY_AST: Literal[1024] if sys.version_info >= (3, 8): PyCF_TYPE_COMMENTS: Literal[4096] PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] -_Identifier: TypeAlias = str +_Identifier: typing_extensions.TypeAlias = str class AST: if sys.version_info >= (3, 10): @@ -59,31 +60,43 @@ class Expression(mod): class stmt(AST): ... class FunctionDef(stmt): - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") name: _Identifier args: arguments body: list[stmt] decorator_list: list[expr] returns: expr | None + if sys.version_info >= (3, 12): + type_params: list[type_param] class AsyncFunctionDef(stmt): - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") + elif sys.version_info >= (3, 10): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") name: _Identifier args: arguments body: list[stmt] decorator_list: list[expr] returns: expr | None + if sys.version_info >= (3, 12): + type_params: list[type_param] class ClassDef(stmt): - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") + elif sys.version_info >= (3, 10): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") name: _Identifier bases: list[expr] keywords: list[keyword] body: list[stmt] decorator_list: list[expr] + if sys.version_info >= (3, 12): + type_params: list[type_param] class Return(stmt): if sys.version_info >= (3, 10): @@ -366,10 +379,10 @@ class Attribute(expr): ctx: expr_context if sys.version_info >= (3, 9): - _Slice: TypeAlias = expr + _Slice: typing_extensions.TypeAlias = expr else: class slice(AST): ... - _Slice: TypeAlias = slice + _Slice: typing_extensions.TypeAlias = slice class Slice(_Slice): if sys.version_info >= (3, 10): @@ -526,7 +539,7 @@ if sys.version_info >= (3, 10): class pattern(AST): ... # Without the alias, Pyright complains variables named pattern are recursively defined - _Pattern: TypeAlias = pattern + _Pattern: typing_extensions.TypeAlias = pattern class match_case(AST): __match_args__ = ("pattern", "guard", "body") @@ -571,3 +584,25 @@ if sys.version_info >= (3, 10): class MatchOr(pattern): __match_args__ = ("patterns",) patterns: list[pattern] + +if sys.version_info >= (3, 12): + class type_param(AST): ... + + class TypeVar(type_param): + __match_args__ = ("name", "bound") + name: _Identifier + bound: expr | None + + class ParamSpec(type_param): + __match_args__ = ("name",) + name: _Identifier + + class TypeVarTuple(type_param): + __match_args__ = ("name",) + name: _Identifier + + class TypeAlias(stmt): + __match_args__ = ("name", "typeparams", "value") + name: Name + type_params: list[type_param] + value: expr diff --git a/mypy/typeshed/stdlib/_csv.pyi b/mypy/typeshed/stdlib/_csv.pyi index c9b9f47e6217..19ea487e1530 100644 --- a/mypy/typeshed/stdlib/_csv.pyi +++ b/mypy/typeshed/stdlib/_csv.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import SupportsWrite from collections.abc import Iterable, Iterator from typing import Any @@ -9,6 +10,9 @@ QUOTE_ALL: Literal[1] QUOTE_MINIMAL: Literal[0] QUOTE_NONE: Literal[3] QUOTE_NONNUMERIC: Literal[2] +if sys.version_info >= (3, 12): + QUOTE_STRINGS: Literal[4] + QUOTE_NOTNULL: Literal[5] # Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC` # However, using literals in situations like these can cause false-positives (see #7258) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 8e8bcbe84bd4..756ee86d3342 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -22,6 +22,9 @@ RTLD_LOCAL: int if sys.version_info >= (3, 11): CTYPES_MAX_ARGCOUNT: int +if sys.version_info >= (3, 12): + SIZEOF_TIME_T: int + if sys.platform == "win32": # Description, Source, HelpFile, HelpContext, scode _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] diff --git a/mypy/typeshed/stdlib/_socket.pyi b/mypy/typeshed/stdlib/_socket.pyi index f7b0e6901bf4..7a0ede62838c 100644 --- a/mypy/typeshed/stdlib/_socket.pyi +++ b/mypy/typeshed/stdlib/_socket.pyi @@ -692,3 +692,28 @@ if sys.platform != "win32" or sys.version_info >= (3, 8): def if_nameindex() -> list[tuple[int, str]]: ... def if_nametoindex(__name: str) -> int: ... def if_indextoname(__index: int) -> str: ... + +if sys.version_info >= (3, 12): + IP_PKTINFO: int + IP_UNBLOCK_SOURCE: int + IP_BLOCK_SOURCE: int + IP_ADD_SOURCE_MEMBERSHIP: int + IP_DROP_SOURCE_MEMBERSHIP: int + if sys.platform == "win32": + AF_HYPERV: int + HV_PROTOCOL_RAW: int + HVSOCKET_CONNECT_TIMEOUT: int + HVSOCKET_CONNECT_TIMEOUT_MAX: int + HVSOCKET_CONNECTED_SUSPEND: int + HVSOCKET_ADDRESS_FLAG_PASSTHRU: int + HV_GUID_ZERO: str + HV_GUID_WILDCARD: str + HV_GUID_BROADCAST: str + HV_GUID_CHILDREN: str + HV_GUID_LOOPBACK: str + HV_GUID_PARENT: str + else: + ETHERTYPE_ARP: int + ETHERTYPE_IP: int + ETHERTYPE_IPV6: int + ETHERTYPE_VLAN: int diff --git a/mypy/typeshed/stdlib/_thread.pyi b/mypy/typeshed/stdlib/_thread.pyi index 152362edcaea..dba8664fbf13 100644 --- a/mypy/typeshed/stdlib/_thread.pyi +++ b/mypy/typeshed/stdlib/_thread.pyi @@ -43,3 +43,6 @@ if sys.version_info >= (3, 8): @property def thread(self) -> Thread | None: ... _excepthook: Callable[[_ExceptHookArgs], Any] + +if sys.version_info >= (3, 12): + def daemon_threads_allowed() -> bool: ... diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index ca1e61f0f19f..b51d844701ac 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -137,6 +137,34 @@ if sys.platform == "win32": LCMAP_TRADITIONAL_CHINESE: int LCMAP_UPPERCASE: int + if sys.version_info >= (3, 12): + COPYFILE2_CALLBACK_CHUNK_STARTED: Literal[1] + COPYFILE2_CALLBACK_CHUNK_FINISHED: Literal[2] + COPYFILE2_CALLBACK_STREAM_STARTED: Literal[3] + COPYFILE2_CALLBACK_STREAM_FINISHED: Literal[4] + COPYFILE2_CALLBACK_POLL_CONTINUE: Literal[5] + COPYFILE2_CALLBACK_ERROR: Literal[6] + + COPYFILE2_PROGRESS_CONTINUE: Literal[0] + COPYFILE2_PROGRESS_CANCEL: Literal[1] + COPYFILE2_PROGRESS_STOP: Literal[2] + COPYFILE2_PROGRESS_QUIET: Literal[3] + COPYFILE2_PROGRESS_PAUSE: Literal[4] + + COPY_FILE_FAIL_IF_EXISTS: Literal[0x1] + COPY_FILE_RESTARTABLE: Literal[0x2] + COPY_FILE_OPEN_SOURCE_FOR_WRITE: Literal[0x4] + COPY_FILE_ALLOW_DECRYPTED_DESTINATION: Literal[0x8] + COPY_FILE_COPY_SYMLINK: Literal[0x800] + COPY_FILE_NO_BUFFERING: Literal[0x1000] + COPY_FILE_REQUEST_SECURITY_PRIVILEGES: Literal[0x2000] + COPY_FILE_RESUME_FROM_PAUSE: Literal[0x4000] + COPY_FILE_NO_OFFLOAD: Literal[0x40000] + COPY_FILE_REQUEST_COMPRESSED_TRAFFIC: Literal[0x10000000] + + ERROR_ACCESS_DENIED: Literal[5] + ERROR_PRIVILEGE_NOT_HELD: Literal[1314] + def CloseHandle(__handle: int) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @@ -224,3 +252,6 @@ if sys.platform == "win32": def GetOverlappedResult(self, __wait: bool) -> tuple[int, int]: ... def cancel(self) -> None: ... def getbuffer(self) -> bytes | None: ... + + if sys.version_info >= (3, 12): + def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index c986b9cdb082..e41048516dd9 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -97,8 +97,16 @@ class _ActionsContainer: version: str = ..., **kwargs: Any, ) -> Action: ... - def add_argument_group(self, *args: Any, **kwargs: Any) -> _ArgumentGroup: ... - def add_mutually_exclusive_group(self, **kwargs: Any) -> _MutuallyExclusiveGroup: ... + def add_argument_group( + self, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + ) -> _ArgumentGroup: ... + def add_mutually_exclusive_group(self, *, required: bool = False) -> _MutuallyExclusiveGroup: ... def _add_action(self, action: _ActionT) -> _ActionT: ... def _remove_action(self, action: Action) -> None: ... def _add_container_actions(self, container: _ActionsContainer) -> None: ... @@ -161,9 +169,9 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): add_help: bool = True, allow_abbrev: bool = True, ) -> None: ... - # Ignore errors about overlapping overloads + @overload - def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... # type: ignore[misc] + def parse_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... # type: ignore[misc] @overload def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... @overload @@ -201,16 +209,27 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def print_help(self, file: IO[str] | None = None) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... - def parse_known_args( - self, args: Sequence[str] | None = None, namespace: Namespace | None = None - ) -> tuple[Namespace, list[str]]: ... + @overload + def parse_known_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] + @overload + def parse_known_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... + @overload + def parse_known_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... def error(self, message: str) -> NoReturn: ... - def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... - def parse_known_intermixed_args( - self, args: Sequence[str] | None = None, namespace: Namespace | None = None - ) -> tuple[Namespace, list[str]]: ... + @overload + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... # type: ignore[misc] + @overload + def parse_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... + @overload + def parse_intermixed_args(self, *, namespace: _N) -> _N: ... + @overload + def parse_known_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> tuple[Namespace, list[str]]: ... # type: ignore[misc] + @overload + def parse_known_intermixed_args(self, args: Sequence[str] | None, namespace: _N) -> tuple[_N, list[str]]: ... + @overload + def parse_known_intermixed_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... # undocumented def _get_optional_actions(self) -> list[Action]: ... def _get_positional_actions(self) -> list[Action]: ... @@ -350,7 +369,14 @@ class _ArgumentGroup(_ActionsContainer): title: str | None _group_actions: list[Action] def __init__( - self, container: _ActionsContainer, title: str | None = None, description: str | None = None, **kwargs: Any + self, + container: _ActionsContainer, + title: str | None = None, + description: str | None = None, + *, + prefix_chars: str = ..., + argument_default: Any = ..., + conflict_handler: str = ..., ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index ea899e150f97..377138141340 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -3,7 +3,7 @@ import sys from _ast import * from _typeshed import ReadableBuffer, Unused from collections.abc import Iterator -from typing import Any, TypeVar, overload +from typing import Any, TypeVar as _TypeVar, overload from typing_extensions import Literal if sys.version_info >= (3, 8): @@ -168,7 +168,7 @@ class NodeTransformer(NodeVisitor): # The usual return type is AST | None, but Iterable[AST] # is also allowed in some cases -- this needs to be mapped. -_T = TypeVar("_T", bound=AST) +_T = _TypeVar("_T", bound=AST) if sys.version_info >= (3, 8): @overload diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 0676aba1277e..4337a44c7c68 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -53,7 +53,17 @@ from typing import ( # noqa: Y022 overload, type_check_only, ) -from typing_extensions import Concatenate, Literal, ParamSpec, Self, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import ( # type: ignore + Concatenate, + Literal, + ParamSpec, + Self, + SupportsIndex, + TypeAlias, + TypeGuard, + TypeVarTuple, + final, +) if sys.version_info >= (3, 9): from types import GenericAlias @@ -187,6 +197,8 @@ class type: if sys.version_info >= (3, 10): def __or__(self, __value: Any) -> types.UnionType: ... def __ror__(self, __value: Any) -> types.UnionType: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] class super: @overload @@ -244,6 +256,9 @@ class int: signed: bool = False, ) -> Self: ... + if sys.version_info >= (3, 12): + def is_integer(self) -> Literal[True]: ... + def __add__(self, __value: int) -> int: ... def __sub__(self, __value: int) -> int: ... def __mul__(self, __value: int) -> int: ... @@ -431,7 +446,7 @@ class str(Sequence[str]): def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def format(self, *args: object, **kwargs: object) -> str: ... + def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def isalnum(self) -> bool: ... @@ -493,7 +508,7 @@ class str(Sequence[str]): def __le__(self, __value: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __value: str) -> bool: ... - def __mod__(self, __value: Any) -> str: ... + def __mod__(self, __value: Any) -> str: ... # type: ignore def __mul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __value: object) -> bool: ... def __rmul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] @@ -874,6 +889,8 @@ class function: if sys.version_info >= (3, 10): @property def __builtins__(self) -> dict[str, Any]: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __module__: str # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. @@ -1826,6 +1843,8 @@ class ImportError(Exception): name: str | None path: str | None msg: str # undocumented + if sys.version_info >= (3, 12): + name_from: str | None # undocumented class LookupError(Exception): ... class MemoryError(Exception): ... diff --git a/mypy/typeshed/stdlib/calendar.pyi b/mypy/typeshed/stdlib/calendar.pyi index 255a12d3348a..3f881393e99f 100644 --- a/mypy/typeshed/stdlib/calendar.pyi +++ b/mypy/typeshed/stdlib/calendar.pyi @@ -1,4 +1,5 @@ import datetime +import enum import sys from _typeshed import Unused from collections.abc import Iterable, Sequence @@ -35,6 +36,23 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] +if sys.version_info >= (3, 12): + __all__ += [ + "Day", + "Month", + "JANUARY", + "FEBRUARY", + "MARCH", + "APRIL", + "MAY", + "JUNE", + "JULY", + "AUGUST", + "SEPTEMBER", + "OCTOBER", + "NOVEMBER", + "DECEMBER", + ] _LocaleType: TypeAlias = tuple[str | None, str | None] @@ -134,12 +152,55 @@ day_abbr: Sequence[str] month_name: Sequence[str] month_abbr: Sequence[str] -MONDAY: Literal[0] -TUESDAY: Literal[1] -WEDNESDAY: Literal[2] -THURSDAY: Literal[3] -FRIDAY: Literal[4] -SATURDAY: Literal[5] -SUNDAY: Literal[6] +if sys.version_info >= (3, 12): + class Month(enum.IntEnum): + JANUARY: Literal[1] + FEBRUARY: Literal[2] + MARCH: Literal[3] + APRIL: Literal[4] + MAY: Literal[5] + JUNE: Literal[6] + JULY: Literal[7] + AUGUST: Literal[8] + SEPTEMBER: Literal[9] + OCTOBER: Literal[10] + NOVEMBER: Literal[11] + DECEMBER: Literal[12] + JANUARY = Month.JANUARY + FEBRUARY = Month.FEBRUARY + MARCH = Month.MARCH + APRIL = Month.APRIL + MAY = Month.MAY + JUNE = Month.JUNE + JULY = Month.JULY + AUGUST = Month.AUGUST + SEPTEMBER = Month.SEPTEMBER + OCTOBER = Month.OCTOBER + NOVEMBER = Month.NOVEMBER + DECEMBER = Month.DECEMBER + + class Day(enum.IntEnum): + MONDAY: Literal[0] + TUESDAY: Literal[1] + WEDNESDAY: Literal[2] + THURSDAY: Literal[3] + FRIDAY: Literal[4] + SATURDAY: Literal[5] + SUNDAY: Literal[6] + MONDAY = Day.MONDAY + TUESDAY = Day.TUESDAY + WEDNESDAY = Day.WEDNESDAY + THURSDAY = Day.THURSDAY + FRIDAY = Day.FRIDAY + SATURDAY = Day.SATURDAY + SUNDAY = Day.SUNDAY +else: + MONDAY: Literal[0] + TUESDAY: Literal[1] + WEDNESDAY: Literal[2] + THURSDAY: Literal[3] + FRIDAY: Literal[4] + SATURDAY: Literal[5] + SUNDAY: Literal[6] EPOCH: Literal[1970] diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index 59f2e7a3c96b..139ba7af2208 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -21,6 +21,9 @@ from _csv import ( unregister_dialect as unregister_dialect, writer as writer, ) + +if sys.version_info >= (3, 12): + from _csv import QUOTE_STRINGS as QUOTE_STRINGS, QUOTE_NOTNULL as QUOTE_NOTNULL from _typeshed import SupportsWrite from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence from typing import Any, Generic, TypeVar, overload @@ -57,6 +60,8 @@ __all__ = [ "DictWriter", "unix_dialect", ] +if sys.version_info >= (3, 12): + __all__ += ["QUOTE_STRINGS", "QUOTE_NOTNULL"] _T = TypeVar("_T") diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 7a185a5b523e..b14fb93c8163 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -181,6 +181,9 @@ class c_bool(_SimpleCData[bool]): if sys.platform == "win32": class HRESULT(_SimpleCData[int]): ... # TODO undocumented +if sys.version_info >= (3, 12): + c_time_t: type[c_int32 | c_int64] + class py_object(_CanCastTo, _SimpleCData[_T]): ... class BigEndianStructure(Structure): ... class LittleEndianStructure(Structure): ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index d153771e676b..ab101a517a6f 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -29,9 +29,12 @@ __all__ = [ "opmap", "HAVE_ARGUMENT", "EXTENDED_ARG", - "hasnargs", "stack_effect", ] +if sys.version_info >= (3, 12): + __all__ += ["hasarg", "hasexc"] +else: + __all__ += ["hasnargs"] # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) diff --git a/mypy/typeshed/stdlib/distutils/__init__.pyi b/mypy/typeshed/stdlib/distutils/__init__.pyi index e69de29bb2d1..328a5b783441 100644 --- a/mypy/typeshed/stdlib/distutils/__init__.pyi +++ b/mypy/typeshed/stdlib/distutils/__init__.pyi @@ -0,0 +1,5 @@ +# Attempts to improve these stubs are probably not the best use of time: +# - distutils is deleted in Python 3.12 and newer +# - Most users already do not use stdlib distutils, due to setuptools monkeypatching +# - We have very little quality assurance on these stubs, since due to the two above issues +# we allowlist all distutils errors in stubtest. diff --git a/mypy/typeshed/stdlib/email/generator.pyi b/mypy/typeshed/stdlib/email/generator.pyi index 8362dd9c4ff6..faa6551fc925 100644 --- a/mypy/typeshed/stdlib/email/generator.pyi +++ b/mypy/typeshed/stdlib/email/generator.pyi @@ -1,11 +1,12 @@ from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy +from typing_extensions import Self __all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] class Generator: - def clone(self, fp: SupportsWrite[str]) -> Generator: ... + def clone(self, fp: SupportsWrite[str]) -> Self: ... def write(self, s: str) -> None: ... def __init__( self, @@ -17,9 +18,7 @@ class Generator: ) -> None: ... def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... -class BytesGenerator: - def clone(self, fp: SupportsWrite[bytes]) -> BytesGenerator: ... - def write(self, s: str) -> None: ... +class BytesGenerator(Generator): def __init__( self, outfp: SupportsWrite[bytes], @@ -28,7 +27,6 @@ class BytesGenerator: *, policy: Policy | None = None, ) -> None: ... - def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... class DecodedGenerator(Generator): def __init__( diff --git a/mypy/typeshed/stdlib/email/policy.pyi b/mypy/typeshed/stdlib/email/policy.pyi index 4df3c1e48b07..dc7f18489bfa 100644 --- a/mypy/typeshed/stdlib/email/policy.pyi +++ b/mypy/typeshed/stdlib/email/policy.pyi @@ -5,6 +5,7 @@ from email.errors import MessageDefect from email.header import Header from email.message import Message from typing import Any +from typing_extensions import Self __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] @@ -25,7 +26,7 @@ class Policy(metaclass=ABCMeta): mangle_from_: bool = ..., message_factory: Callable[[Policy], Message] | None = ..., ) -> None: ... - def clone(self, **kw: Any) -> Policy: ... + def clone(self, **kw: Any) -> Self: ... def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ... def register_defect(self, obj: Message, defect: MessageDefect) -> None: ... def header_max_count(self, name: str) -> int | None: ... diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 01443083f48d..6aec7515f330 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -20,6 +20,9 @@ if sys.platform != "win32": F_SETOWN: int F_UNLCK: int F_WRLCK: int + + F_GETLEASE: int + F_SETLEASE: int if sys.platform == "darwin": F_FULLFSYNC: int F_NOCACHE: int @@ -30,11 +33,9 @@ if sys.platform != "win32": F_SETSIG: int F_SHLCK: int F_SETLK64: int - F_SETLEASE: int F_GETSIG: int F_NOTIFY: int F_EXLCK: int - F_GETLEASE: int F_GETLK64: int if sys.version_info >= (3, 8): F_ADD_SEALS: int diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 36a213d48680..f24d14fbf2b6 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -118,7 +118,20 @@ class FTP: def close(self) -> None: ... class FTP_TLS(FTP): - if sys.version_info >= (3, 9): + if sys.version_info >= (3, 12): + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + *, + context: SSLContext | None = None, + timeout: float = ..., + source_address: tuple[str, int] | None = None, + encoding: str = "utf-8", + ) -> None: ... + elif sys.version_info >= (3, 9): def __init__( self, host: str = "", diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index d01fd8ce55cb..1b4e59b7c120 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -70,22 +70,47 @@ if sys.version_info >= (3, 8): else: def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... -WRAPPER_ASSIGNMENTS: tuple[ - Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] -] +if sys.version_info >= (3, 12): + WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotations__"], + Literal["__type_params__"], + ] +else: + WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] + ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] -def update_wrapper( - wrapper: _T, - wrapped: _AnyCallable, - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), - updated: Sequence[str] = ("__dict__",), -) -> _T: ... -def wraps( - wrapped: _AnyCallable, - assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), - updated: Sequence[str] = ("__dict__",), -) -> IdentityFunction: ... +if sys.version_info >= (3, 12): + def update_wrapper( + wrapper: _T, + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Sequence[str] = ("__dict__",), + ) -> _T: ... + def wraps( + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Sequence[str] = ("__dict__",), + ) -> IdentityFunction: ... + +else: + def update_wrapper( + wrapper: _T, + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), + ) -> _T: ... + def wraps( + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), + ) -> IdentityFunction: ... + def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... diff --git a/mypy/typeshed/stdlib/http/__init__.pyi b/mypy/typeshed/stdlib/http/__init__.pyi index d4b44f2eb99b..4310c79b9269 100644 --- a/mypy/typeshed/stdlib/http/__init__.pyi +++ b/mypy/typeshed/stdlib/http/__init__.pyi @@ -79,6 +79,17 @@ class HTTPStatus(IntEnum): EARLY_HINTS: Literal[103] IM_A_TEAPOT: Literal[418] TOO_EARLY: Literal[425] + if sys.version_info >= (3, 12): + @property + def is_informational(self) -> bool: ... + @property + def is_success(self) -> bool: ... + @property + def is_redirection(self) -> bool: ... + @property + def is_client_error(self) -> bool: ... + @property + def is_server_error(self) -> bool: ... if sys.version_info >= (3, 11): class HTTPMethod(StrEnum): diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index c6cc7cacfb1d..3753700ea889 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -15,7 +15,6 @@ __all__ = [ "str", "atof", "atoi", - "format", "format_string", "currency", "normalize", @@ -32,6 +31,9 @@ __all__ = [ if sys.version_info >= (3, 11): __all__ += ["getencoding"] +if sys.version_info < (3, 12): + __all__ += ["format"] + # This module defines a function "str()", which is why "str" can't be used # as a type annotation or type alias. from builtins import str as _str @@ -123,7 +125,12 @@ def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... def strcoll(__os1: _str, __os2: _str) -> int: ... def strxfrm(__string: _str) -> _str: ... -def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: ... + +if sys.version_info < (3, 12): + def format( + percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any + ) -> _str: ... + def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... def delocalize(string: _str) -> _str: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index d034373712e0..28696fe6a3a3 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -52,7 +52,12 @@ class Listener: self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... -def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... +if sys.version_info >= (3, 12): + def deliver_challenge(connection: Connection, authkey: bytes, digest_name: str = "sha256") -> None: ... + +else: + def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... + def answer_challenge(connection: Connection, authkey: bytes) -> None: ... def wait( object_list: Iterable[Connection | socket.socket | int], timeout: float | None = None diff --git a/mypy/typeshed/stdlib/multiprocessing/queues.pyi b/mypy/typeshed/stdlib/multiprocessing/queues.pyi index a26ab7173232..8e72d15f25f6 100644 --- a/mypy/typeshed/stdlib/multiprocessing/queues.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/queues.pyi @@ -22,6 +22,8 @@ class Queue(Generic[_T]): def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... diff --git a/mypy/typeshed/stdlib/opcode.pyi b/mypy/typeshed/stdlib/opcode.pyi index 1232454e71ea..f852489ffacf 100644 --- a/mypy/typeshed/stdlib/opcode.pyi +++ b/mypy/typeshed/stdlib/opcode.pyi @@ -14,9 +14,12 @@ __all__ = [ "opmap", "HAVE_ARGUMENT", "EXTENDED_ARG", - "hasnargs", "stack_effect", ] +if sys.version_info >= (3, 12): + __all__ += ["hasarg", "hasexc"] +else: + __all__ += ["hasnargs"] if sys.version_info >= (3, 9): cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] @@ -42,6 +45,11 @@ hasjabs: list[int] haslocal: list[int] hascompare: list[int] hasfree: list[int] +if sys.version_info >= (3, 12): + hasarg: list[int] + hasexc: list[int] +else: + hasnargs: list[int] opname: list[str] opmap: dict[str, int] @@ -53,5 +61,3 @@ if sys.version_info >= (3, 8): else: def stack_effect(__opcode: int, __oparg: int | None = None) -> int: ... - -hasnargs: list[int] diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 7aec66b584e3..3c2ae0fe7ab1 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -55,7 +55,11 @@ class PurePath(PathLike[str]): if sys.version_info >= (3, 9): def is_relative_to(self, *other: StrPath) -> bool: ... - def match(self, path_pattern: str) -> bool: ... + if sys.version_info >= (3, 12): + def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: ... + else: + def match(self, path_pattern: str) -> bool: ... + def relative_to(self, *other: StrPath) -> Self: ... def with_name(self, name: str) -> Self: ... if sys.version_info >= (3, 9): @@ -70,6 +74,9 @@ class PurePath(PathLike[str]): if sys.version_info >= (3, 9) and sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... + if sys.version_info >= (3, 12): + def with_segments(self, *args: StrPath) -> Self: ... + class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... @@ -86,8 +93,15 @@ class Path(PurePath): def stat(self) -> stat_result: ... def chmod(self, mode: int) -> None: ... - def exists(self) -> bool: ... - def glob(self, pattern: str) -> Generator[Self, None, None]: ... + if sys.version_info >= (3, 12): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... + def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + else: + def exists(self) -> bool: ... + def glob(self, pattern: str) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... + def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... @@ -95,6 +109,9 @@ class Path(PurePath): def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... + if sys.version_info >= (3, 12): + def is_junction(self) -> bool: ... + def iterdir(self) -> Generator[Self, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> stat_result: ... @@ -159,6 +176,10 @@ class Path(PurePath): # so it's safer to pretend they don't exist def owner(self) -> str: ... def group(self) -> str: ... + + # This method does "exist" on Windows on <3.12, but always raises NotImplementedError + # On py312+, it works properly on Windows, as with all other platforms + if sys.platform != "win32" or sys.version_info >= (3, 12): def is_mount(self) -> bool: ... if sys.version_info >= (3, 9): @@ -171,7 +192,6 @@ class Path(PurePath): def replace(self, target: str | PurePath) -> None: ... def resolve(self, strict: bool = False) -> Self: ... - def rglob(self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index 405c45ca01ac..e0d69e7d30fa 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -168,7 +168,10 @@ class Pdb(Bdb, Cmd): def find_function(funcname: str, filename: str) -> tuple[str, str, int] | None: ... def main() -> None: ... def help() -> None: ... -def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... + +if sys.version_info < (3, 10): + def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... + def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): diff --git a/mypy/typeshed/stdlib/pkgutil.pyi b/mypy/typeshed/stdlib/pkgutil.pyi index f9808c9e5de8..59f1f734cf90 100644 --- a/mypy/typeshed/stdlib/pkgutil.pyi +++ b/mypy/typeshed/stdlib/pkgutil.pyi @@ -12,12 +12,12 @@ __all__ = [ "walk_packages", "iter_modules", "get_data", - "ImpImporter", - "ImpLoader", "read_code", "extend_path", "ModuleInfo", ] +if sys.version_info < (3, 12): + __all__ += ["ImpImporter", "ImpLoader"] _PathT = TypeVar("_PathT", bound=Iterable[str]) @@ -28,11 +28,12 @@ class ModuleInfo(NamedTuple): def extend_path(path: _PathT, name: str) -> _PathT: ... -class ImpImporter: - def __init__(self, path: str | None = None) -> None: ... +if sys.version_info < (3, 12): + class ImpImporter: + def __init__(self, path: str | None = None) -> None: ... -class ImpLoader: - def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... + class ImpLoader: + def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... def find_loader(fullname: str) -> Loader | None: ... def get_importer(path_item: str) -> PathEntryFinder | None: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index ef716d4049dd..38c50d51b129 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -2,6 +2,7 @@ import os import sys from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence +from tarfile import _TarfileFilter from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload from typing_extensions import TypeAlias @@ -192,9 +193,9 @@ def register_archive_format( ) -> None: ... def unregister_archive_format(name: str) -> None: ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 8): def unpack_archive( - filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None, *, filter: str | None = None + filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None, *, filter: _TarfileFilter | None = None ) -> None: ... else: diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 6c897b919909..e1ffc573b52e 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -438,6 +438,36 @@ if sys.platform == "win32": SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, SIO_RCVALL as SIO_RCVALL, ) +if sys.version_info >= (3, 12): + from _socket import ( + IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, + IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, + IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, + IP_PKTINFO as IP_PKTINFO, + IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, + ) + + if sys.platform == "win32": + from _socket import ( + HV_GUID_BROADCAST as HV_GUID_BROADCAST, + HV_GUID_CHILDREN as HV_GUID_CHILDREN, + HV_GUID_LOOPBACK as HV_GUID_LOOPBACK, + HV_GUID_PARENT as HV_GUID_PARENT, + HV_GUID_WILDCARD as HV_GUID_WILDCARD, + HV_GUID_ZERO as HV_GUID_ZERO, + HV_PROTOCOL_RAW as HV_PROTOCOL_RAW, + HVSOCKET_ADDRESS_FLAG_PASSTHRU as HVSOCKET_ADDRESS_FLAG_PASSTHRU, + HVSOCKET_CONNECT_TIMEOUT as HVSOCKET_CONNECT_TIMEOUT, + HVSOCKET_CONNECT_TIMEOUT_MAX as HVSOCKET_CONNECT_TIMEOUT_MAX, + HVSOCKET_CONNECTED_SUSPEND as HVSOCKET_CONNECTED_SUSPEND, + ) + else: + from _socket import ( + ETHERTYPE_ARP as ETHERTYPE_ARP, + ETHERTYPE_IP as ETHERTYPE_IP, + ETHERTYPE_IPV6 as ETHERTYPE_IPV6, + ETHERTYPE_VLAN as ETHERTYPE_VLAN, + ) # Re-exported from errno EBADF: int @@ -489,6 +519,8 @@ class AddressFamily(IntEnum): AF_LINK: int if sys.platform != "darwin": AF_BLUETOOTH: int + if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV: int AF_INET = AddressFamily.AF_INET AF_INET6 = AddressFamily.AF_INET6 @@ -540,6 +572,9 @@ if sys.platform != "win32" or sys.version_info >= (3, 9): if sys.platform != "darwin": AF_BLUETOOTH = AddressFamily.AF_BLUETOOTH +if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV = AddressFamily.AF_HYPERV + class SocketKind(IntEnum): SOCK_STREAM: int SOCK_DGRAM: int diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index 3799d82a0065..6a932f66cd09 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -28,6 +28,8 @@ if sys.platform != "win32": "UnixDatagramServer", "UnixStreamServer", ] + if sys.version_info >= (3, 12): + __all__ += ["ForkingUnixStreamServer", "ForkingUnixDatagramServer"] _RequestType: TypeAlias = _socket | tuple[bytes, _socket] _AfUnixAddress: TypeAlias = str | ReadableBuffer # address acceptable for an AF_UNIX socket @@ -124,6 +126,9 @@ class ThreadingMixIn: if sys.platform != "win32": class ForkingTCPServer(ForkingMixIn, TCPServer): ... class ForkingUDPServer(ForkingMixIn, UDPServer): ... + if sys.version_info >= (3, 12): + class ForkingUnixStreamServer(ForkingMixIn, UnixStreamServer): ... + class ForkingUnixDatagramServer(ForkingMixIn, UnixDatagramServer): ... class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index c2fdbeccca72..ca049124053a 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -321,7 +321,7 @@ if sys.version_info < (3, 9): if sys.version_info >= (3, 8): # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. - class UnraisableHookArgs: + class UnraisableHookArgs(Protocol): exc_type: type[BaseException] exc_value: BaseException | None exc_traceback: TracebackType | None @@ -359,3 +359,13 @@ if sys.version_info < (3, 8): # as part of the response to CVE-2020-10735 def set_int_max_str_digits(maxdigits: int) -> None: ... def get_int_max_str_digits() -> int: ... + +if sys.version_info >= (3, 12): + def getunicodeinternedsize() -> int: ... + def deactivate_stack_trampoline() -> None: ... + def is_stack_trampoline_active() -> bool: ... + # It always exists, but raises on non-linux platforms: + if sys.platform == "linux": + def activate_stack_trampoline(__backend: str) -> None: ... + else: + def activate_stack_trampoline(__backend: str) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 5cf1d55cac63..d9d9641ac698 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -7,7 +7,7 @@ from collections.abc import Callable, Iterable, Iterator, Mapping from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj from types import TracebackType from typing import IO, ClassVar, Protocol, overload -from typing_extensions import Literal, Self +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "TarFile", @@ -26,6 +26,21 @@ __all__ = [ "DEFAULT_FORMAT", "open", ] +if sys.version_info >= (3, 12): + __all__ += [ + "fully_trusted_filter", + "data_filter", + "tar_filter", + "FilterError", + "AbsoluteLinkError", + "OutsideDestinationError", + "SpecialFileError", + "AbsolutePathError", + "LinkOutsideDestinationError", + ] + +_FilterFunction: TypeAlias = Callable[[TarInfo, str], TarInfo | None] +_TarfileFilter: TypeAlias = Literal["fully_trusted", "tar", "data"] | _FilterFunction class _Fileobj(Protocol): def read(self, __size: int) -> bytes: ... @@ -125,6 +140,7 @@ class TarFile: debug: int | None errorlevel: int | None offset: int # undocumented + extraction_filter: _FilterFunction | None def __init__( self, name: StrOrBytesPath | None = None, @@ -275,12 +291,32 @@ class TarFile: def getnames(self) -> _list[str]: ... def list(self, verbose: bool = True, *, members: _list[TarInfo] | None = None) -> None: ... def next(self) -> TarInfo | None: ... - def extractall( - self, path: StrOrBytesPath = ".", members: Iterable[TarInfo] | None = None, *, numeric_owner: bool = False - ) -> None: ... - def extract( - self, member: str | TarInfo, path: StrOrBytesPath = "", set_attrs: bool = True, *, numeric_owner: bool = False - ) -> None: ... + if sys.version_info >= (3, 8): + def extractall( + self, + path: StrOrBytesPath = ".", + members: Iterable[TarInfo] | None = None, + *, + numeric_owner: bool = False, + filter: _TarfileFilter | None = ..., + ) -> None: ... + def extract( + self, + member: str | TarInfo, + path: StrOrBytesPath = "", + set_attrs: bool = True, + *, + numeric_owner: bool = False, + filter: _TarfileFilter | None = ..., + ) -> None: ... + else: + def extractall( + self, path: StrOrBytesPath = ".", members: Iterable[TarInfo] | None = None, *, numeric_owner: bool = False + ) -> None: ... + def extract( + self, member: str | TarInfo, path: StrOrBytesPath = "", set_attrs: bool = True, *, numeric_owner: bool = False + ) -> None: ... + def _extract_member( self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = True, numeric_owner: bool = False ) -> None: ... # undocumented @@ -324,6 +360,31 @@ class StreamError(TarError): ... class ExtractError(TarError): ... class HeaderError(TarError): ... +if sys.version_info >= (3, 8): + class FilterError(TarError): + # This attribute is only set directly on the subclasses, but the documentation guarantees + # that it is always present on FilterError. + tarinfo: TarInfo + + class AbsolutePathError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + + class OutsideDestinationError(FilterError): + def __init__(self, tarinfo: TarInfo, path: str) -> None: ... + + class SpecialFileError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + + class AbsoluteLinkError(FilterError): + def __init__(self, tarinfo: TarInfo) -> None: ... + + class LinkOutsideDestinationError(FilterError): + def __init__(self, tarinfo: TarInfo, path: str) -> None: ... + + def fully_trusted_filter(member: TarInfo, dest_path: str) -> TarInfo: ... + def tar_filter(member: TarInfo, dest_path: str) -> TarInfo: ... + def data_filter(member: TarInfo, dest_path: str) -> TarInfo: ... + class TarInfo: name: str path: str @@ -353,6 +414,21 @@ class TarInfo: def linkpath(self) -> str: ... @linkpath.setter def linkpath(self, linkname: str) -> None: ... + if sys.version_info >= (3, 8): + def replace( + self, + *, + name: str = ..., + mtime: int = ..., + mode: int = ..., + linkname: str = ..., + uid: int = ..., + gid: int = ..., + uname: str = ..., + gname: str = ..., + deep: bool = True, + ) -> Self: ... + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... if sys.version_info >= (3, 8): def tobuf(self, format: int | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index cd27e91fbc75..b251f8b9d029 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -1,10 +1,21 @@ import io import sys -from _typeshed import BytesPath, GenericPath, ReadableBuffer, StrPath, WriteableBuffer +from _typeshed import ( + BytesPath, + GenericPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrPath, + WriteableBuffer, +) from collections.abc import Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, overload -from typing_extensions import Literal, Self, TypeAlias +from typing_extensions import Literal, Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -30,13 +41,54 @@ TMP_MAX: int tempdir: str | None template: str -_StrMode: TypeAlias = Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] -_BytesMode: TypeAlias = Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] +if sys.version_info >= (3, 12): + @overload + def NamedTemporaryFile( + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[str]: ... + @overload + def NamedTemporaryFile( + mode: OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[bytes]: ... + @overload + def NamedTemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> _TemporaryFileWrapper[Any]: ... -if sys.version_info >= (3, 8): +elif sys.version_info >= (3, 8): @overload def NamedTemporaryFile( - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -49,7 +101,7 @@ if sys.version_info >= (3, 8): ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = "w+b", + mode: OpenBinaryMode = "w+b", buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -77,7 +129,7 @@ if sys.version_info >= (3, 8): else: @overload def NamedTemporaryFile( - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -88,7 +140,7 @@ else: ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = "w+b", + mode: OpenBinaryMode = "w+b", buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -112,10 +164,11 @@ else: if sys.platform == "win32": TemporaryFile = NamedTemporaryFile else: + # See the comments for builtins.open() for an explanation of the overloads. if sys.version_info >= (3, 8): @overload def TemporaryFile( - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -124,11 +177,11 @@ else: dir: GenericPath[AnyStr] | None = None, *, errors: str | None = None, - ) -> IO[str]: ... + ) -> io.TextIOWrapper: ... @overload def TemporaryFile( - mode: _BytesMode = "w+b", - buffering: int = -1, + mode: OpenBinaryMode, + buffering: Literal[0], encoding: str | None = None, newline: str | None = None, suffix: AnyStr | None = None, @@ -136,7 +189,54 @@ else: dir: GenericPath[AnyStr] | None = None, *, errors: str | None = None, - ) -> IO[bytes]: ... + ) -> io.FileIO: ... + @overload + def TemporaryFile( + *, + buffering: Literal[0], + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + errors: str | None = None, + ) -> io.FileIO: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedWriter: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedReader: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeUpdating = "w+b", + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> io.BufferedRandom: ... @overload def TemporaryFile( mode: str = "w+b", @@ -152,40 +252,84 @@ else: else: @overload def TemporaryFile( - mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - ) -> IO[str]: ... + mode: OpenTextMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.TextIOWrapper: ... + @overload + def TemporaryFile( + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.FileIO: ... + @overload + def TemporaryFile( + *, + buffering: Literal[0], + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.FileIO: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeUpdating = "w+b", + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.BufferedRandom: ... + @overload + def TemporaryFile( + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.BufferedWriter: ... @overload def TemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - ) -> IO[bytes]: ... + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + ) -> io.BufferedReader: ... @overload def TemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, ) -> IO[Any]: ... class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool - def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... + if sys.version_info >= (3, 12): + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True, delete_on_close: bool = True) -> None: ... + else: + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... + def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... @@ -246,7 +390,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def __init__( self: SpooledTemporaryFile[bytes], max_size: int = 0, - mode: _BytesMode = "w+b", + mode: OpenBinaryMode = "w+b", buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -260,7 +404,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def __init__( self: SpooledTemporaryFile[str], max_size: int, - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -275,7 +419,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): self: SpooledTemporaryFile[str], max_size: int = 0, *, - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -319,7 +463,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def __init__( self: SpooledTemporaryFile[bytes], max_size: int = 0, - mode: _BytesMode = "w+b", + mode: OpenBinaryMode = "w+b", buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -331,7 +475,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def __init__( self: SpooledTemporaryFile[str], max_size: int, - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -344,7 +488,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): self: SpooledTemporaryFile[str], max_size: int = 0, *, - mode: _StrMode, + mode: OpenTextMode, buffering: int = -1, encoding: str | None = None, newline: str | None = None, @@ -425,7 +569,28 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): class TemporaryDirectory(Generic[AnyStr]): name: AnyStr - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + @overload + def __init__( + self: TemporaryDirectory[str], + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, + *, + delete: bool = True, + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, + *, + delete: bool = True, + ) -> None: ... + elif sys.version_info >= (3, 10): @overload def __init__( self: TemporaryDirectory[str], diff --git a/mypy/typeshed/stdlib/threading.pyi b/mypy/typeshed/stdlib/threading.pyi index 6275e4552630..badd09cae051 100644 --- a/mypy/typeshed/stdlib/threading.pyi +++ b/mypy/typeshed/stdlib/threading.pyi @@ -37,6 +37,9 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): __all__ += ["getprofile", "gettrace"] +if sys.version_info >= (3, 12): + __all__ += ["setprofile_all_threads", "settrace_all_threads"] + _profile_hook: ProfileFunction | None def active_count() -> int: ... @@ -53,6 +56,10 @@ if sys.version_info >= (3, 8): def settrace(func: TraceFunction) -> None: ... def setprofile(func: ProfileFunction | None) -> None: ... +if sys.version_info >= (3, 12): + def setprofile_all_threads(func: ProfileFunction | None) -> None: ... + def settrace_all_threads(func: TraceFunction) -> None: ... + if sys.version_info >= (3, 10): def gettrace() -> TraceFunction | None: ... def getprofile() -> ProfileFunction | None: ... diff --git a/mypy/typeshed/stdlib/token.pyi b/mypy/typeshed/stdlib/token.pyi index fcd6ef87d217..85867a2b9744 100644 --- a/mypy/typeshed/stdlib/token.pyi +++ b/mypy/typeshed/stdlib/token.pyi @@ -73,6 +73,9 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): __all__ += ["SOFT_KEYWORD"] +if sys.version_info >= (3, 12): + __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START"] + ENDMARKER: int NAME: int NUMBER: int @@ -145,6 +148,12 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): SOFT_KEYWORD: int +if sys.version_info >= (3, 12): + EXCLAMATION: int + FSTRING_END: int + FSTRING_MIDDLE: int + FSTRING_START: int + def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... def ISEOF(x: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index ba57402fb845..0028ed034ae6 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -83,6 +83,9 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): __all__ += ["SOFT_KEYWORD"] +if sys.version_info >= (3, 12): + __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START"] + if sys.version_info >= (3, 8): from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES else: diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 43475d91279d..6909c765cb7d 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -17,7 +17,7 @@ from importlib.machinery import ModuleSpec # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y022 -from typing_extensions import Literal, ParamSpec, final +from typing_extensions import Literal, ParamSpec, TypeVarTuple, final __all__ = [ "FunctionType", @@ -94,6 +94,8 @@ class FunctionType: if sys.version_info >= (3, 10): @property def __builtins__(self) -> dict[str, Any]: ... + if sys.version_info >= (3, 12): + __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __module__: str def __init__( @@ -374,6 +376,10 @@ class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): def ag_await(self) -> Awaitable[Any] | None: ... __name__: str __qualname__: str + if sys.version_info >= (3, 12): + @property + def ag_suspended(self) -> bool: ... + def __aiter__(self) -> AsyncGeneratorType[_T_co, _T_contra]: ... def __anext__(self) -> Coroutine[Any, Any, _T_co]: ... def asend(self, __val: _T_contra) -> Coroutine[Any, Any, _T_co]: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index db042dc440ae..08d7da5e8622 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -631,6 +631,7 @@ class Mapping(Collection[_KT], Generic[_KT, _VT_co]): def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... def __contains__(self, __key: object) -> bool: ... + def __eq__(self, __other: object) -> bool: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod @@ -853,9 +854,9 @@ class NamedTuple(tuple[Any, ...]): if sys.version_info >= (3, 12): __orig_bases__: ClassVar[tuple[Any, ...]] @overload - def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + def __init__(self, __typename: str, __fields: Iterable[tuple[str, Any]]) -> None: ... @overload - def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + def __init__(self, __typename: str, __fields: None = None, **kwargs: Any) -> None: ... @classmethod def _make(cls, iterable: Iterable[Any]) -> typing_extensions.Self: ... if sys.version_info >= (3, 8): diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index 935e44e80dfa..74ce4ebd6b47 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -96,3 +96,6 @@ RESERVED_NCS: str RFC_4122: str RESERVED_MICROSOFT: str RESERVED_FUTURE: str + +if sys.version_info >= (3, 12): + def main() -> None: ... diff --git a/mypy/typeshed/stdlib/webbrowser.pyi b/mypy/typeshed/stdlib/webbrowser.pyi index 02edd42e7d59..99c7bb5846b6 100644 --- a/mypy/typeshed/stdlib/webbrowser.pyi +++ b/mypy/typeshed/stdlib/webbrowser.pyi @@ -43,8 +43,12 @@ class UnixBrowser(BaseBrowser): class Mozilla(UnixBrowser): ... -class Galeon(UnixBrowser): - raise_opts: list[str] +if sys.version_info < (3, 12): + class Galeon(UnixBrowser): + raise_opts: list[str] + + class Grail(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class Chrome(UnixBrowser): ... class Opera(UnixBrowser): ... @@ -53,9 +57,6 @@ class Elinks(UnixBrowser): ... class Konqueror(BaseBrowser): def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... -class Grail(BaseBrowser): - def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... - if sys.platform == "win32": class WindowsDefault(BaseBrowser): def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index ee97faace379..0189bfe712b5 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -17,8 +17,10 @@ class zipimporter: else: def __init__(self, path: StrOrBytesPath) -> None: ... - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + if sys.version_info < (3, 12): + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + def get_code(self, fullname: str) -> CodeType: ... def get_data(self, pathname: str) -> bytes: ... def get_filename(self, fullname: str) -> str: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index fe994be3e8ff..a95530ed461a 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -17,9 +17,9 @@ class ZoneInfo(tzinfo): @classmethod def no_cache(cls, key: str) -> Self: ... @classmethod - def from_file(cls, __fobj: _IOBytes, key: str | None = ...) -> Self: ... + def from_file(cls, __fobj: _IOBytes, key: str | None = None) -> Self: ... @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... def tzname(self, __dt: datetime | None) -> str | None: ... def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... def dst(self, __dt: datetime | None) -> timedelta | None: ... @@ -30,7 +30,7 @@ class ZoneInfo(tzinfo): def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... def available_timezones() -> set[str]: ... -TZPATH: Sequence[str] +TZPATH: tuple[str, ...] class ZoneInfoNotFoundError(KeyError): ... class InvalidTZPathWarning(RuntimeWarning): ... diff --git a/mypy/version.py b/mypy/version.py index 826ba0020100..42cda2fc7794 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.4.0+dev" +__version__ = "1.5.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index a2e3d9849dca..8114e0517219 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -345,12 +345,6 @@ def tuple_c_declaration(self, rtuple: RTuple) -> list[str]: result.append(f"{self.ctype_spaced(typ)}f{i};") i += 1 result.append(f"}} {rtuple.struct_name};") - values = self.tuple_undefined_value_helper(rtuple) - result.append( - "static {} {} = {{ {} }};".format( - self.ctype(rtuple), self.tuple_undefined_value(rtuple), "".join(values) - ) - ) result.append("#endif") result.append("") @@ -470,23 +464,20 @@ def tuple_undefined_check_cond( return check def tuple_undefined_value(self, rtuple: RTuple) -> str: - return "tuple_undefined_" + rtuple.unique_id + """Undefined tuple value suitable in an expression.""" + return f"({rtuple.struct_name}) {self.c_initializer_undefined_value(rtuple)}" - def tuple_undefined_value_helper(self, rtuple: RTuple) -> list[str]: - res = [] - # see tuple_c_declaration() - if len(rtuple.types) == 0: - return [self.c_undefined_value(int_rprimitive)] - for item in rtuple.types: - if not isinstance(item, RTuple): - res.append(self.c_undefined_value(item)) - else: - sub_list = self.tuple_undefined_value_helper(item) - res.append("{ ") - res.extend(sub_list) - res.append(" }") - res.append(", ") - return res[:-1] + def c_initializer_undefined_value(self, rtype: RType) -> str: + """Undefined value represented in a form suitable for variable initialization.""" + if isinstance(rtype, RTuple): + if not rtype.types: + # Empty tuples contain a flag so that they can still indicate + # error values. + return f"{{ {int_rprimitive.c_undefined} }}" + items = ", ".join([self.c_initializer_undefined_value(t) for t in rtype.types]) + return f"{{ {items} }}" + else: + return self.c_undefined_value(rtype) # Higher-level operations diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 6a272d1aee2b..eac7a2207972 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -578,7 +578,12 @@ def generate_setup_for_class( for base in reversed(cl.base_mro): for attr, rtype in base.attributes.items(): - emitter.emit_line(rf"self->{emitter.attr(attr)} = {emitter.c_undefined_value(rtype)};") + value = emitter.c_undefined_value(rtype) + + # We don't need to set this field to NULL since tp_alloc() already + # zero-initializes `self`. + if value != "NULL": + emitter.emit_line(rf"self->{emitter.attr(attr)} = {value};") # Initialize attributes to default values, if necessary if defaults_fn is not None: diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 0e80ff6da1f2..f360fabbe8f6 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -51,7 +51,7 @@ from mypyc.ir.func_ir import FuncIR from mypyc.ir.module_ir import ModuleIR, ModuleIRs, deserialize_modules from mypyc.ir.ops import DeserMaps, LoadLiteral -from mypyc.ir.rtypes import RTuple, RType +from mypyc.ir.rtypes import RType from mypyc.irbuild.main import build_ir from mypyc.irbuild.mapper import Mapper from mypyc.irbuild.prepare import load_type_map @@ -1052,11 +1052,7 @@ def declare_finals( def final_definition(self, module: str, name: str, typ: RType, emitter: Emitter) -> str: static_name = emitter.static_name(name, module) # Here we rely on the fact that undefined value and error value are always the same - if isinstance(typ, RTuple): - # We need to inline because initializer must be static - undefined = "{{ {} }}".format("".join(emitter.tuple_undefined_value_helper(typ))) - else: - undefined = emitter.c_undefined_value(typ) + undefined = emitter.c_initializer_undefined_value(typ) return f"{emitter.ctype_spaced(typ)}{static_name} = {undefined};" def declare_static_pyobject(self, identifier: str, emitter: Emitter) -> None: diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 7a3e16fe9d65..30d93be60989 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -41,7 +41,6 @@ typedef struct tuple_T3OOO { PyObject *f1; PyObject *f2; } tuple_T3OOO; -static tuple_T3OOO tuple_undefined_T3OOO = { NULL, NULL, NULL }; #endif // Our return tuple wrapper for dictionary iteration helper. @@ -52,7 +51,6 @@ typedef struct tuple_T3CIO { CPyTagged f1; // Last dict offset PyObject *f2; // Next dictionary key or value } tuple_T3CIO; -static tuple_T3CIO tuple_undefined_T3CIO = { 2, CPY_INT_TAG, NULL }; #endif // Same as above but for both key and value. @@ -64,7 +62,6 @@ typedef struct tuple_T4CIOO { PyObject *f2; // Next dictionary key PyObject *f3; // Next dictionary value } tuple_T4CIOO; -static tuple_T4CIOO tuple_undefined_T4CIOO = { 2, CPY_INT_TAG, NULL, NULL }; #endif diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 54bf4eef3c74..e4ace3ec01f0 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -4,7 +4,7 @@ from mypyc.codegen.emit import Emitter, EmitterContext from mypyc.ir.ops import BasicBlock, Register, Value -from mypyc.ir.rtypes import int_rprimitive +from mypyc.ir.rtypes import RTuple, bool_rprimitive, int_rprimitive, str_rprimitive from mypyc.namegen import NameGenerator @@ -49,3 +49,21 @@ def test_emit_line(self) -> None: CPyStatics[1]; /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29] */\n""" ) + + def test_emit_undefined_value_for_simple_type(self) -> None: + emitter = Emitter(self.context, {}) + assert emitter.c_undefined_value(int_rprimitive) == "CPY_INT_TAG" + assert emitter.c_undefined_value(str_rprimitive) == "NULL" + assert emitter.c_undefined_value(bool_rprimitive) == "2" + + def test_emit_undefined_value_for_tuple(self) -> None: + emitter = Emitter(self.context, {}) + assert ( + emitter.c_undefined_value(RTuple([str_rprimitive, int_rprimitive, bool_rprimitive])) + == "(tuple_T3OIC) { NULL, CPY_INT_TAG, 2 }" + ) + assert emitter.c_undefined_value(RTuple([str_rprimitive])) == "(tuple_T1O) { NULL }" + assert ( + emitter.c_undefined_value(RTuple([RTuple([str_rprimitive]), bool_rprimitive])) + == "(tuple_T2T1OC) { { NULL }, 2 }" + ) diff --git a/test-data/packages/typedpkg-stubs/pyproject.toml b/test-data/packages/typedpkg-stubs/pyproject.toml new file mode 100644 index 000000000000..125816151ef8 --- /dev/null +++ b/test-data/packages/typedpkg-stubs/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = 'typedpkg-stubs' +version = '0.1' +description = 'test' + +[tool.hatch.build] +include = ["**/*.pyi"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/test-data/packages/typedpkg-stubs/setup.py b/test-data/packages/typedpkg-stubs/setup.py deleted file mode 100644 index 4948dc6a01df..000000000000 --- a/test-data/packages/typedpkg-stubs/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -This setup file installs packages to test mypy's PEP 561 implementation -""" - -from setuptools import setup - -setup( - name='typedpkg-stubs', - author="The mypy team", - version='0.1', - package_data={'typedpkg-stubs': ['sample.pyi', '__init__.pyi', 'py.typed']}, - packages=['typedpkg-stubs'], -) diff --git a/test-data/packages/typedpkg/pyproject.toml b/test-data/packages/typedpkg/pyproject.toml new file mode 100644 index 000000000000..5269c94320e1 --- /dev/null +++ b/test-data/packages/typedpkg/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = 'typedpkg' +version = '0.1' +description = 'test' + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/test-data/packages/typedpkg/setup.py b/test-data/packages/typedpkg/setup.py deleted file mode 100644 index 11bcfb11a104..000000000000 --- a/test-data/packages/typedpkg/setup.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -This setup file installs packages to test mypy's PEP 561 implementation -""" - -from setuptools import setup - -setup( - name='typedpkg', - author="The mypy team", - version='0.1', - package_data={'typedpkg': ['py.typed']}, - packages=['typedpkg', 'typedpkg.pkg'], - include_package_data=True, - zip_safe=False, -) diff --git a/test-data/packages/typedpkg_ns_a/pyproject.toml b/test-data/packages/typedpkg_ns_a/pyproject.toml new file mode 100644 index 000000000000..cc464af75b17 --- /dev/null +++ b/test-data/packages/typedpkg_ns_a/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = 'typedpkg_namespace.alpha' +version = '0.1' +description = 'test' + +[tool.hatch.build] +include = ["**/*.py", "**/*.pyi", "**/py.typed"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/test-data/packages/typedpkg_ns_a/setup.py b/test-data/packages/typedpkg_ns_a/setup.py deleted file mode 100644 index 3dab731cada9..000000000000 --- a/test-data/packages/typedpkg_ns_a/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -from setuptools import setup - -setup( - name='typedpkg_namespace.alpha', - version='1.0.0', - namespace_packages=['typedpkg_ns'], - zip_safe=False, - package_data={'typedpkg_ns.a': ['py.typed']}, - packages=['typedpkg_ns.a'], -) diff --git a/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml b/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml new file mode 100644 index 000000000000..d5275d1ed8b3 --- /dev/null +++ b/test-data/packages/typedpkg_ns_b-stubs/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = 'typedpkg_ns-stubs' +version = '0.1' +description = 'test' + +[tool.hatch.build] +include = ["**/*.pyi"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/test-data/packages/typedpkg_ns_b-stubs/setup.py b/test-data/packages/typedpkg_ns_b-stubs/setup.py deleted file mode 100644 index a5d7df83eeea..000000000000 --- a/test-data/packages/typedpkg_ns_b-stubs/setup.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -This setup file installs packages to test mypy's PEP 561 implementation -""" - -from distutils.core import setup - -setup( - name='typedpkg_ns_b-stubs', - author="The mypy team", - version='0.1', - namespace_packages=['typedpkg_ns-stubs'], - package_data={'typedpkg_ns-stubs.b': ['__init__.pyi', 'bbb.pyi']}, - packages=['typedpkg_ns-stubs.b'], -) diff --git a/test-data/packages/typedpkg_ns_b/pyproject.toml b/test-data/packages/typedpkg_ns_b/pyproject.toml new file mode 100644 index 000000000000..8567af11152e --- /dev/null +++ b/test-data/packages/typedpkg_ns_b/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = 'typedpkg_namespace.beta' +version = '0.1' +description = 'test' + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/test-data/packages/typedpkg_ns_b/setup.py b/test-data/packages/typedpkg_ns_b/setup.py deleted file mode 100644 index 4f0d0d954a73..000000000000 --- a/test-data/packages/typedpkg_ns_b/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -from setuptools import setup - -setup( - name='typedpkg_namespace.beta', - version='1.0.0', - namespace_packages=['typedpkg_ns'], - zip_safe=False, - package_data={'typedpkg_ns.b': []}, - packages=['typedpkg_ns.b'], -) diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index 8a13e5cb5760..3a0a30a14462 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -735,7 +735,44 @@ class A(metaclass=ABCMeta): def x(self) -> int: pass @x.setter def x(self, x: int) -> None: pass -[out] + +[case testReadWriteDeleteAbstractProperty] +from abc import ABC, abstractmethod +class Abstract(ABC): + @property + @abstractmethod + def prop(self) -> str: ... + + @prop.setter + @abstractmethod + def prop(self, code: str) -> None: ... + + @prop.deleter + @abstractmethod + def prop(self) -> None: ... + +class Good(Abstract): + @property + def prop(self) -> str: ... + @prop.setter + def prop(self, code: str) -> None: ... + @prop.deleter + def prop(self) -> None: ... + +class Bad1(Abstract): + @property # E: Read-only property cannot override read-write property + def prop(self) -> str: ... + +class ThisShouldProbablyError(Abstract): + @property + def prop(self) -> str: ... + @prop.setter + def prop(self, code: str) -> None: ... + +a = Good() +reveal_type(a.prop) # N: Revealed type is "builtins.str" +a.prop = 123 # E: Incompatible types in assignment (expression has type "int", variable has type "str") +[builtins fixtures/property.pyi] [case testInstantiateClassWithReadOnlyAbstractProperty] from abc import abstractproperty, ABCMeta @@ -767,7 +804,7 @@ b = B() b.x() # E: "int" not callable [builtins fixtures/property.pyi] -[case testImplementReradWriteAbstractPropertyViaProperty] +[case testImplementReadWriteAbstractPropertyViaProperty] from abc import abstractproperty, ABCMeta class A(metaclass=ABCMeta): @abstractproperty diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index 8ae7f6555f9d..ab2f5f3f6b48 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -325,7 +325,10 @@ class X(NamedTuple): reveal_type(X._fields) # N: Revealed type is "Tuple[builtins.str, builtins.str]" reveal_type(X._field_types) # N: Revealed type is "builtins.dict[builtins.str, Any]" reveal_type(X._field_defaults) # N: Revealed type is "builtins.dict[builtins.str, Any]" -reveal_type(X.__annotations__) # N: Revealed type is "builtins.dict[builtins.str, Any]" + +# In typeshed's stub for builtins.pyi, __annotations__ is `dict[str, Any]`, +# but it's inferred as `Mapping[str, object]` here due to the fixture we're using +reveal_type(X.__annotations__) # N: Revealed type is "typing.Mapping[builtins.str, builtins.object]" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 3af20bd1e7ea..2c80eb7b49bc 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -2019,7 +2019,7 @@ tmp/foo.pyi:5: note: @overload tmp/foo.pyi:5: note: def __add__(self, int, /) -> int tmp/foo.pyi:5: note: @overload tmp/foo.pyi:5: note: def __add__(self, str, /) -> str -tmp/foo.pyi:5: note: Overloaded operator methods cannot have wider argument types in overrides +tmp/foo.pyi:5: note: Overloaded operator methods can't have wider argument types in overrides [case testOperatorMethodOverrideWideningArgumentType] import typing @@ -2138,7 +2138,7 @@ tmp/foo.pyi:8: note: @overload tmp/foo.pyi:8: note: def __add__(self, str, /) -> A tmp/foo.pyi:8: note: @overload tmp/foo.pyi:8: note: def __add__(self, type, /) -> A -tmp/foo.pyi:8: note: Overloaded operator methods cannot have wider argument types in overrides +tmp/foo.pyi:8: note: Overloaded operator methods can't have wider argument types in overrides [case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder] from foo import * @@ -4124,12 +4124,31 @@ int.__eq__(3, 4) main:33: error: Too few arguments for "__eq__" of "int" main:33: error: Unsupported operand types for == ("int" and "Type[int]") -[case testMroSetAfterError] -class C(str, str): - foo = 0 - bar = foo -[out] -main:1: error: Duplicate base class "str" +[case testDupBaseClasses] +class A: + def method(self) -> str: ... + +class B(A, A): # E: Duplicate base class "A" + attr: int + +b: B + +reveal_type(b.method()) # N: Revealed type is "Any" +reveal_type(b.missing()) # N: Revealed type is "Any" +reveal_type(b.attr) # N: Revealed type is "builtins.int" + +[case testDupBaseClassesGeneric] +from typing import Generic, TypeVar + +T = TypeVar('T') +class A(Generic[T]): + def method(self) -> T: ... + +class B(A[int], A[str]): # E: Duplicate base class "A" + attr: int + +reveal_type(B().method()) # N: Revealed type is "Any" +reveal_type(B().attr) # N: Revealed type is "builtins.int" [case testCannotDetermineMro] class A: pass @@ -4521,6 +4540,39 @@ def f(TA: Type[A]): reveal_type(TA) # N: Revealed type is "Type[__main__.A]" reveal_type(TA.x) # N: Revealed type is "builtins.int" +[case testMetaclassConflictingInstanceVars] +from typing import ClassVar + +class Meta(type): + foo: int + bar: int + eggs: ClassVar[int] = 42 + spam: ClassVar[int] = 42 + +class Foo(metaclass=Meta): + foo: str + bar: ClassVar[str] = 'bar' + eggs: str + spam: ClassVar[str] = 'spam' + +reveal_type(Foo.foo) # N: Revealed type is "builtins.int" +reveal_type(Foo.bar) # N: Revealed type is "builtins.str" +reveal_type(Foo.eggs) # N: Revealed type is "builtins.int" +reveal_type(Foo.spam) # N: Revealed type is "builtins.str" + +class MetaSub(Meta): ... + +class Bar(metaclass=MetaSub): + foo: str + bar: ClassVar[str] = 'bar' + eggs: str + spam: ClassVar[str] = 'spam' + +reveal_type(Bar.foo) # N: Revealed type is "builtins.int" +reveal_type(Bar.bar) # N: Revealed type is "builtins.str" +reveal_type(Bar.eggs) # N: Revealed type is "builtins.int" +reveal_type(Bar.spam) # N: Revealed type is "builtins.str" + [case testSubclassMetaclass] class M1(type): x = 0 diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index c81de675d808..ec5bce219dbd 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -887,7 +887,10 @@ plugins=/test-data/unit/plugins/descriptor.py # flags: --config-file tmp/mypy.ini def dynamic_signature(arg1: str) -> str: ... -reveal_type(dynamic_signature(1)) # N: Revealed type is "builtins.int" +a: int = 1 +reveal_type(dynamic_signature(a)) # N: Revealed type is "builtins.int" +b: bytes = b'foo' +reveal_type(dynamic_signature(b)) # N: Revealed type is "builtins.bytes" [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/function_sig_hook.py diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 39fcef1db673..6ec0849146c0 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2174,3 +2174,24 @@ def f(x: bytes, y: bytearray, z: memoryview) -> None: x in y x in z [builtins fixtures/primitives.pyi] + +[case testNoCrashFollowImportsForStubs] +# flags: --config-file tmp/mypy.ini +{**{"x": "y"}} + +[file mypy.ini] +\[mypy] +follow_imports = skip +follow_imports_for_stubs = true +[builtins fixtures/dict.pyi] + +[case testReturnAnyLambda] +# flags: --warn-return-any +from typing import Any, Callable + +def cb(f: Callable[[int], int]) -> None: ... +a: Any +cb(lambda x: a) # OK + +fn = lambda x: a +cb(fn) diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 6b9f139f541c..5eed70246e8c 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -938,8 +938,9 @@ class A: def __init__(self) -> None: self.b = NamedTuple('x', [('s', str), ('n', int)]) # E: NamedTuple type as an attribute is not supported -reveal_type(A().b) # N: Revealed type is "Any" +reveal_type(A().b) # N: Revealed type is "typing.NamedTuple" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] [case testNamedTupleWrongfile] from typing import NamedTuple @@ -983,7 +984,7 @@ class Both2(Other, Bar): ... class Both3(Biz, Other): ... def print_namedtuple(obj: NamedTuple) -> None: - reveal_type(obj.name) # N: Revealed type is "builtins.str" + reveal_type(obj._fields) # N: Revealed type is "builtins.tuple[builtins.str, ...]" b1: Bar b2: Baz @@ -1337,3 +1338,12 @@ class SNT(NT[int]): ... reveal_type(SNT("test", 42).meth()) # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.SNT]" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testNoCrashUnsupportedNamedTuple] +from typing import NamedTuple +class Test: + def __init__(self, field) -> None: + self.Item = NamedTuple("x", [(field, str)]) # E: NamedTuple type as an attribute is not supported + self.item: self.Item # E: Name "self.Item" is not defined +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 4209f4ec9164..96fbaa77514e 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -4745,7 +4745,7 @@ main:12: note: @overload main:12: note: def __add__(self, Other, /) -> B main:12: note: @overload main:12: note: def __add__(self, A, /) -> A -main:12: note: Overloaded operator methods cannot have wider argument types in overrides +main:12: note: Overloaded operator methods can't have wider argument types in overrides main:32: note: Revealed type is "__main__.Other" [case testOverloadErrorMessageManyMatches] @@ -5405,26 +5405,26 @@ if False: def f2(g): ... reveal_type(f2(A())) # N: Revealed type is "__main__.A" reveal_type(f2(C())) # E: No overload variant of "f2" matches argument type "C" \ - # N: Possible overload variants: \ - # N: def f2(g: A) -> A \ - # N: def f2(g: B) -> B \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f2(g: A) -> A \ + # N: def f2(g: B) -> B \ + # N: Revealed type is "Any" @overload def f3(g: A) -> A: ... @overload def f3(g: B) -> B: ... -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f3(g: C) -> C: ... def f3(g): ... reveal_type(f3(A())) # N: Revealed type is "__main__.A" reveal_type(f3(C())) # E: No overload variant of "f3" matches argument type "C" \ - # N: Possible overload variants: \ - # N: def f3(g: A) -> A \ - # N: def f3(g: B) -> B \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f3(g: A) -> A \ + # N: def f3(g: B) -> B \ + # N: Revealed type is "Any" if True: @overload @@ -5731,10 +5731,10 @@ def f1(x): ... reveal_type(f1(A())) # N: Revealed type is "__main__.A" reveal_type(f1(B())) # N: Revealed type is "__main__.B" reveal_type(f1(D())) # E: No overload variant of "f1" matches argument type "D" \ - # N: Possible overload variants: \ - # N: def f1(x: A) -> A \ - # N: def f1(x: B) -> B \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f1(x: A) -> A \ + # N: def f1(x: B) -> B \ + # N: Revealed type is "Any" @overload def f2(x: A) -> A: ... @@ -5751,14 +5751,14 @@ def f2(x): ... reveal_type(f2(A())) # N: Revealed type is "__main__.A" reveal_type(f2(B())) # N: Revealed type is "__main__.B" reveal_type(f2(C())) # E: No overload variant of "f2" matches argument type "C" \ - # N: Possible overload variants: \ - # N: def f2(x: A) -> A \ - # N: def f2(x: B) -> B \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f2(x: A) -> A \ + # N: def f2(x: B) -> B \ + # N: Revealed type is "Any" @overload # E: Single overload definition, multiple required def f3(x: A) -> A: ... -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f3(x: B) -> B: ... @@ -5771,13 +5771,13 @@ else: def f3(x): ... reveal_type(f3(A())) # N: Revealed type is "__main__.A" reveal_type(f3(B())) # E: No overload variant of "f3" matches argument type "B" \ - # N: Possible overload variant: \ - # N: def f3(x: A) -> A \ - # N: Revealed type is "Any" + # N: Possible overload variant: \ + # N: def f3(x: A) -> A \ + # N: Revealed type is "Any" @overload # E: Single overload definition, multiple required def f4(x: A) -> A: ... -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f4(x: B) -> B: ... @@ -5787,9 +5787,10 @@ else: def f4(x): ... reveal_type(f4(A())) # N: Revealed type is "__main__.A" reveal_type(f4(B())) # E: No overload variant of "f4" matches argument type "B" \ - # N: Possible overload variant: \ - # N: def f4(x: A) -> A \ - # N: Revealed type is "Any" + # N: Possible overload variant: \ + # N: def f4(x: A) -> A \ + # N: Revealed type is "Any" + [case testOverloadIfElse3] # flags: --always-false False @@ -5817,10 +5818,10 @@ else: def f1(x): ... reveal_type(f1(A())) # N: Revealed type is "__main__.A" reveal_type(f1(B())) # E: No overload variant of "f1" matches argument type "B" \ - # N: Possible overload variants: \ - # N: def f1(x: A) -> A \ - # N: def f1(x: D) -> D \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f1(x: A) -> A \ + # N: def f1(x: D) -> D \ + # N: Revealed type is "Any" reveal_type(f1(D())) # N: Revealed type is "__main__.D" @overload # E: Single overload definition, multiple required @@ -5828,7 +5829,7 @@ def f2(x: A) -> A: ... if False: @overload def f2(x: B) -> B: ... -elif maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +elif maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f2(x: C) -> C: ... @@ -5838,13 +5839,13 @@ else: def f2(x): ... reveal_type(f2(A())) # N: Revealed type is "__main__.A" reveal_type(f2(C())) # E: No overload variant of "f2" matches argument type "C" \ - # N: Possible overload variant: \ - # N: def f2(x: A) -> A \ - # N: Revealed type is "Any" + # N: Possible overload variant: \ + # N: def f2(x: A) -> A \ + # N: Revealed type is "Any" @overload # E: Single overload definition, multiple required def f3(x: A) -> A: ... -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f3(x: B) -> B: ... @@ -5857,14 +5858,14 @@ else: def f3(x): ... reveal_type(f3(A())) # N: Revealed type is "__main__.A" reveal_type(f3(B())) # E: No overload variant of "f3" matches argument type "B" \ - # N: Possible overload variant: \ - # N: def f3(x: A) -> A \ - # N: Revealed type is "Any" + # N: Possible overload variant: \ + # N: def f3(x: A) -> A \ + # N: Revealed type is "Any" def g(bool_var: bool) -> None: @overload def f4(x: A) -> A: ... - if bool_var: # E: Condition cannot be inferred, unable to merge overloads + if bool_var: # E: Condition can't be inferred, unable to merge overloads @overload def f4(x: B) -> B: ... elif maybe_true: # E: Name "maybe_true" is not defined @@ -5880,10 +5881,11 @@ def g(bool_var: bool) -> None: def f4(x): ... reveal_type(f4(E())) # N: Revealed type is "__main__.E" reveal_type(f4(B())) # E: No overload variant of "f4" matches argument type "B" \ - # N: Possible overload variants: \ - # N: def f4(x: A) -> A \ - # N: def f4(x: E) -> E \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f4(x: A) -> A \ + # N: def f4(x: E) -> E \ + # N: Revealed type is "Any" + [case testOverloadIfSkipUnknownExecution] # flags: --always-true True @@ -5901,14 +5903,14 @@ class D: ... @overload # E: Single overload definition, multiple required def f1(x: A) -> A: ... -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f1(x: B) -> B: ... def f1(x): ... reveal_type(f1(A())) # N: Revealed type is "__main__.A" -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f2(x: A) -> A: ... @@ -5918,15 +5920,15 @@ def f2(x: B) -> B: ... def f2(x: C) -> C: ... def f2(x): ... reveal_type(f2(A())) # E: No overload variant of "f2" matches argument type "A" \ - # N: Possible overload variants: \ - # N: def f2(x: B) -> B \ - # N: def f2(x: C) -> C \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f2(x: B) -> B \ + # N: def f2(x: C) -> C \ + # N: Revealed type is "Any" if True: @overload # E: Single overload definition, multiple required def f3(x: A) -> A: ... - if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ + if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f3(x: B) -> B: ... @@ -5934,7 +5936,7 @@ if True: reveal_type(f3(A())) # N: Revealed type is "__main__.A" if True: - if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ + if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f4(x: A) -> A: ... @@ -5944,10 +5946,10 @@ if True: def f4(x: C) -> C: ... def f4(x): ... reveal_type(f4(A())) # E: No overload variant of "f4" matches argument type "A" \ - # N: Possible overload variants: \ - # N: def f4(x: B) -> B \ - # N: def f4(x: C) -> C \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f4(x: B) -> B \ + # N: def f4(x: C) -> C \ + # N: Revealed type is "Any" [case testOverloadIfDontSkipUnrelatedOverload] # flags: --always-true True @@ -6187,16 +6189,16 @@ if False: def f8(x): ... reveal_type(f8(A())) # N: Revealed type is "__main__.A" reveal_type(f8(C())) # E: No overload variant of "f8" matches argument type "C" \ - # N: Possible overload variants: \ - # N: def f8(x: A) -> A \ - # N: def f8(x: B) -> B \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f8(x: A) -> A \ + # N: def f8(x: B) -> B \ + # N: Revealed type is "Any" -if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f9(x: A) -> A: ... -if another_maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ +if another_maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "another_maybe_true" is not defined @overload def f9(x: B) -> B: ... @@ -6206,18 +6208,18 @@ def f9(x: C) -> C: ... def f9(x: D) -> D: ... def f9(x): ... reveal_type(f9(A())) # E: No overload variant of "f9" matches argument type "A" \ - # N: Possible overload variants: \ - # N: def f9(x: C) -> C \ - # N: def f9(x: D) -> D \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f9(x: C) -> C \ + # N: def f9(x: D) -> D \ + # N: Revealed type is "Any" reveal_type(f9(C())) # N: Revealed type is "__main__.C" if True: - if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ + if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f10(x: A) -> A: ... - if another_maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ + if another_maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "another_maybe_true" is not defined @overload def f10(x: B) -> B: ... @@ -6227,10 +6229,10 @@ if True: def f10(x: D) -> D: ... def f10(x): ... reveal_type(f10(A())) # E: No overload variant of "f10" matches argument type "A" \ - # N: Possible overload variants: \ - # N: def f10(x: C) -> C \ - # N: def f10(x: D) -> D \ - # N: Revealed type is "Any" + # N: Possible overload variants: \ + # N: def f10(x: C) -> C \ + # N: def f10(x: D) -> D \ + # N: Revealed type is "Any" reveal_type(f10(C())) # N: Revealed type is "__main__.C" if some_var: # E: Name "some_var" is not defined @@ -6251,6 +6253,7 @@ if True: def f12(x: B) -> B: ... def f12(x): ... reveal_type(f12(A())) # N: Revealed type is "__main__.A" + [typing fixtures/typing-medium.pyi] [case testOverloadIfUnconditionalFuncDef] @@ -6406,7 +6409,7 @@ def f1(g: A) -> A: ... if True: @overload # E: Single overload definition, multiple required def f1(g: B) -> B: ... - if maybe_true: # E: Condition cannot be inferred, unable to merge overloads \ + if maybe_true: # E: Condition can't be inferred, unable to merge overloads \ # E: Name "maybe_true" is not defined @overload def f1(g: C) -> C: ... @@ -6432,7 +6435,7 @@ if True: def f3(g: B) -> B: ... if True: pass # Some other node - @overload # E: Name "f3" already defined on line 32 \ + @overload # E: Name "f3" already defined on line 32 \ # E: An overloaded function outside a stub file must have an implementation def f3(g: C) -> C: ... @overload diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index fe66b18fbfea..901e73008d56 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -6,11 +6,11 @@ P = ParamSpec('P') [case testInvalidParamSpecDefinitions] from typing import ParamSpec -P1 = ParamSpec("P1", covariant=True) # E: Only the first argument to ParamSpec has defined semantics -P2 = ParamSpec("P2", contravariant=True) # E: Only the first argument to ParamSpec has defined semantics -P3 = ParamSpec("P3", bound=int) # E: Only the first argument to ParamSpec has defined semantics -P4 = ParamSpec("P4", int, str) # E: Only the first argument to ParamSpec has defined semantics -P5 = ParamSpec("P5", covariant=True, bound=int) # E: Only the first argument to ParamSpec has defined semantics +P1 = ParamSpec("P1", covariant=True) # E: The variance and bound arguments to ParamSpec do not have defined semantics yet +P2 = ParamSpec("P2", contravariant=True) # E: The variance and bound arguments to ParamSpec do not have defined semantics yet +P3 = ParamSpec("P3", bound=int) # E: The variance and bound arguments to ParamSpec do not have defined semantics yet +P4 = ParamSpec("P4", int, str) # E: Too many positional arguments for "ParamSpec" +P5 = ParamSpec("P5", covariant=True, bound=int) # E: The variance and bound arguments to ParamSpec do not have defined semantics yet [builtins fixtures/paramspec.pyi] [case testParamSpecLocations] diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index 75f2433c6d8c..dc1ae448c0d1 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -409,8 +409,8 @@ def local() -> None: x: L reveal_type(x) # N: Revealed type is "builtins.list[Union[builtins.int, Any]]" -S = Type[S] # E: Type[...] cannot contain another Type[...] -U = Type[Union[int, U]] # E: Type[...] cannot contain another Type[...] +S = Type[S] # E: Type[...] can't contain another Type[...] +U = Type[Union[int, U]] # E: Type[...] can't contain another Type[...] x: U reveal_type(x) # N: Revealed type is "Type[Any]" @@ -937,3 +937,12 @@ x: A[int, str] if last is not None: reveal_type(last) # N: Revealed type is "Tuple[builtins.int, builtins.str, Union[Tuple[builtins.int, builtins.str, Union[..., None]], None]]" [builtins fixtures/tuple.pyi] + +[case testRecursiveAliasLiteral] +from typing import Tuple +from typing_extensions import Literal + +NotFilter = Tuple[Literal["not"], "NotFilter"] +n: NotFilter +reveal_type(n[1][1][0]) # N: Revealed type is "Literal['not']" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 3cb8864f9207..29e2c5ba3266 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -2212,3 +2212,17 @@ main:1: error: Module "typing" has no attribute "_FutureFeatureFixture" main:1: note: Use `from typing_extensions import _FutureFeatureFixture` instead main:1: note: See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module [builtins fixtures/tuple.pyi] + +[case testNoCrashOnBreakOutsideLoopFunction] +def foo(): + for x in [1, 2]: + def inner(): + break # E: "break" outside loop +[builtins fixtures/list.pyi] + +[case testNoCrashOnBreakOutsideLoopClass] +class Outer: + for x in [1, 2]: + class Inner: + break # E: "break" outside loop +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index e843532a2560..f8d6573a56fe 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1367,42 +1367,36 @@ reveal_type(a + b) # N: Revealed type is "Tuple[builtins.int, builtins.str, bui from typing import Tuple # long initializer assignment with few mismatches -t: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", 11) \ - # E: Incompatible types in assignment (3 tuple items are incompatible) \ - # N: Expression tuple item 8 has type "str"; "int" expected; \ - # N: Expression tuple item 9 has type "str"; "int" expected; \ - # N: Expression tuple item 10 has type "str"; "int" expected; +t: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", 11) # E: Incompatible types in assignment (3 tuple items are incompatible) \ + # N: Expression tuple item 8 has type "str"; "int" expected; \ + # N: Expression tuple item 9 has type "str"; "int" expected; \ + # N: Expression tuple item 10 has type "str"; "int" expected; # long initializer assignment with more mismatches -t1: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") \ - # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ - # N: Expression tuple item 8 has type "str"; "int" expected; \ - # N: Expression tuple item 9 has type "str"; "int" expected; \ - # N: Expression tuple item 10 has type "str"; "int" expected; +t1: Tuple[int, ...] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ + # N: Expression tuple item 8 has type "str"; "int" expected; \ + # N: Expression tuple item 9 has type "str"; "int" expected; \ + # N: Expression tuple item 10 has type "str"; "int" expected; # short tuple initializer assignment -t2: Tuple[int, ...] = (1, 2, "s", 4) \ - # E: Incompatible types in assignment (expression has type "Tuple[int, int, str, int]", variable has type "Tuple[int, ...]") +t2: Tuple[int, ...] = (1, 2, "s", 4) # E: Incompatible types in assignment (expression has type "Tuple[int, int, str, int]", variable has type "Tuple[int, ...]") # long initializer assignment with few mismatches, no ellipsis -t3: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, "str", "str") \ - # E: Incompatible types in assignment (2 tuple items are incompatible) \ - # N: Expression tuple item 10 has type "str"; "int" expected; \ - # N: Expression tuple item 11 has type "str"; "int" expected; +t3: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, "str", "str") # E: Incompatible types in assignment (2 tuple items are incompatible) \ + # N: Expression tuple item 10 has type "str"; "int" expected; \ + # N: Expression tuple item 11 has type "str"; "int" expected; # long initializer assignment with more mismatches, no ellipsis -t4: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") \ - # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ - # N: Expression tuple item 8 has type "str"; "int" expected; \ - # N: Expression tuple item 9 has type "str"; "int" expected; \ - # N: Expression tuple item 10 has type "str"; "int" expected; +t4: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str") # E: Incompatible types in assignment (4 tuple items are incompatible; 1 items are omitted) \ + # N: Expression tuple item 8 has type "str"; "int" expected; \ + # N: Expression tuple item 9 has type "str"; "int" expected; \ + # N: Expression tuple item 10 has type "str"; "int" expected; # short tuple initializer assignment, no ellipsis t5: Tuple[int, int] = (1, 2, "s", 4) # E: Incompatible types in assignment (expression has type "Tuple[int, int, str, int]", variable has type "Tuple[int, int]") # long initializer assignment with mismatched pairs -t6: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str", 1, 1, 1, 1, 1) \ - # E: Incompatible types in assignment (expression has type Tuple[int, int, ... <15 more items>], variable has type Tuple[int, int, ... <10 more items>]) +t6: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str", 1, 1, 1, 1, 1) # E: Incompatible types in assignment (expression has type Tuple[int, int, ... <15 more items>], variable has type Tuple[int, int, ... <10 more items>]) [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 970dc05b488d..fc487d2d553d 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -2873,3 +2873,15 @@ foo({"foo": {"e": "foo"}}) # E: Type of TypedDict is ambiguous, none of ("A", " # E: Argument 1 to "foo" has incompatible type "Dict[str, Dict[str, str]]"; expected "Union[A, B]" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testTypedDictMissingEmptyKey] +from typing_extensions import TypedDict + +class A(TypedDict): + my_attr_1: str + my_attr_2: int + +d: A +d[''] # E: TypedDict "A" has no key "" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-typevar-defaults.test b/test-data/unit/check-typevar-defaults.test new file mode 100644 index 000000000000..514186aa7518 --- /dev/null +++ b/test-data/unit/check-typevar-defaults.test @@ -0,0 +1,83 @@ +[case testTypeVarDefaultsBasic] +import builtins +from typing import Generic, TypeVar, ParamSpec, Callable, Tuple, List +from typing_extensions import TypeVarTuple, Unpack + +T1 = TypeVar("T1", default=int) +P1 = ParamSpec("P1", default=[int, str]) +Ts1 = TypeVarTuple("Ts1", default=Unpack[Tuple[int, str]]) + +def f1(a: T1) -> List[T1]: ... +reveal_type(f1) # N: Revealed type is "def [T1 = builtins.int] (a: T1`-1 = builtins.int) -> builtins.list[T1`-1 = builtins.int]" + +def f2(a: Callable[P1, None] ) -> Callable[P1, None]: ... +reveal_type(f2) # N: Revealed type is "def [P1 = [builtins.int, builtins.str]] (a: def (*P1.args, **P1.kwargs)) -> def (*P1.args, **P1.kwargs)" + +def f3(a: Tuple[Unpack[Ts1]]) -> Tuple[Unpack[Ts1]]: ... +reveal_type(f3) # N: Revealed type is "def [Ts1 = Unpack[Tuple[builtins.int, builtins.str]]] (a: Tuple[Unpack[Ts1`-1 = Unpack[Tuple[builtins.int, builtins.str]]]]) -> Tuple[Unpack[Ts1`-1 = Unpack[Tuple[builtins.int, builtins.str]]]]" + + +class ClassA1(Generic[T1]): ... +class ClassA2(Generic[P1]): ... +class ClassA3(Generic[Unpack[Ts1]]): ... + +reveal_type(ClassA1) # N: Revealed type is "def [T1 = builtins.int] () -> __main__.ClassA1[T1`1 = builtins.int]" +reveal_type(ClassA2) # N: Revealed type is "def [P1 = [builtins.int, builtins.str]] () -> __main__.ClassA2[P1`1 = [builtins.int, builtins.str]]" +reveal_type(ClassA3) # N: Revealed type is "def [Ts1 = Unpack[Tuple[builtins.int, builtins.str]]] () -> __main__.ClassA3[Unpack[Ts1`1 = Unpack[Tuple[builtins.int, builtins.str]]]]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarDefaultsValid] +from typing import TypeVar, ParamSpec, Any, List, Tuple +from typing_extensions import TypeVarTuple, Unpack + +S0 = TypeVar("S0") +S1 = TypeVar("S1", bound=int) + +P0 = ParamSpec("P0") +Ts0 = TypeVarTuple("Ts0") + +T1 = TypeVar("T1", default=int) +T2 = TypeVar("T2", bound=float, default=int) +T3 = TypeVar("T3", bound=List[Any], default=List[int]) +T4 = TypeVar("T4", int, str, default=int) +T5 = TypeVar("T5", default=S0) +T6 = TypeVar("T6", bound=float, default=S1) +# T7 = TypeVar("T7", bound=List[Any], default=List[S0]) # TODO + +P1 = ParamSpec("P1", default=[]) +P2 = ParamSpec("P2", default=...) +P3 = ParamSpec("P3", default=[int, str]) +P4 = ParamSpec("P4", default=P0) + +Ts1 = TypeVarTuple("Ts1", default=Unpack[Tuple[int]]) +Ts2 = TypeVarTuple("Ts2", default=Unpack[Tuple[int, ...]]) +# Ts3 = TypeVarTuple("Ts3", default=Unpack[Ts0]) # TODO +[builtins fixtures/tuple.pyi] + +[case testTypeVarDefaultsInvalid] +from typing import TypeVar, ParamSpec, Tuple +from typing_extensions import TypeVarTuple, Unpack + +T1 = TypeVar("T1", default=2) # E: TypeVar "default" must be a type +T2 = TypeVar("T2", default=[int, str]) # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? \ + # E: TypeVar "default" must be a type + +P1 = ParamSpec("P1", default=int) # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec +P2 = ParamSpec("P2", default=2) # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec +P3 = ParamSpec("P3", default=(2, int)) # E: The default argument to ParamSpec must be a list expression, ellipsis, or a ParamSpec +P4 = ParamSpec("P4", default=[2, int]) # E: Argument 0 of ParamSpec default must be a type + +Ts1 = TypeVarTuple("Ts1", default=2) # E: The default argument to TypeVarTuple must be an Unpacked tuple +Ts2 = TypeVarTuple("Ts2", default=int) # E: The default argument to TypeVarTuple must be an Unpacked tuple +Ts3 = TypeVarTuple("Ts3", default=Tuple[int]) # E: The default argument to TypeVarTuple must be an Unpacked tuple +[builtins fixtures/tuple.pyi] + +[case testTypeVarDefaultsInvalid2] +from typing import TypeVar, List, Union + +T1 = TypeVar("T1", bound=str, default=int) # E: TypeVar default must be a subtype of the bound type +T2 = TypeVar("T2", bound=List[str], default=List[int]) # E: TypeVar default must be a subtype of the bound type +T3 = TypeVar("T3", int, str, default=bytes) # E: TypeVar default must be one of the constraint types +T4 = TypeVar("T4", int, str, default=Union[int, str]) # E: TypeVar default must be one of the constraint types +T5 = TypeVar("T5", float, str, default=int) # E: TypeVar default must be one of the constraint types diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index c2e98cdb74f9..b87bb7f17eb0 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1484,6 +1484,10 @@ note: A user-defined top-level module with name "typing" is not supported [file dir/stdlib/types.pyi] [file dir/stdlib/typing.pyi] [file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/_typeshed.pyi] +[file dir/stdlib/_collections_abc.pyi] +[file dir/stdlib/collections/abc.pyi] +[file dir/stdlib/collections/__init__.pyi] [file dir/stdlib/VERSIONS] [out] Failed to find builtin module mypy_extensions, perhaps typeshed is broken? @@ -1523,6 +1527,10 @@ class dict: pass [file dir/stdlib/typing.pyi] [file dir/stdlib/mypy_extensions.pyi] [file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/_typeshed.pyi] +[file dir/stdlib/_collections_abc.pyi] +[file dir/stdlib/collections/abc.pyi] +[file dir/stdlib/collections/__init__.pyi] [file dir/stdlib/foo.pyi] 1() # Errors are reported if the file was explicitly passed on the command line [file dir/stdlib/VERSIONS] diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index c60068a44bec..1fae1514111c 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -28,6 +28,33 @@ Daemon stopped [file foo.py] def f(): pass +[case testDaemonRunIgnoreMissingImports] +$ dmypy run -- foo.py --follow-imports=error --ignore-missing-imports +Daemon started +Success: no issues found in 1 source file +$ dmypy stop +Daemon stopped +[file foo.py] +def f(): pass + +[case testDaemonRunErrorCodes] +$ dmypy run -- foo.py --follow-imports=error --disable-error-code=type-abstract +Daemon started +Success: no issues found in 1 source file +$ dmypy stop +Daemon stopped +[file foo.py] +def f(): pass + +[case testDaemonRunCombinedOptions] +$ dmypy run -- foo.py --follow-imports=error --ignore-missing-imports --disable-error-code=type-abstract +Daemon started +Success: no issues found in 1 source file +$ dmypy stop +Daemon stopped +[file foo.py] +def f(): pass + [case testDaemonIgnoreConfigFiles] $ dmypy start -- --follow-imports=error Daemon started @@ -35,6 +62,28 @@ Daemon started \[mypy] files = ./foo.py +[case testDaemonRunMultipleStrict] +$ dmypy run -- foo.py --strict --follow-imports=error +Daemon started +foo.py:1: error: Function is missing a return type annotation +foo.py:1: note: Use "-> None" if function does not return a value +Found 1 error in 1 file (checked 1 source file) +== Return code: 1 +$ dmypy run -- bar.py --strict --follow-imports=error +bar.py:1: error: Function is missing a return type annotation +bar.py:1: note: Use "-> None" if function does not return a value +Found 1 error in 1 file (checked 1 source file) +== Return code: 1 +$ dmypy run -- foo.py --strict --follow-imports=error +foo.py:1: error: Function is missing a return type annotation +foo.py:1: note: Use "-> None" if function does not return a value +Found 1 error in 1 file (checked 1 source file) +== Return code: 1 +[file foo.py] +def f(): pass +[file bar.py] +def f(): pass + [case testDaemonRunRestart] $ dmypy run -- foo.py --follow-imports=error Daemon started diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 88a11be31f34..a0d7d302fd08 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10340,3 +10340,24 @@ reveal_type(x) [out] == a.py:3: note: Revealed type is "Union[def (x: builtins.int) -> builtins.int, def (*x: builtins.int) -> builtins.int]" + +[case testErrorInReAddedModule] +# flags: --disallow-untyped-defs --follow-imports=error +# cmd: mypy a.py +# cmd2: mypy b.py +# cmd3: mypy a.py + +[file a.py] +def f(): pass +[file b.py] +def f(): pass +[file unrelated.txt.3] +[out] +a.py:1: error: Function is missing a return type annotation +a.py:1: note: Use "-> None" if function does not return a value +== +b.py:1: error: Function is missing a return type annotation +b.py:1: note: Use "-> None" if function does not return a value +== +a.py:1: error: Function is missing a return type annotation +a.py:1: note: Use "-> None" if function does not return a value diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index c8658a815a13..f4744575fc09 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -6,6 +6,8 @@ Type = 0 Literal = 0 Optional = 0 Self = 0 +Tuple = 0 +ClassVar = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) @@ -18,6 +20,9 @@ class Mapping(Iterable[KT], Generic[KT, T_co]): def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass -class Tuple(Sequence): pass -class NamedTuple(Tuple): - name: str +class NamedTuple(tuple[Any, ...]): + _fields: ClassVar[tuple[str, ...]] + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... diff --git a/test-data/unit/pep561.test b/test-data/unit/pep561.test index 8c401cfc3c51..e8ebbd03dca7 100644 --- a/test-data/unit/pep561.test +++ b/test-data/unit/pep561.test @@ -72,15 +72,6 @@ reveal_type(a) [out] testStubPrecedence.py:5: note: Revealed type is "builtins.list[builtins.str]" -[case testTypedPkgSimpleEgg] -# pkgs: typedpkg; no-pip -from typedpkg.sample import ex -from typedpkg import dne -a = ex(['']) -reveal_type(a) -[out] -testTypedPkgSimpleEgg.py:5: note: Revealed type is "builtins.tuple[builtins.str, ...]" - [case testTypedPkgSimpleEditable] # pkgs: typedpkg; editable from typedpkg.sample import ex @@ -90,15 +81,6 @@ reveal_type(a) [out] testTypedPkgSimpleEditable.py:5: note: Revealed type is "builtins.tuple[builtins.str, ...]" -[case testTypedPkgSimpleEditableEgg] -# pkgs: typedpkg; editable; no-pip -from typedpkg.sample import ex -from typedpkg import dne -a = ex(['']) -reveal_type(a) -[out] -testTypedPkgSimpleEditableEgg.py:5: note: Revealed type is "builtins.tuple[builtins.str, ...]" - [case testTypedPkgNamespaceImportFrom] # pkgs: typedpkg, typedpkg_ns_a from typedpkg.pkg.aaa import af diff --git a/test-data/unit/plugins/function_sig_hook.py b/test-data/unit/plugins/function_sig_hook.py index d83c7df26209..4d901b96716e 100644 --- a/test-data/unit/plugins/function_sig_hook.py +++ b/test-data/unit/plugins/function_sig_hook.py @@ -1,5 +1,5 @@ -from mypy.plugin import CallableType, CheckerPluginInterface, FunctionSigContext, Plugin -from mypy.types import Instance, Type +from mypy.plugin import CallableType, FunctionSigContext, Plugin + class FunctionSigPlugin(Plugin): def get_function_signature_hook(self, fullname): @@ -7,20 +7,17 @@ def get_function_signature_hook(self, fullname): return my_hook return None -def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type: - if isinstance(typ, Instance): - if typ.type.fullname == 'builtins.str': - return api.named_generic_type('builtins.int', []) - elif typ.args: - return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args]) - - return typ def my_hook(ctx: FunctionSigContext) -> CallableType: + arg1_args = ctx.args[0] + if len(arg1_args) != 1: + return ctx.default_signature + arg1_type = ctx.api.get_expression_type(arg1_args[0]) return ctx.default_signature.copy_modified( - arg_types=[_str_to_int(ctx.api, t) for t in ctx.default_signature.arg_types], - ret_type=_str_to_int(ctx.api, ctx.default_signature.ret_type), + arg_types=[arg1_type], + ret_type=arg1_type, ) + def plugin(version): return FunctionSigPlugin diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 7108734102d1..93ff2da9f7b9 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1987,6 +1987,19 @@ def good9(foo1: Foo[Concatenate[int, P]], foo2: Foo[[int, str, bytes]], *args: P [out] _testStrictEqualitywithParamSpec.py:11: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Bar[[int]]") +[case testInferenceOfDunderDictOnClassObjects] +class Foo: ... +reveal_type(Foo.__dict__) +reveal_type(Foo().__dict__) +Foo.__dict__ = {} +Foo().__dict__ = {} + +[out] +_testInferenceOfDunderDictOnClassObjects.py:2: note: Revealed type is "types.MappingProxyType[builtins.str, Any]" +_testInferenceOfDunderDictOnClassObjects.py:3: note: Revealed type is "builtins.dict[builtins.str, Any]" +_testInferenceOfDunderDictOnClassObjects.py:4: error: Property "__dict__" defined in "type" is read-only +_testInferenceOfDunderDictOnClassObjects.py:4: error: Incompatible types in assignment (expression has type "Dict[, ]", variable has type "MappingProxyType[str, Any]") + [case testTypeVarTuple] # flags: --enable-incomplete-feature=TypeVarTuple --enable-incomplete-feature=Unpack --python-version=3.11 from typing import Any, Callable, Unpack, TypeVarTuple diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index d09ed87d3afc..9c6ac2833c8b 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -362,41 +362,55 @@ main:2: error: "yield" outside function 1 = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [out version>=3.10] -main:1: error: can't assign to literal here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to literal here. Maybe you meant '==' instead of '='? [case testInvalidLvalues2] (1) = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [out version>=3.10] -main:1: error: can't assign to literal here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to literal here. Maybe you meant '==' instead of '='? [case testInvalidLvalues3] (1, 1) = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [case testInvalidLvalues4] [1, 1] = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [case testInvalidLvalues6] x = y = z = 1 # ok x, (y, 1) = 1 [out] main:2: error: can't assign to literal +[out version>=3.8] +main:2: error: cannot assign to literal [case testInvalidLvalues7] x, [y, 1] = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [case testInvalidLvalues8] x, [y, [z, 1]] = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [case testInvalidLvalues9] x, (y) = 1 # ok @@ -404,41 +418,53 @@ x, (y, (z, z)) = 1 # ok x, (y, (z, 1)) = 1 [out] main:3: error: can't assign to literal +[out version>=3.8] +main:3: error: cannot assign to literal [case testInvalidLvalues10] x + x = 1 [out] main:1: error: can't assign to operator +[out version>=3.8] +main:1: error: cannot assign to operator [out version>=3.10] -main:1: error: can't assign to expression here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to expression here. Maybe you meant '==' instead of '='? [case testInvalidLvalues11] -x = 1 [out] main:1: error: can't assign to operator +[out version>=3.8] +main:1: error: cannot assign to operator [out version>=3.10] -main:1: error: can't assign to expression here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to expression here. Maybe you meant '==' instead of '='? [case testInvalidLvalues12] 1.1 = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [out version>=3.10] -main:1: error: can't assign to literal here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to literal here. Maybe you meant '==' instead of '='? [case testInvalidLvalues13] 'x' = 1 [out] main:1: error: can't assign to literal +[out version>=3.8] +main:1: error: cannot assign to literal [out version>=3.10] -main:1: error: can't assign to literal here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to literal here. Maybe you meant '==' instead of '='? [case testInvalidLvalues14] x() = 1 [out] main:1: error: can't assign to function call +[out version>=3.8] +main:1: error: cannot assign to function call [out version>=3.10] -main:1: error: can't assign to function call here. Maybe you meant '==' instead of '='? +main:1: error: cannot assign to function call here. Maybe you meant '==' instead of '='? [case testTwoStarExpressions] a, *b, *c = 1 @@ -490,20 +516,24 @@ main:2: error: Can use starred expression only as assignment target [case testInvalidDel1] x = 1 -del x(1) # E: can't delete function call +del x(1) [out] +main:2: error: can't delete function call +[out version>=3.8] +main:2: error: cannot delete function call [case testInvalidDel2] x = 1 del x + 1 [out] +main:2: error: can't delete operator +[out version>=3.8] main:2: error: cannot delete operator [out version>=3.10] main:2: error: cannot delete expression [case testInvalidDel3] del z # E: Name "z" is not defined -[out] [case testFunctionTvarScope] @@ -898,8 +928,10 @@ def f(): pass f() = 1 # type: int [out] main:3: error: can't assign to function call +[out version>=3.8] +main:3: error: cannot assign to function call [out version>=3.10] -main:3: error: can't assign to function call here. Maybe you meant '==' instead of '='? +main:3: error: cannot assign to function call here. Maybe you meant '==' instead of '='? [case testIndexedAssignmentWithTypeDeclaration] import typing @@ -975,6 +1007,8 @@ x, y = 1, 2 # type: int # E: Tuple type expected for multiple variables a = 1 a() = None # type: int [out] +main:2: error: can't assign to function call +[out version>=3.8] main:2: error: cannot assign to function call [out version>=3.10] main:2: error: cannot assign to function call here. Maybe you meant '==' instead of '='? @@ -1275,8 +1309,11 @@ main:2: note: Did you forget to import it from "typing"? (Suggestion: "from typi [case testInvalidWithTarget] def f(): pass -with f() as 1: pass # E: can't assign to literal +with f() as 1: pass [out] +main:2: error: can't assign to literal +[out version>=3.8] +main:2: error: cannot assign to literal [case testInvalidTypeAnnotation] import typing @@ -1291,8 +1328,10 @@ def f() -> None: f() = 1 # type: int [out] main:3: error: can't assign to function call +[out version>=3.8] +main:3: error: cannot assign to function call [out version>=3.10] -main:3: error: can't assign to function call here. Maybe you meant '==' instead of '='? +main:3: error: cannot assign to function call here. Maybe you meant '==' instead of '='? [case testInvalidReferenceToAttributeOfOuterClass] class A: @@ -1465,8 +1504,9 @@ TVariadic2 = TypeVarTuple('TVariadic2') TP = TypeVarTuple('?') # E: String argument 1 "?" to TypeVarTuple(...) does not match variable name "TP" TP2: int = TypeVarTuple('TP2') # E: Cannot declare the type of a TypeVar or similar construct TP3 = TypeVarTuple() # E: Too few arguments for TypeVarTuple() -TP4 = TypeVarTuple('TP4', 'TP4') # E: Only the first argument to TypeVarTuple has defined semantics +TP4 = TypeVarTuple('TP4', 'TP4') # E: Too many positional arguments for "TypeVarTuple" TP5 = TypeVarTuple(t='TP5') # E: TypeVarTuple() expects a string literal as first argument +TP6 = TypeVarTuple('TP6', bound=int) # E: Unexpected keyword argument "bound" for "TypeVarTuple" x: TVariadic # E: TypeVarTuple "TVariadic" is unbound y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index f602c236c949..9b43a5b853c0 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -558,8 +558,10 @@ def f(x, y) -> None: del x, y + 1 [out] main:2: error: can't delete operator +[out version>=3.8] +main:2: error: cannot delete operator [out version>=3.10] -main:2: error: can't delete expression +main:2: error: cannot delete expression [case testTry] class c: pass diff --git a/tox.ini b/tox.ini index 6d64cebaec6d..2ddda8281beb 100644 --- a/tox.ini +++ b/tox.ini @@ -33,6 +33,8 @@ commands = [testenv:docs] description = invoke sphinx-build to build the HTML docs +passenv = + VERIFY_MYPY_ERROR_CODES deps = -rdocs/requirements-docs.txt commands = sphinx-build -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs}