From 7bd6fdd326b6a06795edb46a5c74fc5cdeb87cc9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 17 May 2022 18:33:56 +0100 Subject: [PATCH 01/80] [mypyc] Detect always defined attributes (#12600) Use static analysis to find attributes that are always defined. Always defined attributes don't require checks on each access. This makes them faster and also reduces code size. Attributes defined in the class body and assigned to in all code paths in `__init__` are always defined. We need to know all subclasses statically to determine whether `__init__` always defines an attribute in every case, including in subclasses. The analysis looks at `__init__` methods and supports limited inter-procedural analysis over `super().__init__(...)` calls. Otherwise we rely on intra-procedural analysis to keep the analysis fast. As a side effect, `__init__` will now always be called when constructing an object. This means that `copy.copy` (and others like it) won't be supported for native classes unless `__init__` can be called without arguments. `mypyc/analysis/attrdefined.py` has more details about the algorithm in docstrings. Performance impact to selected benchmarks (with clang): - richards +28% - deltablue +10% - hexiom +1% The richards result is probably an outlier. This will also significantly help with native integers (mypyc/mypyc#837, as tracking undefined values would otherwise require extra memory use. Closes mypyc/mypyc#836. --- mypy/copytype.py | 111 +++ mypy/moduleinspect.py | 15 +- mypy/nodes.py | 17 +- mypy/stubtest.py | 2 +- mypy/typeops.py | 4 +- mypy/types.py | 11 - mypyc/analysis/attrdefined.py | 377 ++++++++++ mypyc/analysis/dataflow.py | 129 ++-- mypyc/analysis/selfleaks.py | 153 ++++ mypyc/codegen/emitclass.py | 65 +- mypyc/codegen/emitfunc.py | 81 ++- mypyc/doc/differences_from_python.rst | 37 + mypyc/ir/class_ir.py | 44 ++ mypyc/ir/ops.py | 8 + mypyc/ir/pprint.py | 9 +- mypyc/irbuild/classdef.py | 49 +- mypyc/irbuild/function.py | 2 +- mypyc/irbuild/main.py | 5 +- mypyc/irbuild/mapper.py | 16 +- mypyc/irbuild/prepare.py | 3 + mypyc/test-data/alwaysdefined.test | 732 ++++++++++++++++++++ mypyc/test-data/irbuild-basic.test | 19 +- mypyc/test-data/irbuild-classes.test | 79 ++- mypyc/test-data/irbuild-constant-fold.test | 3 +- mypyc/test-data/irbuild-singledispatch.test | 41 +- mypyc/test-data/irbuild-statements.test | 5 +- mypyc/test-data/run-classes.test | 290 +++++++- mypyc/test-data/run-multimodule.test | 63 ++ mypyc/test/test_alwaysdefined.py | 42 ++ mypyc/test/test_run.py | 1 - mypyc/test/test_serialization.py | 5 +- mypyc/test/testutil.py | 9 +- 32 files changed, 2187 insertions(+), 240 deletions(-) create mode 100644 mypy/copytype.py create mode 100644 mypyc/analysis/attrdefined.py create mode 100644 mypyc/analysis/selfleaks.py create mode 100644 mypyc/test-data/alwaysdefined.test create mode 100644 mypyc/test/test_alwaysdefined.py diff --git a/mypy/copytype.py b/mypy/copytype.py new file mode 100644 index 000000000000..85d7d531c5a3 --- /dev/null +++ b/mypy/copytype.py @@ -0,0 +1,111 @@ +from typing import Any, cast + +from mypy.types import ( + ProperType, UnboundType, AnyType, NoneType, UninhabitedType, ErasedType, DeletedType, + Instance, TypeVarType, ParamSpecType, PartialType, CallableType, TupleType, TypedDictType, + LiteralType, UnionType, Overloaded, TypeType, TypeAliasType, UnpackType, Parameters, + TypeVarTupleType +) +from mypy.type_visitor import TypeVisitor + + +def copy_type(t: ProperType) -> ProperType: + """Create a shallow copy of a type. + + This can be used to mutate the copy with truthiness information. + + Classes compiled with mypyc don't support copy.copy(), so we need + a custom implementation. + """ + return t.accept(TypeShallowCopier()) + + +class TypeShallowCopier(TypeVisitor[ProperType]): + def visit_unbound_type(self, t: UnboundType) -> ProperType: + return t + + def visit_any(self, t: AnyType) -> ProperType: + return self.copy_common(t, AnyType(t.type_of_any, t.source_any, t.missing_import_name)) + + def visit_none_type(self, t: NoneType) -> ProperType: + return self.copy_common(t, NoneType()) + + def visit_uninhabited_type(self, t: UninhabitedType) -> ProperType: + dup = UninhabitedType(t.is_noreturn) + dup.ambiguous = t.ambiguous + return self.copy_common(t, dup) + + def visit_erased_type(self, t: ErasedType) -> ProperType: + return self.copy_common(t, ErasedType()) + + def visit_deleted_type(self, t: DeletedType) -> ProperType: + return self.copy_common(t, DeletedType(t.source)) + + def visit_instance(self, t: Instance) -> ProperType: + dup = Instance(t.type, t.args, last_known_value=t.last_known_value) + dup.invalid = t.invalid + return self.copy_common(t, dup) + + def visit_type_var(self, t: TypeVarType) -> ProperType: + dup = TypeVarType( + t.name, + t.fullname, + t.id, + values=t.values, + upper_bound=t.upper_bound, + variance=t.variance, + ) + return self.copy_common(t, dup) + + def visit_param_spec(self, t: ParamSpecType) -> ProperType: + dup = ParamSpecType(t.name, t.fullname, t.id, t.flavor, t.upper_bound, prefix=t.prefix) + return self.copy_common(t, dup) + + def visit_parameters(self, t: Parameters) -> ProperType: + dup = Parameters(t.arg_types, t.arg_kinds, t.arg_names, + variables=t.variables, + is_ellipsis_args=t.is_ellipsis_args) + return self.copy_common(t, dup) + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: + dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound) + return self.copy_common(t, dup) + + def visit_unpack_type(self, t: UnpackType) -> ProperType: + dup = UnpackType(t.type) + return self.copy_common(t, dup) + + def visit_partial_type(self, t: PartialType) -> ProperType: + return self.copy_common(t, PartialType(t.type, t.var, t.value_type)) + + def visit_callable_type(self, t: CallableType) -> ProperType: + return self.copy_common(t, t.copy_modified()) + + def visit_tuple_type(self, t: TupleType) -> ProperType: + return self.copy_common(t, TupleType(t.items, t.partial_fallback, implicit=t.implicit)) + + def visit_typeddict_type(self, t: TypedDictType) -> ProperType: + return self.copy_common(t, TypedDictType(t.items, t.required_keys, t.fallback)) + + def visit_literal_type(self, t: LiteralType) -> ProperType: + return self.copy_common(t, LiteralType(value=t.value, fallback=t.fallback)) + + def visit_union_type(self, t: UnionType) -> ProperType: + return self.copy_common(t, UnionType(t.items)) + + def visit_overloaded(self, t: Overloaded) -> ProperType: + return self.copy_common(t, Overloaded(items=t.items)) + + def visit_type_type(self, t: TypeType) -> ProperType: + # Use cast since the type annotations in TypeType are imprecise. + return self.copy_common(t, TypeType(cast(Any, t.item))) + + def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: + assert False, "only ProperTypes supported" + + def copy_common(self, t: ProperType, t2: ProperType) -> ProperType: + t2.line = t.line + t2.column = t.column + t2.can_be_false = t.can_be_false + t2.can_be_true = t.can_be_true + return t2 diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index 2b2068e0b7c5..326876ec5d43 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -12,19 +12,20 @@ class ModuleProperties: + # Note that all __init__ args must have default values def __init__(self, - name: str, - file: Optional[str], - path: Optional[List[str]], - all: Optional[List[str]], - is_c_module: bool, - subpackages: List[str]) -> None: + name: str = "", + file: Optional[str] = None, + path: Optional[List[str]] = None, + all: Optional[List[str]] = None, + is_c_module: bool = False, + subpackages: Optional[List[str]] = None) -> None: self.name = name # __name__ attribute self.file = file # __file__ attribute self.path = path # __path__ attribute self.all = all # __all__ attribute self.is_c_module = is_c_module - self.subpackages = subpackages + self.subpackages = subpackages or [] def is_c_module(module: ModuleType) -> bool: diff --git a/mypy/nodes.py b/mypy/nodes.py index 4ffa3116a118..d510cbeeec62 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -668,16 +668,16 @@ class FuncItem(FuncBase): __deletable__ = ('arguments', 'max_pos', 'min_args') def __init__(self, - arguments: List[Argument], - body: 'Block', + arguments: Optional[List[Argument]] = None, + body: Optional['Block'] = None, typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: super().__init__() - self.arguments = arguments - self.arg_names = [None if arg.pos_only else arg.variable.name for arg in arguments] + self.arguments = arguments or [] + self.arg_names = [None if arg.pos_only else arg.variable.name for arg in self.arguments] self.arg_kinds: List[ArgKind] = [arg.kind for arg in self.arguments] self.max_pos: int = ( self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT)) - self.body: 'Block' = body + self.body: 'Block' = body or Block([]) self.type = typ self.unanalyzed_type = typ self.is_overload: bool = False @@ -725,10 +725,11 @@ class FuncDef(FuncItem, SymbolNode, Statement): 'original_def', ) + # Note that all __init__ args must have default values def __init__(self, - name: str, # Function name - arguments: List[Argument], - body: 'Block', + name: str = '', # Function name + arguments: Optional[List[Argument]] = None, + body: Optional['Block'] = None, typ: 'Optional[mypy.types.FunctionLike]' = None) -> None: super().__init__(arguments, body, typ) self._name = name diff --git a/mypy/stubtest.py b/mypy/stubtest.py index ea0deb35092f..b7aa6367ef2d 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -895,7 +895,6 @@ def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> Optional[nodes. Returns None if we can't figure out what that would be. For convenience, this function also accepts FuncItems. - """ if isinstance(dec, nodes.FuncItem): return dec @@ -917,6 +916,7 @@ def apply_decorator_to_funcitem( return func if decorator.fullname == "builtins.classmethod": assert func.arguments[0].variable.name in ("cls", "metacls") + # FuncItem is written so that copy.copy() actually works, even when compiled ret = copy.copy(func) # Remove the cls argument, since it's not present in inspect.signature of classmethods ret.arguments = ret.arguments[1:] diff --git a/mypy/typeops.py b/mypy/typeops.py index e2e44b915c0c..e8171e2e85ab 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -14,8 +14,7 @@ TupleType, Instance, FunctionLike, Type, CallableType, TypeVarLikeType, Overloaded, TypeVarType, UninhabitedType, FormalArgument, UnionType, NoneType, AnyType, TypeOfAny, TypeType, ProperType, LiteralType, get_proper_type, get_proper_types, - copy_type, TypeAliasType, TypeQuery, ParamSpecType, Parameters, - ENUM_REMOVED_PROPS + TypeAliasType, TypeQuery, ParamSpecType, Parameters, ENUM_REMOVED_PROPS ) from mypy.nodes import ( FuncBase, FuncItem, FuncDef, OverloadedFuncDef, TypeInfo, ARG_STAR, ARG_STAR2, ARG_POS, @@ -23,6 +22,7 @@ ) from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance, expand_type +from mypy.copytype import copy_type from mypy.typevars import fill_typevars diff --git a/mypy/types.py b/mypy/types.py index afe1a88e06b1..f0f7add2d92f 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1,6 +1,5 @@ """Classes for representing mypy types.""" -import copy import sys from abc import abstractmethod @@ -2893,16 +2892,6 @@ def is_named_instance(t: Type, fullnames: Union[str, Tuple[str, ...]]) -> bool: return isinstance(t, Instance) and t.type.fullname in fullnames -TP = TypeVar('TP', bound=Type) - - -def copy_type(t: TP) -> TP: - """ - Build a copy of the type; used to mutate the copy with truthiness information - """ - return copy.copy(t) - - class InstantiateAliasVisitor(TypeTranslator): def __init__(self, vars: List[str], subs: List[Type]) -> None: self.replacements = {v: s for (v, s) in zip(vars, subs)} diff --git a/mypyc/analysis/attrdefined.py b/mypyc/analysis/attrdefined.py new file mode 100644 index 000000000000..6187d143711f --- /dev/null +++ b/mypyc/analysis/attrdefined.py @@ -0,0 +1,377 @@ +"""Always defined attribute analysis. + +An always defined attribute has some statements in __init__ or the +class body that cause the attribute to be always initialized when an +instance is constructed. It must also not be possible to read the +attribute before initialization, and it can't be deletable. + +We can assume that the value is always defined when reading an always +defined attribute. Otherwise we'll need to raise AttributeError if the +value is undefined (i.e. has the error value). + +We use data flow analysis to figure out attributes that are always +defined. Example: + + class C: + def __init__(self) -> None: + self.x = 0 + if func(): + self.y = 1 + else: + self.y = 2 + self.z = 3 + +In this example, the attributes 'x' and 'y' are always defined, but 'z' +is not. The analysis assumes that we know that there won't be any subclasses. + +The analysis also works if there is a known, closed set of subclasses. +An attribute defined in a base class can only be always defined if it's +also always defined in all subclasses. + +As soon as __init__ contains an op that can 'leak' self to another +function, we will stop inferring always defined attributes, since the +analysis is mostly intra-procedural and only looks at __init__ methods. +The called code could read an uninitialized attribute. Example: + + class C: + def __init__(self) -> None: + self.x = self.foo() + + def foo(self) -> int: + ... + +Now we won't infer 'x' as always defined, since 'foo' might read 'x' +before initialization. + +As an exception to the above limitation, we perform inter-procedural +analysis of super().__init__ calls, since these are very common. + +Our analysis is somewhat optimistic. We assume that nobody calls a +method of a partially uninitialized object through gc.get_objects(), in +particular. Code like this could potentially cause a segfault with a null +pointer dereference. This seems very unlikely to be an issue in practice, +however. + +Accessing an attribute via getattr always checks for undefined attributes +and thus works if the object is partially uninitialized. This can be used +as a workaround if somebody ever needs to inspect partially uninitialized +objects via gc.get_objects(). + +The analysis runs after IR building as a separate pass. Since we only +run this on __init__ methods, this analysis pass will be fairly quick. +""" + +from typing import List, Set, Tuple +from typing_extensions import Final + +from mypyc.ir.ops import ( + Register, Assign, AssignMulti, SetMem, SetAttr, Branch, Return, Unreachable, GetAttr, + Call, RegisterOp, BasicBlock, ControlOp +) +from mypyc.ir.rtypes import RInstance +from mypyc.ir.class_ir import ClassIR +from mypyc.analysis.dataflow import ( + BaseAnalysisVisitor, AnalysisResult, get_cfg, CFG, MAYBE_ANALYSIS, run_analysis +) +from mypyc.analysis.selfleaks import analyze_self_leaks + + +# If True, print out all always-defined attributes of native classes (to aid +# debugging and testing) +dump_always_defined: Final = False + + +def analyze_always_defined_attrs(class_irs: List[ClassIR]) -> None: + """Find always defined attributes all classes of a compilation unit. + + Also tag attribute initialization ops to not decref the previous + value (as this would read a NULL pointer and segfault). + + Update the _always_initialized_attrs, _sometimes_initialized_attrs + and init_self_leak attributes in ClassIR instances. + + This is the main entry point. + """ + seen: Set[ClassIR] = set() + + # First pass: only look at target class and classes in MRO + for cl in class_irs: + analyze_always_defined_attrs_in_class(cl, seen) + + # Second pass: look at all derived class + seen = set() + for cl in class_irs: + update_always_defined_attrs_using_subclasses(cl, seen) + + +def analyze_always_defined_attrs_in_class(cl: ClassIR, seen: Set[ClassIR]) -> None: + if cl in seen: + return + + seen.add(cl) + + if (cl.is_trait + or cl.inherits_python + or cl.allow_interpreted_subclasses + or cl.builtin_base is not None + or cl.children is None + or cl.is_serializable()): + # Give up -- we can't enforce that attributes are always defined. + return + + # First analyze all base classes. Track seen classes to avoid duplicate work. + for base in cl.mro[1:]: + analyze_always_defined_attrs_in_class(base, seen) + + m = cl.get_method('__init__') + if m is None: + cl._always_initialized_attrs = cl.attrs_with_defaults.copy() + cl._sometimes_initialized_attrs = cl.attrs_with_defaults.copy() + return + self_reg = m.arg_regs[0] + cfg = get_cfg(m.blocks) + dirty = analyze_self_leaks(m.blocks, self_reg, cfg) + maybe_defined = analyze_maybe_defined_attrs_in_init( + m.blocks, self_reg, cl.attrs_with_defaults, cfg) + all_attrs: Set[str] = set() + for base in cl.mro: + all_attrs.update(base.attributes) + maybe_undefined = analyze_maybe_undefined_attrs_in_init( + m.blocks, + self_reg, + initial_undefined=all_attrs - cl.attrs_with_defaults, + cfg=cfg) + + always_defined = find_always_defined_attributes( + m.blocks, self_reg, all_attrs, maybe_defined, maybe_undefined, dirty) + always_defined = {a for a in always_defined if not cl.is_deletable(a)} + + cl._always_initialized_attrs = always_defined + if dump_always_defined: + print(cl.name, sorted(always_defined)) + cl._sometimes_initialized_attrs = find_sometimes_defined_attributes( + m.blocks, self_reg, maybe_defined, dirty) + + mark_attr_initialiation_ops(m.blocks, self_reg, maybe_defined, dirty) + + # Check if __init__ can run unpredictable code (leak 'self'). + any_dirty = False + for b in m.blocks: + for i, op in enumerate(b.ops): + if dirty.after[b, i] and not isinstance(op, Return): + any_dirty = True + break + cl.init_self_leak = any_dirty + + +def find_always_defined_attributes(blocks: List[BasicBlock], + self_reg: Register, + all_attrs: Set[str], + maybe_defined: AnalysisResult[str], + maybe_undefined: AnalysisResult[str], + dirty: AnalysisResult[None]) -> Set[str]: + """Find attributes that are always initialized in some basic blocks. + + The analysis results are expected to be up-to-date for the blocks. + + Return a set of always defined attributes. + """ + attrs = all_attrs.copy() + for block in blocks: + for i, op in enumerate(block.ops): + # If an attribute we *read* may be undefined, it isn't always defined. + if isinstance(op, GetAttr) and op.obj is self_reg: + if op.attr in maybe_undefined.before[block, i]: + attrs.discard(op.attr) + # If an attribute we *set* may be sometimes undefined and + # sometimes defined, don't consider it always defined. Unlike + # the get case, it's fine for the attribute to be undefined. + # The set operation will then be treated as initialization. + if isinstance(op, SetAttr) and op.obj is self_reg: + if (op.attr in maybe_undefined.before[block, i] + and op.attr in maybe_defined.before[block, i]): + attrs.discard(op.attr) + # Treat an op that might run arbitrary code as an "exit" + # in terms of the analysis -- we can't do any inference + # afterwards reliably. + if dirty.after[block, i]: + if not dirty.before[block, i]: + attrs = attrs & (maybe_defined.after[block, i] - + maybe_undefined.after[block, i]) + break + if isinstance(op, ControlOp): + for target in op.targets(): + # Gotos/branches can also be "exits". + if not dirty.after[block, i] and dirty.before[target, 0]: + attrs = attrs & (maybe_defined.after[target, 0] - + maybe_undefined.after[target, 0]) + return attrs + + +def find_sometimes_defined_attributes(blocks: List[BasicBlock], + self_reg: Register, + maybe_defined: AnalysisResult[str], + dirty: AnalysisResult[None]) -> Set[str]: + """Find attributes that are sometimes initialized in some basic blocks.""" + attrs: Set[str] = set() + for block in blocks: + for i, op in enumerate(block.ops): + # Only look at possibly defined attributes at exits. + if dirty.after[block, i]: + if not dirty.before[block, i]: + attrs = attrs | maybe_defined.after[block, i] + break + if isinstance(op, ControlOp): + for target in op.targets(): + if not dirty.after[block, i] and dirty.before[target, 0]: + attrs = attrs | maybe_defined.after[target, 0] + return attrs + + +def mark_attr_initialiation_ops(blocks: List[BasicBlock], + self_reg: Register, + maybe_defined: AnalysisResult[str], + dirty: AnalysisResult[None]) -> None: + """Tag all SetAttr ops in the basic blocks that initialize attributes. + + Initialization ops assume that the previous attribute value is the error value, + so there's no need to decref or check for definedness. + """ + for block in blocks: + for i, op in enumerate(block.ops): + if isinstance(op, SetAttr) and op.obj is self_reg: + attr = op.attr + if attr not in maybe_defined.before[block, i] and not dirty.after[block, i]: + op.mark_as_initializer() + + +GenAndKill = Tuple[Set[str], Set[str]] + + +def attributes_initialized_by_init_call(op: Call) -> Set[str]: + """Calculate attributes that are always initialized by a super().__init__ call.""" + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance) + cl = self_type.class_ir + return {a for base in cl.mro for a in base.attributes if base.is_always_defined(a)} + + +def attributes_maybe_initialized_by_init_call(op: Call) -> Set[str]: + """Calculate attributes that may be initialized by a super().__init__ call.""" + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance) + cl = self_type.class_ir + return attributes_initialized_by_init_call(op) | cl._sometimes_initialized_attrs + + +class AttributeMaybeDefinedVisitor(BaseAnalysisVisitor[str]): + """Find attributes that may have been defined via some code path. + + Consider initializations in class body and assignments to 'self.x' + and calls to base class '__init__'. + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_branch(self, op: Branch) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_return(self, op: Return) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> Tuple[Set[str], Set[str]]: + if isinstance(op, SetAttr) and op.obj is self.self_reg: + return {op.attr}, set() + if isinstance(op, Call) and op.fn.class_name and op.fn.name == '__init__': + return attributes_maybe_initialized_by_init_call(op), set() + return set(), set() + + def visit_assign(self, op: Assign) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_assign_multi(self, op: AssignMulti) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_set_mem(self, op: SetMem) -> Tuple[Set[str], Set[str]]: + return set(), set() + + +def analyze_maybe_defined_attrs_in_init(blocks: List[BasicBlock], + self_reg: Register, + attrs_with_defaults: Set[str], + cfg: CFG) -> AnalysisResult[str]: + return run_analysis(blocks=blocks, + cfg=cfg, + gen_and_kill=AttributeMaybeDefinedVisitor(self_reg), + initial=attrs_with_defaults, + backward=False, + kind=MAYBE_ANALYSIS) + + +class AttributeMaybeUndefinedVisitor(BaseAnalysisVisitor[str]): + """Find attributes that may be undefined via some code path. + + Consider initializations in class body, assignments to 'self.x' + and calls to base class '__init__'. + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_branch(self, op: Branch) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_return(self, op: Return) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_unreachable(self, op: Unreachable) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_register_op(self, op: RegisterOp) -> Tuple[Set[str], Set[str]]: + if isinstance(op, SetAttr) and op.obj is self.self_reg: + return set(), {op.attr} + if isinstance(op, Call) and op.fn.class_name and op.fn.name == '__init__': + return set(), attributes_initialized_by_init_call(op) + return set(), set() + + def visit_assign(self, op: Assign) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_assign_multi(self, op: AssignMulti) -> Tuple[Set[str], Set[str]]: + return set(), set() + + def visit_set_mem(self, op: SetMem) -> Tuple[Set[str], Set[str]]: + return set(), set() + + +def analyze_maybe_undefined_attrs_in_init(blocks: List[BasicBlock], + self_reg: Register, + initial_undefined: Set[str], + cfg: CFG) -> AnalysisResult[str]: + return run_analysis(blocks=blocks, + cfg=cfg, + gen_and_kill=AttributeMaybeUndefinedVisitor(self_reg), + initial=initial_undefined, + backward=False, + kind=MAYBE_ANALYSIS) + + +def update_always_defined_attrs_using_subclasses(cl: ClassIR, seen: Set[ClassIR]) -> None: + """Remove attributes not defined in all subclasses from always defined attrs.""" + if cl in seen: + return + if cl.children is None: + # Subclasses are unknown + return + removed = set() + for attr in cl._always_initialized_attrs: + for child in cl.children: + update_always_defined_attrs_using_subclasses(child, seen) + if attr not in child._always_initialized_attrs: + removed.add(attr) + cl._always_initialized_attrs -= removed + seen.add(cl) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index 3b79f101a670..053efc733845 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -128,100 +128,100 @@ def __str__(self) -> str: return f'before: {self.before}\nafter: {self.after}\n' -GenAndKill = Tuple[Set[Value], Set[Value]] +GenAndKill = Tuple[Set[T], Set[T]] -class BaseAnalysisVisitor(OpVisitor[GenAndKill]): - def visit_goto(self, op: Goto) -> GenAndKill: +class BaseAnalysisVisitor(OpVisitor[GenAndKill[T]]): + def visit_goto(self, op: Goto) -> GenAndKill[T]: return set(), set() @abstractmethod - def visit_register_op(self, op: RegisterOp) -> GenAndKill: + def visit_register_op(self, op: RegisterOp) -> GenAndKill[T]: raise NotImplementedError @abstractmethod - def visit_assign(self, op: Assign) -> GenAndKill: + def visit_assign(self, op: Assign) -> GenAndKill[T]: raise NotImplementedError @abstractmethod - def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[T]: raise NotImplementedError @abstractmethod - def visit_set_mem(self, op: SetMem) -> GenAndKill: + def visit_set_mem(self, op: SetMem) -> GenAndKill[T]: raise NotImplementedError - def visit_call(self, op: Call) -> GenAndKill: + def visit_call(self, op: Call) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_method_call(self, op: MethodCall) -> GenAndKill: + def visit_method_call(self, op: MethodCall) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill: + def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_literal(self, op: LoadLiteral) -> GenAndKill: + def visit_load_literal(self, op: LoadLiteral) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_get_attr(self, op: GetAttr) -> GenAndKill: + def visit_get_attr(self, op: GetAttr) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_set_attr(self, op: SetAttr) -> GenAndKill: + def visit_set_attr(self, op: SetAttr) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_static(self, op: LoadStatic) -> GenAndKill: + def visit_load_static(self, op: LoadStatic) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_init_static(self, op: InitStatic) -> GenAndKill: + def visit_init_static(self, op: InitStatic) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_tuple_get(self, op: TupleGet) -> GenAndKill: + def visit_tuple_get(self, op: TupleGet) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_tuple_set(self, op: TupleSet) -> GenAndKill: + def visit_tuple_set(self, op: TupleSet) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_box(self, op: Box) -> GenAndKill: + def visit_box(self, op: Box) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_unbox(self, op: Unbox) -> GenAndKill: + def visit_unbox(self, op: Unbox) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_cast(self, op: Cast) -> GenAndKill: + def visit_cast(self, op: Cast) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill: + def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_call_c(self, op: CallC) -> GenAndKill: + def visit_call_c(self, op: CallC) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_truncate(self, op: Truncate) -> GenAndKill: + def visit_truncate(self, op: Truncate) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_global(self, op: LoadGlobal) -> GenAndKill: + def visit_load_global(self, op: LoadGlobal) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_int_op(self, op: IntOp) -> GenAndKill: + def visit_int_op(self, op: IntOp) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill: + def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_mem(self, op: LoadMem) -> GenAndKill: + def visit_load_mem(self, op: LoadMem) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_get_element_ptr(self, op: GetElementPtr) -> GenAndKill: + def visit_get_element_ptr(self, op: GetElementPtr) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_load_address(self, op: LoadAddress) -> GenAndKill: + def visit_load_address(self, op: LoadAddress) -> GenAndKill[T]: return self.visit_register_op(op) - def visit_keep_alive(self, op: KeepAlive) -> GenAndKill: + def visit_keep_alive(self, op: KeepAlive) -> GenAndKill[T]: return self.visit_register_op(op) -class DefinedVisitor(BaseAnalysisVisitor): +class DefinedVisitor(BaseAnalysisVisitor[Value]): """Visitor for finding defined registers. Note that this only deals with registers and not temporaries, on @@ -240,19 +240,19 @@ class DefinedVisitor(BaseAnalysisVisitor): def __init__(self, strict_errors: bool = False) -> None: self.strict_errors = strict_errors - def visit_branch(self, op: Branch) -> GenAndKill: + def visit_branch(self, op: Branch) -> GenAndKill[Value]: return set(), set() - def visit_return(self, op: Return) -> GenAndKill: + def visit_return(self, op: Return) -> GenAndKill[Value]: return set(), set() - def visit_unreachable(self, op: Unreachable) -> GenAndKill: + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: return set(), set() - def visit_register_op(self, op: RegisterOp) -> GenAndKill: + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: return set(), set() - def visit_assign(self, op: Assign) -> GenAndKill: + def visit_assign(self, op: Assign) -> GenAndKill[Value]: # Loading an error value may undefine the register. if (isinstance(op.src, LoadErrorValue) and (op.src.undefines or self.strict_errors)): @@ -260,11 +260,11 @@ def visit_assign(self, op: Assign) -> GenAndKill: else: return {op.dest}, set() - def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: # Array registers are special and we don't track the definedness of them. return set(), set() - def visit_set_mem(self, op: SetMem) -> GenAndKill: + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: return set(), set() @@ -307,31 +307,31 @@ def analyze_must_defined_regs( universe=set(regs)) -class BorrowedArgumentsVisitor(BaseAnalysisVisitor): +class BorrowedArgumentsVisitor(BaseAnalysisVisitor[Value]): def __init__(self, args: Set[Value]) -> None: self.args = args - def visit_branch(self, op: Branch) -> GenAndKill: + def visit_branch(self, op: Branch) -> GenAndKill[Value]: return set(), set() - def visit_return(self, op: Return) -> GenAndKill: + def visit_return(self, op: Return) -> GenAndKill[Value]: return set(), set() - def visit_unreachable(self, op: Unreachable) -> GenAndKill: + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: return set(), set() - def visit_register_op(self, op: RegisterOp) -> GenAndKill: + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: return set(), set() - def visit_assign(self, op: Assign) -> GenAndKill: + def visit_assign(self, op: Assign) -> GenAndKill[Value]: if op.dest in self.args: return set(), {op.dest} return set(), set() - def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: return set(), set() - def visit_set_mem(self, op: SetMem) -> GenAndKill: + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: return set(), set() @@ -352,26 +352,26 @@ def analyze_borrowed_arguments( universe=borrowed) -class UndefinedVisitor(BaseAnalysisVisitor): - def visit_branch(self, op: Branch) -> GenAndKill: +class UndefinedVisitor(BaseAnalysisVisitor[Value]): + def visit_branch(self, op: Branch) -> GenAndKill[Value]: return set(), set() - def visit_return(self, op: Return) -> GenAndKill: + def visit_return(self, op: Return) -> GenAndKill[Value]: return set(), set() - def visit_unreachable(self, op: Unreachable) -> GenAndKill: + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: return set(), set() - def visit_register_op(self, op: RegisterOp) -> GenAndKill: + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: return set(), {op} if not op.is_void else set() - def visit_assign(self, op: Assign) -> GenAndKill: + def visit_assign(self, op: Assign) -> GenAndKill[Value]: return set(), {op.dest} - def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: return set(), {op.dest} - def visit_set_mem(self, op: SetMem) -> GenAndKill: + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: return set(), set() @@ -402,33 +402,33 @@ def non_trivial_sources(op: Op) -> Set[Value]: return result -class LivenessVisitor(BaseAnalysisVisitor): - def visit_branch(self, op: Branch) -> GenAndKill: +class LivenessVisitor(BaseAnalysisVisitor[Value]): + def visit_branch(self, op: Branch) -> GenAndKill[Value]: return non_trivial_sources(op), set() - def visit_return(self, op: Return) -> GenAndKill: + def visit_return(self, op: Return) -> GenAndKill[Value]: if not isinstance(op.value, Integer): return {op.value}, set() else: return set(), set() - def visit_unreachable(self, op: Unreachable) -> GenAndKill: + def visit_unreachable(self, op: Unreachable) -> GenAndKill[Value]: return set(), set() - def visit_register_op(self, op: RegisterOp) -> GenAndKill: + def visit_register_op(self, op: RegisterOp) -> GenAndKill[Value]: gen = non_trivial_sources(op) if not op.is_void: return gen, {op} else: return gen, set() - def visit_assign(self, op: Assign) -> GenAndKill: + def visit_assign(self, op: Assign) -> GenAndKill[Value]: return non_trivial_sources(op), {op.dest} - def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill[Value]: return non_trivial_sources(op), {op.dest} - def visit_set_mem(self, op: SetMem) -> GenAndKill: + def visit_set_mem(self, op: SetMem) -> GenAndKill[Value]: return non_trivial_sources(op), set() @@ -452,12 +452,9 @@ def analyze_live_regs(blocks: List[BasicBlock], MAYBE_ANALYSIS = 1 -# TODO the return type of this function is too complicated. Abstract it into its -# own class. - def run_analysis(blocks: List[BasicBlock], cfg: CFG, - gen_and_kill: OpVisitor[Tuple[Set[T], Set[T]]], + gen_and_kill: OpVisitor[GenAndKill[T]], initial: Set[T], kind: int, backward: bool, diff --git a/mypyc/analysis/selfleaks.py b/mypyc/analysis/selfleaks.py new file mode 100644 index 000000000000..ae3731a40ac3 --- /dev/null +++ b/mypyc/analysis/selfleaks.py @@ -0,0 +1,153 @@ +from typing import List, Set, Tuple + +from mypyc.ir.ops import ( + OpVisitor, Register, Goto, Assign, AssignMulti, SetMem, Call, MethodCall, LoadErrorValue, + LoadLiteral, GetAttr, SetAttr, LoadStatic, InitStatic, TupleGet, TupleSet, Box, Unbox, + Cast, RaiseStandardError, CallC, Truncate, LoadGlobal, IntOp, ComparisonOp, LoadMem, + GetElementPtr, LoadAddress, KeepAlive, Branch, Return, Unreachable, RegisterOp, BasicBlock +) +from mypyc.ir.rtypes import RInstance +from mypyc.analysis.dataflow import MAYBE_ANALYSIS, run_analysis, AnalysisResult, CFG + +GenAndKill = Tuple[Set[None], Set[None]] + +CLEAN: GenAndKill = (set(), set()) +DIRTY: GenAndKill = ({None}, {None}) + + +class SelfLeakedVisitor(OpVisitor[GenAndKill]): + """Analyze whether 'self' may be seen by arbitrary code in '__init__'. + + More formally, the set is not empty if along some path from IR entry point + arbitrary code could have been executed that has access to 'self'. + + (We don't consider access via 'gc.get_objects()'.) + """ + + def __init__(self, self_reg: Register) -> None: + self.self_reg = self_reg + + def visit_goto(self, op: Goto) -> GenAndKill: + return CLEAN + + def visit_branch(self, op: Branch) -> GenAndKill: + return CLEAN + + def visit_return(self, op: Return) -> GenAndKill: + # Consider all exits from the function 'dirty' since they implicitly + # cause 'self' to be returned. + return DIRTY + + def visit_unreachable(self, op: Unreachable) -> GenAndKill: + return CLEAN + + def visit_assign(self, op: Assign) -> GenAndKill: + if op.src is self.self_reg or op.dest is self.self_reg: + return DIRTY + return CLEAN + + def visit_assign_multi(self, op: AssignMulti) -> GenAndKill: + return CLEAN + + def visit_set_mem(self, op: SetMem) -> GenAndKill: + return CLEAN + + def visit_call(self, op: Call) -> GenAndKill: + fn = op.fn + if fn.class_name and fn.name == '__init__': + self_type = op.fn.sig.args[0].type + assert isinstance(self_type, RInstance) + cl = self_type.class_ir + if not cl.init_self_leak: + return CLEAN + return self.check_register_op(op) + + def visit_method_call(self, op: MethodCall) -> GenAndKill: + return self.check_register_op(op) + + def visit_load_error_value(self, op: LoadErrorValue) -> GenAndKill: + return CLEAN + + def visit_load_literal(self, op: LoadLiteral) -> GenAndKill: + return CLEAN + + def visit_get_attr(self, op: GetAttr) -> GenAndKill: + cl = op.class_type.class_ir + if cl.get_method(op.attr): + # Property -- calls a function + return self.check_register_op(op) + return CLEAN + + def visit_set_attr(self, op: SetAttr) -> GenAndKill: + cl = op.class_type.class_ir + if cl.get_method(op.attr): + # Property - calls a function + return self.check_register_op(op) + return CLEAN + + def visit_load_static(self, op: LoadStatic) -> GenAndKill: + return CLEAN + + def visit_init_static(self, op: InitStatic) -> GenAndKill: + return self.check_register_op(op) + + def visit_tuple_get(self, op: TupleGet) -> GenAndKill: + return CLEAN + + def visit_tuple_set(self, op: TupleSet) -> GenAndKill: + return self.check_register_op(op) + + def visit_box(self, op: Box) -> GenAndKill: + return self.check_register_op(op) + + def visit_unbox(self, op: Unbox) -> GenAndKill: + return self.check_register_op(op) + + def visit_cast(self, op: Cast) -> GenAndKill: + return self.check_register_op(op) + + def visit_raise_standard_error(self, op: RaiseStandardError) -> GenAndKill: + return CLEAN + + def visit_call_c(self, op: CallC) -> GenAndKill: + return self.check_register_op(op) + + def visit_truncate(self, op: Truncate) -> GenAndKill: + return CLEAN + + def visit_load_global(self, op: LoadGlobal) -> GenAndKill: + return CLEAN + + def visit_int_op(self, op: IntOp) -> GenAndKill: + return CLEAN + + def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill: + return CLEAN + + def visit_load_mem(self, op: LoadMem) -> GenAndKill: + return CLEAN + + def visit_get_element_ptr(self, op: GetElementPtr) -> GenAndKill: + return CLEAN + + def visit_load_address(self, op: LoadAddress) -> GenAndKill: + return CLEAN + + def visit_keep_alive(self, op: KeepAlive) -> GenAndKill: + return CLEAN + + def check_register_op(self, op: RegisterOp) -> GenAndKill: + if any(src is self.self_reg for src in op.sources()): + return DIRTY + return CLEAN + + +def analyze_self_leaks(blocks: List[BasicBlock], + self_reg: Register, + cfg: CFG) -> AnalysisResult[None]: + return run_analysis(blocks=blocks, + cfg=cfg, + gen_and_kill=SelfLeakedVisitor(self_reg), + initial=set(), + backward=False, + kind=MAYBE_ANALYSIS) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 437b50444d63..ef36da3c414e 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -284,7 +284,8 @@ def emit_line() -> None: emitter.emit_line(native_function_header(cl.ctor, emitter) + ';') emit_line() - generate_new_for_class(cl, new_name, vtable_name, setup_name, emitter) + init_fn = cl.get_method('__init__') + generate_new_for_class(cl, new_name, vtable_name, setup_name, init_fn, emitter) emit_line() generate_traverse_for_class(cl, traverse_name, emitter) emit_line() @@ -539,7 +540,7 @@ def generate_setup_for_class(cl: ClassIR, for base in reversed(cl.base_mro): for attr, rtype in base.attributes.items(): - emitter.emit_line('self->{} = {};'.format( + emitter.emit_line(r'self->{} = {};'.format( emitter.attr(attr), emitter.c_undefined_value(rtype))) # Initialize attributes to default values, if necessary @@ -608,8 +609,11 @@ def generate_init_for_class(cl: ClassIR, emitter.emit_line( f'{func_name}(PyObject *self, PyObject *args, PyObject *kwds)') emitter.emit_line('{') - emitter.emit_line('return {}{}(self, args, kwds) != NULL ? 0 : -1;'.format( - PREFIX, init_fn.cname(emitter.names))) + if cl.allow_interpreted_subclasses or cl.builtin_base: + emitter.emit_line('return {}{}(self, args, kwds) != NULL ? 0 : -1;'.format( + PREFIX, init_fn.cname(emitter.names))) + else: + emitter.emit_line('return 0;') emitter.emit_line('}') return func_name @@ -619,6 +623,7 @@ def generate_new_for_class(cl: ClassIR, func_name: str, vtable_name: str, setup_name: str, + init_fn: Optional[FuncIR], emitter: Emitter) -> None: emitter.emit_line('static PyObject *') emitter.emit_line( @@ -633,7 +638,24 @@ def generate_new_for_class(cl: ClassIR, emitter.emit_line('return NULL;') emitter.emit_line('}') - emitter.emit_line(f'return {setup_name}(type);') + if (not init_fn + or cl.allow_interpreted_subclasses + or cl.builtin_base + or cl.is_serializable()): + # Match Python semantics -- __new__ doesn't call __init__. + emitter.emit_line(f'return {setup_name}(type);') + else: + # __new__ of a native class implicitly calls __init__ so that we + # can enforce that instances are always properly initialized. This + # is needed to support always defined attributes. + emitter.emit_line(f'PyObject *self = {setup_name}(type);') + emitter.emit_lines('if (self == NULL)', + ' return NULL;') + emitter.emit_line( + f'PyObject *ret = {PREFIX}{init_fn.cname(emitter.names)}(self, args, kwds);') + emitter.emit_lines('if (ret == NULL)', + ' return NULL;') + emitter.emit_line('return self;') emitter.emit_line('}') @@ -846,12 +868,19 @@ def generate_getter(cl: ClassIR, cl.struct_name(emitter.names))) emitter.emit_line('{') attr_expr = f'self->{attr_field}' - emitter.emit_undefined_attr_check(rtype, attr_expr, '==', unlikely=True) - emitter.emit_line('PyErr_SetString(PyExc_AttributeError,') - emitter.emit_line(' "attribute {} of {} undefined");'.format(repr(attr), - repr(cl.name))) - emitter.emit_line('return NULL;') - emitter.emit_line('}') + + # HACK: Don't consider refcounted values as always defined, since it's possible to + # access uninitialized values via 'gc.get_objects()'. Accessing non-refcounted + # values is benign. + always_defined = cl.is_always_defined(attr) and not rtype.is_refcounted + + if not always_defined: + emitter.emit_undefined_attr_check(rtype, attr_expr, '==', unlikely=True) + emitter.emit_line('PyErr_SetString(PyExc_AttributeError,') + emitter.emit_line(' "attribute {} of {} undefined");'.format(repr(attr), + repr(cl.name))) + emitter.emit_line('return NULL;') + emitter.emit_line('}') emitter.emit_inc_ref(f'self->{attr_field}', rtype) emitter.emit_box(f'self->{attr_field}', 'retval', rtype, declare_dest=True) emitter.emit_line('return retval;') @@ -878,14 +907,22 @@ def generate_setter(cl: ClassIR, emitter.emit_line('return -1;') emitter.emit_line('}') + # HACK: Don't consider refcounted values as always defined, since it's possible to + # access uninitialized values via 'gc.get_objects()'. Accessing non-refcounted + # values is benign. + always_defined = cl.is_always_defined(attr) and not rtype.is_refcounted + if rtype.is_refcounted: attr_expr = f'self->{attr_field}' - emitter.emit_undefined_attr_check(rtype, attr_expr, '!=') - emitter.emit_dec_ref(f'self->{attr_field}', rtype) - emitter.emit_line('}') + if not always_defined: + emitter.emit_undefined_attr_check(rtype, attr_expr, '!=') + emitter.emit_dec_ref('self->{}'.format(attr_field), rtype) + if not always_defined: + emitter.emit_line('}') if deletable: emitter.emit_line('if (value != NULL) {') + if rtype.is_unboxed: emitter.emit_unbox('value', 'tmp', rtype, error=ReturnHandler('-1'), declare_dest=True) elif is_same_type(rtype, object_rprimitive): diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 91b3a539adf5..f4ed657c467f 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -12,7 +12,7 @@ LoadStatic, InitStatic, TupleGet, TupleSet, Call, IncRef, DecRef, Box, Cast, Unbox, BasicBlock, Value, MethodCall, Unreachable, NAMESPACE_STATIC, NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, CallC, LoadGlobal, Truncate, IntOp, LoadMem, GetElementPtr, - LoadAddress, ComparisonOp, SetMem, Register, LoadLiteral, AssignMulti, KeepAlive + LoadAddress, ComparisonOp, SetMem, Register, LoadLiteral, AssignMulti, KeepAlive, ERR_FALSE ) from mypyc.ir.rtypes import ( RType, RTuple, RArray, is_tagged, is_int32_rprimitive, is_int64_rprimitive, RStruct, @@ -131,6 +131,13 @@ def visit_goto(self, op: Goto) -> None: def visit_branch(self, op: Branch) -> None: true, false = op.true, op.false + if op.op == Branch.IS_ERROR and isinstance(op.value, GetAttr) and not op.negated: + op2 = op.value + if op2.class_type.class_ir.is_always_defined(op2.attr): + # Getting an always defined attribute never fails, so the branch can be omitted. + if false is not self.next_block: + self.emit_line('goto {};'.format(self.label(false))) + return negated = op.negated negated_rare = False if true is self.next_block and op.traceback_entry is None: @@ -302,37 +309,39 @@ def visit_get_attr(self, op: GetAttr) -> None: # Otherwise, use direct or offset struct access. attr_expr = self.get_attr_expr(obj, op, decl_cl) self.emitter.emit_line(f'{dest} = {attr_expr};') - self.emitter.emit_undefined_attr_check( - attr_rtype, dest, '==', unlikely=True - ) - exc_class = 'PyExc_AttributeError' + always_defined = cl.is_always_defined(op.attr) merged_branch = None - branch = self.next_branch() - if branch is not None: - if (branch.value is op - and branch.op == Branch.IS_ERROR - and branch.traceback_entry is not None - and not branch.negated): - # Generate code for the following branch here to avoid - # redundant branches in the generate code. - self.emit_attribute_error(branch, cl.name, op.attr) - self.emit_line('goto %s;' % self.label(branch.true)) - merged_branch = branch - self.emitter.emit_line('}') - if not merged_branch: - self.emitter.emit_line( - 'PyErr_SetString({}, "attribute {} of {} undefined");'.format( - exc_class, repr(op.attr), repr(cl.name))) + if not always_defined: + self.emitter.emit_undefined_attr_check( + attr_rtype, dest, '==', unlikely=True + ) + branch = self.next_branch() + if branch is not None: + if (branch.value is op + and branch.op == Branch.IS_ERROR + and branch.traceback_entry is not None + and not branch.negated): + # Generate code for the following branch here to avoid + # redundant branches in the generate code. + self.emit_attribute_error(branch, cl.name, op.attr) + self.emit_line('goto %s;' % self.label(branch.true)) + merged_branch = branch + self.emitter.emit_line('}') + if not merged_branch: + exc_class = 'PyExc_AttributeError' + self.emitter.emit_line( + 'PyErr_SetString({}, "attribute {} of {} undefined");'.format( + exc_class, repr(op.attr), repr(cl.name))) if attr_rtype.is_refcounted: - if not merged_branch: + if not merged_branch and not always_defined: self.emitter.emit_line('} else {') self.emitter.emit_inc_ref(dest, attr_rtype) if merged_branch: if merged_branch.false is not self.next_block: self.emit_line('goto %s;' % self.label(merged_branch.false)) self.op_index += 1 - else: + elif not always_defined: self.emitter.emit_line('}') def next_branch(self) -> Optional[Branch]: @@ -343,7 +352,8 @@ def next_branch(self) -> Optional[Branch]: return None def visit_set_attr(self, op: SetAttr) -> None: - dest = self.reg(op) + if op.error_kind == ERR_FALSE: + dest = self.reg(op) obj = self.reg(op.obj) src = self.reg(op.src) rtype = op.class_type @@ -351,6 +361,8 @@ def visit_set_attr(self, op: SetAttr) -> None: attr_rtype, decl_cl = cl.attr_details(op.attr) if cl.get_method(op.attr): # Again, use vtable access for properties... + assert not op.is_init and op.error_kind == ERR_FALSE, '%s %d %d %s' % ( + op.attr, op.is_init, op.error_kind, rtype) version = '_TRAIT' if cl.is_trait else '' self.emit_line('%s = CPY_SET_ATTR%s(%s, %s, %d, %s, %s, %s); /* %s */' % ( dest, @@ -365,15 +377,18 @@ def visit_set_attr(self, op: SetAttr) -> None: else: # ...and struct access for normal attributes. attr_expr = self.get_attr_expr(obj, op, decl_cl) - if attr_rtype.is_refcounted: - self.emitter.emit_undefined_attr_check(attr_rtype, attr_expr, '!=') - self.emitter.emit_dec_ref(attr_expr, attr_rtype) - self.emitter.emit_line('}') - # This steal the reference to src, so we don't need to increment the arg - self.emitter.emit_lines( - f'{attr_expr} = {src};', - f'{dest} = 1;', - ) + if not op.is_init: + always_defined = cl.is_always_defined(op.attr) + if not always_defined: + self.emitter.emit_undefined_attr_check(attr_rtype, attr_expr, '!=') + if attr_rtype.is_refcounted: + self.emitter.emit_dec_ref(attr_expr, attr_rtype) + if not always_defined: + self.emitter.emit_line('}') + # This steals the reference to src, so we don't need to increment the arg + self.emitter.emit_line(f'{attr_expr} = {src};') + if op.error_kind == ERR_FALSE: + self.emitter.emit_line(f'{dest} = 1;') PREFIX_MAP: Final = { NAMESPACE_STATIC: STATIC_PREFIX, diff --git a/mypyc/doc/differences_from_python.rst b/mypyc/doc/differences_from_python.rst index 3bebf4049e7c..16faae60303f 100644 --- a/mypyc/doc/differences_from_python.rst +++ b/mypyc/doc/differences_from_python.rst @@ -171,6 +171,43 @@ Examples of early and late binding:: var = x # Module-level variable lib.func() # Accessing library that is not compiled +Pickling and copying objects +---------------------------- + +Mypyc tries to enforce that instances native classes are properly +initialized by calling ``__init__`` implicitly when constructing +objects, even if objects are constructed through ``pickle``, +``copy.copy`` or ``copy.deepcopy``, for example. + +If a native class doesn't support calling ``__init__`` without arguments, +you can't pickle or copy instances of the class. Use the +``mypy_extensions.mypyc_attr`` class decorator to override this behavior +and enable pickling through the ``serializable`` flag:: + + from mypy_extensions import mypyc_attr + import pickle + + @mypyc_attr(serializable=True) + class Cls: + def __init__(self, n: int) -> None: + self.n = n + + data = pickle.dumps(Cls(5)) + obj = pickle.loads(data) # OK + +Additional notes: + +* All subclasses inherit the ``serializable`` flag. +* If a class has the ``allow_interpreted_subclasses`` attribute, it + implicitly supports serialization. +* Enabling serialization may slow down attribute access, since compiled + code has to be always prepared to raise ``AttributeError`` in case an + attribute is not defined at runtime. +* If you try to pickle an object without setting the ``serializable`` + flag, you'll get a ``TypeError`` about missing arguments to + ``__init__``. + + Monkey patching --------------- diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 2e3e2b15c930..197b267633d7 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -106,6 +106,19 @@ def __init__(self, name: str, module_name: str, is_trait: bool = False, # Does this class need getseters to be generated for its attributes? (getseters are also # added if is_generated is False) self.needs_getseters = False + # Is this class declared as serializable (supports copy.copy + # and pickle) using @mypyc_attr(serializable=True)? + # + # Additionally, any class with this attribute False but with + # an __init__ that can be called without any arguments is + # *implicitly serializable*. In this case __init__ will be + # called during deserialization without arguments. If this is + # True, we match Python semantics and __init__ won't be called + # during deserialization. + # + # This impacts also all subclasses. Use is_serializable() to + # also consider base classes. + self._serializable = False # If this a subclass of some built-in python class, the name # of the object for that class. We currently only support this # in a few ad-hoc cases. @@ -153,6 +166,19 @@ def __init__(self, name: str, module_name: str, is_trait: bool = False, # None if separate compilation prevents this from working self.children: Optional[List[ClassIR]] = [] + # Instance attributes that are initialized in the class body. + self.attrs_with_defaults: Set[str] = set() + + # Attributes that are always initialized in __init__ or class body + # (inferred in mypyc.analysis.attrdefined using interprocedural analysis) + self._always_initialized_attrs: Set[str] = set() + + # Attributes that are sometimes initialized in __init__ + self._sometimes_initialized_attrs: Set[str] = set() + + # If True, __init__ can make 'self' visible to unanalyzed/arbitrary code + self.init_self_leak = False + def __repr__(self) -> str: return ( "ClassIR(" @@ -231,6 +257,11 @@ def is_deletable(self, name: str) -> bool: return True return False + def is_always_defined(self, name: str) -> bool: + if self.is_deletable(name): + return False + return name in self._always_initialized_attrs + def name_prefix(self, names: NameGenerator) -> str: return names.private_name(self.module_name, self.name) @@ -279,6 +310,9 @@ def concrete_subclasses(self) -> Optional[List['ClassIR']]: # to get stable order. return sorted(concrete, key=lambda c: (len(c.children or []), c.name)) + def is_serializable(self) -> bool: + return any(ci._serializable for ci in self.mro) + def serialize(self) -> JsonDict: return { 'name': self.name, @@ -292,6 +326,7 @@ def serialize(self) -> JsonDict: 'has_dict': self.has_dict, 'allow_interpreted_subclasses': self.allow_interpreted_subclasses, 'needs_getseters': self.needs_getseters, + '_serializable': self._serializable, 'builtin_base': self.builtin_base, 'ctor': self.ctor.serialize(), # We serialize dicts as lists to ensure order is preserved @@ -327,6 +362,10 @@ def serialize(self) -> JsonDict: cir.fullname for cir in self.children ] if self.children is not None else None, 'deletable': self.deletable, + 'attrs_with_defaults': sorted(self.attrs_with_defaults), + '_always_initialized_attrs': sorted(self._always_initialized_attrs), + '_sometimes_initialized_attrs': sorted(self._sometimes_initialized_attrs), + 'init_self_leak': self.init_self_leak, } @classmethod @@ -344,6 +383,7 @@ def deserialize(cls, data: JsonDict, ctx: 'DeserMaps') -> 'ClassIR': ir.has_dict = data['has_dict'] ir.allow_interpreted_subclasses = data['allow_interpreted_subclasses'] ir.needs_getseters = data['needs_getseters'] + ir._serializable = data['_serializable'] ir.builtin_base = data['builtin_base'] ir.ctor = FuncDecl.deserialize(data['ctor'], ctx) ir.attributes = OrderedDict( @@ -376,6 +416,10 @@ def deserialize(cls, data: JsonDict, ctx: 'DeserMaps') -> 'ClassIR': ir.base_mro = [ctx.classes[s] for s in data['base_mro']] ir.children = data['children'] and [ctx.classes[s] for s in data['children']] ir.deletable = data['deletable'] + ir.attrs_with_defaults = set(data['attrs_with_defaults']) + ir._always_initialized_attrs = set(data['_always_initialized_attrs']) + ir._sometimes_initialized_attrs = set(data['_sometimes_initialized_attrs']) + ir.init_self_leak = data['init_self_leak'] return ir diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index ecd2293c657f..786cb018f96b 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -630,6 +630,14 @@ def __init__(self, obj: Value, attr: str, src: Value, line: int) -> None: assert isinstance(obj.type, RInstance), 'Attribute access not supported: %s' % obj.type self.class_type = obj.type self.type = bool_rprimitive + # If True, we can safely assume that the attribute is previously undefined + # and we don't use a setter + self.is_init = False + + def mark_as_initializer(self) -> None: + self.is_init = True + self.error_kind = ERR_NEVER + self.type = void_rtype def sources(self) -> List[Value]: return [self.obj, self.src] diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index aab8dc86664f..753965cb1e9c 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -11,7 +11,7 @@ LoadStatic, InitStatic, TupleGet, TupleSet, IncRef, DecRef, Call, MethodCall, Cast, Box, Unbox, RaiseStandardError, CallC, Truncate, LoadGlobal, IntOp, ComparisonOp, LoadMem, SetMem, GetElementPtr, LoadAddress, Register, Value, OpVisitor, BasicBlock, ControlOp, LoadLiteral, - AssignMulti, KeepAlive, Op + AssignMulti, KeepAlive, Op, ERR_NEVER ) from mypyc.ir.func_ir import FuncIR, all_values_full from mypyc.ir.module_ir import ModuleIRs @@ -80,7 +80,12 @@ def visit_get_attr(self, op: GetAttr) -> str: return self.format('%r = %r.%s', op, op.obj, op.attr) def visit_set_attr(self, op: SetAttr) -> str: - return self.format('%r.%s = %r; %r = is_error', op.obj, op.attr, op.src, op) + if op.is_init: + assert op.error_kind == ERR_NEVER + # Initialization and direct struct access can never fail + return self.format('%r.%s = %r', op.obj, op.attr, op.src) + else: + return self.format('%r.%s = %r; %r = is_error', op.obj, op.attr, op.src, op) def visit_load_static(self, op: LoadStatic) -> str: ann = f' ({repr(op.ann)})' if op.ann else '' diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 9a458181dc6c..7cc08b73494f 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -1,7 +1,7 @@ """Transform class definitions from the mypy AST form to IR.""" from abc import abstractmethod -from typing import Callable, List, Optional, Tuple +from typing import Callable, List, Optional, Set, Tuple from typing_extensions import Final from mypy.nodes import ( @@ -214,7 +214,10 @@ def add_attr(self, lvalue: NameExpr, stmt: AssignmentStmt) -> None: self.builder.init_final_static(lvalue, value, self.cdef.name) def finalize(self, ir: ClassIR) -> None: - generate_attr_defaults(self.builder, self.cdef, self.skip_attr_default) + attrs_with_defaults, default_assignments = find_attr_initializers( + self.builder, self.cdef, self.skip_attr_default) + ir.attrs_with_defaults.update(attrs_with_defaults) + generate_attr_defaults_init(self.builder, self.cdef, default_assignments) create_ne_from_eq(self.builder, self.cdef) @@ -524,9 +527,11 @@ def add_non_ext_class_attr(builder: IRBuilder, attr_to_cache.append((lvalue, object_rprimitive)) -def generate_attr_defaults(builder: IRBuilder, cdef: ClassDef, - skip: Optional[Callable[[str, AssignmentStmt], bool]] = None) -> None: - """Generate an initialization method for default attr values (from class vars). +def find_attr_initializers(builder: IRBuilder, + cdef: ClassDef, + skip: Optional[Callable[[str, AssignmentStmt], bool]] = None, + ) -> Tuple[Set[str], List[AssignmentStmt]]: + """Find initializers of attributes in a class body. If provided, the skip arg should be a callable which will return whether to skip generating a default for an attribute. It will be passed the name of @@ -534,7 +539,9 @@ def generate_attr_defaults(builder: IRBuilder, cdef: ClassDef, """ cls = builder.mapper.type_to_ir[cdef.info] if cls.builtin_base: - return + return set(), [] + + attrs_with_defaults = set() # Pull out all assignments in classes in the mro so we can initialize them # TODO: Support nested statements @@ -558,10 +565,30 @@ def generate_attr_defaults(builder: IRBuilder, cdef: ClassDef, if skip is not None and skip(name, stmt): continue + attr_type = cls.attr_type(name) + + # If the attribute is initialized to None and type isn't optional, + # doesn't initialize it to anything (special case for "# type:" comments). + if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == 'builtins.None': + if (not is_optional_type(attr_type) and not is_object_rprimitive(attr_type) + and not is_none_rprimitive(attr_type)): + continue + + attrs_with_defaults.add(name) default_assignments.append(stmt) + return attrs_with_defaults, default_assignments + + +def generate_attr_defaults_init(builder: IRBuilder, + cdef: ClassDef, + default_assignments: List[AssignmentStmt]) -> None: + """Generate an initialization method for default attr values (from class vars).""" if not default_assignments: return + cls = builder.mapper.type_to_ir[cdef.info] + if cls.builtin_base: + return with builder.enter_method(cls, '__mypyc_defaults_setup', bool_rprimitive): self_var = builder.self() @@ -571,15 +598,11 @@ def generate_attr_defaults(builder: IRBuilder, cdef: ClassDef, if not stmt.is_final_def and not is_constant(stmt.rvalue): builder.warning('Unsupported default attribute value', stmt.rvalue.line) - # If the attribute is initialized to None and type isn't optional, - # don't initialize it to anything. attr_type = cls.attr_type(lvalue.name) - if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == 'builtins.None': - if (not is_optional_type(attr_type) and not is_object_rprimitive(attr_type) - and not is_none_rprimitive(attr_type)): - continue val = builder.coerce(builder.accept(stmt.rvalue), attr_type, stmt.line) - builder.add(SetAttr(self_var, lvalue.name, val, -1)) + init = SetAttr(self_var, lvalue.name, val, -1) + init.mark_as_initializer() + builder.add(init) builder.add(Return(builder.true())) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 275d3449f812..2c771df08809 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -11,7 +11,7 @@ """ from typing import ( - DefaultDict, NamedTuple, Optional, List, Sequence, Tuple, Union, Dict, + DefaultDict, NamedTuple, Optional, List, Sequence, Tuple, Union, Dict ) from mypy.nodes import ( diff --git a/mypyc/irbuild/main.py b/mypyc/irbuild/main.py index f2c49359b69a..52c9d5cf32df 100644 --- a/mypyc/irbuild/main.py +++ b/mypyc/irbuild/main.py @@ -40,6 +40,7 @@ def f(x: int) -> int: from mypyc.irbuild.builder import IRBuilder from mypyc.irbuild.visitor import IRBuilderVisitor from mypyc.irbuild.mapper import Mapper +from mypyc.analysis.attrdefined import analyze_always_defined_attrs # The stubs for callable contextmanagers are busted so cast it to the @@ -52,7 +53,7 @@ def f(x: int) -> int: def build_ir(modules: List[MypyFile], graph: Graph, types: Dict[Expression, Type], - mapper: 'Mapper', + mapper: Mapper, options: CompilerOptions, errors: Errors) -> ModuleIRs: """Build IR for a set of modules that have been type-checked by mypy.""" @@ -90,6 +91,8 @@ def build_ir(modules: List[MypyFile], result[module.fullname] = module_ir class_irs.extend(builder.classes) + analyze_always_defined_attrs(class_irs) + # Compute vtables. for cir in class_irs: if cir.is_ext_class: diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 901ea49fc2fa..576eacc141df 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -2,7 +2,7 @@ from typing import Dict, Optional -from mypy.nodes import FuncDef, TypeInfo, SymbolNode, ArgKind, ARG_STAR, ARG_STAR2 +from mypy.nodes import FuncDef, TypeInfo, SymbolNode, RefExpr, ArgKind, ARG_STAR, ARG_STAR2, GDEF from mypy.types import ( Instance, Type, CallableType, LiteralType, TypedDictType, UnboundType, PartialType, UninhabitedType, Overloaded, UnionType, TypeType, AnyType, NoneTyp, TupleType, TypeVarType, @@ -160,3 +160,17 @@ def fdef_to_sig(self, fdef: FuncDef) -> FuncSignature: if fdef.name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'): ret = object_rprimitive return FuncSignature(args, ret) + + def is_native_module(self, module: str) -> bool: + """Is the given module one compiled by mypyc?""" + return module in self.group_map + + def is_native_ref_expr(self, expr: RefExpr) -> bool: + if expr.node is None: + return False + if '.' in expr.node.fullname: + return self.is_native_module(expr.node.fullname.rpartition('.')[0]) + return True + + def is_native_module_ref_expr(self, expr: RefExpr) -> bool: + return self.is_native_ref_expr(expr) and expr.kind == GDEF diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 2cb3deac9700..cc9505853db1 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -177,6 +177,9 @@ def prepare_class_def(path: str, module_name: str, cdef: ClassDef, attrs = get_mypyc_attrs(cdef) if attrs.get("allow_interpreted_subclasses") is True: ir.allow_interpreted_subclasses = True + if attrs.get("serializable") is True: + # Supports copy.copy and pickle (including subclasses) + ir._serializable = True # We sort the table for determinism here on Python 3.5 for name, node in sorted(info.names.items()): diff --git a/mypyc/test-data/alwaysdefined.test b/mypyc/test-data/alwaysdefined.test new file mode 100644 index 000000000000..e8c44d8fc548 --- /dev/null +++ b/mypyc/test-data/alwaysdefined.test @@ -0,0 +1,732 @@ +-- Test cases for always defined attributes. +-- +-- If class C has attributes x and y that are always defined, the output will +-- have a line like this: +-- +-- C: [x, y] + +[case testAlwaysDefinedSimple] +class C: + def __init__(self, x: int) -> None: + self.x = x +[out] +C: [x] + +[case testAlwaysDefinedFail] +class MethodCall: + def __init__(self, x: int) -> None: + self.f() + self.x = x + + def f(self) -> None: + pass + +class FuncCall: + def __init__(self, x: int) -> None: + f(x) + self.x = x + f(self) + self.y = x + +class GetAttr: + x: int + def __init__(self, x: int) -> None: + a = self.x + self.x = x + +class _Base: + def __init__(self) -> None: + f(self) + +class CallSuper(_Base): + def __init__(self, x: int) -> None: + super().__init__() + self.x = x + +class Lambda: + def __init__(self, x: int) -> None: + f = lambda x: x + 1 + self.x = x + g = lambda x: self + self.y = x + +class If: + def __init__(self, x: int) -> None: + self.a = 1 + if x: + self.x = x + else: + self.y = 1 + +class Deletable: + __deletable__ = ('x', 'y') + + def __init__(self) -> None: + self.x = 0 + self.y = 1 + self.z = 2 + +class PrimitiveWithSelf: + def __init__(self, s: str) -> None: + self.x = getattr(self, s) + +def f(a) -> None: pass +[out] +MethodCall: [] +FuncCall: [x] +GetAttr: [] +CallSuper: [] +Lambda: [] +If: [a] +Deletable: [z] +PrimitiveWithSelf: [] + +[case testAlwaysDefinedConditional] +class IfAlways: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + self.y = y + elif y: + self.x = y + self.y = x + else: + self.x = 0 + self.y = 0 + self.z = 0 + +class IfSometimes1: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + self.y = y + elif y: + self.z = y + self.y = x + else: + self.y = 0 + self.a = 0 + +class IfSometimes2: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + self.y = y + +class IfStopAnalysis1: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + f(self) + else: + self.x = x + self.y = y + +class IfStopAnalysis2: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + else: + self.x = x + f(self) + self.y = y + +class IfStopAnalysis3: + def __init__(self, x: int, y: int) -> None: + if x: + self.x = x + else: + f(self) + self.x = x + self.y = y + +class IfConditionalAndNonConditional1: + def __init__(self, x: int) -> None: + self.x = 0 + if x: + self.x = x + +class IfConditionalAndNonConditional2: + def __init__(self, x: int) -> None: + # x is not considered always defined, since the second assignment may + # either initialize or update. + if x: + self.x = x + self.x = 0 + +def f(a) -> None: pass +[out] +IfAlways: [x, y, z] +IfSometimes1: [y] +IfSometimes2: [y] +IfStopAnalysis1: [x] +IfStopAnalysis2: [x] +IfStopAnalysis3: [] +IfConditionalAndNonConditional1: [x] +IfConditionalAndNonConditional2: [] + +[case testAlwaysDefinedExpressions] +from typing import Dict, List, Set, Optional, cast +from typing_extensions import Final + +import other + +class C: pass + +class Collections: + def __init__(self, x: int) -> None: + self.l = [x] + self.d: Dict[str, str] = {} + self.s: Set[int] = set() + self.d2 = {'x': x} + self.s2 = {x} + self.l2 = [f(), None] * x + self.t = tuple(self.l2) + +class Comparisons: + def __init__(self, y: int, c: C, s: str, o: Optional[str]) -> None: + self.n1 = y < 5 + self.n2 = y == 5 + self.c1 = y is c + self.c2 = y is not c + self.o1 = o is None + self.o2 = o is not None + self.s = s < 'x' + +class BinaryOps: + def __init__(self, x: int, s: str) -> None: + self.a = x + 2 + self.b = x & 2 + self.c = x * 2 + self.d = -x + self.e = 'x' + s + self.f = x << x + +g = 2 + +class LocalsAndGlobals: + def __init__(self, x: int) -> None: + t = x + 1 + self.a = t - t + self.g = g + +class Booleans: + def __init__(self, x: int, b: bool) -> None: + self.a = True + self.b = False + self.c = not b + self.d = b or b + self.e = b and b + +F: Final = 3 + +class ModuleFinal: + def __init__(self) -> None: + self.a = F + self.b = other.Y + +class ClassFinal: + F: Final = 3 + + def __init__(self) -> None: + self.a = ClassFinal.F + +class Literals: + def __init__(self) -> None: + self.a = 'x' + self.b = b'x' + self.c = 2.2 + +class ListComprehension: + def __init__(self, x: List[int]) -> None: + self.a = [i + 1 for i in x] + +class Helper: + def __init__(self, arg) -> None: + self.x = 0 + + def foo(self, arg) -> int: + return 1 + +class AttrAccess: + def __init__(self, o: Helper) -> None: + self.x = o.x + o.x = o.x + 1 + self.y = o.foo(self.x) + o.foo(self) + self.z = 1 + +class Construct: + def __init__(self) -> None: + self.x = Helper(1) + self.y = Helper(self) + +class IsInstance: + def __init__(self, x: object) -> None: + if isinstance(x, str): + self.x = 0 + elif isinstance(x, Helper): + self.x = 1 + elif isinstance(x, (list, tuple)): + self.x = 2 + else: + self.x = 3 + +class Cast: + def __init__(self, x: object) -> None: + self.x = cast(int, x) + self.s = cast(str, x) + self.c = cast(Cast, x) + +class PropertyAccessGetter: + def __init__(self, other: PropertyAccessGetter) -> None: + self.x = other.p + self.y = 1 + self.z = self.p + + @property + def p(self) -> int: + return 0 + +class PropertyAccessSetter: + def __init__(self, other: PropertyAccessSetter) -> None: + other.p = 1 + self.y = 1 + self.z = self.p + + @property + def p(self) -> int: + return 0 + + @p.setter + def p(self, x: int) -> None: + pass + +def f() -> int: + return 0 + +[file other.py] +# Not compiled +from typing_extensions import Final + +Y: Final = 3 + +[out] +C: [] +Collections: [d, d2, l, l2, s, s2, t] +Comparisons: [c1, c2, n1, n2, o1, o2, s] +BinaryOps: [a, b, c, d, e, f] +LocalsAndGlobals: [a, g] +Booleans: [a, b, c, d, e] +ModuleFinal: [a, b] +ClassFinal: [F, a] +Literals: [a, b, c] +ListComprehension: [a] +Helper: [x] +AttrAccess: [x, y] +Construct: [x] +IsInstance: [x] +Cast: [c, s, x] +PropertyAccessGetter: [x, y] +PropertyAccessSetter: [y] + +[case testAlwaysDefinedExpressions2] +from typing import List, Tuple + +class C: + def __init__(self) -> None: + self.x = 0 + +class AttributeRef: + def __init__(self, c: C) -> None: + self.aa = c.x + self.bb = self.aa + if c is not None: + self.z = 0 + self.cc = 0 + self.dd = self.z + +class ListOps: + def __init__(self, x: List[int], n: int) -> None: + self.a = len(x) + self.b = x[n] + self.c = [y + 1 for y in x] + +class TupleOps: + def __init__(self, t: Tuple[int, str]) -> None: + x, y = t + self.x = x + self.y = t[0] + s = x, y + self.z = s + +class IfExpr: + def __init__(self, x: int) -> None: + self.a = 1 if x < 5 else 2 + +class Base: + def __init__(self, x: int) -> None: + self.x = x + +class Derived1(Base): + def __init__(self, y: int) -> None: + self.aa = y + super().__init__(y) + self.bb = y + +class Derived2(Base): + pass + +class Conditionals: + def __init__(self, b: bool, n: int) -> None: + if not (n == 5 or n >= n + 1): + self.a = b + else: + self.a = not b + if b: + self.b = 2 + else: + self.b = 4 + +[out] +C: [x] +AttributeRef: [aa, bb, cc, dd] +ListOps: [a, b, c] +TupleOps: [x, y, z] +IfExpr: [a] +Base: [x] +Derived1: [aa, bb, x] +Derived2: [x] +Conditionals: [a, b] + +[case testAlwaysDefinedStatements] +from typing import Any, List, Optional, Iterable + +class Return: + def __init__(self, x: int) -> None: + self.x = x + if x > 5: + self.y = 1 + return + self.y = 2 + self.z = x + +class While: + def __init__(self, x: int) -> None: + n = 2 + while x > 0: + n *=2 + x -= 1 + self.a = n + while x < 5: + self.b = 1 + self.b += 1 + +class Try: + def __init__(self, x: List[int]) -> None: + self.a = 0 + try: + self.b = x[0] + except: + self.c = x + self.d = 0 + try: + self.e = x[0] + except: + self.e = 1 + +class TryFinally: + def __init__(self, x: List[int]) -> None: + self.a = 0 + try: + self.b = x[0] + finally: + self.c = x + self.d = 0 + try: + self.e = x[0] + finally: + self.e = 1 + +class Assert: + def __init__(self, x: Optional[str], y: int) -> None: + assert x is not None + assert y < 5 + self.a = x + +class For: + def __init__(self, it: Iterable[int]) -> None: + self.x = 0 + for x in it: + self.x += x + for x in it: + self.y = x + +class Assignment1: + def __init__(self, other: Assignment1) -> None: + self.x = 0 + self = other # Give up after assignment to self + self.y = 1 + +class Assignment2: + def __init__(self) -> None: + self.x = 0 + other = self # Give up after self is aliased + self.y = other.x + +class With: + def __init__(self, x: Any) -> None: + self.a = 0 + with x: + self.b = 1 + self.c = 2 + +def f() -> None: + pass + +[out] +Return: [x, y] +While: [a] +-- We could infer 'e' as always defined, but this is tricky, since always defined attribute +-- analysis must be performed earlier than exception handling transform. This would be +-- easy to infer *after* exception handling transform. +Try: [a, d] +-- Again, 'e' could be always defined, but it would be a bit tricky to do it. +TryFinally: [a, c, d] +Assert: [a] +For: [x] +Assignment1: [x] +Assignment2: [x] +-- TODO: Why is not 'b' included? +With: [a, c] + +[case testAlwaysDefinedAttributeDefaults] +class Basic: + x = 0 + +class ClassBodyAndInit: + x = 0 + s = 'x' + + def __init__(self, n: int) -> None: + self.n = 0 + +class AttrWithDefaultAndInit: + x = 0 + + def __init__(self, x: int) -> None: + self.x = x + +class Base: + x = 0 + y = 1 + +class Derived(Base): + y = 2 + z = 3 +[out] +Basic: [x] +ClassBodyAndInit: [n, s, x] +AttrWithDefaultAndInit: [x] +Base: [x, y] +Derived: [x, y, z] + +[case testAlwaysDefinedWithInheritance] +class Base: + def __init__(self, x: int) -> None: + self.x = x + +class Deriv1(Base): + def __init__(self, x: int, y: str) -> None: + super().__init__(x) + self.y = y + +class Deriv2(Base): + def __init__(self, x: int, y: str) -> None: + self.y = y + super().__init__(x) + +class Deriv22(Deriv2): + def __init__(self, x: int, y: str, z: bool) -> None: + super().__init__(x, y) + self.z = False + +class Deriv3(Base): + def __init__(self) -> None: + super().__init__(1) + +class Deriv4(Base): + def __init__(self) -> None: + self.y = 1 + self.x = 2 + +def f(a): pass + +class BaseUnsafe: + def __init__(self, x: int, y: int) -> None: + self.x = x + f(self) # Unknown function + self.y = y + +class DerivUnsafe(BaseUnsafe): + def __init__(self, z: int, zz: int) -> None: + self.z = z + super().__init__(1, 2) # Calls unknown function + self.zz = zz + +class BaseWithDefault: + x = 1 + + def __init__(self) -> None: + self.y = 1 + +class DerivedWithDefault(BaseWithDefault): + def __init__(self) -> None: + super().__init__() + self.z = 1 + +class AlwaysDefinedInBase: + def __init__(self) -> None: + self.x = 1 + self.y = 1 + +class UndefinedInDerived(AlwaysDefinedInBase): + def __init__(self, x: bool) -> None: + self.x = 1 + if x: + self.y = 2 + +class UndefinedInDerived2(UndefinedInDerived): + def __init__(self, x: bool): + if x: + self.y = 2 +[out] +Base: [x] +Deriv1: [x, y] +Deriv2: [x, y] +Deriv22: [x, y, z] +Deriv3: [x] +Deriv4: [x, y] +BaseUnsafe: [x] +DerivUnsafe: [x, z] +BaseWithDefault: [x, y] +DerivedWithDefault: [x, y, z] +AlwaysDefinedInBase: [] +UndefinedInDerived: [] +UndefinedInDerived2: [] + +[case testAlwaysDefinedWithInheritance2] +from mypy_extensions import trait, mypyc_attr + +from interpreted import PythonBase + +class BasePartiallyDefined: + def __init__(self, x: int) -> None: + self.a = 0 + if x: + self.x = x + +class Derived1(BasePartiallyDefined): + def __init__(self, x: int) -> None: + super().__init__(x) + self.y = x + +class BaseUndefined: + x: int + +class DerivedAlwaysDefined(BaseUndefined): + def __init__(self) -> None: + super().__init__() + self.z = 0 + self.x = 2 + +@trait +class MyTrait: + def f(self) -> None: pass + +class SimpleTraitImpl(MyTrait): + def __init__(self) -> None: + super().__init__() + self.x = 0 + +@trait +class TraitWithAttr: + x: int + y: str + +class TraitWithAttrImpl(TraitWithAttr): + def __init__(self) -> None: + self.y = 'x' + +@trait +class TraitWithAttr2: + z: int + +class TraitWithAttrImpl2(TraitWithAttr, TraitWithAttr2): + def __init__(self) -> None: + self.y = 'x' + self.z = 2 + +@mypyc_attr(allow_interpreted_subclasses=True) +class BaseWithGeneralSubclassing: + x = 0 + y: int + def __init__(self, s: str) -> None: + self.s = s + +class Derived2(BaseWithGeneralSubclassing): + def __init__(self) -> None: + super().__init__('x') + self.z = 0 + +class SubclassPythonclass(PythonBase): + def __init__(self) -> None: + self.y = 1 + +class BaseWithSometimesDefined: + def __init__(self, b: bool) -> None: + if b: + self.x = 0 + +class Derived3(BaseWithSometimesDefined): + def __init__(self, b: bool) -> None: + super().__init__(b) + self.x = 1 + +[file interpreted.py] +class PythonBase: + def __init__(self) -> None: + self.x = 0 + +[out] +BasePartiallyDefined: [a] +Derived1: [a, y] +BaseUndefined: [] +DerivedAlwaysDefined: [x, z] +MyTrait: [] +SimpleTraitImpl: [x] +TraitWithAttr: [] +TraitWithAttrImpl: [y] +TraitWithAttr2: [] +TraitWithAttrImpl2: [y, z] +BaseWithGeneralSubclassing: [] +-- TODO: 's' could also be always defined +Derived2: [x, z] +-- Always defined attribute analysis is turned off when inheriting a non-native class. +SubclassPythonclass: [] +BaseWithSometimesDefined: [] +-- TODO: 'x' could also be always defined, but it is a bit tricky to support +Derived3: [] + +[case testAlwaysDefinedWithNesting] +class NestedFunc: + def __init__(self) -> None: + self.x = 0 + def f() -> None: + self.y = 0 + f() + self.z = 1 +[out] +-- TODO: Support nested functions. +NestedFunc: [] +f___init___NestedFunc_obj: [] diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index d3403addecfb..077abcf2939b 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -2212,11 +2212,11 @@ L3: def PropertyHolder.__init__(self, left, right, is_add): self :: __main__.PropertyHolder left, right :: int - is_add, r0, r1, r2 :: bool + is_add :: bool L0: - self.left = left; r0 = is_error - self.right = right; r1 = is_error - self.is_add = is_add; r2 = is_error + self.left = left + self.right = right + self.is_add = is_add return 1 def PropertyHolder.twice_value(self): self :: __main__.PropertyHolder @@ -2299,9 +2299,8 @@ L0: def BaseProperty.__init__(self, value): self :: __main__.BaseProperty value :: int - r0 :: bool L0: - self._incrementer = value; r0 = is_error + self._incrementer = value return 1 def DerivedProperty.value(self): self :: __main__.DerivedProperty @@ -2351,10 +2350,9 @@ def DerivedProperty.__init__(self, incr_func, value): incr_func :: object value :: int r0 :: None - r1 :: bool L0: r0 = BaseProperty.__init__(self, value) - self._incr_func = incr_func; r1 = is_error + self._incr_func = incr_func return 1 def AgainProperty.next(self): self :: __main__.AgainProperty @@ -3444,10 +3442,9 @@ def f(a: bool) -> int: [out] def C.__mypyc_defaults_setup(__mypyc_self__): __mypyc_self__ :: __main__.C - r0, r1 :: bool L0: - __mypyc_self__.x = 2; r0 = is_error - __mypyc_self__.y = 4; r1 = is_error + __mypyc_self__.x = 2 + __mypyc_self__.y = 4 return 1 def f(a): a :: bool diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index 77943045ffe3..ca1e289354b2 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -146,16 +146,14 @@ class B(A): [out] def A.__init__(self): self :: __main__.A - r0 :: bool L0: - self.x = 20; r0 = is_error + self.x = 20 return 1 def B.__init__(self): self :: __main__.B - r0, r1 :: bool L0: - self.x = 40; r0 = is_error - self.y = 60; r1 = is_error + self.x = 40 + self.y = 60 return 1 [case testAttrLvalue] @@ -169,9 +167,8 @@ def increment(o: O) -> O: [out] def O.__init__(self): self :: __main__.O - r0 :: bool L0: - self.x = 2; r0 = is_error + self.x = 2 return 1 def increment(o): o :: __main__.O @@ -702,18 +699,16 @@ class B(A): def A.__init__(self, x): self :: __main__.A x :: int - r0 :: bool L0: - self.x = x; r0 = is_error + self.x = x return 1 def B.__init__(self, x, y): self :: __main__.B x, y :: int r0 :: None - r1 :: bool L0: r0 = A.__init__(self, x) - self.y = y; r1 = is_error + self.y = y return 1 [case testClassMethod] @@ -760,18 +755,16 @@ class B(A): def A.__init__(self, x): self :: __main__.A x :: int - r0 :: bool L0: - self.x = x; r0 = is_error + self.x = x return 1 def B.__init__(self, x, y): self :: __main__.B x, y :: int r0 :: None - r1 :: bool L0: r0 = A.__init__(self, x) - self.y = y; r1 = is_error + self.y = y return 1 [case testSuper2] @@ -1077,30 +1070,26 @@ L0: return 1 def A.__mypyc_defaults_setup(__mypyc_self__): __mypyc_self__ :: __main__.A - r0 :: bool L0: - __mypyc_self__.x = 20; r0 = is_error + __mypyc_self__.x = 20 return 1 def B.__mypyc_defaults_setup(__mypyc_self__): __mypyc_self__ :: __main__.B - r0 :: bool - r1 :: dict - r2 :: str - r3 :: object - r4 :: str - r5 :: bool - r6 :: object - r7, r8 :: bool + r0 :: dict + r1 :: str + r2 :: object + r3 :: str + r4 :: object L0: - __mypyc_self__.x = 20; r0 = is_error - r1 = __main__.globals :: static - r2 = 'LOL' - r3 = CPyDict_GetItem(r1, r2) - r4 = cast(str, r3) - __mypyc_self__.y = r4; r5 = is_error - r6 = box(None, 1) - __mypyc_self__.z = r6; r7 = is_error - __mypyc_self__.b = 1; r8 = is_error + __mypyc_self__.x = 20 + r0 = __main__.globals :: static + r1 = 'LOL' + r2 = CPyDict_GetItem(r0, r1) + r3 = cast(str, r2) + __mypyc_self__.y = r3 + r4 = box(None, 1) + __mypyc_self__.z = r4 + __mypyc_self__.b = 1 return 1 [case testSubclassDictSpecalized] @@ -1229,3 +1218,25 @@ def g(c: Type[C], d: Type[D]) -> None: # N: (Hint: Use "x: Final = ..." or "x: ClassVar = ..." to define a class attribute) d.f d.c + +[case testSetAttributeWithDefaultInInit] +class C: + s = '' + + def __init__(self, s: str) -> None: + self.s = s +[out] +def C.__init__(self, s): + self :: __main__.C + s :: str + r0 :: bool +L0: + self.s = s; r0 = is_error + return 1 +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0 :: str +L0: + r0 = '' + __mypyc_self__.s = r0 + return 1 diff --git a/mypyc/test-data/irbuild-constant-fold.test b/mypyc/test-data/irbuild-constant-fold.test index eab4df4e2b27..dd75c01443f1 100644 --- a/mypyc/test-data/irbuild-constant-fold.test +++ b/mypyc/test-data/irbuild-constant-fold.test @@ -234,9 +234,8 @@ def f() -> None: [out] def C.__mypyc_defaults_setup(__mypyc_self__): __mypyc_self__ :: __main__.C - r0 :: bool L0: - __mypyc_self__.X = 10; r0 = is_error + __mypyc_self__.X = 10 return 1 def f(): a :: int diff --git a/mypyc/test-data/irbuild-singledispatch.test b/mypyc/test-data/irbuild-singledispatch.test index 8b2e2abd507a..4e18bbf50d4e 100644 --- a/mypyc/test-data/irbuild-singledispatch.test +++ b/mypyc/test-data/irbuild-singledispatch.test @@ -14,19 +14,17 @@ L0: return 0 def f_obj.__init__(__mypyc_self__): __mypyc_self__ :: __main__.f_obj - r0 :: dict - r1 :: bool - r2 :: dict - r3 :: str - r4 :: int32 - r5 :: bit + r0, r1 :: dict + r2 :: str + r3 :: int32 + r4 :: bit L0: r0 = PyDict_New() - __mypyc_self__.registry = r0; r1 = is_error - r2 = PyDict_New() - r3 = 'dispatch_cache' - r4 = PyObject_SetAttr(__mypyc_self__, r3, r2) - r5 = r4 >= 0 :: signed + __mypyc_self__.registry = r0 + r1 = PyDict_New() + r2 = 'dispatch_cache' + r3 = PyObject_SetAttr(__mypyc_self__, r2, r1) + r4 = r3 >= 0 :: signed return 1 def f_obj.__call__(__mypyc_self__, arg): __mypyc_self__ :: __main__.f_obj @@ -148,19 +146,17 @@ L0: return 1 def f_obj.__init__(__mypyc_self__): __mypyc_self__ :: __main__.f_obj - r0 :: dict - r1 :: bool - r2 :: dict - r3 :: str - r4 :: int32 - r5 :: bit + r0, r1 :: dict + r2 :: str + r3 :: int32 + r4 :: bit L0: r0 = PyDict_New() - __mypyc_self__.registry = r0; r1 = is_error - r2 = PyDict_New() - r3 = 'dispatch_cache' - r4 = PyObject_SetAttr(__mypyc_self__, r3, r2) - r5 = r4 >= 0 :: signed + __mypyc_self__.registry = r0 + r1 = PyDict_New() + r2 = 'dispatch_cache' + r3 = PyObject_SetAttr(__mypyc_self__, r2, r1) + r4 = r3 >= 0 :: signed return 1 def f_obj.__call__(__mypyc_self__, x): __mypyc_self__ :: __main__.f_obj @@ -259,4 +255,3 @@ L0: r1 = f(r0) r2 = box(None, 1) return r2 - diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index 98a6fa240359..ab947c956b74 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -894,10 +894,9 @@ def delAttributeMultiple() -> None: def Dummy.__init__(self, x, y): self :: __main__.Dummy x, y :: int - r0, r1 :: bool L0: - self.x = x; r0 = is_error - self.y = y; r1 = is_error + self.x = x + self.y = y return 1 def delAttribute(): r0, dummy :: __main__.Dummy diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index e238c2b02284..ac42aa26cf58 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -710,8 +710,7 @@ class B(A): class C(B): def __init__(self, x: int, y: int) -> None: - init = super(C, self).__init__ - init(x, y+1) + super(C, self).__init__(x, y + 1) def foo(self, x: int) -> int: # should go to A, not B @@ -1329,16 +1328,18 @@ assert Nothing2.X == 10 assert Nothing3.X == 10 [case testPickling] -from mypy_extensions import trait +from mypy_extensions import trait, mypyc_attr from typing import Any, TypeVar, Generic def dec(x: Any) -> Any: return x +@mypyc_attr(allow_interpreted_subclasses=True) class A: x: int y: str +@mypyc_attr(allow_interpreted_subclasses=True) class B(A): z: bool @@ -1865,10 +1866,28 @@ class F(D): # # def y(self, val : object) -> None: # # self._y = val +# No inheritance, just plain setter/getter +class G: + def __init__(self, x: int) -> None: + self._x = x + + @property + def x(self) -> int: + return self._x + + @x.setter + def x(self, x: int) -> None: + self._x = x + +class H: + def __init__(self, g: G) -> None: + self.g = g + self.g.x = 5 # Should not be treated as initialization + [file other.py] # Run in both interpreted and compiled mode -from native import A, B, C, D, E, F +from native import A, B, C, D, E, F, G a = A() assert a.x == 0 @@ -1898,6 +1917,9 @@ f = F() assert f.x == 20 f.x = 30 assert f.x == 50 +g = G(4) +g.x = 20 +assert g.x == 20 [file driver.py] # Run the tests in both interpreted and compiled mode @@ -1924,3 +1946,263 @@ from native import A, B, C a = A() b = B() c = C() + +[case testCopyAlwaysDefinedAttributes] +import copy +from typing import Union + +class A: pass + +class C: + def __init__(self, n: int = 0) -> None: + self.n = n + self.s = "" + self.t = ("", 0) + self.u: Union[str, bytes] = '' + self.a = A() + +def test_copy() -> None: + c1 = C() + c1.n = 1 + c1.s = "x" + c2 = copy.copy(c1) + assert c2.n == 1 + assert c2.s == "x" + assert c2.t == ("", 0) + assert c2.u == '' + assert c2.a is c1.a + +[case testNonNativeCallsToDunderNewAndInit] +from typing import Any +from testutil import assertRaises + +count_c = 0 + +class C: + def __init__(self) -> None: + self.x = 'a' # Always defined attribute + global count_c + count_c += 1 + + def get(self) -> str: + return self.x + +def test_no_init_args() -> None: + global count_c + count_c = 0 + + # Use Any to get non-native semantics + cls: Any = C + # __new__ implicitly calls __init__ for native classes + obj = cls.__new__(cls) + assert obj.get() == 'a' + assert count_c == 1 + # Make sure we don't call __init__ twice + obj2 = cls() + assert obj2.get() == 'a' + assert count_c == 2 + +count_d = 0 + +class D: + def __init__(self, x: str) -> None: + self.x = x # Always defined attribute + global count_d + count_d += 1 + + def get(self) -> str: + return self.x + +def test_init_arg() -> None: + global count_d + count_d = 0 + + # Use Any to get non-native semantics + cls: Any = D + # __new__ implicitly calls __init__ for native classes + obj = cls.__new__(cls, 'abc') + assert obj.get() == 'abc' + assert count_d == 1 + # Make sure we don't call __init__ twice + obj2 = cls('x') + assert obj2.get() == 'x' + assert count_d == 2 + # Keyword args should work + obj = cls.__new__(cls, x='abc') + assert obj.get() == 'abc' + assert count_d == 3 + +def test_invalid_init_args() -> None: + # Use Any to get non-native semantics + cls: Any = D + with assertRaises(TypeError): + cls() + with assertRaises(TypeError): + cls(y='x') + with assertRaises(TypeError): + cls(1) + +[case testTryDeletingAlwaysDefinedAttribute] +from typing import Any +from testutil import assertRaises + +class C: + def __init__(self) -> None: + self.x = 0 + +class D(C): + pass + +def test_try_deleting_always_defined_attr() -> None: + c: Any = C() + with assertRaises(AttributeError): + del c.x + d: Any = D() + with assertRaises(AttributeError): + del d.x + +[case testAlwaysDefinedAttributeAndAllowInterpretedSubclasses] +from mypy_extensions import mypyc_attr + +from m import define_interpreted_subclass + +@mypyc_attr(allow_interpreted_subclasses=True) +class Base: + x = 5 + y: int + def __init__(self, s: str) -> None: + self.s = s + +class DerivedNative(Base): + def __init__(self) -> None: + super().__init__('x') + self.z = 3 + +def test_native_subclass() -> None: + o = DerivedNative() + assert o.x == 5 + assert o.s == 'x' + assert o.z == 3 + +def test_interpreted_subclass() -> None: + define_interpreted_subclass(Base) + +[file m.py] +from testutil import assertRaises + +def define_interpreted_subclass(b): + class DerivedInterpreted1(b): + def __init__(self): + # Don't call base class __init__ + pass + d1 = DerivedInterpreted1() + assert d1.x == 5 + with assertRaises(AttributeError): + d1.y + with assertRaises(AttributeError): + d1.s + with assertRaises(AttributeError): + del d1.x + + class DerivedInterpreted1(b): + def __init__(self): + super().__init__('y') + d2 = DerivedInterpreted1() + assert d2.x == 5 + assert d2.s == 'y' + with assertRaises(AttributeError): + d2.y + with assertRaises(AttributeError): + del d2.x + +[case testBaseClassSometimesDefinesAttribute] +class C: + def __init__(self, b: bool) -> None: + if b: + self.x = [1] + +class D(C): + def __init__(self, b: bool) -> None: + super().__init__(b) + self.x = [2] + +def test_base_class() -> None: + c = C(True) + assert c.x == [1] + c = C(False) + try: + c.x + except AttributeError: + return + assert False + +def test_subclass() -> None: + d = D(True) + assert d.x == [2] + d = D(False) + assert d.x == [2] + +[case testSerializableClass] +from mypy_extensions import mypyc_attr +from typing import Any +import copy +from testutil import assertRaises + +@mypyc_attr(serializable=True) +class Base: + def __init__(self, s: str) -> None: + self.s = s + +class Derived(Base): + def __init__(self, s: str, n: int) -> None: + super().__init__(s) + self.n = n + +def test_copy_base() -> None: + o = Base('xyz') + o2 = copy.copy(o) + assert isinstance(o2, Base) + assert o2 is not o + assert o2.s == 'xyz' + +def test_copy_derived() -> None: + d = Derived('xyz', 5) + d2 = copy.copy(d) + assert isinstance(d2, Derived) + assert d2 is not d + assert d2.s == 'xyz' + assert d2.n == 5 + +class NonSerializable: + def __init__(self, s: str) -> None: + self.s = s + +@mypyc_attr(serializable=True) +class SerializableSub(NonSerializable): + def __init__(self, s: str, n: int) -> None: + super().__init__(s) + self.n = n + +def test_serializable_sub_class() -> None: + n = NonSerializable('xyz') + assert n.s == 'xyz' + + with assertRaises(TypeError): + copy.copy(n) + + s = SerializableSub('foo', 6) + s2 = copy.copy(s) + assert s2 is not s + assert s2.s == 'foo' + assert s2.n == 6 + +def test_serializable_sub_class_call_new() -> None: + t: Any = SerializableSub + sub: SerializableSub = t.__new__(t) + with assertRaises(AttributeError): + sub.s + with assertRaises(AttributeError): + sub.n + base: NonSerializable = sub + with assertRaises(AttributeError): + base.s diff --git a/mypyc/test-data/run-multimodule.test b/mypyc/test-data/run-multimodule.test index 6ffa166c57a1..418af66ba060 100644 --- a/mypyc/test-data/run-multimodule.test +++ b/mypyc/test-data/run-multimodule.test @@ -799,6 +799,69 @@ import native [rechecked native, other_a] +[case testSeparateCompilationWithUndefinedAttribute] +from other_a import A + +def f() -> None: + a = A() + if a.x == 5: + print(a.y) + print(a.m()) + else: + assert a.x == 6 + try: + print(a.y) + except AttributeError: + print('y undefined') + else: + assert False + + try: + print(a.m()) + except AttributeError: + print('y undefined') + else: + assert False + +[file other_a.py] +from other_b import B + +class A(B): + def __init__(self) -> None: + self.y = 9 + +[file other_a.py.2] +from other_b import B + +class A(B): + x = 6 + + def __init__(self) -> None: + pass + +[file other_b.py] +class B: + x = 5 + + def __init__(self) -> None: + self.y = 7 + + def m(self) -> int: + return self.y + +[file driver.py] +from native import f +f() + +[rechecked native, other_a] + +[out] +9 +9 +[out2] +y undefined +y undefined + [case testIncrementalCompilationWithDeletable] import other_a [file other_a.py] diff --git a/mypyc/test/test_alwaysdefined.py b/mypyc/test/test_alwaysdefined.py new file mode 100644 index 000000000000..f9a90fabf2a1 --- /dev/null +++ b/mypyc/test/test_alwaysdefined.py @@ -0,0 +1,42 @@ +"""Test cases for inferring always defined attributes in classes.""" + +import os.path + +from mypy.test.config import test_temp_dir +from mypy.test.data import DataDrivenTestCase +from mypy.errors import CompileError + +from mypyc.test.testutil import ( + ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file2, + assert_test_output, infer_ir_build_options_from_test_name +) + +files = [ + 'alwaysdefined.test' +] + + +class TestAlwaysDefined(MypycDataSuite): + files = files + base_path = test_temp_dir + + def run_case(self, testcase: DataDrivenTestCase) -> None: + """Perform a runtime checking transformation test case.""" + options = infer_ir_build_options_from_test_name(testcase.name) + if options is None: + # Skipped test case + return + with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase): + try: + ir = build_ir_for_single_file2(testcase.input, options) + except CompileError as e: + actual = e.messages + else: + actual = [] + for cl in ir.classes: + if cl.name.startswith('_'): + continue + actual.append('{}: [{}]'.format( + cl.name, ', '.join(sorted(cl._always_initialized_attrs)))) + + assert_test_output(testcase, actual, 'Invalid test output', testcase.output) diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 466815534fdb..852de8edcf69 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -376,7 +376,6 @@ class TestRunSeparate(TestRun): This puts other.py and other_b.py into a compilation group named "stuff". Any files not mentioned in the comment will get single-file groups. """ - separate = True test_name_suffix = '_separate' files = [ diff --git a/mypyc/test/test_serialization.py b/mypyc/test/test_serialization.py index 683bb807620e..eeef6beb1305 100644 --- a/mypyc/test/test_serialization.py +++ b/mypyc/test/test_serialization.py @@ -58,7 +58,10 @@ def assert_blobs_same(x: Any, y: Any, trail: Tuple[Any, ...]) -> None: assert x.keys() == y.keys(), f"Keys mismatch at {trail}" for k in x.keys(): assert_blobs_same(x[k], y[k], trail + (k,)) - elif isinstance(x, Iterable) and not isinstance(x, str): + elif isinstance(x, Iterable) and not isinstance(x, (str, set)): + # Special case iterables to generate better assert error messages. + # We can't use this for sets since the ordering is unpredictable, + # and strings should be treated as atomic values. for i, (xv, yv) in enumerate(zip(x, y)): assert_blobs_same(xv, yv, trail + (i,)) elif isinstance(x, RType): diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index c5dc2588a7e2..d5c5dea2d634 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -17,6 +17,7 @@ from mypyc.options import CompilerOptions from mypyc.analysis.ircheck import assert_func_ir_valid from mypyc.ir.func_ir import FuncIR +from mypyc.ir.module_ir import ModuleIR from mypyc.errors import Errors from mypyc.irbuild.main import build_ir from mypyc.irbuild.mapper import Mapper @@ -87,6 +88,12 @@ def perform_test(func: Callable[[DataDrivenTestCase], None], def build_ir_for_single_file(input_lines: List[str], compiler_options: Optional[CompilerOptions] = None) -> List[FuncIR]: + return build_ir_for_single_file2(input_lines, compiler_options).functions + + +def build_ir_for_single_file2(input_lines: List[str], + compiler_options: Optional[CompilerOptions] = None + ) -> ModuleIR: program_text = '\n'.join(input_lines) # By default generate IR compatible with the earliest supported Python C API. @@ -121,7 +128,7 @@ def build_ir_for_single_file(input_lines: List[str], module = list(modules.values())[0] for fn in module.functions: assert_func_ir_valid(fn) - return module.functions + return module def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> None: From 74cfa3d46ae6a9c0033bf95d88462619bc3bc044 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 May 2022 10:52:49 +0100 Subject: [PATCH 02/80] [mypyc] Borrow references during chained attribute access (#12805) If we have multiple native attribute access operations in succession, we can borrow the temporaries. This avoids an incref and decref. For example, when evaluating x.y.z, we don't need to incref the result of x.y. We need to make sure that the objects from which we borrow values are not freed too early by adding keep_alive ops. This is part of a wider reference counting optimization workstream. All the improvements together produced around 5% performance improvement in the richards benchmark. In carefully constructed microbenchmarks 50+% improvements are possible. --- mypyc/codegen/emitfunc.py | 2 +- mypyc/ir/ops.py | 3 +- mypyc/ir/pprint.py | 6 +++- mypyc/irbuild/builder.py | 19 +++++++++-- mypyc/irbuild/expression.py | 18 +++++++++-- mypyc/irbuild/ll_builder.py | 15 +++++++-- mypyc/test-data/irbuild-classes.test | 48 ++++++++++++++++++++++++++++ mypyc/test-data/refcount.test | 33 +++++++++++++++++++ 8 files changed, 133 insertions(+), 11 deletions(-) diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index f4ed657c467f..540c6b646496 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -333,7 +333,7 @@ def visit_get_attr(self, op: GetAttr) -> None: 'PyErr_SetString({}, "attribute {} of {} undefined");'.format( exc_class, repr(op.attr), repr(cl.name))) - if attr_rtype.is_refcounted: + if attr_rtype.is_refcounted and not op.is_borrowed: if not merged_branch and not always_defined: self.emitter.emit_line('} else {') self.emitter.emit_inc_ref(dest, attr_rtype) diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 786cb018f96b..74a31153f5a4 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -599,13 +599,14 @@ class GetAttr(RegisterOp): error_kind = ERR_MAGIC - def __init__(self, obj: Value, attr: str, line: int) -> None: + def __init__(self, obj: Value, attr: str, line: int, *, borrow: bool = False) -> None: super().__init__(line) self.obj = obj self.attr = attr assert isinstance(obj.type, RInstance), 'Attribute access not supported: %s' % obj.type self.class_type = obj.type self.type = obj.type.attr_type(attr) + self.is_borrowed = borrow def sources(self) -> List[Value]: return [self.obj] diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index 753965cb1e9c..c009b1343392 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -77,7 +77,11 @@ def visit_load_literal(self, op: LoadLiteral) -> str: return self.format('%r = %s%s', op, prefix, repr(op.value)) def visit_get_attr(self, op: GetAttr) -> str: - return self.format('%r = %r.%s', op, op.obj, op.attr) + if op.is_borrowed: + borrow = 'borrow ' + else: + borrow = '' + return self.format('%r = %s%r.%s', op, borrow, op.obj, op.attr) def visit_set_attr(self, op: SetAttr) -> str: if op.is_init: diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 72c03801e326..0b97bdbee625 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -141,6 +141,8 @@ def __init__(self, # can also do quick lookups. self.imports: OrderedDict[str, None] = OrderedDict() + self.can_borrow = False + # High-level control def set_module(self, module_name: str, module_path: str) -> None: @@ -152,15 +154,23 @@ def set_module(self, module_name: str, module_path: str) -> None: self.module_path = module_path @overload - def accept(self, node: Expression) -> Value: ... + def accept(self, node: Expression, *, can_borrow: bool = False) -> Value: ... @overload def accept(self, node: Statement) -> None: ... - def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: - """Transform an expression or a statement.""" + def accept(self, node: Union[Statement, Expression], *, + can_borrow: bool = False) -> Optional[Value]: + """Transform an expression or a statement. + + If can_borrow is true, prefer to generate a borrowed reference. + Borrowed references are faster since they don't require reference count + manipulation, but they are only safe to use in specific contexts. + """ with self.catch_errors(node.line): if isinstance(node, Expression): + old_can_borrow = self.can_borrow + self.can_borrow = can_borrow try: res = node.accept(self.visitor) res = self.coerce(res, self.node_type(node), node.line) @@ -170,6 +180,9 @@ def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: # from causing more downstream trouble. except UnsupportedException: res = Register(self.node_type(node)) + self.can_borrow = old_can_borrow + if not can_borrow: + self.builder.flush_keep_alives() return res else: try: diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index 1a74d71c3b27..e1d6e31619c8 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -21,7 +21,7 @@ Value, Register, TupleGet, TupleSet, BasicBlock, Assign, LoadAddress, RaiseStandardError ) from mypyc.ir.rtypes import ( - RTuple, object_rprimitive, is_none_rprimitive, int_rprimitive, is_int_rprimitive + RTuple, RInstance, object_rprimitive, is_none_rprimitive, int_rprimitive, is_int_rprimitive ) from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD from mypyc.irbuild.format_str_tokenizer import ( @@ -130,8 +130,19 @@ def transform_member_expr(builder: IRBuilder, expr: MemberExpr) -> Value: if isinstance(expr.node, MypyFile) and expr.node.fullname in builder.imports: return builder.load_module(expr.node.fullname) - obj = builder.accept(expr.expr) + obj_rtype = builder.node_type(expr.expr) + if (isinstance(obj_rtype, RInstance) + and obj_rtype.class_ir.is_ext_class + and obj_rtype.class_ir.has_attr(expr.name) + and not obj_rtype.class_ir.get_method(expr.name)): + # Direct attribute access -> can borrow object + can_borrow = True + else: + can_borrow = False + obj = builder.accept(expr.expr, can_borrow=can_borrow) + rtype = builder.node_type(expr) + # Special case: for named tuples transform attribute access to faster index access. typ = get_proper_type(builder.types.get(expr.expr)) if isinstance(typ, TupleType) and typ.partial_fallback.type.is_named_tuple: @@ -142,7 +153,8 @@ def transform_member_expr(builder: IRBuilder, expr: MemberExpr) -> Value: check_instance_attribute_access_through_class(builder, expr, typ) - return builder.builder.get_attr(obj, expr.name, rtype, expr.line) + borrow = can_borrow and builder.can_borrow + return builder.builder.get_attr(obj, expr.name, rtype, expr.line, borrow=borrow) def check_instance_attribute_access_through_class(builder: IRBuilder, diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 1927773489b1..bbc30c233039 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -105,6 +105,9 @@ def __init__( self.blocks: List[BasicBlock] = [] # Stack of except handler entry blocks self.error_handlers: List[Optional[BasicBlock]] = [None] + # Values that we need to keep alive as long as we have borrowed + # temporaries. Use flush_keep_alives() to mark the end of the live range. + self.keep_alives: List[Value] = [] # Basic operations @@ -145,6 +148,11 @@ def self(self) -> Register: """ return self.args[0] + def flush_keep_alives(self) -> None: + if self.keep_alives: + self.add(KeepAlive(self.keep_alives[:])) + self.keep_alives = [] + # Type conversions def box(self, src: Value) -> Value: @@ -219,11 +227,14 @@ def coerce_nullable(self, src: Value, target_type: RType, line: int) -> Value: # Attribute access - def get_attr(self, obj: Value, attr: str, result_type: RType, line: int) -> Value: + def get_attr(self, obj: Value, attr: str, result_type: RType, line: int, *, + borrow: bool = False) -> Value: """Get a native or Python attribute of an object.""" if (isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class and obj.type.class_ir.has_attr(attr)): - return self.add(GetAttr(obj, attr, line)) + if borrow: + self.keep_alives.append(obj) + return self.add(GetAttr(obj, attr, line, borrow=borrow)) elif isinstance(obj.type, RUnion): return self.union_get_attr(obj, obj.type, attr, result_type, line) else: diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index ca1e289354b2..a5f360928abe 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -1240,3 +1240,51 @@ L0: r0 = '' __mypyc_self__.s = r0 return 1 + +[case testBorrowAttribute] +def f(d: D) -> int: + return d.c.x + +class C: + x: int +class D: + c: C +[out] +def f(d): + d :: __main__.D + r0 :: __main__.C + r1 :: int +L0: + r0 = borrow d.c + r1 = r0.x + keep_alive d + return r1 + +[case testNoBorrowOverPropertyAccess] +class C: + d: D +class D: + @property + def e(self) -> E: + return E() +class E: + x: int +def f(c: C) -> int: + return c.d.e.x +[out] +def D.e(self): + self :: __main__.D + r0 :: __main__.E +L0: + r0 = E() + return r0 +def f(c): + c :: __main__.C + r0 :: __main__.D + r1 :: __main__.E + r2 :: int +L0: + r0 = c.d + r1 = r0.e + r2 = r1.x + return r2 diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test index 909251741a30..965d4066c0b5 100644 --- a/mypyc/test-data/refcount.test +++ b/mypyc/test-data/refcount.test @@ -917,3 +917,36 @@ L0: r5 = unbox(int, r4) dec_ref r4 return r5 + +[case testBorrowAttribute] +def g() -> int: + d = D() + return d.c.x + +def f(d: D) -> int: + return d.c.x + +class C: + x: int +class D: + c: C +[out] +def g(): + r0, d :: __main__.D + r1 :: __main__.C + r2 :: int +L0: + r0 = D() + d = r0 + r1 = borrow d.c + r2 = r1.x + dec_ref d + return r2 +def f(d): + d :: __main__.D + r0 :: __main__.C + r1 :: int +L0: + r0 = borrow d.c + r1 = r0.x + return r1 From 8dac2210f5afedee1c45c6293f20bf9b5c5ec179 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 May 2022 13:03:32 +0100 Subject: [PATCH 03/80] Fix namedtuple crash in unannotated function (#12804) Fixes #11121. --- mypy/semanal_namedtuple.py | 11 +++++++--- test-data/unit/check-incremental.test | 29 +++++++++++++++++++++++++++ test-data/unit/check-namedtuple.test | 14 +++++++++++++ 3 files changed, 51 insertions(+), 3 deletions(-) diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 07863dea2efb..109ec17cbc89 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -186,11 +186,16 @@ def check_namedtuple(self, # Error. Construct dummy return value. if var_name: name = var_name + if is_func_scope: + name += '@' + str(call.line) else: - name = 'namedtuple@' + str(call.line) + name = var_name = 'namedtuple@' + str(call.line) info = self.build_namedtuple_typeinfo(name, [], [], {}, node.line) - self.store_namedtuple_info(info, name, call, is_typed) - return name, info + self.store_namedtuple_info(info, var_name, call, is_typed) + if name != var_name or is_func_scope: + # NOTE: we skip local namespaces since they are not serialized. + self.api.add_symbol_skip_local(name, info) + return var_name, info if not ok: # This is a valid named tuple but some types are not ready. return typename, None diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index c604b386691b..79fa1c92c52e 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5711,3 +5711,32 @@ class C: [builtins fixtures/dict.pyi] [out2] tmp/a.py:2: error: "object" has no attribute "xyz" + +[case testIncrementalInvalidNamedTupleInUnannotatedFunction] +import a + +[file a.py] +import b + +[file a.py.2] +import b # f + +[file b.py] +from typing import NamedTuple + +def toplevel(fields): + TupleType = NamedTuple("TupleType", fields) + class InheritFromTuple(TupleType): + pass + NT2 = NamedTuple("bad", [('x', int)]) + nt2: NT2 = NT2(x=1) + +class C: + def method(self, fields): + TupleType = NamedTuple("TupleType", fields) + class InheritFromTuple(TupleType): + pass + NT2 = NamedTuple("bad", [('x', int)]) + nt2: NT2 = NT2(x=1) + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index b95cc96f8115..c6f1fe3b1d04 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -1120,3 +1120,17 @@ def bar1(c: C1) -> None: reveal_type(c) # N: Revealed type is "Tuple[builtins.int, fallback=__main__.C1]" [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testInvalidNamedTupleWithinFunction] +from collections import namedtuple + +def f(fields) -> None: + TupleType = namedtuple("TupleType", fields) \ + # E: List or tuple literal expected as the second argument to "namedtuple()" + class InheritFromTuple(TupleType): + pass + t: TupleType + it: InheritFromTuple + NT2 = namedtuple("bad", "x") # E: First argument to namedtuple() should be "NT2", not "bad" + nt2: NT2 = NT2(x=1) +[builtins fixtures/tuple.pyi] From 8e7e81759d3e43199bb0dced94f55157b72b2a49 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 May 2022 17:48:09 +0100 Subject: [PATCH 04/80] Update version to 0.970+dev (#12808) --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index c7b4e30f2420..b46cd5b82a66 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.960+dev' +__version__ = '0.970+dev' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From f71dba7526b3f5244faaa210302fc20269691056 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 19 May 2022 11:08:50 +0100 Subject: [PATCH 05/80] [mypyc] Borrow operands of several primitives (#12810) Borrow an operand such as `x.y` (attribute of a native class) in various contexts when it's safe to do so. This reduces the number of incref/decref operations we need to perform. This continues work started in #12805. These cases now support borrowing (for some subexpressions, in some contexts): * `x.y is None` * Cast source value * `len(x.y)` (if the operand is a list) * `isinstance(x.y, C)` * `x.y[a.b]` * `x.y.z = 1` --- mypyc/ir/ops.py | 5 +- mypyc/ir/pprint.py | 11 +- mypyc/irbuild/builder.py | 18 +- mypyc/irbuild/expression.py | 56 +++-- mypyc/irbuild/ll_builder.py | 12 +- mypyc/irbuild/specialize.py | 20 +- mypyc/irbuild/statement.py | 5 +- mypyc/test-data/exceptions.test | 22 +- mypyc/test-data/irbuild-classes.test | 28 +-- mypyc/test-data/irbuild-isinstance.test | 40 ++++ mypyc/test-data/irbuild-optional.test | 47 ++-- mypyc/test-data/irbuild-str.test | 113 +++++---- mypyc/test-data/refcount.test | 291 ++++++++++++++++++++++++ mypyc/test-data/run-generators.test | 14 ++ 14 files changed, 539 insertions(+), 143 deletions(-) diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 74a31153f5a4..d36fcfb9e7eb 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -775,15 +775,18 @@ class Cast(RegisterOp): error_kind = ERR_MAGIC - def __init__(self, src: Value, typ: RType, line: int) -> None: + def __init__(self, src: Value, typ: RType, line: int, *, borrow: bool = False) -> None: super().__init__(line) self.src = src self.type = typ + self.is_borrowed = borrow def sources(self) -> List[Value]: return [self.src] def stolen(self) -> List[Value]: + if self.is_borrowed: + return [] return [self.src] def accept(self, visitor: 'OpVisitor[T]') -> T: diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index c009b1343392..40243dac96e9 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -77,11 +77,12 @@ def visit_load_literal(self, op: LoadLiteral) -> str: return self.format('%r = %s%s', op, prefix, repr(op.value)) def visit_get_attr(self, op: GetAttr) -> str: + return self.format('%r = %s%r.%s', op, self.borrow_prefix(op), op.obj, op.attr) + + def borrow_prefix(self, op: Op) -> str: if op.is_borrowed: - borrow = 'borrow ' - else: - borrow = '' - return self.format('%r = %s%r.%s', op, borrow, op.obj, op.attr) + return 'borrow ' + return '' def visit_set_attr(self, op: SetAttr) -> str: if op.is_init: @@ -142,7 +143,7 @@ def visit_method_call(self, op: MethodCall) -> str: return s def visit_cast(self, op: Cast) -> str: - return self.format('%r = cast(%s, %r)', op, op.type, op.src) + return self.format('%r = %scast(%s, %r)', op, self.borrow_prefix(op), op.type, op.src) def visit_box(self, op: Box) -> str: return self.format('%r = box(%s, %r)', op, op.src.type, op.src) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 0b97bdbee625..c7ef400236b3 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -182,7 +182,7 @@ def accept(self, node: Union[Statement, Expression], *, res = Register(self.node_type(node)) self.can_borrow = old_can_borrow if not can_borrow: - self.builder.flush_keep_alives() + self.flush_keep_alives() return res else: try: @@ -191,6 +191,9 @@ def accept(self, node: Union[Statement, Expression], *, pass return None + def flush_keep_alives(self) -> None: + self.builder.flush_keep_alives() + # Pass through methods for the most common low-level builder ops, for convenience. def add(self, op: Op) -> Value: @@ -234,7 +237,7 @@ def binary_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: return self.builder.binary_op(lreg, rreg, expr_op, line) def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: - return self.builder.coerce(src, target_type, line, force) + return self.builder.coerce(src, target_type, line, force, can_borrow=self.can_borrow) def none_object(self) -> Value: return self.builder.none_object() @@ -510,7 +513,8 @@ def get_assignment_target(self, lvalue: Lvalue, return AssignmentTargetIndex(base, index) elif isinstance(lvalue, MemberExpr): # Attribute assignment x.y = e - obj = self.accept(lvalue.expr) + can_borrow = self.is_native_attr_ref(lvalue) + obj = self.accept(lvalue.expr, can_borrow=can_borrow) return AssignmentTargetAttr(obj, lvalue.name) elif isinstance(lvalue, TupleExpr): # Multiple assignment a, ..., b = e @@ -1176,6 +1180,14 @@ def load_module_attr_by_fullname(self, fullname: str, line: int) -> Value: left = self.load_module(module) return self.py_get_attr(left, name, line) + def is_native_attr_ref(self, expr: MemberExpr) -> bool: + """Is expr a direct reference to a native (struct) attribute of an instance?""" + obj_rtype = self.node_type(expr.expr) + return (isinstance(obj_rtype, RInstance) + and obj_rtype.class_ir.is_ext_class + and obj_rtype.class_ir.has_attr(expr.name) + and not obj_rtype.class_ir.get_method(expr.name)) + # Lacks a good type because there wasn't a reasonable type in 3.5 :( def catch_errors(self, line: int) -> Any: return catch_errors(self.module_path, line) diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index e1d6e31619c8..e1feabb0a4f3 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -21,7 +21,8 @@ Value, Register, TupleGet, TupleSet, BasicBlock, Assign, LoadAddress, RaiseStandardError ) from mypyc.ir.rtypes import ( - RTuple, RInstance, object_rprimitive, is_none_rprimitive, int_rprimitive, is_int_rprimitive + RTuple, object_rprimitive, is_none_rprimitive, int_rprimitive, is_int_rprimitive, + is_list_rprimitive ) from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD from mypyc.irbuild.format_str_tokenizer import ( @@ -130,17 +131,8 @@ def transform_member_expr(builder: IRBuilder, expr: MemberExpr) -> Value: if isinstance(expr.node, MypyFile) and expr.node.fullname in builder.imports: return builder.load_module(expr.node.fullname) - obj_rtype = builder.node_type(expr.expr) - if (isinstance(obj_rtype, RInstance) - and obj_rtype.class_ir.is_ext_class - and obj_rtype.class_ir.has_attr(expr.name) - and not obj_rtype.class_ir.get_method(expr.name)): - # Direct attribute access -> can borrow object - can_borrow = True - else: - can_borrow = False + can_borrow = builder.is_native_attr_ref(expr) obj = builder.accept(expr.expr, can_borrow=can_borrow) - rtype = builder.node_type(expr) # Special case: for named tuples transform attribute access to faster index access. @@ -418,8 +410,12 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: - base = builder.accept(expr.base) index = expr.index + base_type = builder.node_type(expr.base) + is_list = is_list_rprimitive(base_type) + can_borrow_base = is_list and is_borrow_friendly_expr(builder, index) + + base = builder.accept(expr.base, can_borrow=can_borrow_base) if isinstance(base.type, RTuple) and isinstance(index, IntExpr): return builder.add(TupleGet(base, index.value, expr.line)) @@ -429,11 +425,31 @@ def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: if value: return value - index_reg = builder.accept(expr.index) + index_reg = builder.accept(expr.index, can_borrow=is_list) return builder.gen_method_call( base, '__getitem__', [index_reg], builder.node_type(expr), expr.line) +def is_borrow_friendly_expr(builder: IRBuilder, expr: Expression) -> bool: + """Can the result of the expression borrowed temporarily? + + Borrowing means keeping a reference without incrementing the reference count. + """ + if isinstance(expr, (IntExpr, FloatExpr, StrExpr, BytesExpr)): + # Literals are immportal and can always be borrowed + return True + if isinstance(expr, (UnaryExpr, OpExpr)) and constant_fold_expr(builder, expr) is not None: + # Literal expressions are similar to literals + return True + if isinstance(expr, NameExpr): + if isinstance(expr.node, Var) and expr.kind == LDEF: + # Local variable reference can be borrowed + return True + if isinstance(expr, MemberExpr) and builder.is_native_attr_ref(expr): + return True + return False + + def try_constant_fold(builder: IRBuilder, expr: Expression) -> Optional[Value]: """Return the constant value of an expression if possible. @@ -513,7 +529,8 @@ def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Val def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: # x in (...)/[...] # x not in (...)/[...] - if (e.operators[0] in ['in', 'not in'] + first_op = e.operators[0] + if (first_op in ['in', 'not in'] and len(e.operators) == 1 and isinstance(e.operands[1], (TupleExpr, ListExpr))): items = e.operands[1].items @@ -560,6 +577,12 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: else: return builder.true() + if first_op in ('is', 'is not') and len(e.operators) == 1: + right = e.operands[1] + if isinstance(right, NameExpr) and right.fullname == 'builtins.None': + # Special case 'is None' / 'is not None'. + return translate_is_none(builder, e.operands[0], negated=first_op != 'is') + # TODO: Don't produce an expression when used in conditional context # All of the trickiness here is due to support for chained conditionals # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to @@ -584,6 +607,11 @@ def go(i: int, prev: Value) -> Value: return go(0, builder.accept(e.operands[0])) +def translate_is_none(builder: IRBuilder, expr: Expression, negated: bool) -> Value: + v = builder.accept(expr, can_borrow=True) + return builder.binary_op(v, builder.none_object(), 'is not' if negated else 'is', expr.line) + + def transform_basic_comparison(builder: IRBuilder, op: str, left: Value, diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index bbc30c233039..c7d8dc7b3ab2 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -163,13 +163,17 @@ def box(self, src: Value) -> Value: else: return src - def unbox_or_cast(self, src: Value, target_type: RType, line: int) -> Value: + def unbox_or_cast(self, src: Value, target_type: RType, line: int, *, + can_borrow: bool = False) -> Value: if target_type.is_unboxed: return self.add(Unbox(src, target_type, line)) else: - return self.add(Cast(src, target_type, line)) + if can_borrow: + self.keep_alives.append(src) + return self.add(Cast(src, target_type, line, borrow=can_borrow)) - def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: + def coerce(self, src: Value, target_type: RType, line: int, force: bool = False, *, + can_borrow: bool = False) -> Value: """Generate a coercion/cast from one type to other (only if needed). For example, int -> object boxes the source int; int -> int emits nothing; @@ -190,7 +194,7 @@ def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) return self.unbox_or_cast(tmp, target_type, line) if ((not src.type.is_unboxed and target_type.is_unboxed) or not is_subtype(src.type, target_type)): - return self.unbox_or_cast(src, target_type, line) + return self.unbox_or_cast(src, target_type, line, can_borrow=can_borrow) elif force: tmp = Register(target_type) self.add(Assign(tmp, src)) diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index d35039ecc0bc..1b4aa5e8c8c0 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -25,7 +25,7 @@ ) from mypyc.ir.rtypes import ( RType, RTuple, str_rprimitive, list_rprimitive, dict_rprimitive, set_rprimitive, - bool_rprimitive, c_int_rprimitive, is_dict_rprimitive + bool_rprimitive, c_int_rprimitive, is_dict_rprimitive, is_list_rprimitive ) from mypyc.irbuild.format_str_tokenizer import ( tokenizer_format_call, join_formatted_strings, convert_format_expr_to_str, FormatOp @@ -113,14 +113,19 @@ def translate_len( builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: if (len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]): - expr_rtype = builder.node_type(expr.args[0]) + arg = expr.args[0] + expr_rtype = builder.node_type(arg) if isinstance(expr_rtype, RTuple): # len() of fixed-length tuple can be trivially determined # statically, though we still need to evaluate it. - builder.accept(expr.args[0]) + builder.accept(arg) return Integer(len(expr_rtype.types)) else: - obj = builder.accept(expr.args[0]) + if is_list_rprimitive(builder.node_type(arg)): + borrow = True + else: + borrow = False + obj = builder.accept(arg, can_borrow=borrow) return builder.builtin_len(obj, expr.line) return None @@ -429,7 +434,12 @@ def translate_isinstance(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> irs = builder.flatten_classes(expr.args[1]) if irs is not None: - return builder.builder.isinstance_helper(builder.accept(expr.args[0]), irs, expr.line) + can_borrow = all(ir.is_ext_class + and not ir.inherits_python + and not ir.allow_interpreted_subclasses + for ir in irs) + obj = builder.accept(expr.args[0], can_borrow=can_borrow) + return builder.builder.isinstance_helper(obj, irs, expr.line) return None diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 9c9273b0cd76..142a77fbe946 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -58,8 +58,10 @@ def transform_expression_stmt(builder: IRBuilder, stmt: ExpressionStmt) -> None: if isinstance(stmt.expr, StrExpr): # Docstring. Ignore return - # ExpressionStmts do not need to be coerced like other Expressions. + # ExpressionStmts do not need to be coerced like other Expressions, so we shouldn't + # call builder.accept here. stmt.expr.accept(builder.visitor) + builder.flush_keep_alives() def transform_return_stmt(builder: IRBuilder, stmt: ReturnStmt) -> None: @@ -107,6 +109,7 @@ def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: for lvalue in lvalues: target = builder.get_assignment_target(lvalue) builder.assign(target, rvalue_reg, line) + builder.flush_keep_alives() def is_simple_lvalue(expr: Expression) -> bool: diff --git a/mypyc/test-data/exceptions.test b/mypyc/test-data/exceptions.test index 9d688a4c0651..8c576b49ce82 100644 --- a/mypyc/test-data/exceptions.test +++ b/mypyc/test-data/exceptions.test @@ -75,31 +75,28 @@ def f(x): r1 :: bit r2 :: __main__.A r3 :: object - r4, r5 :: bit - r6 :: int + r4 :: bit + r5 :: int L0: - r0 = box(None, 1) + r0 = load_address _Py_NoneStruct r1 = x == r0 if r1 goto L1 else goto L2 :: bool L1: return 2 L2: - inc_ref x - r2 = cast(__main__.A, x) + r2 = borrow cast(__main__.A, x) if is_error(r2) goto L6 (error at f:8) else goto L3 L3: - r3 = box(None, 1) - r4 = r2 == r3 - dec_ref r2 - r5 = r4 ^ 1 - if r5 goto L4 else goto L5 :: bool + r3 = load_address _Py_NoneStruct + r4 = r2 != r3 + if r4 goto L4 else goto L5 :: bool L4: return 4 L5: return 6 L6: - r6 = :: int - return r6 + r5 = :: int + return r5 [case testListSum] from typing import List @@ -518,4 +515,3 @@ L13: L14: dec_ref r9 goto L8 - diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index a5f360928abe..fcf6ef957435 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -116,22 +116,22 @@ def Node.length(self): self :: __main__.Node r0 :: union[__main__.Node, None] r1 :: object - r2, r3 :: bit - r4 :: union[__main__.Node, None] - r5 :: __main__.Node - r6, r7 :: int + r2 :: bit + r3 :: union[__main__.Node, None] + r4 :: __main__.Node + r5, r6 :: int L0: - r0 = self.next - r1 = box(None, 1) - r2 = r0 == r1 - r3 = r2 ^ 1 - if r3 goto L1 else goto L2 :: bool + r0 = borrow self.next + r1 = load_address _Py_NoneStruct + r2 = r0 != r1 + keep_alive self + if r2 goto L1 else goto L2 :: bool L1: - r4 = self.next - r5 = cast(__main__.Node, r4) - r6 = r5.length() - r7 = CPyTagged_Add(2, r6) - return r7 + r3 = self.next + r4 = cast(__main__.Node, r3) + r5 = r4.length() + r6 = CPyTagged_Add(2, r5) + return r6 L2: return 2 diff --git a/mypyc/test-data/irbuild-isinstance.test b/mypyc/test-data/irbuild-isinstance.test index b340ea302623..6bb92d0a947e 100644 --- a/mypyc/test-data/irbuild-isinstance.test +++ b/mypyc/test-data/irbuild-isinstance.test @@ -67,3 +67,43 @@ L2: r4 = r9 L3: return r4 + +[case testBorrowSpecialCaseWithIsinstance] +class C: + s: str + +def g() -> object: + pass + +def f() -> None: + x = g() + if isinstance(x, C): + x.s +[out] +def g(): + r0 :: object +L0: + r0 = box(None, 1) + return r0 +def f(): + r0, x, r1 :: object + r2 :: ptr + r3 :: object + r4 :: bit + r5 :: __main__.C + r6 :: str +L0: + r0 = g() + x = r0 + r1 = __main__.C :: type + r2 = get_element_ptr x ob_type :: PyObject + r3 = load_mem r2 :: builtins.object* + keep_alive x + r4 = r3 == r1 + if r4 goto L1 else goto L2 :: bool +L1: + r5 = borrow cast(__main__.C, x) + r6 = r5.s + keep_alive x +L2: + return 1 diff --git a/mypyc/test-data/irbuild-optional.test b/mypyc/test-data/irbuild-optional.test index cc8653ee3e82..4b1d3d1ffec2 100644 --- a/mypyc/test-data/irbuild-optional.test +++ b/mypyc/test-data/irbuild-optional.test @@ -13,7 +13,7 @@ def f(x): r0 :: object r1 :: bit L0: - r0 = box(None, 1) + r0 = load_address _Py_NoneStruct r1 = x == r0 if r1 goto L1 else goto L2 :: bool L1: @@ -34,12 +34,11 @@ def f(x: Optional[A]) -> int: def f(x): x :: union[__main__.A, None] r0 :: object - r1, r2 :: bit + r1 :: bit L0: - r0 = box(None, 1) - r1 = x == r0 - r2 = r1 ^ 1 - if r2 goto L1 else goto L2 :: bool + r0 = load_address _Py_NoneStruct + r1 = x != r0 + if r1 goto L1 else goto L2 :: bool L1: return 2 L2: @@ -188,20 +187,19 @@ def f(x): x :: union[__main__.A, None] r0, y :: __main__.A r1 :: object - r2, r3 :: bit - r4, r5 :: __main__.A + r2 :: bit + r3, r4 :: __main__.A L0: r0 = A() y = r0 - r1 = box(None, 1) - r2 = x == r1 - r3 = r2 ^ 1 - if r3 goto L1 else goto L2 :: bool + r1 = load_address _Py_NoneStruct + r2 = x != r1 + if r2 goto L1 else goto L2 :: bool L1: + r3 = cast(__main__.A, x) + y = r3 r4 = cast(__main__.A, x) - y = r4 - r5 = cast(__main__.A, x) - return r5 + return r4 L2: return y @@ -219,8 +217,8 @@ def f(y): x :: union[int, None] r1 :: bit r2, r3 :: object - r4, r5 :: bit - r6 :: int + r4 :: bit + r5 :: int L0: r0 = box(None, 1) x = r0 @@ -230,13 +228,12 @@ L1: r2 = box(int, y) x = r2 L2: - r3 = box(None, 1) - r4 = x == r3 - r5 = r4 ^ 1 - if r5 goto L3 else goto L4 :: bool + r3 = load_address _Py_NoneStruct + r4 = x != r3 + if r4 goto L3 else goto L4 :: bool L3: - r6 = unbox(int, x) - y = r6 + r5 = unbox(int, x) + y = r5 L4: return 1 @@ -272,8 +269,9 @@ L1: r5 = CPyTagged_Add(r4, 2) return r5 L2: - r6 = cast(__main__.A, x) + r6 = borrow cast(__main__.A, x) r7 = r6.a + keep_alive x return r7 L3: unreachable @@ -540,4 +538,3 @@ L0: r3 = r2 L1: return 1 - diff --git a/mypyc/test-data/irbuild-str.test b/mypyc/test-data/irbuild-str.test index bb296c53d224..63be7250ebd1 100644 --- a/mypyc/test-data/irbuild-str.test +++ b/mypyc/test-data/irbuild-str.test @@ -14,14 +14,14 @@ def do_split(s, sep, max_split): sep :: union[str, None] max_split :: union[int, None] r0, r1, r2 :: object - r3, r4 :: bit - r5 :: object - r6, r7 :: bit - r8 :: str - r9 :: int - r10 :: list - r11 :: str - r12, r13 :: list + r3 :: bit + r4 :: object + r5 :: bit + r6 :: str + r7 :: int + r8 :: list + r9 :: str + r10, r11 :: list L0: if is_error(sep) goto L1 else goto L2 L1: @@ -33,28 +33,27 @@ L3: r1 = box(None, 1) max_split = r1 L4: - r2 = box(None, 1) - r3 = sep == r2 - r4 = r3 ^ 1 - if r4 goto L5 else goto L9 :: bool + r2 = load_address _Py_NoneStruct + r3 = sep != r2 + if r3 goto L5 else goto L9 :: bool L5: - r5 = box(None, 1) - r6 = max_split == r5 - r7 = r6 ^ 1 - if r7 goto L6 else goto L7 :: bool + r4 = load_address _Py_NoneStruct + r5 = max_split != r4 + if r5 goto L6 else goto L7 :: bool L6: - r8 = cast(str, sep) - r9 = unbox(int, max_split) - r10 = CPyStr_Split(s, r8, r9) - return r10 + r6 = cast(str, sep) + r7 = unbox(int, max_split) + r8 = CPyStr_Split(s, r6, r7) + return r8 L7: - r11 = cast(str, sep) - r12 = PyUnicode_Split(s, r11, -1) - return r12 + r9 = cast(str, sep) + r10 = PyUnicode_Split(s, r9, -1) + return r10 L8: L9: - r13 = PyUnicode_Split(s, 0, -1) - return r13 + r11 = PyUnicode_Split(s, 0, -1) + return r11 + [case testStrEquality] def eq(x: str, y: str) -> bool: @@ -106,39 +105,38 @@ L3: [case testStrReplace] from typing import Optional -def do_replace(s: str, old_substr: str, new_substr: str, max_count: Optional[int] = None) -> str: - if max_count is not None: - return s.replace(old_substr, new_substr, max_count) - else: - return s.replace(old_substr, new_substr) +def do_replace(s: str, old_substr: str, new_substr: str, max_count: Optional[int] = None) -> str: + if max_count is not None: + return s.replace(old_substr, new_substr, max_count) + else: + return s.replace(old_substr, new_substr) [out] -def do_replace(s, old_substr, new_substr, max_count): - s, old_substr, new_substr :: str - max_count :: union[int, None] - r0, r1 :: object - r2, r3 :: bit - r4 :: int - r5, r6 :: str -L0: - if is_error(max_count) goto L1 else goto L2 -L1: - r0 = box(None, 1) - max_count = r0 -L2: - r1 = box(None, 1) - r2 = max_count == r1 - r3 = r2 ^ 1 - if r3 goto L3 else goto L4 :: bool -L3: - r4 = unbox(int, max_count) - r5 = CPyStr_Replace(s, old_substr, new_substr, r4) - return r5 -L4: - r6 = PyUnicode_Replace(s, old_substr, new_substr, -1) - return r6 -L5: - unreachable - +def do_replace(s, old_substr, new_substr, max_count): + s, old_substr, new_substr :: str + max_count :: union[int, None] + r0, r1 :: object + r2 :: bit + r3 :: int + r4, r5 :: str +L0: + if is_error(max_count) goto L1 else goto L2 +L1: + r0 = box(None, 1) + max_count = r0 +L2: + r1 = load_address _Py_NoneStruct + r2 = max_count != r1 + if r2 goto L3 else goto L4 :: bool +L3: + r3 = unbox(int, max_count) + r4 = CPyStr_Replace(s, old_substr, new_substr, r3) + return r4 +L4: + r5 = PyUnicode_Replace(s, old_substr, new_substr, -1) + return r5 +L5: + unreachable + [case testStrToBool] def is_true(x: str) -> bool: if x: @@ -314,4 +312,3 @@ L0: r4 = 'backslashreplace' r5 = CPy_Encode(s, r3, r4) return 1 - diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test index 965d4066c0b5..a7ee390c8d74 100644 --- a/mypyc/test-data/refcount.test +++ b/mypyc/test-data/refcount.test @@ -950,3 +950,294 @@ L0: r0 = borrow d.c r1 = r0.x return r1 + +[case testBorrowAttributeTwice] +def f(e: E) -> int: + return e.d.c.x + +class C: + x: int +class D: + c: C +class E: + d: D +[out] +def f(e): + e :: __main__.E + r0 :: __main__.D + r1 :: __main__.C + r2 :: int +L0: + r0 = borrow e.d + r1 = borrow r0.c + r2 = r1.x + return r2 + +[case testBorrowAttributeIsNone] +from typing import Optional + +def f(c: C) -> bool: + return c.x is not None + +def g(c: C) -> bool: + return c.x is None + +class C: + x: Optional[str] +[out] +def f(c): + c :: __main__.C + r0 :: union[str, None] + r1 :: object + r2 :: bit +L0: + r0 = borrow c.x + r1 = load_address _Py_NoneStruct + r2 = r0 != r1 + return r2 +def g(c): + c :: __main__.C + r0 :: union[str, None] + r1 :: object + r2 :: bit +L0: + r0 = borrow c.x + r1 = load_address _Py_NoneStruct + r2 = r0 == r1 + return r2 + +[case testBorrowAttributeNarrowOptional] +from typing import Optional + +def f(c: C) -> bool: + if c.x is not None: + return c.x.b + return False + +class C: + x: Optional[D] + +class D: + b: bool +[out] +def f(c): + c :: __main__.C + r0 :: union[__main__.D, None] + r1 :: object + r2 :: bit + r3 :: union[__main__.D, None] + r4 :: __main__.D + r5 :: bool +L0: + r0 = borrow c.x + r1 = load_address _Py_NoneStruct + r2 = r0 != r1 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = borrow c.x + r4 = borrow cast(__main__.D, r3) + r5 = r4.b + return r5 +L2: + return 0 + +[case testBorrowLenArgument] +from typing import List + +def f(x: C) -> int: + return len(x.a) + +class C: + a: List[str] +[out] +def f(x): + x :: __main__.C + r0 :: list + r1 :: ptr + r2 :: native_int + r3 :: short_int +L0: + r0 = borrow x.a + r1 = get_element_ptr r0 ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + r3 = r2 << 1 + return r3 + +[case testBorrowIsinstanceArgument] +from typing import List + +def f(x: C) -> bool: + if isinstance(x.a, D): + return x.a.b + else: + return True + +class C: + a: object + +class D: + b: bool +[out] +def f(x): + x :: __main__.C + r0, r1 :: object + r2 :: ptr + r3 :: object + r4 :: bit + r5 :: object + r6 :: __main__.D + r7 :: bool +L0: + r0 = borrow x.a + r1 = __main__.D :: type + r2 = get_element_ptr r0 ob_type :: PyObject + r3 = load_mem r2 :: builtins.object* + r4 = r3 == r1 + if r4 goto L1 else goto L2 :: bool +L1: + r5 = borrow x.a + r6 = borrow cast(__main__.D, r5) + r7 = r6.b + return r7 +L2: + return 1 + +[case testBorrowListGetItem1] +from typing import List + +def literal_index(x: C) -> str: + return x.a[0] + +def negative_index(x: C) -> str: + return x.a[-1] + +def lvar_index(x: C, n: int) -> str: + return x.a[n] + +class C: + a: List[str] + +[out] +def literal_index(x): + x :: __main__.C + r0 :: list + r1 :: object + r2 :: str +L0: + r0 = borrow x.a + r1 = CPyList_GetItemShort(r0, 0) + r2 = cast(str, r1) + return r2 +def negative_index(x): + x :: __main__.C + r0 :: list + r1 :: object + r2 :: str +L0: + r0 = borrow x.a + r1 = CPyList_GetItemShort(r0, -2) + r2 = cast(str, r1) + return r2 +def lvar_index(x, n): + x :: __main__.C + n :: int + r0 :: list + r1 :: object + r2 :: str +L0: + r0 = borrow x.a + r1 = CPyList_GetItem(r0, n) + r2 = cast(str, r1) + return r2 + +[case testBorrowListGetItem2] +from typing import List + +def attr_index(x: C) -> str: + return x.a[x.n] + +class C: + a: List[str] + n: int +[out] +def attr_index(x): + x :: __main__.C + r0 :: list + r1 :: int + r2 :: object + r3 :: str +L0: + r0 = borrow x.a + r1 = borrow x.n + r2 = CPyList_GetItem(r0, r1) + r3 = cast(str, r2) + return r3 + +[case testCannotBorrowListGetItem] +from typing import List + +def func_index(x: C) -> str: + return x.a[f()] + +def f() -> int: return 0 + +class C: + a: List[str] +[out] +def func_index(x): + x :: __main__.C + r0 :: list + r1 :: int + r2 :: object + r3 :: str +L0: + r0 = x.a + r1 = f() + r2 = CPyList_GetItem(r0, r1) + dec_ref r0 + dec_ref r1 :: int + r3 = cast(str, r2) + return r3 +def f(): +L0: + return 0 + +[case testBorrowSetAttrObject] +from typing import Optional + +def f(x: Optional[C]) -> None: + if x is not None: + x.b = True + +def g(x: D) -> None: + x.c.b = False + +class C: + b: bool + +class D: + c: C +[out] +def f(x): + x :: union[__main__.C, None] + r0 :: object + r1 :: bit + r2 :: __main__.C + r3 :: bool +L0: + r0 = load_address _Py_NoneStruct + r1 = x != r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = borrow cast(__main__.C, x) + r2.b = 1; r3 = is_error +L2: + return 1 +def g(x): + x :: __main__.D + r0 :: __main__.C + r1 :: bool +L0: + r0 = borrow x.c + r0.b = 0; r1 = is_error + return 1 diff --git a/mypyc/test-data/run-generators.test b/mypyc/test-data/run-generators.test index b2f244b90008..8aecce6564c8 100644 --- a/mypyc/test-data/run-generators.test +++ b/mypyc/test-data/run-generators.test @@ -581,3 +581,17 @@ except RuntimeError as e: assert str(e) == 'error' else: assert False + +[case testBorrowingInGeneratorNearYield] +from typing import Iterator + +class Foo: + flag: bool + +class C: + foo: Foo + + def genf(self) -> Iterator[None]: + self.foo.flag = True + yield + self.foo.flag = False From 7f4f5b87631dbf4746f81d81e3ccf86bac5901b3 Mon Sep 17 00:00:00 2001 From: Ethan Smith Date: Thu, 19 May 2022 08:02:09 -0700 Subject: [PATCH 06/80] Skip tests that depend on lxml if not installed (#12813) Detect if lxml is importable in the test suite, if it is not, then skip the report tests which depend on it. This is useful for enabling CI on new Python versions that may not have lxml wheels yet. Closes #11662. --- mypy/test/testcheck.py | 8 ++++++++ mypy/test/testcmdline.py | 9 +++++++++ mypy/test/testreports.py | 10 ++++++++++ 3 files changed, 27 insertions(+) diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index f0dc4bc6a671..cc0c5875f53b 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -21,6 +21,12 @@ from mypy.errors import CompileError from mypy.semanal_main import core_modules +try: + import lxml # type: ignore +except ImportError: + lxml = None + +import pytest # List of files that contain test case descriptions. typecheck_files = [ @@ -117,6 +123,8 @@ class TypeCheckSuite(DataSuite): files = typecheck_files def run_case(self, testcase: DataDrivenTestCase) -> None: + if lxml is None and os.path.basename(testcase.file) == 'check-reports.test': + pytest.skip("Cannot import lxml. Is it installed?") incremental = ('incremental' in testcase.name.lower() or 'incremental' in testcase.file or 'serialize' in testcase.file) diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index d58d10087c80..62e258677c7f 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -18,6 +18,13 @@ assert_string_arrays_equal, normalize_error_messages, check_test_output_files ) +try: + import lxml # type: ignore +except ImportError: + lxml = None + +import pytest + # Path to Python 3 interpreter python3_path = sys.executable @@ -35,6 +42,8 @@ class PythonCmdlineSuite(DataSuite): native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: + if lxml is None and os.path.basename(testcase.file) == 'reports.test': + pytest.skip("Cannot import lxml. Is it installed?") for step in [1] + sorted(testcase.output2): test_python_cmdline(testcase, step) diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py index 75f24d514431..37dc16a107d5 100644 --- a/mypy/test/testreports.py +++ b/mypy/test/testreports.py @@ -5,11 +5,21 @@ from mypy.report import CoberturaPackage, get_line_rate +try: + import lxml # type: ignore +except ImportError: + lxml = None + +import pytest + + class CoberturaReportSuite(Suite): + @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_get_line_rate(self) -> None: assert_equal('1.0', get_line_rate(0, 0)) assert_equal('0.3333', get_line_rate(1, 3)) + @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_as_xml(self) -> None: import lxml.etree as etree # type: ignore From 6c2690e4af5e12d68a5b91f7c9117f783e528277 Mon Sep 17 00:00:00 2001 From: pranavrajpal <78008260+pranavrajpal@users.noreply.github.com> Date: Thu, 19 May 2022 08:04:06 -0700 Subject: [PATCH 07/80] Avoid crashing on invalid python executables (#12812) When an invalid python executable is passed to mypy, show an error message instead of crashing. --- mypy/modulefinder.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index bee99156a570..4ab95dd6564f 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -12,6 +12,8 @@ import sys from enum import Enum, unique +from mypy.errors import CompileError + if sys.version_info >= (3, 11): import tomllib else: @@ -649,9 +651,15 @@ def get_site_packages_dirs(python_executable: Optional[str]) -> Tuple[List[str], else: # Use subprocess to get the package directory of given Python # executable - site_packages = ast.literal_eval( - subprocess.check_output([python_executable, pyinfo.__file__, 'getsitepackages'], - stderr=subprocess.PIPE).decode()) + try: + site_packages = ast.literal_eval( + subprocess.check_output([python_executable, pyinfo.__file__, 'getsitepackages'], + stderr=subprocess.PIPE).decode()) + except OSError as err: + reason = os.strerror(err.errno) + raise CompileError( + [f"mypy: Invalid python executable '{python_executable}': {reason}"] + ) from err return expand_site_packages(site_packages) From e612e440103ec7abdc40d0e41115701362abad21 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 20 May 2022 14:27:54 +0100 Subject: [PATCH 08/80] Fix crash on type alias definition inside dataclass declaration (#12792) Skip processing a type alias node and generate an error. Fixes #12544. --- mypy/plugins/dataclasses.py | 16 ++++++++++++++- test-data/unit/check-dataclasses.test | 28 +++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 24077bb4a549..00c46e1417c5 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -5,7 +5,7 @@ from mypy.nodes import ( ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_POS, ARG_STAR, ARG_STAR2, MDEF, - Argument, AssignmentStmt, CallExpr, Context, Expression, JsonDict, + Argument, AssignmentStmt, CallExpr, TypeAlias, Context, Expression, JsonDict, NameExpr, RefExpr, SymbolTableNode, TempNode, TypeInfo, Var, TypeVarExpr, PlaceholderNode ) @@ -333,6 +333,20 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: node = sym.node assert not isinstance(node, PlaceholderNode) + + if isinstance(node, TypeAlias): + ctx.api.fail( + ( + 'Type aliases inside dataclass definitions ' + 'are not supported at runtime' + ), + node + ) + # Skip processing this node. This doesn't match the runtime behaviour, + # but the only alternative would be to modify the SymbolTable, + # and it's a little hairy to do that in a plugin. + continue + assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index abfcb79c0cc5..972cc4d40a1e 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -526,6 +526,34 @@ Application.COUNTER = 1 [builtins fixtures/dataclasses.pyi] +[case testTypeAliasInDataclassDoesNotCrash] +# flags: --python-version 3.7 +from dataclasses import dataclass +from typing import Callable +from typing_extensions import TypeAlias + +@dataclass +class Foo: + x: int + +@dataclass +class One: + S: TypeAlias = Foo # E: Type aliases inside dataclass definitions are not supported at runtime + +a = One() +reveal_type(a.S) # N: Revealed type is "def (x: builtins.int) -> __main__.Foo" +a.S() # E: Missing positional argument "x" in call to "Foo" +reveal_type(a.S(5)) # N: Revealed type is "__main__.Foo" + +@dataclass +class Two: + S: TypeAlias = Callable[[int], str] # E: Type aliases inside dataclass definitions are not supported at runtime + +c = Two() +x = c.S # E: Member "S" is not assignable +reveal_type(x) # N: Revealed type is "Any" +[builtins fixtures/dataclasses.pyi] + [case testDataclassOrdering] # flags: --python-version 3.7 from dataclasses import dataclass From e93be734f4ca51f88f0b46e5c089014e712e1d11 Mon Sep 17 00:00:00 2001 From: Hugues Date: Fri, 20 May 2022 08:37:23 -0700 Subject: [PATCH 09/80] speedup typechecking of nested if expressions (#12700) Deeply nested if/else expressions have a worst-case exponential behavior. This will for instance manifest when returning literal values which cause repeated analysis of conditional branches with subtly different type context for each literal. This can be optimized by observing that a simple literal context will yield the same analysis as its fallback type, and likewise, two literals of the same fallback type will yield the same analysis. In those case we can avoid the repeated analysis and prevent the worst-case exponential behavior. Fixes #9591 --- mypy/checkexpr.py | 25 +++++++++++++++++++++---- mypy/typeops.py | 2 +- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index bfbe961adc7a..0149f1971477 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -69,7 +69,7 @@ try_expanding_sum_type_to_union, tuple_fallback, make_simplified_union, true_only, false_only, erase_to_union_or_bound, function_type, callable_type, try_getting_str_literals, custom_special_method, - is_literal_type_like, + is_literal_type_like, simple_literal_type, ) from mypy.message_registry import ErrorMessage import mypy.errorcodes as codes @@ -3874,26 +3874,43 @@ def visit_conditional_expr(self, e: ConditionalExpr, allow_none_return: bool = F if_type = self.analyze_cond_branch(if_map, e.if_expr, context=ctx, allow_none_return=allow_none_return) + # we want to keep the narrowest value of if_type for union'ing the branches + # however, it would be silly to pass a literal as a type context. Pass the + # underlying fallback type instead. + if_type_fallback = simple_literal_type(get_proper_type(if_type)) or if_type + # Analyze the right branch using full type context and store the type full_context_else_type = self.analyze_cond_branch(else_map, e.else_expr, context=ctx, allow_none_return=allow_none_return) + if not mypy.checker.is_valid_inferred_type(if_type): # Analyze the right branch disregarding the left branch. else_type = full_context_else_type + # we want to keep the narrowest value of else_type for union'ing the branches + # however, it would be silly to pass a literal as a type context. Pass the + # underlying fallback type instead. + else_type_fallback = simple_literal_type(get_proper_type(else_type)) or else_type # If it would make a difference, re-analyze the left # branch using the right branch's type as context. - if ctx is None or not is_equivalent(else_type, ctx): + if ctx is None or not is_equivalent(else_type_fallback, ctx): # TODO: If it's possible that the previous analysis of # the left branch produced errors that are avoided # using this context, suppress those errors. - if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type, + if_type = self.analyze_cond_branch(if_map, e.if_expr, context=else_type_fallback, allow_none_return=allow_none_return) + elif if_type_fallback == ctx: + # There is no point re-running the analysis if if_type is equal to ctx. + # That would be an exact duplicate of the work we just did. + # This optimization is particularly important to avoid exponential blowup with nested + # if/else expressions: https://github.com/python/mypy/issues/9591 + # TODO: would checking for is_proper_subtype also work and cover more cases? + else_type = full_context_else_type else: # Analyze the right branch in the context of the left # branch's type. - else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type, + else_type = self.analyze_cond_branch(else_map, e.else_expr, context=if_type_fallback, allow_none_return=allow_none_return) # Only create a union type if the type context is a union, to be mostly diff --git a/mypy/typeops.py b/mypy/typeops.py index e8171e2e85ab..22ca0b6ec2fe 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -318,7 +318,7 @@ def simple_literal_value_key(t: ProperType) -> Optional[Tuple[str, ...]]: return None -def simple_literal_type(t: ProperType) -> Optional[Instance]: +def simple_literal_type(t: Optional[ProperType]) -> Optional[Instance]: """Extract the underlying fallback Instance type for a simple Literal""" if isinstance(t, Instance) and t.last_known_value is not None: t = t.last_known_value From f7e94ee94c2917fb8e522f16bea226b9d8b8d844 Mon Sep 17 00:00:00 2001 From: Fabian Keller Date: Fri, 20 May 2022 17:44:27 +0200 Subject: [PATCH 10/80] Make pybind11 test fixture fully self-contained (#12722) Co-authored-by: Keller Fabian Rudolf (CC-AD/EYC3) --- misc/test-stubgenc.sh | 15 +- test-data/pybind11_mypy_demo/pyproject.toml | 10 ++ test-data/pybind11_mypy_demo/setup.py | 18 ++ test-data/pybind11_mypy_demo/src/main.cpp | 170 ++++++++++++++++++ .../stubgen/pybind11_mypy_demo/__init__.pyi | 0 .../stubgen/pybind11_mypy_demo/basics.pyi | 4 + 6 files changed, 212 insertions(+), 5 deletions(-) create mode 100644 test-data/pybind11_mypy_demo/pyproject.toml create mode 100644 test-data/pybind11_mypy_demo/setup.py create mode 100644 test-data/pybind11_mypy_demo/src/main.cpp rename test-data/{ => pybind11_mypy_demo}/stubgen/pybind11_mypy_demo/__init__.pyi (100%) rename test-data/{ => pybind11_mypy_demo}/stubgen/pybind11_mypy_demo/basics.pyi (95%) diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index 175c912e6712..7da135f0bf16 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -1,14 +1,19 @@ #!/bin/bash -# This script is expected to be run from root of the mypy repo + +set -e +set -x + +cd "$(dirname $0)/.." # Install dependencies, demo project and mypy python -m pip install -r test-requirements.txt -python -m pip install pybind11-mypy-demo==0.0.1 +python -m pip install ./test-data/pybind11_mypy_demo python -m pip install . # Remove expected stubs and generate new inplace -rm -rf test-data/stubgen/pybind11_mypy_demo -stubgen -p pybind11_mypy_demo -o test-data/stubgen/ +STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/stubgen +rm -rf $STUBGEN_OUTPUT_FOLDER/* +stubgen -p pybind11_mypy_demo -o $STUBGEN_OUTPUT_FOLDER # Compare generated stubs to expected ones -git diff --exit-code test-data/stubgen/pybind11_mypy_demo +git diff --exit-code $STUBGEN_OUTPUT_FOLDER diff --git a/test-data/pybind11_mypy_demo/pyproject.toml b/test-data/pybind11_mypy_demo/pyproject.toml new file mode 100644 index 000000000000..878abe731b1b --- /dev/null +++ b/test-data/pybind11_mypy_demo/pyproject.toml @@ -0,0 +1,10 @@ +[build-system] +requires = [ + "setuptools>=42", + "wheel", + # Officially supported pybind11 version. This is pinned to guarantee 100% reproducible CI. + # As a result, the version needs to be bumped manually at will. + "pybind11==2.9.2", +] + +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/test-data/pybind11_mypy_demo/setup.py b/test-data/pybind11_mypy_demo/setup.py new file mode 100644 index 000000000000..0da1cfbcef19 --- /dev/null +++ b/test-data/pybind11_mypy_demo/setup.py @@ -0,0 +1,18 @@ +# pybind11 is available at setup time due to pyproject.toml +from pybind11.setup_helpers import Pybind11Extension +from setuptools import setup + +# Documentation: https://pybind11.readthedocs.io/en/stable/compiling.html +ext_modules = [ + Pybind11Extension( + "pybind11_mypy_demo", + ["src/main.cpp"], + cxx_std=17, + ), +] + +setup( + name="pybind11-mypy-demo", + version="0.0.1", + ext_modules=ext_modules, +) diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp new file mode 100644 index 000000000000..5cedef391b2d --- /dev/null +++ b/test-data/pybind11_mypy_demo/src/main.cpp @@ -0,0 +1,170 @@ +/** + * This file contains the pybind11 reference implementation for the stugen tests, + * and was originally inspired by: + * + * https://github.com/sizmailov/pybind11-mypy-demo + * + * Copyright (c) 2016 The Pybind Development Team, All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * You are under no obligation whatsoever to provide any bug fixes, patches, or + * upgrades to the features, functionality or performance of the source code + * ("Enhancements") to anyone; however, if you choose to make your Enhancements + * available either publicly, or directly to the author of this software, without + * imposing a separate written license agreement for such Enhancements, then you + * hereby grant the following license: a non-exclusive, royalty-free perpetual + * license to install, use, modify, prepare derivative works, incorporate into + * other computer software, distribute, and sublicense such enhancements or + * derivative works thereof, in binary and source code form. + */ + +#include +#include + +namespace py = pybind11; + +namespace basics { + +int answer() { + return 42; +} + +int sum(int a, int b) { + return a + b; +} + +double midpoint(double left, double right){ + return left + (right - left)/2; +} + +double weighted_midpoint(double left, double right, double alpha=0.5) { + return left + (right - left) * alpha; +} + +struct Point { + + enum class LengthUnit { + mm=0, + pixel, + inch + }; + + enum class AngleUnit { + radian=0, + degree + }; + + Point() : Point(0, 0) {} + Point(double x, double y) : x(x), y(y) {} + + static const Point origin; + static const Point x_axis; + static const Point y_axis; + + static LengthUnit length_unit; + static AngleUnit angle_unit; + + double length() const { + return std::sqrt(x * x + y * y); + } + + double distance_to(double other_x, double other_y) const { + double dx = x - other_x; + double dy = y - other_y; + return std::sqrt(dx*dx + dy*dy); + } + + double distance_to(const Point& other) const { + return distance_to(other.x, other.y); + } + + double x, y; +}; + +const Point Point::origin = Point(0, 0); +const Point Point::x_axis = Point(1, 0); +const Point Point::y_axis = Point(0, 1); + +Point::LengthUnit Point::length_unit = Point::LengthUnit::mm; +Point::AngleUnit Point::angle_unit = Point::AngleUnit::radian; + +} // namespace: basics + +void bind_basics(py::module& basics) { + + using namespace basics; + + // Functions + basics.def("answer", &answer); + basics.def("sum", &sum); + basics.def("midpoint", &midpoint, py::arg("left"), py::arg("right")); + basics.def("weighted_midpoint", weighted_midpoint, py::arg("left"), py::arg("right"), py::arg("alpha")=0.5); + + // Classes + py::class_ pyPoint(basics, "Point"); + py::enum_ pyLengthUnit(pyPoint, "LengthUnit"); + py::enum_ pyAngleUnit(pyPoint, "AngleUnit"); + + pyPoint + .def(py::init<>()) + .def(py::init(), py::arg("x"), py::arg("y")) + .def("distance_to", py::overload_cast(&Point::distance_to, py::const_), py::arg("x"), py::arg("y")) + .def("distance_to", py::overload_cast(&Point::distance_to, py::const_), py::arg("other")) + .def_readwrite("x", &Point::x) + .def_property("y", + [](Point& self){ return self.y; }, + [](Point& self, double value){ self.y = value; } + ) + .def_property_readonly("length", &Point::length) + .def_property_readonly_static("x_axis", [](py::object cls){return Point::x_axis;}) + .def_property_readonly_static("y_axis", [](py::object cls){return Point::y_axis;}) + .def_readwrite_static("length_unit", &Point::length_unit) + .def_property_static("angle_unit", + [](py::object& /*cls*/){ return Point::angle_unit; }, + [](py::object& /*cls*/, Point::AngleUnit value){ Point::angle_unit = value; } + ); + + pyPoint.attr("origin") = Point::origin; + + pyLengthUnit + .value("mm", Point::LengthUnit::mm) + .value("pixel", Point::LengthUnit::pixel) + .value("inch", Point::LengthUnit::inch); + + pyAngleUnit + .value("radian", Point::AngleUnit::radian) + .value("degree", Point::AngleUnit::degree); + + // Module-level attributes + basics.attr("PI") = std::acos(-1); + basics.attr("__version__") = "0.0.1"; +} + +PYBIND11_MODULE(pybind11_mypy_demo, m) { + auto basics = m.def_submodule("basics"); + bind_basics(basics); +} \ No newline at end of file diff --git a/test-data/stubgen/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi similarity index 100% rename from test-data/stubgen/pybind11_mypy_demo/__init__.pyi rename to test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi diff --git a/test-data/stubgen/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi similarity index 95% rename from test-data/stubgen/pybind11_mypy_demo/basics.pyi rename to test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi index 99093fd6087a..226080ac9d57 100644 --- a/test-data/stubgen/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi @@ -18,6 +18,8 @@ class Point: def __setstate__(self, state: int) -> None: ... @property def name(self) -> str: ... + @property + def value(self) -> int: ... class LengthUnit: __entries: ClassVar[dict] = ... @@ -34,6 +36,8 @@ class Point: def __setstate__(self, state: int) -> None: ... @property def name(self) -> str: ... + @property + def value(self) -> int: ... angle_unit: ClassVar[Point.AngleUnit] = ... length_unit: ClassVar[Point.LengthUnit] = ... x_axis: ClassVar[Point] = ... # read-only From 1b7e33f7ef4105c7ae495085ec52e15f1fc52c5d Mon Sep 17 00:00:00 2001 From: Hugues Date: Fri, 20 May 2022 08:49:52 -0700 Subject: [PATCH 11/80] checkexpr: cache type of container literals when possible (#12707) When a container (list, set, tuple, or dict) literal expression is used as an argument to an overloaded function it will get repeatedly typechecked. This becomes particularly problematic when the expression is somewhat large, as seen in #9427 To avoid repeated work, add a new cache in ExprChecker, mapping the AST node to the resolved type of the expression. Right now the cache is only used in the fast path, although it could conceivably be leveraged for the slow path as well in a follow-up commit. To further reduce duplicate work, when the fast-path doesn't work, we use the cache to make a note of that, to avoid repeatedly attempting to take the fast path. Fixes #9427 --- mypy/checker.py | 1 + mypy/checkexpr.py | 49 ++++++++++++++++++++++++++++++++++------------- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 109a3b1f15d2..e5abcfcf4541 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -293,6 +293,7 @@ def reset(self) -> None: self._type_maps[1:] = [] self._type_maps[0].clear() self.temp_type_map = None + self.expr_checker.reset() assert self.inferred_attribute_types is None assert self.partial_types == [] diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0149f1971477..4cc91f9cc123 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -177,6 +177,9 @@ class ExpressionChecker(ExpressionVisitor[Type]): # Type context for type inference type_context: List[Optional[Type]] + # cache resolved types in some cases + resolved_type: Dict[Expression, ProperType] + strfrm_checker: StringFormatterChecker plugin: Plugin @@ -197,6 +200,11 @@ def __init__(self, self.type_overrides: Dict[Expression, Type] = {} self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg) + self.resolved_type = {} + + def reset(self) -> None: + self.resolved_type = {} + def visit_name_expr(self, e: NameExpr) -> Type: """Type check a name expression. @@ -3269,13 +3277,13 @@ def apply_type_arguments_to_callable( def visit_list_expr(self, e: ListExpr) -> Type: """Type check a list expression [...].""" - return self.check_lst_expr(e.items, 'builtins.list', '', e) + return self.check_lst_expr(e, 'builtins.list', '') def visit_set_expr(self, e: SetExpr) -> Type: - return self.check_lst_expr(e.items, 'builtins.set', '', e) + return self.check_lst_expr(e, 'builtins.set', '') def fast_container_type( - self, items: List[Expression], container_fullname: str + self, e: Union[ListExpr, SetExpr, TupleExpr], container_fullname: str ) -> Optional[Type]: """ Fast path to determine the type of a list or set literal, @@ -3290,21 +3298,28 @@ def fast_container_type( ctx = self.type_context[-1] if ctx: return None + rt = self.resolved_type.get(e, None) + if rt is not None: + return rt if isinstance(rt, Instance) else None values: List[Type] = [] - for item in items: + for item in e.items: if isinstance(item, StarExpr): # fallback to slow path + self.resolved_type[e] = NoneType() return None values.append(self.accept(item)) vt = join.join_type_list(values) if not allow_fast_container_literal(vt): + self.resolved_type[e] = NoneType() return None - return self.chk.named_generic_type(container_fullname, [vt]) + ct = self.chk.named_generic_type(container_fullname, [vt]) + self.resolved_type[e] = ct + return ct - def check_lst_expr(self, items: List[Expression], fullname: str, - tag: str, context: Context) -> Type: + def check_lst_expr(self, e: Union[ListExpr, SetExpr, TupleExpr], fullname: str, + tag: str) -> Type: # fast path - t = self.fast_container_type(items, fullname) + t = self.fast_container_type(e, fullname) if t: return t @@ -3323,10 +3338,10 @@ def check_lst_expr(self, items: List[Expression], fullname: str, variables=[tv]) out = self.check_call(constructor, [(i.expr if isinstance(i, StarExpr) else i) - for i in items], + for i in e.items], [(nodes.ARG_STAR if isinstance(i, StarExpr) else nodes.ARG_POS) - for i in items], - context)[0] + for i in e.items], + e)[0] return remove_instance_last_known_values(out) def visit_tuple_expr(self, e: TupleExpr) -> Type: @@ -3376,7 +3391,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: else: # A star expression that's not a Tuple. # Treat the whole thing as a variable-length tuple. - return self.check_lst_expr(e.items, 'builtins.tuple', '', e) + return self.check_lst_expr(e, 'builtins.tuple', '') else: if not type_context_items or j >= len(type_context_items): tt = self.accept(item) @@ -3402,6 +3417,9 @@ def fast_dict_type(self, e: DictExpr) -> Optional[Type]: ctx = self.type_context[-1] if ctx: return None + rt = self.resolved_type.get(e, None) + if rt is not None: + return rt if isinstance(rt, Instance) else None keys: List[Type] = [] values: List[Type] = [] stargs: Optional[Tuple[Type, Type]] = None @@ -3415,6 +3433,7 @@ def fast_dict_type(self, e: DictExpr) -> Optional[Type]: ): stargs = (st.args[0], st.args[1]) else: + self.resolved_type[e] = NoneType() return None else: keys.append(self.accept(key)) @@ -3422,10 +3441,14 @@ def fast_dict_type(self, e: DictExpr) -> Optional[Type]: kt = join.join_type_list(keys) vt = join.join_type_list(values) if not (allow_fast_container_literal(kt) and allow_fast_container_literal(vt)): + self.resolved_type[e] = NoneType() return None if stargs and (stargs[0] != kt or stargs[1] != vt): + self.resolved_type[e] = NoneType() return None - return self.chk.named_generic_type('builtins.dict', [kt, vt]) + dt = self.chk.named_generic_type('builtins.dict', [kt, vt]) + self.resolved_type[e] = dt + return dt def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. From 37fb21afe937be9da98446557244f3de5424ebf8 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 May 2022 16:50:51 +0100 Subject: [PATCH 12/80] Typeshed cherry-pick: Ignore mypy errors in Python 2 builtins and typing (#7894) (#12826) From python/typeshed#7894. --- mypy/typeshed/stdlib/@python2/__builtin__.pyi | 4 ++-- mypy/typeshed/stdlib/@python2/builtins.pyi | 4 ++-- mypy/typeshed/stdlib/@python2/typing.pyi | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mypy/typeshed/stdlib/@python2/__builtin__.pyi b/mypy/typeshed/stdlib/@python2/__builtin__.pyi index d936e08b8266..4ede9dc9d8bd 100644 --- a/mypy/typeshed/stdlib/@python2/__builtin__.pyi +++ b/mypy/typeshed/stdlib/@python2/__builtin__.pyi @@ -711,7 +711,7 @@ class set(MutableSet[_T], Generic[_T]): def __and__(self, s: AbstractSet[object]) -> set[_T]: ... def __iand__(self: Self, s: AbstractSet[object]) -> Self: ... def __or__(self, s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] @overload def __sub__(self: set[str], s: AbstractSet[Text | None]) -> set[_T]: ... @overload @@ -721,7 +721,7 @@ class set(MutableSet[_T], Generic[_T]): @overload def __isub__(self, s: AbstractSet[_T | None]) -> set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... diff --git a/mypy/typeshed/stdlib/@python2/builtins.pyi b/mypy/typeshed/stdlib/@python2/builtins.pyi index d936e08b8266..4ede9dc9d8bd 100644 --- a/mypy/typeshed/stdlib/@python2/builtins.pyi +++ b/mypy/typeshed/stdlib/@python2/builtins.pyi @@ -711,7 +711,7 @@ class set(MutableSet[_T], Generic[_T]): def __and__(self, s: AbstractSet[object]) -> set[_T]: ... def __iand__(self: Self, s: AbstractSet[object]) -> Self: ... def __or__(self, s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] @overload def __sub__(self: set[str], s: AbstractSet[Text | None]) -> set[_T]: ... @overload @@ -721,7 +721,7 @@ class set(MutableSet[_T], Generic[_T]): @overload def __isub__(self, s: AbstractSet[_T | None]) -> set[_T]: ... def __xor__(self, s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __le__(self, s: AbstractSet[object]) -> bool: ... def __lt__(self, s: AbstractSet[object]) -> bool: ... def __ge__(self, s: AbstractSet[object]) -> bool: ... diff --git a/mypy/typeshed/stdlib/@python2/typing.pyi b/mypy/typeshed/stdlib/@python2/typing.pyi index affa82c825a2..d1c9ae574e98 100644 --- a/mypy/typeshed/stdlib/@python2/typing.pyi +++ b/mypy/typeshed/stdlib/@python2/typing.pyi @@ -239,9 +239,9 @@ class MutableSet(AbstractSet[_T], Generic[_T]): def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, element: _T) -> None: ... - def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ior__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __iand__(self: Self, s: AbstractSet[Any]) -> Self: ... - def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... + def __ixor__(self: Self, s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __isub__(self: Self, s: AbstractSet[Any]) -> Self: ... class MappingView(object): From a6166b2f7e8e4cf9d176107277da223e45c3a6a1 Mon Sep 17 00:00:00 2001 From: Hugues Date: Fri, 20 May 2022 08:56:30 -0700 Subject: [PATCH 13/80] FindModuleCache: optionally leverage BuildSourceSet (#12616) Given a large codebase with folder hierarchy of the form ``` foo/ company/ __init__.py foo/ bar/ company/ __init__.py bar/ baz/ company/ __init__.py baz/ ... ``` with >100 toplevel folders, the time spent in load_graph is dominated by find_module because this operation is itself O(n) where n is the number of input files, which ends up being O(n**2) because it is called for every import statement in the codebase and the way find_module work, it will always scan through each and every one of those toplevel directories for each and every import statement of company.* Introduce a fast path that leverages the fact that for imports within the code being typechecked, we already have a mapping of module import path to file path in BuildSourceSet Gated behind a command line flag (--fast-module-lookup) to assuage concerns about subtle issues in module lookup being introduced by this fast path. --- docs/source/command_line.rst | 23 +++++ docs/source/running_mypy.rst | 1 + mypy/build.py | 34 +------ mypy/main.py | 4 + mypy/modulefinder.py | 114 ++++++++++++++++++++- mypy/options.py | 2 + mypy/test/testcheck.py | 1 + test-data/unit/check-modules-fast.test | 136 +++++++++++++++++++++++++ 8 files changed, 283 insertions(+), 32 deletions(-) create mode 100644 test-data/unit/check-modules-fast.test diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 1a35d81a7ee9..908fa799da46 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -212,6 +212,29 @@ imports. By default, mypy will suppress any error messages generated within :pep:`561` compliant packages. Adding this flag will disable this behavior. +.. option:: --fast-module-lookup + + The default logic used to scan through search paths to resolve imports has a + quadratic worse-case behavior in some cases, which is for instance triggered + by a large number of folders sharing a top-level namespace as in: + + foo/ + company/ + foo/ + a.py + bar/ + company/ + bar/ + b.py + baz/ + company/ + baz/ + c.py + ... + + If you are in this situation, you can enable an experimental fast path by + setting the :option:`--fast-module-lookup` option. + .. _platform-configuration: diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 46ad2c65c386..caf05dcdf258 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -516,6 +516,7 @@ same directory on the search path, only the stub file is used. (However, if the files are in different directories, the one found in the earlier directory is used.) + Other advice and best practices ******************************* diff --git a/mypy/build.py b/mypy/build.py index c0b9aff5ab32..1196356d5bb8 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -43,8 +43,8 @@ from mypy.report import Reports # Avoid unconditional slow import from mypy.fixup import fixup_module from mypy.modulefinder import ( - BuildSource, compute_search_paths, FindModuleCache, SearchPaths, ModuleSearchResult, - ModuleNotFoundReason + BuildSource, BuildSourceSet, compute_search_paths, FindModuleCache, SearchPaths, + ModuleSearchResult, ModuleNotFoundReason ) from mypy.nodes import Expression from mypy.options import Options @@ -107,33 +107,6 @@ def __init__(self, manager: 'BuildManager', graph: Graph) -> None: self.errors: List[str] = [] # Filled in by build if desired -class BuildSourceSet: - """Efficiently test a file's membership in the set of build sources.""" - - def __init__(self, sources: List[BuildSource]) -> None: - self.source_text_present = False - self.source_modules: Set[str] = set() - self.source_paths: Set[str] = set() - - for source in sources: - if source.text is not None: - self.source_text_present = True - elif source.path: - self.source_paths.add(source.path) - else: - self.source_modules.add(source.module) - - def is_source(self, file: MypyFile) -> bool: - if file.path and file.path in self.source_paths: - return True - elif file._fullname in self.source_modules: - return True - elif self.source_text_present: - return True - else: - return False - - def build(sources: List[BuildSource], options: Options, alt_lib_path: Optional[str] = None, @@ -630,7 +603,8 @@ def __init__(self, data_dir: str, or options.use_fine_grained_cache) and not has_reporters) self.fscache = fscache - self.find_module_cache = FindModuleCache(self.search_paths, self.fscache, self.options) + self.find_module_cache = FindModuleCache(self.search_paths, self.fscache, self.options, + source_set=self.source_set) self.metastore = create_metastore(options) # a mapping from source files to their corresponding shadow files diff --git a/mypy/main.py b/mypy/main.py index c735bb389a35..57727821274e 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -881,6 +881,10 @@ def add_invertible_flag(flag: str, '--explicit-package-bases', default=False, help="Use current directory and MYPYPATH to determine module names of files passed", group=code_group) + add_invertible_flag( + '--fast-module-lookup', default=False, + help=argparse.SUPPRESS, + group=code_group) code_group.add_argument( "--exclude", action="append", diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 4ab95dd6564f..43cc4fc0a6d3 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -23,6 +23,7 @@ from typing_extensions import Final, TypeAlias as _TypeAlias from mypy.fscache import FileSystemCache +from mypy.nodes import MypyFile from mypy.options import Options from mypy.stubinfo import is_legacy_bundled_package from mypy import pyinfo @@ -126,6 +127,33 @@ def __repr__(self) -> str: self.base_dir) +class BuildSourceSet: + """Helper to efficiently test a file's membership in a set of build sources.""" + + def __init__(self, sources: List[BuildSource]) -> None: + self.source_text_present = False + self.source_modules = {} # type: Dict[str, str] + self.source_paths = set() # type: Set[str] + + for source in sources: + if source.text is not None: + self.source_text_present = True + if source.path: + self.source_paths.add(source.path) + if source.module: + self.source_modules[source.module] = source.path or '' + + def is_source(self, file: MypyFile) -> bool: + if file.path and file.path in self.source_paths: + return True + elif file._fullname in self.source_modules: + return True + elif self.source_text_present: + return True + else: + return False + + class FindModuleCache: """Module finder with integrated cache. @@ -141,8 +169,10 @@ def __init__(self, search_paths: SearchPaths, fscache: Optional[FileSystemCache], options: Optional[Options], - stdlib_py_versions: Optional[StdlibVersions] = None) -> None: + stdlib_py_versions: Optional[StdlibVersions] = None, + source_set: Optional[BuildSourceSet] = None) -> None: self.search_paths = search_paths + self.source_set = source_set self.fscache = fscache or FileSystemCache() # Cache for get_toplevel_possibilities: # search_paths -> (toplevel_id -> list(package_dirs)) @@ -164,6 +194,53 @@ def clear(self) -> None: self.initial_components.clear() self.ns_ancestors.clear() + def find_module_via_source_set(self, id: str) -> Optional[ModuleSearchResult]: + """Fast path to find modules by looking through the input sources + + This is only used when --fast-module-lookup is passed on the command line.""" + if not self.source_set: + return None + + p = self.source_set.source_modules.get(id, None) + if p and self.fscache.isfile(p): + # We need to make sure we still have __init__.py all the way up + # otherwise we might have false positives compared to slow path + # in case of deletion of init files, which is covered by some tests. + # TODO: are there some combination of flags in which this check should be skipped? + d = os.path.dirname(p) + for _ in range(id.count('.')): + if not any(self.fscache.isfile(os.path.join(d, '__init__' + x)) + for x in PYTHON_EXTENSIONS): + return None + d = os.path.dirname(d) + return p + + idx = id.rfind('.') + if idx != -1: + # When we're looking for foo.bar.baz and can't find a matching module + # in the source set, look up for a foo.bar module. + parent = self.find_module_via_source_set(id[:idx]) + if parent is None or not isinstance(parent, str): + return None + + basename, ext = os.path.splitext(parent) + if (not any(parent.endswith('__init__' + x) for x in PYTHON_EXTENSIONS) + and (ext in PYTHON_EXTENSIONS and not self.fscache.isdir(basename))): + # If we do find such a *module* (and crucially, we don't want a package, + # hence the filtering out of __init__ files, and checking for the presence + # of a folder with a matching name), then we can be pretty confident that + # 'baz' will either be a top-level variable in foo.bar, or will not exist. + # + # Either way, spelunking in other search paths for another 'foo.bar.baz' + # module should be avoided because: + # 1. in the unlikely event that one were found, it's highly likely that + # it would be unrelated to the source being typechecked and therefore + # more likely to lead to erroneous results + # 2. as described in _find_module, in some cases the search itself could + # potentially waste significant amounts of time + return ModuleNotFoundReason.NOT_FOUND + return None + def find_lib_path_dirs(self, id: str, lib_path: Tuple[str, ...]) -> PackageDirs: """Find which elements of a lib_path have the directory a module needs to exist. @@ -229,7 +306,7 @@ def find_module(self, id: str, *, fast_path: bool = False) -> ModuleSearchResult elif top_level in self.stdlib_py_versions: use_typeshed = self._typeshed_has_version(top_level) self.results[id] = self._find_module(id, use_typeshed) - if (not fast_path + if (not (fast_path or (self.options is not None and self.options.fast_module_lookup)) and self.results[id] is ModuleNotFoundReason.NOT_FOUND and self._can_find_module_in_parent_dir(id)): self.results[id] = ModuleNotFoundReason.WRONG_WORKING_DIRECTORY @@ -295,6 +372,39 @@ def _can_find_module_in_parent_dir(self, id: str) -> bool: def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: fscache = self.fscache + # Fast path for any modules in the current source set. + # This is particularly important when there are a large number of search + # paths which share the first (few) component(s) due to the use of namespace + # packages, for instance: + # foo/ + # company/ + # __init__.py + # foo/ + # bar/ + # company/ + # __init__.py + # bar/ + # baz/ + # company/ + # __init__.py + # baz/ + # + # mypy gets [foo/company/foo, bar/company/bar, baz/company/baz, ...] as input + # and computes [foo, bar, baz, ...] as the module search path. + # + # This would result in O(n) search for every import of company.*, leading to + # O(n**2) behavior in load_graph as such imports are unsurprisingly present + # at least once, and usually many more times than that, in each and every file + # being parsed. + # + # Thankfully, such cases are efficiently handled by looking up the module path + # via BuildSourceSet. + p = (self.find_module_via_source_set(id) + if (self.options is not None and self.options.fast_module_lookup) + else None) + if p: + return p + # If we're looking for a module like 'foo.bar.baz', it's likely that most of the # many elements of lib_path don't even have a subdirectory 'foo/bar'. Discover # that only once and cache it for when we look for modules like 'foo.bar.blah' diff --git a/mypy/options.py b/mypy/options.py index b8bc53feb89c..254af61a0645 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -293,6 +293,8 @@ def __init__(self) -> None: self.cache_map: Dict[str, Tuple[str, str]] = {} # Don't properly free objects on exit, just kill the current process. self.fast_exit = True + # fast path for finding modules from source set + self.fast_module_lookup = False # Used to transform source code before parsing if not None # TODO: Make the type precise (AnyStr -> AnyStr) self.transform_source: Optional[Callable[[Any], Any]] = None diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index cc0c5875f53b..279ecdb2d22d 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -46,6 +46,7 @@ 'check-multiple-inheritance.test', 'check-super.test', 'check-modules.test', + 'check-modules-fast.test', 'check-typevar-values.test', 'check-unsupported.test', 'check-unreachable-code.test', diff --git a/test-data/unit/check-modules-fast.test b/test-data/unit/check-modules-fast.test new file mode 100644 index 000000000000..875125c6532b --- /dev/null +++ b/test-data/unit/check-modules-fast.test @@ -0,0 +1,136 @@ +-- Type checker test cases dealing with module lookup edge cases +-- to ensure that --fast-module-lookup matches regular lookup behavior + +[case testModuleLookup] +# flags: --fast-module-lookup +import m +reveal_type(m.a) # N: Revealed type is "m.A" + +[file m.py] +class A: pass +a = A() + +[case testModuleLookupStub] +# flags: --fast-module-lookup +import m +reveal_type(m.a) # N: Revealed type is "m.A" + +[file m.pyi] +class A: pass +a = A() + +[case testModuleLookupFromImport] +# flags: --fast-module-lookup +from m import a +reveal_type(a) # N: Revealed type is "m.A" + +[file m.py] +class A: pass +a = A() + +[case testModuleLookupStubFromImport] +# flags: --fast-module-lookup +from m import a +reveal_type(a) # N: Revealed type is "m.A" + +[file m.pyi] +class A: pass +a = A() + + +[case testModuleLookupWeird] +# flags: --fast-module-lookup +from m import a +reveal_type(a) # N: Revealed type is "builtins.object" +reveal_type(a.b) # N: Revealed type is "m.a.B" + +[file m.py] +class A: pass +a = A() + +[file m/__init__.py] +[file m/a.py] +class B: pass +b = B() + + +[case testModuleLookupWeird2] +# flags: --fast-module-lookup +from m.a import b +reveal_type(b) # N: Revealed type is "m.a.B" + +[file m.py] +class A: pass +a = A() + +[file m/__init__.py] +[file m/a.py] +class B: pass +b = B() + + +[case testModuleLookupWeird3] +# flags: --fast-module-lookup +from m.a import b +reveal_type(b) # N: Revealed type is "m.a.B" + +[file m.py] +class A: pass +a = A() +[file m/__init__.py] +class B: pass +a = B() +[file m/a.py] +class B: pass +b = B() + + +[case testModuleLookupWeird4] +# flags: --fast-module-lookup +import m.a +m.a.b # E: "str" has no attribute "b" + +[file m.py] +class A: pass +a = A() +[file m/__init__.py] +class B: pass +a = 'foo' +b = B() +[file m/a.py] +class C: pass +b = C() + + +[case testModuleLookupWeird5] +# flags: --fast-module-lookup +import m.a as ma +reveal_type(ma.b) # N: Revealed type is "m.a.C" + +[file m.py] +class A: pass +a = A() +[file m/__init__.py] +class B: pass +a = 'foo' +b = B() +[file m/a.py] +class C: pass +b = C() + + +[case testModuleLookupWeird6] +# flags: --fast-module-lookup +from m.a import b +reveal_type(b) # N: Revealed type is "m.a.C" + +[file m.py] +class A: pass +a = A() +[file m/__init__.py] +class B: pass +a = 'foo' +b = B() +[file m/a.py] +class C: pass +b = C() From 927a9ba05d7e1ca495c62e555b937e74a01c4e57 Mon Sep 17 00:00:00 2001 From: Ramazan Elsunakev <35545693+relsunkaev@users.noreply.github.com> Date: Fri, 20 May 2022 19:35:31 -0700 Subject: [PATCH 14/80] fix: "variable arguments" error wording (#12827) Fixes #12508 --- mypy/messages.py | 2 +- test-data/unit/check-varargs.test | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 70d79384c1a9..b5f6ca339d6a 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -944,7 +944,7 @@ def could_not_infer_type_arguments(self, callee_type: CallableType, n: int, self.fail('Cannot infer function type argument', context) def invalid_var_arg(self, typ: Type, context: Context) -> None: - self.fail('List or tuple expected as variable arguments', context) + self.fail('List or tuple expected as variadic arguments', context) def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) -> None: typ = get_proper_type(typ) diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index d93618b85ba9..4dc10c9f7489 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -275,7 +275,7 @@ class CC(C): pass a = None # type: A f(*None) -f(*a) # E: List or tuple expected as variable arguments +f(*a) # E: List or tuple expected as variadic arguments f(*(a,)) def f(a: 'A') -> None: @@ -544,9 +544,9 @@ if int(): if int(): b, b = f(b, b, *aa) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" if int(): - a, b = f(a, *a) # E: List or tuple expected as variable arguments + a, b = f(a, *a) # E: List or tuple expected as variadic arguments if int(): - a, b = f(*a) # E: List or tuple expected as variable arguments + a, b = f(*a) # E: List or tuple expected as variadic arguments if int(): a, a = f(*aa) @@ -758,5 +758,5 @@ bar(*good1) bar(*good2) bar(*good3) bar(*bad1) # E: Argument 1 to "bar" has incompatible type "*I[str]"; expected "float" -bar(*bad2) # E: List or tuple expected as variable arguments +bar(*bad2) # E: List or tuple expected as variadic arguments [builtins fixtures/dict.pyi] From f19a711eae747e2e5525bf0b0baaa3d3ae972fbd Mon Sep 17 00:00:00 2001 From: Fabian Keller Date: Sat, 21 May 2022 16:00:40 +0200 Subject: [PATCH 15/80] Bring back type annotation support of dunder methods in stub generator (#12828) Fixes #12717 --- mypy/stubgenc.py | 2 +- .../pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 682ed418ffc7..9f90c7aafe69 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -288,7 +288,7 @@ def infer_prop_type(docstr: Optional[str]) -> Optional[str]: return None # Ignore special properties/attributes. - if name.startswith('__') and name.endswith('__'): + if is_skipped_attribute(name): return inferred = infer_prop_type(getattr(obj, '__doc__', None)) diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi index 226080ac9d57..ab5a4f4e78d2 100644 --- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi @@ -5,6 +5,7 @@ PI: float class Point: class AngleUnit: + __members__: ClassVar[dict] = ... # read-only __entries: ClassVar[dict] = ... degree: ClassVar[Point.AngleUnit] = ... radian: ClassVar[Point.AngleUnit] = ... @@ -22,6 +23,7 @@ class Point: def value(self) -> int: ... class LengthUnit: + __members__: ClassVar[dict] = ... # read-only __entries: ClassVar[dict] = ... inch: ClassVar[Point.LengthUnit] = ... mm: ClassVar[Point.LengthUnit] = ... From 9d63fa52e335bca8322948bb2aedf6253e1df4bb Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 21 May 2022 15:15:17 -0700 Subject: [PATCH 16/80] Run Python 3.11 tests in CI (#12833) Co-authored-by: hauntsaninja <> --- .github/workflows/test.yml | 37 +++++++++++++++++-------------------- test-requirements.txt | 2 +- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f62dba8cb9c7..71223846bc38 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -119,23 +119,20 @@ jobs: - name: Test run: tox -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} -# TODO: re-enable when `typed-ast` will be fixed for `python==3.11` -# python-nightly: -# runs-on: ubuntu-latest -# name: Test suite with Python nightly -# steps: -# - uses: actions/checkout@v2 -# - uses: actions/setup-python@v2 -# with: -# python-version: '3.11-dev' -# - name: Install tox -# run: | -# pip install -U pip==21.2.3 setuptools -# pip install --upgrade 'setuptools!=50' virtualenv==20.4.7 tox==3.20.1 -# - name: Setup tox environment -# run: tox -e py --notest -# - name: Test -# run: tox -e py --skip-pkg-install -- "-n 2" -# continue-on-error: true -# - name: Mark as a success -# run: exit 0 + python-nightly: + runs-on: ubuntu-latest + name: Test suite with Python nightly + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.11-dev' + - name: Install tox + run: pip install --upgrade 'setuptools!=50' 'virtualenv>=20.6.0' tox==3.24.5 + - name: Setup tox environment + run: tox -e py --notest + - name: Test + run: tox -e py --skip-pkg-install -- "-n 2" + continue-on-error: true + - name: Mark as a success + run: exit 0 diff --git a/test-requirements.txt b/test-requirements.txt index a3d11872fd5c..3d7835e38f14 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -12,7 +12,7 @@ pytest-xdist>=1.34.0 pytest-forked>=1.3.0,<2.0.0 pytest-cov>=2.10.0,<3.0.0 py>=1.5.2 -typed_ast>=1.4.0,<2; python_version>='3.8' +typed_ast>=1.5.4,<2; python_version>='3.8' virtualenv>=20.6.0 setuptools!=50 importlib-metadata>=4.6.1,<5.0.0 From 19e9f2136fd46c662f1066a4894edf48173942f4 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 22 May 2022 06:43:07 -0700 Subject: [PATCH 17/80] stubtest: allow ellipsis as default argument (#12838) Resolves #12819 --- mypy/stubtest.py | 2 ++ mypy/test/teststubtest.py | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index b7aa6367ef2d..567f6f8c788a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -419,6 +419,8 @@ def _verify_arg_default_value( and stub_type is not None # Avoid false positives for marker objects and type(runtime_arg.default) != object + # And ellipsis + and runtime_arg.default is not ... and not is_subtype_helper(runtime_type, stub_type) ): yield ( diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index de48c9ce2723..50b3f90c8fad 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -43,6 +43,7 @@ def __getitem__(self, typeargs: Any) -> object: ... Callable: _SpecialForm = ... Generic: _SpecialForm = ... +Protocol: _SpecialForm = ... class TypeVar: def __init__(self, name, covariant: bool = ..., contravariant: bool = ...) -> None: ... @@ -1020,6 +1021,27 @@ class _Options(TypedDict): error="opt3", ) + @collect_cases + def test_protocol(self) -> Iterator[Case]: + if sys.version_info < (3, 7): + return + yield Case( + stub=""" + from typing_extensions import Protocol + + class X(Protocol): + def foo(self, x: int, y: bytes = ...) -> str: ... + """, + runtime=""" + from typing_extensions import Protocol + + class X(Protocol): + def foo(self, x: int, y: bytes = ...) -> str: ... + """, + # TODO: this should not be an error, #12820 + error="X.__init__" + ) + def remove_color_code(s: str) -> str: return re.sub("\\x1b.*?m", "", s) # this works! From 205d256584f16013602c71a4696bbaa356942fb8 Mon Sep 17 00:00:00 2001 From: pranavrajpal <78008260+pranavrajpal@users.noreply.github.com> Date: Sun, 22 May 2022 15:17:45 -0700 Subject: [PATCH 18/80] [mypyc] Remove unnecessary max call for python version (#12848) We don't support running on Python 3.5 or lower anymore, so the max call is unnecessary because the current version will always be greater than or equal to (3, 6). --- mypyc/test/test_run.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 852de8edcf69..1eafd2d4c803 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -174,11 +174,7 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> options.use_builtins_fixtures = True options.show_traceback = True options.strict_optional = True - # N.B: We try to (and ought to!) run with the current - # version of python, since we are going to link and run - # against the current version of python. - # But a lot of the tests use type annotations so we can't say it is 3.5. - options.python_version = max(sys.version_info[:2], (3, 6)) + options.python_version = sys.version_info[:2] options.export_types = True options.preserve_asts = True options.incremental = self.separate From 16d27a2628ef8837f8d180c1e9d248d4c3f2fe4c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 22 May 2022 17:26:34 -0700 Subject: [PATCH 19/80] stubtest: add --version (#12852) Suggested in #12825 Co-authored-by: hauntsaninja <> --- mypy/stubtest.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 567f6f8c788a..c1bdcb3437a4 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -23,6 +23,7 @@ import mypy.modulefinder import mypy.state import mypy.types +import mypy.version from mypy import nodes from mypy.config_parser import parse_config_file from mypy.options import Options @@ -1471,6 +1472,9 @@ def parse_options(args: List[str]) -> argparse.Namespace: parser.add_argument( "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" ) + parser.add_argument( + "--version", action="version", version="%(prog)s " + mypy.version.__version__ + ) return parser.parse_args(args) From c986e54124b967587fbc674029ebe503c75d3a60 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 22 May 2022 17:40:11 -0700 Subject: [PATCH 20/80] Use async def in pythoneval tests (#12834) Co-authored-by: hauntsaninja <> --- test-data/unit/pythoneval-asyncio.test | 221 +++++++++++-------------- 1 file changed, 97 insertions(+), 124 deletions(-) diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index b3400fe6010e..11a61756a824 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -4,7 +4,7 @@ -- These are mostly regression tests -- no attempt is made to make these -- complete. -- --- This test file check Asyncio and yield from interaction +-- This test file checks Asyncio and await interaction [case testImportAsyncio] import asyncio @@ -17,12 +17,11 @@ from typing import Any, Generator import asyncio from asyncio import Future -@asyncio.coroutine -def greet_every_two_seconds() -> 'Generator[Any, None, None]': +async def greet_every_two_seconds() -> None: n = 0 while n < 5: print('Prev', n) - yield from asyncio.sleep(0.1) + await asyncio.sleep(0.01) print('After', n) n += 1 @@ -44,19 +43,17 @@ Prev 4 After 4 [case testCoroutineCallingOtherCoroutine] -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def compute(x: int, y: int) -> 'Generator[Any, None, int]': +async def compute(x: int, y: int) -> int: print("Compute %s + %s ..." % (x, y)) - yield from asyncio.sleep(0.1) + await asyncio.sleep(0.01) return x + y # Here the int is wrapped in Future[int] -@asyncio.coroutine -def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': - result = yield from compute(x, y) # The type of result will be int (is extracted from Future[int] +async def print_sum(x: int, y: int) -> None: + result = await compute(x, y) # The type of result will be int (is extracted from Future[int] print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() @@ -67,13 +64,12 @@ Compute 1 + 2 ... 1 + 2 = 3 [case testCoroutineChangingFuture] -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(0.1) +async def slow_operation(future: 'Future[str]') -> None: + await asyncio.sleep(0.01) future.set_result('Future is done!') loop = asyncio.get_event_loop() @@ -87,13 +83,12 @@ Future is done! [case testFunctionAssignedAsCallback] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future, AbstractEventLoop -@asyncio.coroutine -def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(1) +async def slow_operation(future: 'Future[str]') -> None: + await asyncio.sleep(1) future.set_result('Callback works!') def got_result(future: 'Future[str]') -> None: @@ -113,15 +108,14 @@ Callback works! [case testMultipleTasks] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Task, Future -@asyncio.coroutine -def factorial(name, number) -> 'Generator[Any, None, None]': +async def factorial(name, number) -> None: f = 1 for i in range(2, number+1): print("Task %s: Compute factorial(%s)..." % (name, i)) - yield from asyncio.sleep(0.1) + await asyncio.sleep(0.01) f *= i print("Task %s: factorial(%s) = %s" % (name, number, f)) @@ -146,30 +140,26 @@ Task C: factorial(4) = 24 [case testConcatenatedCoroutines] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def h4() -> 'Generator[Any, None, int]': - x = yield from future +async def h4() -> int: + x = await future return x -@asyncio.coroutine -def h3() -> 'Generator[Any, None, int]': - x = yield from h4() +async def h3() -> int: + x = await h4() print("h3: %s" % x) return x -@asyncio.coroutine -def h2() -> 'Generator[Any, None, int]': - x = yield from h3() +async def h2() -> int: + x = await h3() print("h2: %s" % x) return x -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': - x = yield from h2() +async def h() -> None: + x = await h2() print("h: %s" % x) loop = asyncio.get_event_loop() @@ -186,30 +176,27 @@ Outside 42 [case testConcatenatedCoroutinesReturningFutures] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def h4() -> 'Generator[Any, None, Future[int]]': - yield from asyncio.sleep(0.1) +async def h4() -> "Future[int]": + await asyncio.sleep(0.01) f = asyncio.Future() #type: Future[int] return f -@asyncio.coroutine -def h3() -> 'Generator[Any, None, Future[Future[int]]]': - x = yield from h4() +async def h3() -> "Future[Future[int]]": + x = await h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': +async def h() -> None: print("Before") - x = yield from h3() - y = yield from x - z = yield from y + x = await h3() + y = await x + z = await y print(z) def normalize(future): # The str conversion seems inconsistent; not sure exactly why. Normalize @@ -230,7 +217,7 @@ Future> [case testCoroutineWithOwnClass] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future @@ -238,9 +225,8 @@ class A: def __init__(self, x: int) -> None: self.x = x -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': - x = yield from future +async def h() -> None: + x = await future print("h: %s" % x.x) loop = asyncio.get_event_loop() @@ -256,20 +242,19 @@ Outside 42 -- Errors -[case testErrorAssigningCoroutineThatDontReturn] -from typing import Generator, Any +[case testErrorAssigningCoroutineThatDontReturn-xfail] +# https://github.com/python/mypy/issues/12837 +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def greet() -> 'Generator[Any, None, None]': - yield from asyncio.sleep(0.2) +async def greet() -> None: + await asyncio.sleep(0.2) print('Hello World') -@asyncio.coroutine -def test() -> 'Generator[Any, None, None]': - yield from greet() - x = yield from greet() # Error +async def test() -> None: + await greet() + x = await greet() # Error loop = asyncio.get_event_loop() try: @@ -280,19 +265,17 @@ finally: _program.py:13: error: Function does not return a value [case testErrorReturnIsNotTheSameType] -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def compute(x: int, y: int) -> 'Generator[Any, None, int]': +async def compute(x: int, y: int) -> int: print("Compute %s + %s ..." % (x, y)) - yield from asyncio.sleep(0.1) + await asyncio.sleep(0.01) return str(x + y) # Error -@asyncio.coroutine -def print_sum(x: int, y: int) -> 'Generator[Any, None, None]': - result = yield from compute(x, y) +async def print_sum(x: int, y: int) -> None: + result = await compute(x, y) print("%s + %s = %s" % (x, y, result)) loop = asyncio.get_event_loop() @@ -300,16 +283,15 @@ loop.run_until_complete(print_sum(1, 2)) loop.close() [out] -_program.py:9: error: Incompatible return value type (got "str", expected "int") +_program.py:8: error: Incompatible return value type (got "str", expected "int") [case testErrorSetFutureDifferentInternalType] -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(1) +async def slow_operation(future: 'Future[str]') -> None: + await asyncio.sleep(1) future.set_result(42) # Error loop = asyncio.get_event_loop() @@ -319,17 +301,16 @@ loop.run_until_complete(future) print(future.result()) loop.close() [out] -_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str" +_program.py:7: error: Argument 1 to "set_result" of "Future" has incompatible type "int"; expected "str" [case testErrorUsingDifferentFutureType] -from typing import Any, Generator +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(1) +async def slow_operation(future: 'Future[int]') -> None: + await asyncio.sleep(1) future.set_result(42) loop = asyncio.get_event_loop() @@ -339,16 +320,15 @@ loop.run_until_complete(future) print(future.result()) loop.close() [out] -_program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" +_program.py:11: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorUsingDifferentFutureTypeAndSetFutureDifferentInternalType] -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def slow_operation(future: 'Future[int]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(1) +async def slow_operation(future: 'Future[int]') -> None: + await asyncio.sleep(1) future.set_result('42') #Try to set an str as result to a Future[int] loop = asyncio.get_event_loop() @@ -358,18 +338,17 @@ loop.run_until_complete(future) print(future.result()) loop.close() [out] -_program.py:8: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int" -_program.py:12: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" +_program.py:7: error: Argument 1 to "set_result" of "Future" has incompatible type "str"; expected "int" +_program.py:11: error: Argument 1 to "slow_operation" has incompatible type "Future[str]"; expected "Future[int]" [case testErrorSettingCallbackWithDifferentFutureType] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future, AbstractEventLoop -@asyncio.coroutine -def slow_operation(future: 'Future[str]') -> 'Generator[Any, None, None]': - yield from asyncio.sleep(1) +async def slow_operation(future: 'Future[str]') -> None: + await asyncio.sleep(1) future.set_result('Future is done!') def got_result(future: 'Future[int]') -> None: @@ -386,7 +365,7 @@ try: finally: loop.close() [out] -_program.py:18: error: Argument 1 to "add_done_callback" of "Future" has incompatible type "Callable[[Future[int]], None]"; expected "Callable[[Future[str]], Any]" +_program.py:17: error: Argument 1 to "add_done_callback" of "Future" has incompatible type "Callable[[Future[int]], None]"; expected "Callable[[Future[str]], Any]" [case testErrorOneMoreFutureInReturnType] import typing @@ -394,26 +373,23 @@ from typing import Any, Generator import asyncio from asyncio import Future -@asyncio.coroutine -def h4() -> 'Generator[Any, None, Future[int]]': - yield from asyncio.sleep(1) +async def h4() -> Future[int]: + await asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f -@asyncio.coroutine -def h3() -> 'Generator[Any, None, Future[Future[Future[int]]]]': - x = yield from h4() +async def h3() -> Future[Future[Future[int]]]: + x = await h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': +async def h() -> None: print("Before") - x = yield from h3() - y = yield from x - z = yield from y + x = await h3() + y = await x + z = await y print(z) print(y) print(x) @@ -422,33 +398,30 @@ loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] -_program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[Future[Future[int]]]") +_program.py:16: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[Future[Future[int]]]") [case testErrorOneLessFutureInReturnType] import typing -from typing import Any, Generator +from typing import Any import asyncio from asyncio import Future -@asyncio.coroutine -def h4() -> 'Generator[Any, None, Future[int]]': - yield from asyncio.sleep(1) +async def h4() -> Future[int]: + await asyncio.sleep(1) f = asyncio.Future() #type: Future[int] return f -@asyncio.coroutine -def h3() -> 'Generator[Any, None, Future[int]]': - x = yield from h4() +async def h3() -> Future[int]: + x = await h4() x.set_result(42) f = asyncio.Future() #type: Future[Future[int]] f.set_result(x) return f -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': +async def h() -> None: print("Before") - x = yield from h3() - y = yield from x + x = await h3() + y = await x print(y) print(x) @@ -456,11 +429,11 @@ loop = asyncio.get_event_loop() loop.run_until_complete(h()) loop.close() [out] -_program.py:18: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]") +_program.py:16: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]") [case testErrorAssignmentDifferentType] import typing -from typing import Generator, Any +from typing import Any import asyncio from asyncio import Future @@ -472,9 +445,8 @@ class B: def __init__(self, x: int) -> None: self.x = x -@asyncio.coroutine -def h() -> 'Generator[Any, None, None]': - x = yield from future # type: B # Error +async def h() -> None: + x = await future # type: B # Error print("h: %s" % x.x) loop = asyncio.get_event_loop() @@ -483,7 +455,7 @@ future.set_result(A(42)) loop.run_until_complete(h()) loop.close() [out] -_program.py:16: error: Incompatible types in assignment (expression has type "A", variable has type "B") +_program.py:15: error: Incompatible types in assignment (expression has type "A", variable has type "B") [case testForwardRefToBadAsyncShouldNotCrash_newsemanal] from typing import TypeVar @@ -496,10 +468,11 @@ def test() -> None: reveal_type(bad) bad(0) -@asyncio.coroutine -def bad(arg: P) -> T: +async def bad(arg: P) -> T: pass [out] -_program.py:8: note: Revealed type is "def [T] (arg: P?) -> T`-1" -_program.py:12: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type -_program.py:12: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +_program.py:8: note: Revealed type is "def [T] (arg: P?) -> typing.Coroutine[Any, Any, T`-1]" +_program.py:9: error: Value of type "Coroutine[Any, Any, ]" must be used +_program.py:9: note: Are you missing an await? +_program.py:11: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type +_program.py:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases From 3c0409203a59eccb0e04caca7bef97c3e1e83ba7 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 May 2022 13:15:18 +0100 Subject: [PATCH 21/80] [mypyc] Borrow even more things (#12817) Borrow operands of tagged integer operations to reduce the number of incref/decref operations (when it's safe to do so). Borrow the results in list get item operations, similar to what we've been doing with get attribute operations. --- mypyc/irbuild/ast_helpers.py | 94 ++++++++++ mypyc/irbuild/builder.py | 80 ++------ mypyc/irbuild/expression.py | 55 +++--- mypyc/irbuild/ll_builder.py | 32 +++- mypyc/irbuild/statement.py | 20 +- mypyc/irbuild/targets.py | 3 +- mypyc/lib-rt/CPy.h | 2 + mypyc/lib-rt/int_ops.c | 7 +- mypyc/lib-rt/list_ops.c | 41 +++++ mypyc/primitives/list_ops.py | 22 ++- mypyc/test-data/exceptions.test | 3 +- mypyc/test-data/irbuild-basic.test | 16 +- mypyc/test-data/irbuild-classes.test | 49 ++++- mypyc/test-data/irbuild-generics.test | 3 +- mypyc/test-data/irbuild-lists.test | 5 +- mypyc/test-data/refcount.test | 253 +++++++++++++++++++++++++- mypyc/test-data/run-lists.test | 25 +++ 17 files changed, 587 insertions(+), 123 deletions(-) create mode 100644 mypyc/irbuild/ast_helpers.py diff --git a/mypyc/irbuild/ast_helpers.py b/mypyc/irbuild/ast_helpers.py new file mode 100644 index 000000000000..8c9ca186e46a --- /dev/null +++ b/mypyc/irbuild/ast_helpers.py @@ -0,0 +1,94 @@ +"""IRBuilder AST transform helpers shared between expressions and statements. + +Shared code that is tightly coupled to mypy ASTs can be put here instead of +making mypyc.irbuild.builder larger. +""" + +from mypy.nodes import ( + Expression, MemberExpr, Var, IntExpr, FloatExpr, StrExpr, BytesExpr, NameExpr, OpExpr, + UnaryExpr, ComparisonExpr, LDEF +) +from mypyc.ir.ops import BasicBlock +from mypyc.ir.rtypes import is_tagged +from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.constant_fold import constant_fold_expr + + +def process_conditional(self: IRBuilder, e: Expression, true: BasicBlock, + false: BasicBlock) -> None: + if isinstance(e, OpExpr) and e.op in ['and', 'or']: + if e.op == 'and': + # Short circuit 'and' in a conditional context. + new = BasicBlock() + process_conditional(self, e.left, new, false) + self.activate_block(new) + process_conditional(self, e.right, true, false) + else: + # Short circuit 'or' in a conditional context. + new = BasicBlock() + process_conditional(self, e.left, true, new) + self.activate_block(new) + process_conditional(self, e.right, true, false) + elif isinstance(e, UnaryExpr) and e.op == 'not': + process_conditional(self, e.expr, false, true) + else: + res = maybe_process_conditional_comparison(self, e, true, false) + if res: + return + # Catch-all for arbitrary expressions. + reg = self.accept(e) + self.add_bool_branch(reg, true, false) + + +def maybe_process_conditional_comparison(self: IRBuilder, + e: Expression, + true: BasicBlock, + false: BasicBlock) -> bool: + """Transform simple tagged integer comparisons in a conditional context. + + Return True if the operation is supported (and was transformed). Otherwise, + do nothing and return False. + + Args: + e: Arbitrary expression + true: Branch target if comparison is true + false: Branch target if comparison is false + """ + if not isinstance(e, ComparisonExpr) or len(e.operands) != 2: + return False + ltype = self.node_type(e.operands[0]) + rtype = self.node_type(e.operands[1]) + if not is_tagged(ltype) or not is_tagged(rtype): + return False + op = e.operators[0] + if op not in ('==', '!=', '<', '<=', '>', '>='): + return False + left_expr = e.operands[0] + right_expr = e.operands[1] + borrow_left = is_borrow_friendly_expr(self, right_expr) + left = self.accept(left_expr, can_borrow=borrow_left) + right = self.accept(right_expr, can_borrow=True) + # "left op right" for two tagged integers + self.builder.compare_tagged_condition(left, right, op, true, false, e.line) + return True + + +def is_borrow_friendly_expr(self: IRBuilder, expr: Expression) -> bool: + """Can the result of the expression borrowed temporarily? + + Borrowing means keeping a reference without incrementing the reference count. + """ + if isinstance(expr, (IntExpr, FloatExpr, StrExpr, BytesExpr)): + # Literals are immortal and can always be borrowed + return True + if (isinstance(expr, (UnaryExpr, OpExpr, NameExpr, MemberExpr)) and + constant_fold_expr(self, expr) is not None): + # Literal expressions are similar to literals + return True + if isinstance(expr, NameExpr): + if isinstance(expr.node, Var) and expr.kind == LDEF: + # Local variable reference can be borrowed + return True + if isinstance(expr, MemberExpr) and self.is_native_attr_ref(expr): + return True + return False diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index c7ef400236b3..c1662d2fdac2 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -14,15 +14,15 @@ from mypyc.irbuild.prepare import RegisterImplInfo from typing import Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, Iterator -from typing_extensions import overload +from typing_extensions import overload, Final from mypy.backports import OrderedDict from mypy.build import Graph from mypy.nodes import ( MypyFile, SymbolNode, Statement, OpExpr, IntExpr, NameExpr, LDEF, Var, UnaryExpr, CallExpr, IndexExpr, Expression, MemberExpr, RefExpr, Lvalue, TupleExpr, - TypeInfo, Decorator, OverloadedFuncDef, StarExpr, ComparisonExpr, GDEF, - ArgKind, ARG_POS, ARG_NAMED, FuncDef, + TypeInfo, Decorator, OverloadedFuncDef, StarExpr, + GDEF, ArgKind, ARG_POS, ARG_NAMED, FuncDef, ) from mypy.types import ( Type, Instance, TupleType, UninhabitedType, get_proper_type @@ -40,7 +40,7 @@ from mypyc.ir.rtypes import ( RType, RTuple, RInstance, c_int_rprimitive, int_rprimitive, dict_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, is_object_rprimitive, - str_rprimitive, is_tagged, is_list_rprimitive, is_tuple_rprimitive, c_pyssize_t_rprimitive + str_rprimitive, is_list_rprimitive, is_tuple_rprimitive, c_pyssize_t_rprimitive ) from mypyc.ir.func_ir import FuncIR, INVALID_FUNC_DEF, RuntimeArg, FuncSignature, FuncDecl from mypyc.ir.class_ir import ClassIR, NonExtClassInfo @@ -67,6 +67,11 @@ from mypyc.irbuild.util import is_constant +# These int binary operations can borrow their operands safely, since the +# primitives take this into consideration. +int_borrow_friendly_op: Final = {'+', '-', '==', '!=', '<', '<=', '>', '>='} + + class IRVisitor(ExpressionVisitor[Value], StatementVisitor[None]): pass @@ -287,7 +292,7 @@ def gen_method_call(self, arg_kinds: Optional[List[ArgKind]] = None, arg_names: Optional[List[Optional[str]]] = None) -> Value: return self.builder.gen_method_call( - base, name, arg_values, result_type, line, arg_kinds, arg_names + base, name, arg_values, result_type, line, arg_kinds, arg_names, self.can_borrow ) def load_module(self, name: str) -> Value: @@ -515,7 +520,7 @@ def get_assignment_target(self, lvalue: Lvalue, # Attribute assignment x.y = e can_borrow = self.is_native_attr_ref(lvalue) obj = self.accept(lvalue.expr, can_borrow=can_borrow) - return AssignmentTargetAttr(obj, lvalue.name) + return AssignmentTargetAttr(obj, lvalue.name, can_borrow=can_borrow) elif isinstance(lvalue, TupleExpr): # Multiple assignment a, ..., b = e star_idx: Optional[int] = None @@ -535,7 +540,10 @@ def get_assignment_target(self, lvalue: Lvalue, assert False, 'Unsupported lvalue: %r' % lvalue - def read(self, target: Union[Value, AssignmentTarget], line: int = -1) -> Value: + def read(self, + target: Union[Value, AssignmentTarget], + line: int = -1, + can_borrow: bool = False) -> Value: if isinstance(target, Value): return target if isinstance(target, AssignmentTargetRegister): @@ -548,7 +556,8 @@ def read(self, target: Union[Value, AssignmentTarget], line: int = -1) -> Value: assert False, target.base.type if isinstance(target, AssignmentTargetAttr): if isinstance(target.obj.type, RInstance) and target.obj.type.class_ir.is_ext_class: - return self.add(GetAttr(target.obj, target.attr, line)) + borrow = can_borrow and target.can_borrow + return self.add(GetAttr(target.obj, target.attr, line, borrow=borrow)) else: return self.py_get_attr(target.obj, target.attr, line) @@ -915,61 +924,6 @@ def shortcircuit_expr(self, expr: OpExpr) -> Value: expr.line ) - # Conditional expressions - - def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock) -> None: - if isinstance(e, OpExpr) and e.op in ['and', 'or']: - if e.op == 'and': - # Short circuit 'and' in a conditional context. - new = BasicBlock() - self.process_conditional(e.left, new, false) - self.activate_block(new) - self.process_conditional(e.right, true, false) - else: - # Short circuit 'or' in a conditional context. - new = BasicBlock() - self.process_conditional(e.left, true, new) - self.activate_block(new) - self.process_conditional(e.right, true, false) - elif isinstance(e, UnaryExpr) and e.op == 'not': - self.process_conditional(e.expr, false, true) - else: - res = self.maybe_process_conditional_comparison(e, true, false) - if res: - return - # Catch-all for arbitrary expressions. - reg = self.accept(e) - self.add_bool_branch(reg, true, false) - - def maybe_process_conditional_comparison(self, - e: Expression, - true: BasicBlock, - false: BasicBlock) -> bool: - """Transform simple tagged integer comparisons in a conditional context. - - Return True if the operation is supported (and was transformed). Otherwise, - do nothing and return False. - - Args: - e: Arbitrary expression - true: Branch target if comparison is true - false: Branch target if comparison is false - """ - if not isinstance(e, ComparisonExpr) or len(e.operands) != 2: - return False - ltype = self.node_type(e.operands[0]) - rtype = self.node_type(e.operands[1]) - if not is_tagged(ltype) or not is_tagged(rtype): - return False - op = e.operators[0] - if op not in ('==', '!=', '<', '<=', '>', '>='): - return False - left = self.accept(e.operands[0]) - right = self.accept(e.operands[1]) - # "left op right" for two tagged integers - self.builder.compare_tagged_condition(left, right, op, true, false, e.line) - return True - # Basic helpers def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[ClassIR]]: diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index e1feabb0a4f3..76e4db62a465 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -40,12 +40,13 @@ from mypyc.primitives.str_ops import str_slice_op from mypyc.primitives.int_ops import int_comparison_op_mapping from mypyc.irbuild.specialize import apply_function_specialization, apply_method_specialization -from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op from mypyc.irbuild.for_helpers import ( translate_list_comprehension, translate_set_comprehension, comprehension_helper ) from mypyc.irbuild.constant_fold import constant_fold_expr +from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional # Name and attribute references @@ -404,6 +405,15 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: if folded: return folded + # Special case some int ops to allow borrowing operands. + if (is_int_rprimitive(builder.node_type(expr.left)) + and is_int_rprimitive(builder.node_type(expr.right))): + if expr.op in int_borrow_friendly_op: + borrow_left = is_borrow_friendly_expr(builder, expr.right) + left = builder.accept(expr.left, can_borrow=borrow_left) + right = builder.accept(expr.right, can_borrow=True) + return builder.binary_op(left, right, expr.op, expr.line) + return builder.binary_op( builder.accept(expr.left), builder.accept(expr.right), expr.op, expr.line ) @@ -430,26 +440,6 @@ def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: base, '__getitem__', [index_reg], builder.node_type(expr), expr.line) -def is_borrow_friendly_expr(builder: IRBuilder, expr: Expression) -> bool: - """Can the result of the expression borrowed temporarily? - - Borrowing means keeping a reference without incrementing the reference count. - """ - if isinstance(expr, (IntExpr, FloatExpr, StrExpr, BytesExpr)): - # Literals are immportal and can always be borrowed - return True - if isinstance(expr, (UnaryExpr, OpExpr)) and constant_fold_expr(builder, expr) is not None: - # Literal expressions are similar to literals - return True - if isinstance(expr, NameExpr): - if isinstance(expr.node, Var) and expr.kind == LDEF: - # Local variable reference can be borrowed - return True - if isinstance(expr, MemberExpr) and builder.is_native_attr_ref(expr): - return True - return False - - def try_constant_fold(builder: IRBuilder, expr: Expression) -> Optional[Value]: """Return the constant value of an expression if possible. @@ -504,7 +494,7 @@ def try_gen_slice_op(builder: IRBuilder, base: Value, index: SliceExpr) -> Optio def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Value: if_body, else_body, next_block = BasicBlock(), BasicBlock(), BasicBlock() - builder.process_conditional(expr.cond, if_body, else_body) + process_conditional(builder, expr.cond, if_body, else_body) expr_type = builder.node_type(expr) # Having actual Phi nodes would be really nice here! target = Register(expr_type) @@ -577,11 +567,22 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: else: return builder.true() - if first_op in ('is', 'is not') and len(e.operators) == 1: - right = e.operands[1] - if isinstance(right, NameExpr) and right.fullname == 'builtins.None': - # Special case 'is None' / 'is not None'. - return translate_is_none(builder, e.operands[0], negated=first_op != 'is') + if len(e.operators) == 1: + # Special some common simple cases + if first_op in ('is', 'is not'): + right_expr = e.operands[1] + if isinstance(right_expr, NameExpr) and right_expr.fullname == 'builtins.None': + # Special case 'is None' / 'is not None'. + return translate_is_none(builder, e.operands[0], negated=first_op != 'is') + left_expr = e.operands[0] + if is_int_rprimitive(builder.node_type(left_expr)): + right_expr = e.operands[1] + if is_int_rprimitive(builder.node_type(right_expr)): + if first_op in int_borrow_friendly_op: + borrow_left = is_borrow_friendly_expr(builder, right_expr) + left = builder.accept(left_expr, can_borrow=borrow_left) + right = builder.accept(right_expr, can_borrow=True) + return builder.compare_tagged(left, right, first_op, e.line) # TODO: Don't produce an expression when used in conditional context # All of the trickiness here is due to support for chained conditionals diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index c7d8dc7b3ab2..d5154707538b 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -725,7 +725,8 @@ def gen_method_call(self, result_type: Optional[RType], line: int, arg_kinds: Optional[List[ArgKind]] = None, - arg_names: Optional[List[Optional[str]]] = None) -> Value: + arg_names: Optional[List[Optional[str]]] = None, + can_borrow: bool = False) -> Value: """Generate either a native or Python method call.""" # If we have *args, then fallback to Python method call. if arg_kinds is not None and any(kind.is_star() for kind in arg_kinds): @@ -759,7 +760,8 @@ def gen_method_call(self, # Try to do a special-cased method call if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): - target = self.translate_special_method_call(base, name, arg_values, result_type, line) + target = self.translate_special_method_call( + base, name, arg_values, result_type, line, can_borrow=can_borrow) if target: return target @@ -968,12 +970,14 @@ def compare_tagged_condition(self, is_short_int_rprimitive(rhs.type)))): # We can skip the tag check check = self.comparison_op(lhs, rhs, int_comparison_op_mapping[op][0], line) + self.flush_keep_alives() self.add(Branch(check, true, false, Branch.BOOL)) return op_type, c_func_desc, negate_result, swap_op = int_comparison_op_mapping[op] int_block, short_int_block = BasicBlock(), BasicBlock() check_lhs = self.check_tagged_short_int(lhs, line, negated=True) if is_eq or is_short_int_rprimitive(rhs.type): + self.flush_keep_alives() self.add(Branch(check_lhs, int_block, short_int_block, Branch.BOOL)) else: # For non-equality logical ops (less/greater than, etc.), need to check both sides @@ -981,6 +985,7 @@ def compare_tagged_condition(self, self.add(Branch(check_lhs, int_block, rhs_block, Branch.BOOL)) self.activate_block(rhs_block) check_rhs = self.check_tagged_short_int(rhs, line, negated=True) + self.flush_keep_alives() self.add(Branch(check_rhs, int_block, short_int_block, Branch.BOOL)) # Arbitrary integers (slow path) self.activate_block(int_block) @@ -992,6 +997,7 @@ def compare_tagged_condition(self, if negate_result: self.add(Branch(call, false, true, Branch.BOOL)) else: + self.flush_keep_alives() self.add(Branch(call, true, false, Branch.BOOL)) # Short integers (fast path) self.activate_block(short_int_block) @@ -1313,6 +1319,13 @@ def call_c(self, error_kind = ERR_NEVER target = self.add(CallC(desc.c_function_name, coerced, desc.return_type, desc.steals, desc.is_borrowed, error_kind, line, var_arg_idx)) + if desc.is_borrowed: + # If the result is borrowed, force the arguments to be + # kept alive afterwards, as otherwise the result might be + # immediately freed, at the risk of a dangling pointer. + for arg in coerced: + if not isinstance(arg, (Integer, LoadLiteral)): + self.keep_alives.append(arg) if desc.error_kind == ERR_NEG_INT: comp = ComparisonOp(target, Integer(0, desc.return_type, line), @@ -1332,20 +1345,22 @@ def call_c(self, # and so we can't just coerce it. result = self.none() else: - result = self.coerce(target, result_type, line) + result = self.coerce(target, result_type, line, can_borrow=desc.is_borrowed) return result def matching_call_c(self, candidates: List[CFunctionDescription], args: List[Value], line: int, - result_type: Optional[RType] = None) -> Optional[Value]: + result_type: Optional[RType] = None, + can_borrow: bool = False) -> Optional[Value]: matching: Optional[CFunctionDescription] = None for desc in candidates: if len(desc.arg_types) != len(args): continue - if all(is_subtype(actual.type, formal) - for actual, formal in zip(args, desc.arg_types)): + if (all(is_subtype(actual.type, formal) + for actual, formal in zip(args, desc.arg_types)) and + (not desc.is_borrowed or can_borrow)): if matching: assert matching.priority != desc.priority, 'Ambiguous:\n1) {}\n2) {}'.format( matching, desc) @@ -1500,7 +1515,8 @@ def translate_special_method_call(self, name: str, args: List[Value], result_type: Optional[RType], - line: int) -> Optional[Value]: + line: int, + can_borrow: bool = False) -> Optional[Value]: """Translate a method call which is handled nongenerically. These are special in the sense that we have code generated specifically for them. @@ -1511,7 +1527,7 @@ def translate_special_method_call(self, """ call_c_ops_candidates = method_call_ops.get(name, []) call_c_op = self.matching_call_c(call_c_ops_candidates, [base_reg] + args, - line, result_type) + line, result_type, can_borrow=can_borrow) return call_c_op def translate_eq_cmp(self, diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 142a77fbe946..93dc5f24158f 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -20,7 +20,7 @@ Assign, Unreachable, RaiseStandardError, LoadErrorValue, BasicBlock, TupleGet, Value, Register, Branch, NO_TRACEBACK_LINE_NO ) -from mypyc.ir.rtypes import RInstance, exc_rtuple +from mypyc.ir.rtypes import RInstance, exc_rtuple, is_tagged from mypyc.primitives.generic_ops import py_delattr_op from mypyc.primitives.misc_ops import type_op, import_from_op from mypyc.primitives.exc_ops import ( @@ -35,7 +35,8 @@ ExceptNonlocalControl, FinallyNonlocalControl, TryFinallyNonlocalControl ) from mypyc.irbuild.for_helpers import for_loop_helper -from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op +from mypyc.irbuild.ast_helpers import process_conditional, is_borrow_friendly_expr GenFunc = Callable[[], None] @@ -119,9 +120,16 @@ def is_simple_lvalue(expr: Expression) -> bool: def transform_operator_assignment_stmt(builder: IRBuilder, stmt: OperatorAssignmentStmt) -> None: """Operator assignment statement such as x += 1""" builder.disallow_class_assignments([stmt.lvalue], stmt.line) + if (is_tagged(builder.node_type(stmt.lvalue)) + and is_tagged(builder.node_type(stmt.rvalue)) + and stmt.op in int_borrow_friendly_op): + can_borrow = (is_borrow_friendly_expr(builder, stmt.rvalue) + and is_borrow_friendly_expr(builder, stmt.lvalue)) + else: + can_borrow = False target = builder.get_assignment_target(stmt.lvalue) - target_value = builder.read(target, stmt.line) - rreg = builder.accept(stmt.rvalue) + target_value = builder.read(target, stmt.line, can_borrow=can_borrow) + rreg = builder.accept(stmt.rvalue, can_borrow=can_borrow) # the Python parser strips the '=' from operator assignment statements, so re-add it op = stmt.op + '=' res = builder.binary_op(target_value, rreg, op, stmt.line) @@ -207,7 +215,7 @@ def transform_if_stmt(builder: IRBuilder, stmt: IfStmt) -> None: # If statements are normalized assert len(stmt.expr) == 1 - builder.process_conditional(stmt.expr[0], if_body, else_body) + process_conditional(builder, stmt.expr[0], if_body, else_body) builder.activate_block(if_body) builder.accept(stmt.body[0]) builder.goto(next) @@ -226,7 +234,7 @@ def transform_while_stmt(builder: IRBuilder, s: WhileStmt) -> None: # Split block so that we get a handle to the top of the loop. builder.goto_and_activate(top) - builder.process_conditional(s.expr, body, normal_loop_exit) + process_conditional(builder, s.expr, body, normal_loop_exit) builder.activate_block(body) builder.accept(s.body) diff --git a/mypyc/irbuild/targets.py b/mypyc/irbuild/targets.py index f6346d4fa7e7..f2daa701f7e8 100644 --- a/mypyc/irbuild/targets.py +++ b/mypyc/irbuild/targets.py @@ -35,9 +35,10 @@ def __init__(self, base: Value, index: Value) -> None: class AssignmentTargetAttr(AssignmentTarget): """obj.attr as assignment target""" - def __init__(self, obj: Value, attr: str) -> None: + def __init__(self, obj: Value, attr: str, can_borrow: bool = False) -> None: self.obj = obj self.attr = attr + self.can_borrow = can_borrow if isinstance(obj.type, RInstance) and obj.type.class_ir.has_attr(attr): # Native attribute reference self.obj_type: RType = obj.type diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 4c0f91a5707e..0fdd6b0a27cc 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -340,6 +340,8 @@ PyObject *CPyList_Build(Py_ssize_t len, ...); PyObject *CPyList_GetItem(PyObject *list, CPyTagged index); PyObject *CPyList_GetItemUnsafe(PyObject *list, CPyTagged index); PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemShortBorrow(PyObject *list, CPyTagged index); bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value); bool CPyList_SetItemUnsafe(PyObject *list, CPyTagged index, PyObject *value); PyObject *CPyList_PopLast(PyObject *obj); diff --git a/mypyc/lib-rt/int_ops.c b/mypyc/lib-rt/int_ops.c index edf063141619..caf0fe0b5391 100644 --- a/mypyc/lib-rt/int_ops.c +++ b/mypyc/lib-rt/int_ops.c @@ -250,8 +250,11 @@ bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right) { if (CPyTagged_CheckShort(right)) { return false; } else { - int result = PyObject_RichCompareBool(CPyTagged_LongAsObject(left), - CPyTagged_LongAsObject(right), Py_EQ); + PyObject *left_obj = CPyTagged_AsObject(left); + PyObject *right_obj = CPyTagged_AsObject(right); + int result = PyObject_RichCompareBool(left_obj, right_obj, Py_EQ); + Py_DECREF(left_obj); + Py_DECREF(right_obj); if (result == -1) { CPyError_OutOfMemory(); } diff --git a/mypyc/lib-rt/list_ops.c b/mypyc/lib-rt/list_ops.c index 28547cfd7b60..885c1a3366f3 100644 --- a/mypyc/lib-rt/list_ops.c +++ b/mypyc/lib-rt/list_ops.c @@ -52,6 +52,24 @@ PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index) { return result; } +PyObject *CPyList_GetItemShortBorrow(PyObject *list, CPyTagged index) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + return PyList_GET_ITEM(list, n); +} + PyObject *CPyList_GetItem(PyObject *list, CPyTagged index) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); @@ -77,6 +95,29 @@ PyObject *CPyList_GetItem(PyObject *list, CPyTagged index) { } } +PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index) { + if (CPyTagged_CheckShort(index)) { + Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); + Py_ssize_t size = PyList_GET_SIZE(list); + if (n >= 0) { + if (n >= size) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } else { + n += size; + if (n < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + } + return PyList_GET_ITEM(list, n); + } else { + PyErr_SetString(PyExc_OverflowError, CPYTHON_LARGE_INT_ERRMSG); + return NULL; + } +} + bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); diff --git a/mypyc/primitives/list_ops.py b/mypyc/primitives/list_ops.py index 3988511c9772..78955f70f164 100644 --- a/mypyc/primitives/list_ops.py +++ b/mypyc/primitives/list_ops.py @@ -55,7 +55,7 @@ c_function_name='CPyList_GetItem', error_kind=ERR_MAGIC) -# Version with no int bounds check for when it is known to be short +# list[index] version with no int bounds check for when it is known to be short method_op( name='__getitem__', arg_types=[list_rprimitive, short_int_rprimitive], @@ -64,6 +64,26 @@ error_kind=ERR_MAGIC, priority=2) +# list[index] that produces a borrowed result +method_op( + name='__getitem__', + arg_types=[list_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name='CPyList_GetItemBorrow', + error_kind=ERR_MAGIC, + is_borrowed=True, + priority=3) + +# list[index] that produces a borrowed result and index is known to be short +method_op( + name='__getitem__', + arg_types=[list_rprimitive, short_int_rprimitive], + return_type=object_rprimitive, + c_function_name='CPyList_GetItemShortBorrow', + error_kind=ERR_MAGIC, + is_borrowed=True, + priority=4) + # This is unsafe because it assumes that the index is a non-negative short integer # that is in-bounds for the list. list_get_item_unsafe_op = custom_op( diff --git a/mypyc/test-data/exceptions.test b/mypyc/test-data/exceptions.test index 8c576b49ce82..8b186e234c5e 100644 --- a/mypyc/test-data/exceptions.test +++ b/mypyc/test-data/exceptions.test @@ -135,11 +135,10 @@ L4: r5 = i < l :: signed if r5 goto L5 else goto L10 :: bool L5: - r6 = CPyList_GetItem(a, i) + r6 = CPyList_GetItemBorrow(a, i) if is_error(r6) goto L11 (error at sum:6) else goto L6 L6: r7 = unbox(int, r6) - dec_ref r6 if is_error(r7) goto L11 (error at sum:6) else goto L7 L7: r8 = CPyTagged_Add(sum, r7) diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 077abcf2939b..8e54b25b673b 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -2197,15 +2197,17 @@ L0: r0 = self.is_add if r0 goto L1 else goto L2 :: bool L1: - r1 = self.left - r2 = self.right + r1 = borrow self.left + r2 = borrow self.right r3 = CPyTagged_Add(r1, r2) + keep_alive self, self r4 = r3 goto L3 L2: - r5 = self.left - r6 = self.right + r5 = borrow self.left + r6 = borrow self.right r7 = CPyTagged_Subtract(r5, r6) + keep_alive self, self r4 = r7 L3: return r4 @@ -2292,8 +2294,9 @@ def BaseProperty.next(self): r0, r1 :: int r2 :: __main__.BaseProperty L0: - r0 = self._incrementer + r0 = borrow self._incrementer r1 = CPyTagged_Add(r0, 2) + keep_alive self r2 = BaseProperty(r1) return r2 def BaseProperty.__init__(self, value): @@ -2483,9 +2486,10 @@ def g(a, b, c): r2, r3 :: int L0: r0 = a.__getitem__(c) - r1 = CPyList_GetItem(b, c) + r1 = CPyList_GetItemBorrow(b, c) r2 = unbox(int, r1) r3 = CPyTagged_Add(r0, r2) + keep_alive b, c return r3 [case testTypeAlias_toplevel] diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index fcf6ef957435..5a574ac44354 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -59,8 +59,9 @@ L0: r5 = CPyList_GetItemShort(a, 0) r6 = cast(__main__.C, r5) d = r6 - r7 = d.x + r7 = borrow d.x r8 = CPyTagged_Add(r7, 2) + keep_alive d return r8 [case testMethodCall] @@ -175,7 +176,7 @@ def increment(o): r0, r1 :: int r2 :: bool L0: - r0 = o.x + r0 = borrow o.x r1 = CPyTagged_Add(r0, 2) o.x = r1; r2 = is_error return o @@ -1288,3 +1289,47 @@ L0: r1 = r0.e r2 = r1.x return r2 + +[case testBorrowResultOfCustomGetItemInIfStatement] +from typing import List + +class C: + def __getitem__(self, x: int) -> List[int]: + return [] + +def f(x: C) -> None: + # In this case the keep_alive must come before the branch, as otherwise + # reference count transform will get confused. + if x[1][0] == 2: + y = 1 + else: + y = 2 +[out] +def C.__getitem__(self, x): + self :: __main__.C + x :: int + r0 :: list +L0: + r0 = PyList_New(0) + return r0 +def f(x): + x :: __main__.C + r0 :: list + r1 :: object + r2 :: int + r3 :: bit + y :: int +L0: + r0 = x.__getitem__(2) + r1 = CPyList_GetItemShortBorrow(r0, 0) + r2 = unbox(int, r1) + r3 = r2 == 4 + keep_alive r0 + if r3 goto L1 else goto L2 :: bool +L1: + y = 2 + goto L3 +L2: + y = 4 +L3: + return 1 diff --git a/mypyc/test-data/irbuild-generics.test b/mypyc/test-data/irbuild-generics.test index 10f8e737d639..fe4a94992717 100644 --- a/mypyc/test-data/irbuild-generics.test +++ b/mypyc/test-data/irbuild-generics.test @@ -62,9 +62,10 @@ L0: c = r0 r1 = object 1 c.x = r1; r2 = is_error - r3 = c.x + r3 = borrow c.x r4 = unbox(int, r3) r5 = CPyTagged_Add(4, r4) + keep_alive c return 1 [case testGenericMethod] diff --git a/mypyc/test-data/irbuild-lists.test b/mypyc/test-data/irbuild-lists.test index 3173469c8db6..47f7ada709e3 100644 --- a/mypyc/test-data/irbuild-lists.test +++ b/mypyc/test-data/irbuild-lists.test @@ -38,10 +38,11 @@ def f(x): r2 :: object r3 :: int L0: - r0 = CPyList_GetItemShort(x, 0) - r1 = cast(list, r0) + r0 = CPyList_GetItemShortBorrow(x, 0) + r1 = borrow cast(list, r0) r2 = CPyList_GetItemShort(r1, 2) r3 = unbox(int, r2) + keep_alive x, r0 return r3 [case testListSet] diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test index a7ee390c8d74..ce365fc50e7e 100644 --- a/mypyc/test-data/refcount.test +++ b/mypyc/test-data/refcount.test @@ -1153,14 +1153,20 @@ L0: [case testBorrowListGetItem2] from typing import List -def attr_index(x: C) -> str: +def attr_before_index(x: C) -> str: return x.a[x.n] +def attr_after_index(a: List[C], i: int) -> int: + return a[i].n + +def attr_after_index_literal(a: List[C]) -> int: + return a[0].n + class C: a: List[str] n: int [out] -def attr_index(x): +def attr_before_index(x): x :: __main__.C r0 :: list r1 :: int @@ -1172,6 +1178,27 @@ L0: r2 = CPyList_GetItem(r0, r1) r3 = cast(str, r2) return r3 +def attr_after_index(a, i): + a :: list + i :: int + r0 :: object + r1 :: __main__.C + r2 :: int +L0: + r0 = CPyList_GetItemBorrow(a, i) + r1 = borrow cast(__main__.C, r0) + r2 = r1.n + return r2 +def attr_after_index_literal(a): + a :: list + r0 :: object + r1 :: __main__.C + r2 :: int +L0: + r0 = CPyList_GetItemShortBorrow(a, 0) + r1 = borrow cast(__main__.C, r0) + r2 = r1.n + return r2 [case testCannotBorrowListGetItem] from typing import List @@ -1202,6 +1229,37 @@ def f(): L0: return 0 +[case testBorrowListGetItemKeepAlive] +from typing import List + +def f() -> str: + a = [C()] + return a[0].s + +class C: + s: str +[out] +def f(): + r0 :: __main__.C + r1 :: list + r2, r3 :: ptr + a :: list + r4 :: object + r5 :: __main__.C + r6 :: str +L0: + r0 = C() + r1 = PyList_New(1) + r2 = get_element_ptr r1 ob_item :: PyListObject + r3 = load_mem r2 :: ptr* + set_mem r3, r0 :: builtins.object* + a = r1 + r4 = CPyList_GetItemShortBorrow(a, 0) + r5 = borrow cast(__main__.C, r4) + r6 = r5.s + dec_ref a + return r6 + [case testBorrowSetAttrObject] from typing import Optional @@ -1241,3 +1299,194 @@ L0: r0 = borrow x.c r0.b = 0; r1 = is_error return 1 + +[case testBorrowIntEquality] +def add(c: C) -> bool: + return c.x == c.y + +class C: + x: int + y: int +[out] +def add(c): + c :: __main__.C + r0, r1 :: int + r2 :: native_int + r3, r4 :: bit + r5 :: bool + r6 :: bit +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = r0 & 1 + r3 = r2 == 0 + if r3 goto L1 else goto L2 :: bool +L1: + r4 = r0 == r1 + r5 = r4 + goto L3 +L2: + r6 = CPyTagged_IsEq_(r0, r1) + r5 = r6 +L3: + return r5 + +[case testBorrowIntLessThan] +def add(c: C) -> bool: + return c.x < c.y + +class C: + x: int + y: int +[out] +def add(c): + c :: __main__.C + r0, r1 :: int + r2 :: native_int + r3 :: bit + r4 :: native_int + r5, r6, r7 :: bit + r8 :: bool + r9 :: bit +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = r0 & 1 + r3 = r2 == 0 + r4 = r1 & 1 + r5 = r4 == 0 + r6 = r3 & r5 + if r6 goto L1 else goto L2 :: bool +L1: + r7 = r0 < r1 :: signed + r8 = r7 + goto L3 +L2: + r9 = CPyTagged_IsLt_(r0, r1) + r8 = r9 +L3: + return r8 + +[case testBorrowIntCompareFinal] +from typing_extensions import Final + +X: Final = 10 + +def add(c: C) -> bool: + return c.x == X + +class C: + x: int +[out] +def add(c): + c :: __main__.C + r0 :: int + r1 :: native_int + r2, r3 :: bit + r4 :: bool + r5 :: bit +L0: + r0 = borrow c.x + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = r0 == 20 + r4 = r3 + goto L3 +L2: + r5 = CPyTagged_IsEq_(r0, 20) + r4 = r5 +L3: + return r4 + +[case testBorrowIntArithmetic] +def add(c: C) -> int: + return c.x + c.y + +def sub(c: C) -> int: + return c.x - c.y + +class C: + x: int + y: int +[out] +def add(c): + c :: __main__.C + r0, r1, r2 :: int +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = CPyTagged_Add(r0, r1) + return r2 +def sub(c): + c :: __main__.C + r0, r1, r2 :: int +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = CPyTagged_Subtract(r0, r1) + return r2 + +[case testBorrowIntComparisonInIf] +def add(c: C, n: int) -> bool: + if c.x == c.y: + return True + return False + +class C: + x: int + y: int +[out] +def add(c, n): + c :: __main__.C + n, r0, r1 :: int + r2 :: native_int + r3, r4, r5 :: bit +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = r0 & 1 + r3 = r2 != 0 + if r3 goto L1 else goto L2 :: bool +L1: + r4 = CPyTagged_IsEq_(r0, r1) + if r4 goto L3 else goto L4 :: bool +L2: + r5 = r0 == r1 + if r5 goto L3 else goto L4 :: bool +L3: + return 1 +L4: + return 0 + +[case testBorrowIntInPlaceOp] +def add(c: C, n: int) -> None: + c.x += n + +def sub(c: C, n: int) -> None: + c.x -= c.y + +class C: + x: int + y: int +[out] +def add(c, n): + c :: __main__.C + n, r0, r1 :: int + r2 :: bool +L0: + r0 = borrow c.x + r1 = CPyTagged_Add(r0, n) + c.x = r1; r2 = is_error + return 1 +def sub(c, n): + c :: __main__.C + n, r0, r1, r2 :: int + r3 :: bool +L0: + r0 = borrow c.x + r1 = borrow c.y + r2 = CPyTagged_Subtract(r0, r1) + c.x = r2; r3 = is_error + return 1 diff --git a/mypyc/test-data/run-lists.test b/mypyc/test-data/run-lists.test index c98ab9171123..84d5ee121a20 100644 --- a/mypyc/test-data/run-lists.test +++ b/mypyc/test-data/run-lists.test @@ -379,8 +379,33 @@ def test() -> None: source_str = "abcd" f = list("str:" + x for x in source_str) assert f == ["str:a", "str:b", "str:c", "str:d"] + [case testNextBug] from typing import List, Optional def test(x: List[int]) -> None: res = next((i for i in x), None) + +[case testListGetItemWithBorrow] +from typing import List + +class D: + def __init__(self, n: int) -> None: + self.n = n + +class C: + def __init__(self, d: D) -> None: + self.d = d + +def test_index_with_literal() -> None: + d1 = D(1) + d2 = D(2) + a = [C(d1), C(d2)] + d = a[0].d + assert d is d1 + d = a[1].d + assert d is d2 + d = a[-1].d + assert d is d2 + d = a[-2].d + assert d is d1 From c8efeed44d4c90cc05cf196f6315410fda468d70 Mon Sep 17 00:00:00 2001 From: 97littleleaf11 <11172084+97littleleaf11@users.noreply.github.com> Date: Mon, 23 May 2022 22:17:37 +0800 Subject: [PATCH 22/80] [mypyc] Fix Python 3.11 C API errors (#12850) Closes mypyc/mypyc#931 Closes mypyc/mypyc#923 Related CPython modification: python/cpython#89874 python/cpython#31530 --- mypyc/lib-rt/exc_ops.c | 9 ++++++++- mypyc/lib-rt/misc_ops.c | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mypyc/lib-rt/exc_ops.c b/mypyc/lib-rt/exc_ops.c index 2e1f4fb66863..1b679199bc0f 100644 --- a/mypyc/lib-rt/exc_ops.c +++ b/mypyc/lib-rt/exc_ops.c @@ -75,7 +75,7 @@ void CPy_RestoreExcInfo(tuple_T3OOO info) { } bool CPy_ExceptionMatches(PyObject *type) { - return PyErr_GivenExceptionMatches(CPy_ExcState()->exc_type, type); + return PyErr_GivenExceptionMatches((PyObject *)Py_TYPE(CPy_ExcState()->exc_value), type); } PyObject *CPy_GetExcValue(void) { @@ -189,6 +189,13 @@ void CPy_TypeError(const char *expected, PyObject *value) { } } +// The PyFrameObject type definition (struct _frame) has been moved +// to the internal C API: to the pycore_frame.h header file. +// https://github.com/python/cpython/pull/31530 +#if PY_VERSION_HEX >= 0x030b00a6 +#include "internal/pycore_frame.h" +#endif + // This function is basically exactly the same with _PyTraceback_Add // which is available in all the versions we support. // We're continuing to use this because we'll probably optimize this later. diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index dcce89d90720..fea38eca9d90 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -45,7 +45,7 @@ int CPy_YieldFromErrorHandle(PyObject *iter, PyObject **outp) { _Py_IDENTIFIER(close); _Py_IDENTIFIER(throw); - PyObject *exc_type = CPy_ExcState()->exc_type; + PyObject *exc_type = (PyObject *)Py_TYPE(CPy_ExcState()->exc_value); PyObject *type, *value, *traceback; PyObject *_m; PyObject *res; From 040f3ab64a86efcf7093ada1b13842e56715f243 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 May 2022 15:18:58 +0100 Subject: [PATCH 23/80] [mypyc] Generate smaller code for casts (#12839) Merge a cast op followed by a branch that does an error check and adds a traceback entry. Since casts are very common, this reduces the size of the generated code a fair amount. Old code generated for a cast: ``` if (likely(PyUnicode_Check(cpy_r_x))) cpy_r_r0 = cpy_r_x; else { CPy_TypeError("str", cpy_r_x); cpy_r_r0 = NULL; } if (unlikely(cpy_r_r0 == NULL)) { CPy_AddTraceback("t/t.py", "foo", 2, CPyStatic_globals); goto CPyL2; } ``` New code: ``` if (likely(PyUnicode_Check(cpy_r_x))) cpy_r_r0 = cpy_r_x; else { CPy_TypeErrorTraceback("t/t.py", "foo", 2, CPyStatic_globals, "str", cpy_r_x); goto CPyL2; } ``` --- mypyc/codegen/emit.py | 156 ++++++++++++++++++++++------- mypyc/codegen/emitfunc.py | 35 ++++--- mypyc/lib-rt/CPy.h | 2 + mypyc/lib-rt/exc_ops.c | 7 ++ mypyc/test-data/run-functions.test | 15 +++ mypyc/test/test_emitfunc.py | 109 +++++++++++++++++++- mypyc/test/test_emitwrapper.py | 3 +- 7 files changed, 270 insertions(+), 57 deletions(-) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 94ea940ca3e6..0815dd3c3bd0 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -2,6 +2,8 @@ from mypy.backports import OrderedDict from typing import List, Set, Dict, Optional, Callable, Union, Tuple +from typing_extensions import Final + import sys from mypyc.common import ( @@ -23,6 +25,10 @@ from mypyc.sametype import is_same_type from mypyc.codegen.literals import Literals +# Whether to insert debug asserts for all error handling, to quickly +# catch errors propagating without exceptions set. +DEBUG_ERRORS: Final = False + class HeaderDeclaration: """A representation of a declaration in C. @@ -104,6 +110,20 @@ def __init__(self, label: str) -> None: self.label = label +class TracebackAndGotoHandler(ErrorHandler): + """Add traceback item and goto label on error.""" + + def __init__(self, + label: str, + source_path: str, + module_name: str, + traceback_entry: Tuple[str, int]) -> None: + self.label = label + self.source_path = source_path + self.module_name = module_name + self.traceback_entry = traceback_entry + + class ReturnHandler(ErrorHandler): """Return a constant value on error.""" @@ -439,18 +459,6 @@ def emit_cast(self, likely: If the cast is likely to succeed (can be False for unions) """ error = error or AssignHandler() - if isinstance(error, AssignHandler): - handle_error = '%s = NULL;' % dest - elif isinstance(error, GotoHandler): - handle_error = 'goto %s;' % error.label - else: - assert isinstance(error, ReturnHandler) - handle_error = 'return %s;' % error.value - if raise_exception: - raise_exc = f'CPy_TypeError("{self.pretty_name(typ)}", {src}); ' - err = raise_exc + handle_error - else: - err = handle_error # Special case casting *from* optional if src_type and is_optional_type(src_type) and not is_object_rprimitive(typ): @@ -465,9 +473,9 @@ def emit_cast(self, self.emit_arg_check(src, dest, typ, check.format(src), optional) self.emit_lines( f' {dest} = {src};', - 'else {', - err, - '}') + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') return # TODO: Verify refcount handling. @@ -500,9 +508,9 @@ def emit_cast(self, self.emit_arg_check(src, dest, typ, check.format(prefix, src), optional) self.emit_lines( f' {dest} = {src};', - 'else {', - err, - '}') + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') elif is_bytes_rprimitive(typ): if declare_dest: self.emit_line(f'PyObject *{dest};') @@ -512,9 +520,9 @@ def emit_cast(self, self.emit_arg_check(src, dest, typ, check.format(src, src), optional) self.emit_lines( f' {dest} = {src};', - 'else {', - err, - '}') + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') elif is_tuple_rprimitive(typ): if declare_dest: self.emit_line(f'{self.ctype(typ)} {dest};') @@ -525,9 +533,9 @@ def emit_cast(self, check.format(src), optional) self.emit_lines( f' {dest} = {src};', - 'else {', - err, - '}') + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') elif isinstance(typ, RInstance): if declare_dest: self.emit_line(f'PyObject *{dest};') @@ -551,10 +559,10 @@ def emit_cast(self, check = f'(likely{check})' self.emit_arg_check(src, dest, typ, check, optional) self.emit_lines( - f' {dest} = {src};', - 'else {', - err, - '}') + f' {dest} = {src};'.format(dest, src), + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') elif is_none_rprimitive(typ): if declare_dest: self.emit_line(f'PyObject *{dest};') @@ -565,9 +573,9 @@ def emit_cast(self, check.format(src), optional) self.emit_lines( f' {dest} = {src};', - 'else {', - err, - '}') + 'else {') + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) + self.emit_line('}') elif is_object_rprimitive(typ): if declare_dest: self.emit_line(f'PyObject *{dest};') @@ -576,21 +584,51 @@ def emit_cast(self, if optional: self.emit_line('}') elif isinstance(typ, RUnion): - self.emit_union_cast(src, dest, typ, declare_dest, err, optional, src_type) + self.emit_union_cast(src, dest, typ, declare_dest, error, optional, src_type, + raise_exception) elif isinstance(typ, RTuple): assert not optional - self.emit_tuple_cast(src, dest, typ, declare_dest, err, src_type) + self.emit_tuple_cast(src, dest, typ, declare_dest, error, src_type) else: assert False, 'Cast not implemented: %s' % typ + def emit_cast_error_handler(self, + error: ErrorHandler, + src: str, + dest: str, + typ: RType, + raise_exception: bool) -> None: + if raise_exception: + if isinstance(error, TracebackAndGotoHandler): + # Merge raising and emitting traceback entry into a single call. + self.emit_type_error_traceback( + error.source_path, error.module_name, error.traceback_entry, + typ=typ, + src=src) + self.emit_line('goto %s;' % error.label) + return + self.emit_line('CPy_TypeError("{}", {}); '.format(self.pretty_name(typ), src)) + if isinstance(error, AssignHandler): + self.emit_line('%s = NULL;' % dest) + elif isinstance(error, GotoHandler): + self.emit_line('goto %s;' % error.label) + elif isinstance(error, TracebackAndGotoHandler): + self.emit_line('%s = NULL;' % dest) + self.emit_traceback(error.source_path, error.module_name, error.traceback_entry) + self.emit_line('goto %s;' % error.label) + else: + assert isinstance(error, ReturnHandler) + self.emit_line('return %s;' % error.value) + def emit_union_cast(self, src: str, dest: str, typ: RUnion, declare_dest: bool, - err: str, + error: ErrorHandler, optional: bool, - src_type: Optional[RType]) -> None: + src_type: Optional[RType], + raise_exception: bool) -> None: """Emit cast to a union type. The arguments are similar to emit_cast. @@ -613,11 +651,11 @@ def emit_union_cast(self, likely=False) self.emit_line(f'if ({dest} != NULL) goto {good_label};') # Handle cast failure. - self.emit_line(err) + self.emit_cast_error_handler(error, src, dest, typ, raise_exception) self.emit_label(good_label) def emit_tuple_cast(self, src: str, dest: str, typ: RTuple, declare_dest: bool, - err: str, src_type: Optional[RType]) -> None: + error: ErrorHandler, src_type: Optional[RType]) -> None: """Emit cast to a tuple type. The arguments are similar to emit_cast. @@ -740,7 +778,8 @@ def emit_unbox(self, self.emit_line('} else {') cast_temp = self.temp_name() - self.emit_tuple_cast(src, cast_temp, typ, declare_dest=True, err='', src_type=None) + self.emit_tuple_cast(src, cast_temp, typ, declare_dest=True, error=error, + src_type=None) self.emit_line(f'if (unlikely({cast_temp} == NULL)) {{') # self.emit_arg_check(src, dest, typ, @@ -886,3 +925,44 @@ def emit_gc_clear(self, target: str, rtype: RType) -> None: self.emit_line(f'Py_CLEAR({target});') else: assert False, 'emit_gc_clear() not implemented for %s' % repr(rtype) + + def emit_traceback(self, + source_path: str, + module_name: str, + traceback_entry: Tuple[str, int]) -> None: + return self._emit_traceback('CPy_AddTraceback', source_path, module_name, traceback_entry) + + def emit_type_error_traceback( + self, + source_path: str, + module_name: str, + traceback_entry: Tuple[str, int], + *, + typ: RType, + src: str) -> None: + func = 'CPy_TypeErrorTraceback' + type_str = f'"{self.pretty_name(typ)}"' + return self._emit_traceback( + func, source_path, module_name, traceback_entry, type_str=type_str, src=src) + + def _emit_traceback(self, + func: str, + source_path: str, + module_name: str, + traceback_entry: Tuple[str, int], + type_str: str = '', + src: str = '') -> None: + globals_static = self.static_name('globals', module_name) + line = '%s("%s", "%s", %d, %s' % ( + func, + source_path.replace("\\", "\\\\"), + traceback_entry[0], + traceback_entry[1], + globals_static) + if type_str: + assert src + line += f', {type_str}, {src}' + line += ');' + self.emit_line(line) + if DEBUG_ERRORS: + self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 540c6b646496..7b44b22d6cc1 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -6,7 +6,7 @@ from mypyc.common import ( REG_PREFIX, NATIVE_PREFIX, STATIC_PREFIX, TYPE_PREFIX, MODULE_PREFIX, ) -from mypyc.codegen.emit import Emitter +from mypyc.codegen.emit import Emitter, TracebackAndGotoHandler, DEBUG_ERRORS from mypyc.ir.ops import ( Op, OpVisitor, Goto, Branch, Return, Assign, Integer, LoadErrorValue, GetAttr, SetAttr, LoadStatic, InitStatic, TupleGet, TupleSet, Call, IncRef, DecRef, Box, Cast, Unbox, @@ -23,10 +23,6 @@ from mypyc.ir.pprint import generate_names_for_ir from mypyc.analysis.blockfreq import frequently_executed_blocks -# Whether to insert debug asserts for all error handling, to quickly -# catch errors propagating without exceptions set. -DEBUG_ERRORS = False - def native_function_type(fn: FuncIR, emitter: Emitter) -> str: args = ', '.join(emitter.ctype(arg.type) for arg in fn.args) or 'void' @@ -322,7 +318,7 @@ def visit_get_attr(self, op: GetAttr) -> None: and branch.traceback_entry is not None and not branch.negated): # Generate code for the following branch here to avoid - # redundant branches in the generate code. + # redundant branches in the generated code. self.emit_attribute_error(branch, cl.name, op.attr) self.emit_line('goto %s;' % self.label(branch.true)) merged_branch = branch @@ -485,8 +481,24 @@ def visit_box(self, op: Box) -> None: self.emitter.emit_box(self.reg(op.src), self.reg(op), op.src.type, can_borrow=True) def visit_cast(self, op: Cast) -> None: + branch = self.next_branch() + handler = None + if branch is not None: + if (branch.value is op + and branch.op == Branch.IS_ERROR + and branch.traceback_entry is not None + and not branch.negated + and branch.false is self.next_block): + # Generate code also for the following branch here to avoid + # redundant branches in the generated code. + handler = TracebackAndGotoHandler(self.label(branch.true), + self.source_path, + self.module_name, + branch.traceback_entry) + self.op_index += 1 + self.emitter.emit_cast(self.reg(op.src), self.reg(op), op.type, - src_type=op.src.type) + src_type=op.src.type, error=handler) def visit_unbox(self, op: Unbox) -> None: self.emitter.emit_unbox(self.reg(op.src), self.reg(op), op.type) @@ -647,14 +659,7 @@ def emit_declaration(self, line: str) -> None: def emit_traceback(self, op: Branch) -> None: if op.traceback_entry is not None: - globals_static = self.emitter.static_name('globals', self.module_name) - self.emit_line('CPy_AddTraceback("%s", "%s", %d, %s);' % ( - self.source_path.replace("\\", "\\\\"), - op.traceback_entry[0], - op.traceback_entry[1], - globals_static)) - if DEBUG_ERRORS: - self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') + self.emitter.emit_traceback(self.source_path, self.module_name, op.traceback_entry) def emit_attribute_error(self, op: Branch, class_name: str, attr: str) -> None: assert op.traceback_entry is not None diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 0fdd6b0a27cc..f482e09cbe79 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -500,6 +500,8 @@ void _CPy_GetExcInfo(PyObject **p_type, PyObject **p_value, PyObject **p_traceba void CPyError_OutOfMemory(void); void CPy_TypeError(const char *expected, PyObject *value); void CPy_AddTraceback(const char *filename, const char *funcname, int line, PyObject *globals); +void CPy_TypeErrorTraceback(const char *filename, const char *funcname, int line, + PyObject *globals, const char *expected, PyObject *value); void CPy_AttributeError(const char *filename, const char *funcname, const char *classname, const char *attrname, int line, PyObject *globals); diff --git a/mypyc/lib-rt/exc_ops.c b/mypyc/lib-rt/exc_ops.c index 1b679199bc0f..b23a04b26657 100644 --- a/mypyc/lib-rt/exc_ops.c +++ b/mypyc/lib-rt/exc_ops.c @@ -233,6 +233,13 @@ void CPy_AddTraceback(const char *filename, const char *funcname, int line, PyOb _PyErr_ChainExceptions(exc, val, tb); } +CPy_NOINLINE +void CPy_TypeErrorTraceback(const char *filename, const char *funcname, int line, + PyObject *globals, const char *expected, PyObject *value) { + CPy_TypeError(expected, value); + CPy_AddTraceback(filename, funcname, line, globals); +} + void CPy_AttributeError(const char *filename, const char *funcname, const char *classname, const char *attrname, int line, PyObject *globals) { char buf[500]; diff --git a/mypyc/test-data/run-functions.test b/mypyc/test-data/run-functions.test index 77e9c9ed32f7..b6277c9e8ec4 100644 --- a/mypyc/test-data/run-functions.test +++ b/mypyc/test-data/run-functions.test @@ -1220,3 +1220,18 @@ def sub(s: str, f: Callable[[str], str]) -> str: ... def sub(s: bytes, f: Callable[[bytes], bytes]) -> bytes: ... def sub(s, f): return f(s) + +[case testContextManagerSpecialCase] +from typing import Generator, Callable, Iterator +from contextlib import contextmanager + +@contextmanager +def f() -> Iterator[None]: + yield + +def g() -> None: + a = [''] + with f(): + a.pop() + +g() diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 0bb4dc68e8af..fe47af2300d7 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -9,7 +9,7 @@ from mypyc.ir.ops import ( BasicBlock, Goto, Return, Integer, Assign, AssignMulti, IncRef, DecRef, Branch, Call, Unbox, Box, TupleGet, GetAttr, SetAttr, Op, Value, CallC, IntOp, LoadMem, - GetElementPtr, LoadAddress, ComparisonOp, SetMem, Register, Unreachable + GetElementPtr, LoadAddress, ComparisonOp, SetMem, Register, Unreachable, Cast ) from mypyc.ir.rtypes import ( RTuple, RInstance, RType, RArray, int_rprimitive, bool_rprimitive, list_rprimitive, @@ -312,7 +312,9 @@ def test_get_attr_merged(self) -> None: CPyTagged_INCREF(cpy_r_r0); goto CPyL9; """, - next_branch=branch) + next_branch=branch, + skip_next=True, + ) def test_set_attr(self) -> None: self.assert_emit( @@ -440,13 +442,110 @@ def test_long_signed(self) -> None: self.assert_emit(Assign(a, Integer(-(1 << 31), int64_rprimitive)), """cpy_r_a = -2147483648LL;""") + def test_cast_and_branch_merge(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + branch.traceback_entry = ('foobar', 123) + self.assert_emit( + op, + """\ +if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; +else { + CPy_TypeErrorTraceback("prog.py", "foobar", 123, CPyStatic_prog___globals, "dict", cpy_r_r); + goto CPyL8; +} +""", + next_block=next_block, + next_branch=branch, + skip_next=True, + ) + + def test_cast_and_branch_no_merge_1(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + branch = Branch(op, BasicBlock(8), BasicBlock(9), Branch.IS_ERROR) + branch.traceback_entry = ('foobar', 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=BasicBlock(10), + next_branch=branch, + skip_next=False, + ) + + def test_cast_and_branch_no_merge_2(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + branch.negated = True + branch.traceback_entry = ('foobar', 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + + def test_cast_and_branch_no_merge_3(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.BOOL) + branch.traceback_entry = ('foobar', 123) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + + def test_cast_and_branch_no_merge_4(self) -> None: + op = Cast(self.r, dict_rprimitive, 1) + next_block = BasicBlock(9) + branch = Branch(op, BasicBlock(8), next_block, Branch.IS_ERROR) + self.assert_emit( + op, + """\ + if (likely(PyDict_Check(cpy_r_r))) + cpy_r_r0 = cpy_r_r; + else { + CPy_TypeError("dict", cpy_r_r); + cpy_r_r0 = NULL; + } + """, + next_block=next_block, + next_branch=branch, + ) + def assert_emit(self, op: Op, expected: str, next_block: Optional[BasicBlock] = None, *, rare: bool = False, - next_branch: Optional[Branch] = None) -> None: + next_branch: Optional[Branch] = None, + skip_next: bool = False) -> None: block = BasicBlock(0) block.ops.append(op) value_names = generate_names_for_ir(self.registers, [block]) @@ -476,6 +575,10 @@ def assert_emit(self, expected_lines = [line.strip(' ') for line in expected_lines] assert_string_arrays_equal(expected_lines, actual_lines, msg='Generated code unexpected') + if skip_next: + assert visitor.op_index == 1 + else: + assert visitor.op_index == 0 def assert_emit_binary_op(self, op: str, diff --git a/mypyc/test/test_emitwrapper.py b/mypyc/test/test_emitwrapper.py index de04123b7e55..3eb1be37bfb6 100644 --- a/mypyc/test/test_emitwrapper.py +++ b/mypyc/test/test_emitwrapper.py @@ -22,7 +22,8 @@ def test_check_list(self) -> None: 'if (likely(PyList_Check(obj_x)))', ' arg_x = obj_x;', 'else {', - ' CPy_TypeError("list", obj_x); return NULL;', + ' CPy_TypeError("list", obj_x);', + ' return NULL;', '}', ], lines) From 291c39d5587d96b8f42fcebb3a6aa65e9eff1276 Mon Sep 17 00:00:00 2001 From: 97littleleaf11 <11172084+97littleleaf11@users.noreply.github.com> Date: Mon, 23 May 2022 23:53:39 +0800 Subject: [PATCH 24/80] Sync pythoncapi_compat.h (#12859) --- mypyc/lib-rt/pythoncapi_compat.h | 226 +++++++++++++++++++++---------- 1 file changed, 158 insertions(+), 68 deletions(-) diff --git a/mypyc/lib-rt/pythoncapi_compat.h b/mypyc/lib-rt/pythoncapi_compat.h index c9191a1d7a57..f22e92f7358f 100644 --- a/mypyc/lib-rt/pythoncapi_compat.h +++ b/mypyc/lib-rt/pythoncapi_compat.h @@ -4,10 +4,10 @@ // Copyright Contributors to the pythoncapi_compat project. // // Homepage: -// https://github.com/pythoncapi/pythoncapi_compat +// https://github.com/python/pythoncapi_compat // // Latest version: -// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h +// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h // // SPDX-License-Identifier: 0BSD @@ -26,27 +26,32 @@ extern "C" { // the inline keyword in C (only in C++): use __inline instead. #if (defined(_MSC_VER) && _MSC_VER < 1900 \ && !defined(__cplusplus) && !defined(inline)) -# define PYCAPI_COMPAT_INLINE(TYPE static __inline TYPE +# define PYCAPI_COMPAT_STATIC_INLINE(TYPE) static __inline TYPE #else # define PYCAPI_COMPAT_STATIC_INLINE(TYPE) static inline TYPE #endif -// C++ compatibility -#ifdef __cplusplus -# define PYCAPI_COMPAT_CAST(TYPE, EXPR) reinterpret_cast(EXPR) -# define PYCAPI_COMPAT_NULL nullptr -#else -# define PYCAPI_COMPAT_CAST(TYPE, EXPR) (TYPE)(EXPR) -# define PYCAPI_COMPAT_NULL NULL +// C++ compatibility: _Py_CAST() and _Py_NULL +#ifndef _Py_CAST +# ifdef __cplusplus +# define _Py_CAST(type, expr) \ + const_cast(reinterpret_cast(expr)) +# else +# define _Py_CAST(type, expr) ((type)(expr)) +# endif +#endif +#ifndef _Py_NULL +# ifdef __cplusplus +# define _Py_NULL nullptr +# else +# define _Py_NULL NULL +# endif #endif // Cast argument to PyObject* type. #ifndef _PyObject_CAST -# define _PyObject_CAST(op) PYCAPI_COMPAT_CAST(PyObject*, op) -#endif -#ifndef _PyObject_CAST_CONST -# define _PyObject_CAST_CONST(op) PYCAPI_COMPAT_CAST(const PyObject*, op) +# define _PyObject_CAST(op) _Py_CAST(PyObject*, op) #endif @@ -74,30 +79,6 @@ _Py_XNewRef(PyObject *obj) #endif -// See https://bugs.python.org/issue42522 -#if !defined(_Py_StealRef) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -__Py_StealRef(PyObject *obj) -{ - Py_DECREF(obj); - return obj; -} -#define _Py_StealRef(obj) __Py_StealRef(_PyObject_CAST(obj)) -#endif - - -// See https://bugs.python.org/issue42522 -#if !defined(_Py_XStealRef) -PYCAPI_COMPAT_STATIC_INLINE(PyObject*) -__Py_XStealRef(PyObject *obj) -{ - Py_XDECREF(obj); - return obj; -} -#define _Py_XStealRef(obj) __Py_XStealRef(_PyObject_CAST(obj)) -#endif - - // bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) PYCAPI_COMPAT_STATIC_INLINE(void) @@ -171,27 +152,28 @@ _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*) PyFrame_GetCode(PyFrameObject *frame) { - assert(frame != PYCAPI_COMPAT_NULL); - assert(frame->f_code != PYCAPI_COMPAT_NULL); - return PYCAPI_COMPAT_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); + assert(frame != _Py_NULL); + assert(frame->f_code != _Py_NULL); + return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code)); } #endif PYCAPI_COMPAT_STATIC_INLINE(PyCodeObject*) _PyFrame_GetCodeBorrow(PyFrameObject *frame) { - return PYCAPI_COMPAT_CAST(PyCodeObject *, - _Py_StealRef(PyFrame_GetCode(frame))); + PyCodeObject *code = PyFrame_GetCode(frame); + Py_DECREF(code); + return code; } -// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 +// bpo-40421 added PyFrame_GetBack() to Python 3.9.0b1 #if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) PyFrame_GetBack(PyFrameObject *frame) { - assert(frame != PYCAPI_COMPAT_NULL); - return PYCAPI_COMPAT_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); + assert(frame != _Py_NULL); + return _Py_CAST(PyFrameObject*, Py_XNewRef(frame->f_back)); } #endif @@ -199,8 +181,66 @@ PyFrame_GetBack(PyFrameObject *frame) PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) _PyFrame_GetBackBorrow(PyFrameObject *frame) { - return PYCAPI_COMPAT_CAST(PyFrameObject *, - _Py_XStealRef(PyFrame_GetBack(frame))); + PyFrameObject *back = PyFrame_GetBack(frame); + Py_XDECREF(back); + return back; +} +#endif + + +// bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(PyObject*) +PyFrame_GetLocals(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030400B1 + if (PyFrame_FastToLocalsWithError(frame) < 0) { + return NULL; + } +#else + PyFrame_FastToLocals(frame); +#endif + return Py_NewRef(frame->f_locals); +} +#endif + + +// bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(PyObject*) +PyFrame_GetGlobals(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_globals); +} +#endif + + +// bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7 +#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(PyObject*) +PyFrame_GetBuiltins(PyFrameObject *frame) +{ + return Py_NewRef(frame->f_builtins); +} +#endif + + +// bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(int) +PyFrame_GetLasti(PyFrameObject *frame) +{ +#if PY_VERSION_HEX >= 0x030A00A7 + // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset, + // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes) + // instructions. + if (frame->f_lasti < 0) { + return -1; + } + return frame->f_lasti * 2; +#else + return frame->f_lasti; +#endif } #endif @@ -210,7 +250,7 @@ _PyFrame_GetBackBorrow(PyFrameObject *frame) PYCAPI_COMPAT_STATIC_INLINE(PyInterpreterState *) PyThreadState_GetInterpreter(PyThreadState *tstate) { - assert(tstate != PYCAPI_COMPAT_NULL); + assert(tstate != _Py_NULL); return tstate->interp; } #endif @@ -221,8 +261,8 @@ PyThreadState_GetInterpreter(PyThreadState *tstate) PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) PyThreadState_GetFrame(PyThreadState *tstate) { - assert(tstate != PYCAPI_COMPAT_NULL); - return PYCAPI_COMPAT_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); + assert(tstate != _Py_NULL); + return _Py_CAST(PyFrameObject *, Py_XNewRef(tstate->frame)); } #endif @@ -230,8 +270,9 @@ PyThreadState_GetFrame(PyThreadState *tstate) PYCAPI_COMPAT_STATIC_INLINE(PyFrameObject*) _PyThreadState_GetFrameBorrow(PyThreadState *tstate) { - return PYCAPI_COMPAT_CAST(PyFrameObject*, - _Py_XStealRef(PyThreadState_GetFrame(tstate))); + PyFrameObject *frame = PyThreadState_GetFrame(tstate); + Py_XDECREF(frame); + return frame; } #endif @@ -245,11 +286,11 @@ PyInterpreterState_Get(void) PyInterpreterState *interp; tstate = PyThreadState_GET(); - if (tstate == PYCAPI_COMPAT_NULL) { + if (tstate == _Py_NULL) { Py_FatalError("GIL released (tstate is NULL)"); } interp = tstate->interp; - if (interp == PYCAPI_COMPAT_NULL) { + if (interp == _Py_NULL) { Py_FatalError("no current interpreter"); } return interp; @@ -262,7 +303,7 @@ PyInterpreterState_Get(void) PYCAPI_COMPAT_STATIC_INLINE(uint64_t) PyThreadState_GetID(PyThreadState *tstate) { - assert(tstate != PYCAPI_COMPAT_NULL); + assert(tstate != _Py_NULL); return tstate->id; } #endif @@ -286,8 +327,8 @@ PyThreadState_EnterTracing(PyThreadState *tstate) PYCAPI_COMPAT_STATIC_INLINE(void) PyThreadState_LeaveTracing(PyThreadState *tstate) { - int use_tracing = (tstate->c_tracefunc != PYCAPI_COMPAT_NULL - || tstate->c_profilefunc != PYCAPI_COMPAT_NULL); + int use_tracing = (tstate->c_tracefunc != _Py_NULL + || tstate->c_profilefunc != _Py_NULL); tstate->tracing--; #if PY_VERSION_HEX >= 0x030A00A1 tstate->cframe->use_tracing = use_tracing; @@ -322,11 +363,11 @@ PyObject_CallOneArg(PyObject *func, PyObject *arg) // bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 #if PY_VERSION_HEX < 0x030A00A3 PYCAPI_COMPAT_STATIC_INLINE(int) -PyModule_AddObjectRef(PyObject *mod, const char *name, PyObject *value) +PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) { int res; Py_XINCREF(value); - res = PyModule_AddObject(mod, name, value); + res = PyModule_AddObject(module, name, value); if (res < 0) { Py_XDECREF(value); } @@ -338,7 +379,7 @@ PyModule_AddObjectRef(PyObject *mod, const char *name, PyObject *value) // bpo-40024 added PyModule_AddType() to Python 3.9.0a5 #if PY_VERSION_HEX < 0x030900A5 PYCAPI_COMPAT_STATIC_INLINE(int) -PyModule_AddType(PyObject *mod, PyTypeObject *type) +PyModule_AddType(PyObject *module, PyTypeObject *type) { const char *name, *dot; @@ -348,13 +389,13 @@ PyModule_AddType(PyObject *mod, PyTypeObject *type) // inline _PyType_Name() name = type->tp_name; - assert(name != PYCAPI_COMPAT_NULL); + assert(name != _Py_NULL); dot = strrchr(name, '.'); - if (dot != PYCAPI_COMPAT_NULL) { + if (dot != _Py_NULL) { name = dot + 1; } - return PyModule_AddObjectRef(mod, name, _PyObject_CAST(type)); + return PyModule_AddObjectRef(module, name, _PyObject_CAST(type)); } #endif @@ -375,7 +416,7 @@ PyObject_GC_IsTracked(PyObject* obj) PYCAPI_COMPAT_STATIC_INLINE(int) PyObject_GC_IsFinalized(PyObject *obj) { - PyGC_Head *gc = PYCAPI_COMPAT_CAST(PyGC_Head *, obj) - 1; + PyGC_Head *gc = _Py_CAST(PyGC_Head*, obj) - 1; return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED(gc)); } #endif @@ -384,10 +425,59 @@ PyObject_GC_IsFinalized(PyObject *obj) // bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 #if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) PYCAPI_COMPAT_STATIC_INLINE(int) -_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { - return ob->ob_type == type; +_Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { + return Py_TYPE(ob) == type; +} +#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST(ob), type) +#endif + + +// bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7. +// bpo-11734 added _PyFloat_Pack2() and _PyFloat_Unpack2() to Python 3.6.0b1. +// Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal +// C API: Python 3.11a2-3.11a6 versions are not supported. +#if 0x030600B1 <= PY_VERSION_HEX && PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(int) +PyFloat_Pack2(double x, char *p, int le) +{ return _PyFloat_Pack2(x, (unsigned char*)p, le); } + +PYCAPI_COMPAT_STATIC_INLINE(double) +PyFloat_Unpack2(const char *p, int le) +{ return _PyFloat_Unpack2((const unsigned char *)p, le); } +#endif + + +// bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and +// PyFloat_Unpack8() to Python 3.11a7. +// Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4() +// and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions +// are not supported. +#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(int) +PyFloat_Pack4(double x, char *p, int le) +{ return _PyFloat_Pack4(x, (unsigned char*)p, le); } + +PYCAPI_COMPAT_STATIC_INLINE(int) +PyFloat_Pack8(double x, char *p, int le) +{ return _PyFloat_Pack8(x, (unsigned char*)p, le); } + +PYCAPI_COMPAT_STATIC_INLINE(double) +PyFloat_Unpack4(const char *p, int le) +{ return _PyFloat_Unpack4((const unsigned char *)p, le); } + +PYCAPI_COMPAT_STATIC_INLINE(double) +PyFloat_Unpack8(const char *p, int le) +{ return _PyFloat_Unpack8((const unsigned char *)p, le); } +#endif + + +// gh-92154 added PyCode_GetCode() to Python 3.11.0b1 +#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION) +PYCAPI_COMPAT_STATIC_INLINE(PyObject*) +PyCode_GetCode(PyCodeObject *code) +{ + return Py_NewRef(code->co_code); } -#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST_CONST(ob), type) #endif From 24ef8d06a1edad9074990624dd6d955a9e0c0561 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 23 May 2022 09:18:55 -0700 Subject: [PATCH 25/80] Friendlier errors for PEP 612 (#12832) Co-authored-by: hauntsaninja <> --- mypy/typeanal.py | 14 +++++++++++--- test-data/unit/check-parameter-specification.test | 3 ++- test-data/unit/semanal-errors.test | 3 ++- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 98e37bd0aa40..bd0f684653b2 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -944,9 +944,13 @@ def analyze_callable_type(self, t: UnboundType) -> Type: ) if maybe_ret is None: # Callable[?, RET] (where ? is something invalid) - # TODO(PEP612): change error to mention paramspec, once we actually have some - # support for it - self.fail('The first argument to Callable must be a list of types or "..."', t) + self.fail( + 'The first argument to Callable must be a ' + 'list of types, parameter specification, or "..."', t) + self.note( + 'See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas', # noqa: E501 + t + ) return AnyType(TypeOfAny.from_error) ret = maybe_ret else: @@ -1180,6 +1184,10 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa and analyzed.flavor == ParamSpecFlavor.BARE): if analyzed.prefix.arg_types: self.fail('Invalid location for Concatenate', t) + self.note( + 'You can use Concatenate as the first argument to Callable', + t + ) else: self.fail(f'Invalid location for ParamSpec "{analyzed.name}"', t) self.note( diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 28b08aa7122f..4dae32978263 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -25,7 +25,8 @@ def foo1(x: Callable[P, int]) -> Callable[P, str]: ... def foo2(x: P) -> P: ... # E: Invalid location for ParamSpec "P" \ # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' -def foo3(x: Concatenate[int, P]) -> int: ... # E: Invalid location for Concatenate +def foo3(x: Concatenate[int, P]) -> int: ... # E: Invalid location for Concatenate \ + # N: You can use Concatenate as the first argument to Callable def foo4(x: List[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 14bc24f03a7b..a1ff4ec1c3e7 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -867,7 +867,8 @@ x = None # type: Callable[int, str] y = None # type: Callable[int] z = None # type: Callable[int, int, int] [out] -main:2: error: The first argument to Callable must be a list of types or "..." +main:2: error: The first argument to Callable must be a list of types, parameter specification, or "..." +main:2: note: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas main:3: error: Please use "Callable[[], ]" or "Callable" main:4: error: Please use "Callable[[], ]" or "Callable" From 405efd23e483c214cb1293700ef2008aacf8e2ca Mon Sep 17 00:00:00 2001 From: 97littleleaf11 <11172084+97littleleaf11@users.noreply.github.com> Date: Wed, 25 May 2022 20:37:58 +0800 Subject: [PATCH 26/80] [mypyc] Use PyObject_CallNoArgs and PyObject_CallOneArg (#12862) Use PyObject_CallNoArgs and PyObject_CallOneArg to replace PyObject_CallFunctionObjArgs in lib-rt, since the new API costs less memory according to python/cpython#13890 (comment) Also remove the macro in pythonsupport.h since the two API are already covered by pythoncapi_compat.h. --- mypyc/lib-rt/exc_ops.c | 2 +- mypyc/lib-rt/misc_ops.c | 4 ++-- mypyc/lib-rt/pythonsupport.h | 8 ++------ 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/mypyc/lib-rt/exc_ops.c b/mypyc/lib-rt/exc_ops.c index b23a04b26657..8c69664ae878 100644 --- a/mypyc/lib-rt/exc_ops.c +++ b/mypyc/lib-rt/exc_ops.c @@ -7,7 +7,7 @@ void CPy_Raise(PyObject *exc) { if (PyObject_IsInstance(exc, (PyObject *)&PyType_Type)) { - PyObject *obj = PyObject_CallFunctionObjArgs(exc, NULL); + PyObject *obj = PyObject_CallNoArgs(exc); if (!obj) return; PyErr_SetObject(exc, obj); diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index fea38eca9d90..b0a40bbd4931 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -54,7 +54,7 @@ int CPy_YieldFromErrorHandle(PyObject *iter, PyObject **outp) if (PyErr_GivenExceptionMatches(exc_type, PyExc_GeneratorExit)) { _m = _PyObject_GetAttrId(iter, &PyId_close); if (_m) { - res = PyObject_CallFunctionObjArgs(_m, NULL); + res = PyObject_CallNoArgs(_m); Py_DECREF(_m); if (!res) return 2; @@ -360,7 +360,7 @@ CPyDataclass_SleightOfHand(PyObject *dataclass_dec, PyObject *tp, } /* Run the @dataclass descriptor */ - res = PyObject_CallFunctionObjArgs(dataclass_dec, tp, NULL); + res = PyObject_CallOneArg(dataclass_dec, tp); if (!res) { goto fail; } diff --git a/mypyc/lib-rt/pythonsupport.h b/mypyc/lib-rt/pythonsupport.h index c4091dada7fd..09ef9757dd76 100644 --- a/mypyc/lib-rt/pythonsupport.h +++ b/mypyc/lib-rt/pythonsupport.h @@ -350,7 +350,7 @@ CPyGen_SetStopIterationValue(PyObject *value) return 0; } /* Construct an exception instance manually with - * PyObject_CallFunctionObjArgs and pass it to PyErr_SetObject. + * PyObject_CallOneArg and pass it to PyErr_SetObject. * * We do this to handle a situation when "value" is a tuple, in which * case PyErr_SetObject would set the value of StopIteration to @@ -358,7 +358,7 @@ CPyGen_SetStopIterationValue(PyObject *value) * * (See PyErr_SetObject/_PyErr_CreateException code for details.) */ - e = PyObject_CallFunctionObjArgs(PyExc_StopIteration, value, NULL); + e = PyObject_CallOneArg(PyExc_StopIteration, value); if (e == NULL) { return -1; } @@ -410,10 +410,6 @@ _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { _PyObject_CallMethodIdObjArgs((self), (name), NULL) #define _PyObject_CallMethodIdOneArg(self, name, arg) \ _PyObject_CallMethodIdObjArgs((self), (name), (arg), NULL) -#define PyObject_CallNoArgs(callable) \ - PyObject_CallFunctionObjArgs((callable), NULL) -#define PyObject_CallOneArg(callable, arg) \ - PyObject_CallFunctionObjArgs((callable), (arg), NULL) #endif #endif From 87e73feb0ac85d9d0d55ad4e367876d0c7fbe0a2 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 25 May 2022 10:11:42 -0700 Subject: [PATCH 27/80] Disallow assignments to awaited coroutines that do not return (#12853) Resolves #12837 Co-authored-by: hauntsaninja <> --- mypy/checkexpr.py | 11 ++++++++--- test-data/unit/pythoneval-asyncio.test | 5 ++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4cc91f9cc123..b9c657e83e63 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3987,6 +3987,8 @@ def accept(self, typ = self.visit_yield_from_expr(node, allow_none_return=True) elif allow_none_return and isinstance(node, ConditionalExpr): typ = self.visit_conditional_expr(node, allow_none_return=True) + elif allow_none_return and isinstance(node, AwaitExpr): + typ = self.visit_await_expr(node, allow_none_return=True) else: typ = node.accept(self) except Exception as err: @@ -4099,15 +4101,18 @@ def visit_yield_expr(self, e: YieldExpr) -> Type: 'actual type', 'expected type') return self.chk.get_generator_receive_type(return_type, False) - def visit_await_expr(self, e: AwaitExpr) -> Type: + def visit_await_expr(self, e: AwaitExpr, allow_none_return: bool = False) -> Type: expected_type = self.type_context[-1] if expected_type is not None: expected_type = self.chk.named_generic_type('typing.Awaitable', [expected_type]) actual_type = get_proper_type(self.accept(e.expr, expected_type)) if isinstance(actual_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=actual_type) - return self.check_awaitable_expr(actual_type, e, - message_registry.INCOMPATIBLE_TYPES_IN_AWAIT) + ret = self.check_awaitable_expr(actual_type, e, + message_registry.INCOMPATIBLE_TYPES_IN_AWAIT) + if not allow_none_return and isinstance(get_proper_type(ret), NoneType): + self.chk.msg.does_not_return_value(None, e) + return ret def check_awaitable_expr(self, t: Type, ctx: Context, msg: Union[str, ErrorMessage]) -> Type: """Check the argument to `await` and extract the type of value. diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index 11a61756a824..72e4bc9cc9dd 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -242,8 +242,7 @@ Outside 42 -- Errors -[case testErrorAssigningCoroutineThatDontReturn-xfail] -# https://github.com/python/mypy/issues/12837 +[case testErrorAssigningCoroutineThatDontReturn] from typing import Any import asyncio from asyncio import Future @@ -262,7 +261,7 @@ try: finally: loop.close() [out] -_program.py:13: error: Function does not return a value +_program.py:11: error: Function does not return a value [case testErrorReturnIsNotTheSameType] from typing import Any From 4024748bff83237dbbe7da4750fe4c6e993af235 Mon Sep 17 00:00:00 2001 From: pranavrajpal <78008260+pranavrajpal@users.noreply.github.com> Date: Wed, 25 May 2022 10:14:36 -0700 Subject: [PATCH 28/80] [mypyc] Clean up support for debugging mypyc runtests (#12849) Add a --mypyc-debug option to pytest that compiles a test normally and then runs the resulting binary in the debugger specified. The support for debugging runtests was already somewhat there. This just cleans it up and adds a pytest option to use it. --- mypy/test/data.py | 13 +++++++++++++ mypyc/test/test_run.py | 22 +++++++++++----------- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/mypy/test/data.py b/mypy/test/data.py index de7e38693f23..18d25fc74c04 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -11,11 +11,17 @@ import pytest from typing import List, Tuple, Set, Optional, Iterator, Any, Dict, NamedTuple, Union, Pattern +from typing_extensions import Final from mypy.test.config import test_data_prefix, test_temp_dir, PREFIX root_dir = os.path.normpath(PREFIX) +# Debuggers that we support for debugging mypyc run tests +# implementation of using each of these debuggers is in test_run.py +# TODO: support more debuggers +SUPPORTED_DEBUGGERS: Final = ["gdb", "lldb"] + # File modify/create operation: copy module contents from source_path. class UpdateFile(NamedTuple): @@ -549,6 +555,13 @@ def pytest_addoption(parser: Any) -> None: help='Set the verbose flag when creating mypy Options') group.addoption('--mypyc-showc', action='store_true', default=False, help='Display C code on mypyc test failures') + group.addoption( + "--mypyc-debug", + default=None, + dest="debugger", + choices=SUPPORTED_DEBUGGERS, + help="Run the first mypyc run test with the specified debugger", + ) # This function name is special to pytest. See diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 1eafd2d4c803..4013c30c8bc8 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -3,7 +3,6 @@ import ast import glob import os.path -import platform import re import subprocess import contextlib @@ -272,19 +271,20 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> env = os.environ.copy() env['MYPYC_RUN_BENCH'] = '1' if bench else '0' - # XXX: This is an ugly hack. - if 'MYPYC_RUN_GDB' in os.environ: - if platform.system() == 'Darwin': + debugger = testcase.config.getoption('debugger') + if debugger: + if debugger == 'lldb': subprocess.check_call(['lldb', '--', sys.executable, driver_path], env=env) - assert False, ("Test can't pass in lldb mode. (And remember to pass -s to " - "pytest)") - elif platform.system() == 'Linux': + elif debugger == 'gdb': subprocess.check_call(['gdb', '--args', sys.executable, driver_path], env=env) - assert False, ("Test can't pass in gdb mode. (And remember to pass -s to " - "pytest)") else: - assert False, 'Unsupported OS' - + assert False, 'Unsupported debugger' + # TODO: find a way to automatically disable capturing + # stdin/stdout when in debugging mode + assert False, ( + "Test can't pass in debugging mode. " + "(Make sure to pass -s to pytest to interact with the debugger)" + ) proc = subprocess.Popen([sys.executable, driver_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) output = proc.communicate()[0].decode('utf8') From b55dcf534c07eb4d25bbbf7f5ba67f5edf5d0a92 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 25 May 2022 22:43:13 -0700 Subject: [PATCH 29/80] stubtest: check typevar and paramspec (#12851) Came up in #12825 --- mypy/stubtest.py | 30 ++++++++++++++++++++++++++++-- mypy/test/teststubtest.py | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 60 insertions(+), 4 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index c1bdcb3437a4..6406921fbb8a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -12,6 +12,8 @@ import re import sys import types +import typing +import typing_extensions import warnings from functools import singledispatch from pathlib import Path @@ -867,8 +869,32 @@ def verify_overloadedfuncdef( def verify_typevarexpr( stub: nodes.TypeVarExpr, runtime: MaybeMissing[Any], object_path: List[str] ) -> Iterator[Error]: - if False: - yield None + if isinstance(runtime, Missing): + # We seem to insert these typevars into NamedTuple stubs, but they + # don't exist at runtime. Just ignore! + if stub.name == "_NT": + return + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if not isinstance(runtime, TypeVar): + yield Error(object_path, "is not a TypeVar", stub, runtime) + return + + +@verify.register(nodes.ParamSpecExpr) +def verify_paramspecexpr( + stub: nodes.ParamSpecExpr, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + maybe_paramspec_types = ( + getattr(typing, "ParamSpec", None), getattr(typing_extensions, "ParamSpec", None) + ) + paramspec_types = tuple([t for t in maybe_paramspec_types if t is not None]) + if not paramspec_types or not isinstance(runtime, paramspec_types): + yield Error(object_path, "is not a ParamSpec", stub, runtime) + return def _verify_readonly_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 50b3f90c8fad..6ba07fc50cfd 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -48,6 +48,9 @@ def __getitem__(self, typeargs: Any) -> object: ... class TypeVar: def __init__(self, name, covariant: bool = ..., contravariant: bool = ...) -> None: ... +class ParamSpec: + def __init__(self, name: str) -> None: ... + _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _K = TypeVar("_K") @@ -329,8 +332,8 @@ def test_default_value(self) -> Iterator[Case]: yield Case( stub=""" from typing import TypeVar - T = TypeVar("T", bound=str) - def f6(text: T = ...) -> None: ... + _T = TypeVar("_T", bound=str) + def f6(text: _T = ...) -> None: ... """, runtime="def f6(text = None): pass", error="f6", @@ -1042,6 +1045,33 @@ def foo(self, x: int, y: bytes = ...) -> str: ... error="X.__init__" ) + @collect_cases + def test_type_var(self) -> Iterator[Case]: + yield Case( + stub="from typing import TypeVar", runtime="from typing import TypeVar", error=None + ) + yield Case( + stub="A = TypeVar('A')", + runtime="A = TypeVar('A')", + error=None, + ) + yield Case( + stub="B = TypeVar('B')", + runtime="B = 5", + error="B", + ) + if sys.version_info >= (3, 10): + yield Case( + stub="from typing import ParamSpec", + runtime="from typing import ParamSpec", + error=None + ) + yield Case( + stub="C = ParamSpec('C')", + runtime="C = ParamSpec('C')", + error=None, + ) + def remove_color_code(s: str) -> str: return re.sub("\\x1b.*?m", "", s) # this works! From 0054046c8c147a5190769bde21eb08615c16038e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 26 May 2022 14:43:40 +0100 Subject: [PATCH 30/80] Fix crash with nested attrs class (#12872) Fixes #12868. --- mypy/semanal.py | 4 +++- test-data/unit/check-attr.test | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index a49e7c23edf5..e00913a8cde4 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -4505,7 +4505,9 @@ class C: """ # TODO: Forward reference to name imported in class body is not # caught. - assert self.statement # we are at class scope + if self.statement is None: + # Assume it's fine -- don't have enough context to check + return True return (node is None or self.is_textually_before_statement(node) or not self.is_defined_in_current_module(node.fullname) diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index fdb0da7e0fce..021be93bdd21 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1734,3 +1734,18 @@ class C: # E: Unsupported converter, only named functions and types are currently supported ) [builtins fixtures/dict.pyi] + +[case testAttrsNestedClass] +from typing import List +import attr + +@attr.s +class C: + @attr.s + class D: + pass + x = attr.ib(type=List[D]) + +c = C(x=[C.D()]) +reveal_type(c.x) # N: Revealed type is "builtins.list[__main__.C.D]" +[builtins fixtures/list.pyi] From d3ef642a0c2cc5b4784c2c632cee9bdcfa10e8b2 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 27 May 2022 10:12:56 +0100 Subject: [PATCH 31/80] [mypyc] Replace integer floor division by a power of two with a shift (#12870) In a microbenchmark right shift was a bit faster. --- mypyc/irbuild/expression.py | 13 +++++++++++ mypyc/test-data/irbuild-int.test | 38 +++++++++++++++++++++++++++++++ mypyc/test-data/run-integers.test | 18 +++++++++++++++ 3 files changed, 69 insertions(+) diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index 76e4db62a465..49a5dd38089a 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -408,6 +408,8 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: # Special case some int ops to allow borrowing operands. if (is_int_rprimitive(builder.node_type(expr.left)) and is_int_rprimitive(builder.node_type(expr.right))): + if expr.op == '//': + expr = try_optimize_int_floor_divide(expr) if expr.op in int_borrow_friendly_op: borrow_left = is_borrow_friendly_expr(builder, expr.right) left = builder.accept(expr.left, can_borrow=borrow_left) @@ -419,6 +421,17 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: ) +def try_optimize_int_floor_divide(expr: OpExpr) -> OpExpr: + """Replace // with a power of two with a right shift, if possible.""" + if not isinstance(expr.right, IntExpr): + return expr + divisor = expr.right.value + shift = divisor.bit_length() - 1 + if 0 < shift < 28 and divisor == (1 << shift): + return OpExpr('>>', expr.left, IntExpr(shift)) + return expr + + def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: index = expr.index base_type = builder.node_type(expr.base) diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index ea943eef2c69..8bf43cfa4923 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -117,3 +117,41 @@ L5: r8 = x L6: return r8 + +[case testIntFloorDivideByPowerOfTwo] +def divby1(x: int) -> int: + return x // 1 +def divby2(x: int) -> int: + return x // 2 +def divby3(x: int) -> int: + return x // 3 +def divby4(x: int) -> int: + return x // 4 +def divby8(x: int) -> int: + return x // 8 +[out] +def divby1(x): + x, r0 :: int +L0: + r0 = CPyTagged_FloorDivide(x, 2) + return r0 +def divby2(x): + x, r0 :: int +L0: + r0 = CPyTagged_Rshift(x, 2) + return r0 +def divby3(x): + x, r0 :: int +L0: + r0 = CPyTagged_FloorDivide(x, 6) + return r0 +def divby4(x): + x, r0 :: int +L0: + r0 = CPyTagged_Rshift(x, 4) + return r0 +def divby8(x): + x, r0 :: int +L0: + r0 = CPyTagged_Rshift(x, 6) + return r0 diff --git a/mypyc/test-data/run-integers.test b/mypyc/test-data/run-integers.test index b9668d6dec9f..74e7cd6b8fb7 100644 --- a/mypyc/test-data/run-integers.test +++ b/mypyc/test-data/run-integers.test @@ -172,6 +172,8 @@ assert test_isinstance_int_and_not_bool(True) == False assert test_isinstance_int_and_not_bool(1) == True [case testIntOps] +from typing import Any + def check_and(x: int, y: int) -> None: # eval() can be trusted to calculate expected result expected = eval('{} & {}'.format(x, y)) @@ -428,6 +430,22 @@ def test_constant_fold() -> None: n64 = -(1 << 64) + int() assert n64 == -(1 << 64) +def div_by_2(x: int) -> int: + return x // 2 + +def div_by_3(x: int) -> int: + return x // 3 + +def div_by_4(x: int) -> int: + return x // 4 + +def test_floor_divide_by_literal() -> None: + for i in range(-100, 100): + i_boxed: Any = i + assert div_by_2(i) == i_boxed // int('2') + assert div_by_3(i) == i_boxed // int('3') + assert div_by_4(i) == i_boxed // int('4') + [case testIntMinMax] def test_int_min_max() -> None: x: int = 200 From 07edc92b71918f8975357b98c2df96a7a2f2aead Mon Sep 17 00:00:00 2001 From: Erik Kemperman Date: Fri, 27 May 2022 11:22:14 +0200 Subject: [PATCH 32/80] Put filelock around PEP 561 tests (fixes #12615) (#12857) Fixes #12615. As discussed in the linked issue, this PR would put a filesystem-based lock around the pip install steps of the suite of PEP561 testcases, to address race conditions. It introduces a test-dependency on tox-dev/py-filelock. I used 3.0.0 as a lower bound, simply because that's what the tox version mentioned in tox.ini specifies as its lower bound. However from the release history it seems that Python 3.6 support was dropped in 3.4.2, hence the second line in the requirements. I ended up just adding the location of the lock file to .gitignore -- I guess it would be possible to try and remove it after the suite has run, e.g. with atexit.register, but I didn't want to make this complicated. --- .gitignore | 1 + mypy/test/config.py | 9 +++++++++ mypy/test/testpep561.py | 17 +++++++++++------ runtests.py | 6 +++++- test-requirements.txt | 2 ++ 5 files changed, 28 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 3c0f60cfae4f..b2306b96036f 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ venv/ .mypy_cache/ .incremental_checker_cache.json .cache +test-data/packages/.pip_lock dmypy.json .dmypy.json diff --git a/mypy/test/config.py b/mypy/test/config.py index d76eadd72ed8..0c2dfc9a21a9 100644 --- a/mypy/test/config.py +++ b/mypy/test/config.py @@ -15,3 +15,12 @@ # This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase. # It is also hard-coded in numerous places, so don't change it. test_temp_dir = 'tmp' + +# The PEP 561 tests do a bunch of pip installs which, even though they operate +# on distinct temporary virtual environments, run into race conditions on shared +# file-system state. To make this work reliably in parallel mode, we'll use a +# FileLock courtesy of the tox-dev/py-filelock package. +# Ref. https://github.com/python/mypy/issues/12615 +# Ref. mypy/test/testpep561.py +pip_lock = os.path.join(package_path, '.pip_lock') +pip_timeout = 60 diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py index a49c7e8e5874..e5c79762d2c2 100644 --- a/mypy/test/testpep561.py +++ b/mypy/test/testpep561.py @@ -1,4 +1,5 @@ from contextlib import contextmanager +import filelock import os import pytest import re @@ -9,7 +10,7 @@ from typing import Tuple, List, Generator import mypy.api -from mypy.test.config import package_path +from mypy.test.config import package_path, pip_lock, pip_timeout from mypy.util import try_find_python2_interpreter from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.config import test_temp_dir @@ -77,11 +78,15 @@ def install_package(pkg: str, env = {'PIP_BUILD': dir} # Inherit environment for Windows env.update(os.environ) - proc = subprocess.run(install_cmd, - cwd=working_dir, - stdout=PIPE, - stderr=PIPE, - env=env) + try: + with filelock.FileLock(pip_lock, timeout=pip_timeout): + proc = subprocess.run(install_cmd, + cwd=working_dir, + stdout=PIPE, + stderr=PIPE, + env=env) + except filelock.Timeout as err: + raise Exception("Failed to acquire {}".format(pip_lock)) from err if proc.returncode != 0: raise Exception(proc.stdout.decode('utf-8') + proc.stderr.decode('utf-8')) diff --git a/runtests.py b/runtests.py index 871a214ef0c1..1f4167f2bd34 100755 --- a/runtests.py +++ b/runtests.py @@ -58,11 +58,15 @@ 'pytest-slow': ['pytest', '-q', '-k', ' or '.join( [SAMPLES, TYPESHED, - PEP561, DAEMON, MYPYC_EXTERNAL, MYPYC_COMMAND_LINE, ERROR_STREAM])], + + # Test cases that might take minutes to run + 'pytest-slower': ['pytest', '-q', '-k', ' or '.join( + [PEP561])], + # Test cases to run in typeshed CI 'typeshed-ci': ['pytest', '-q', '-k', ' or '.join([CMDLINE, EVALUATION, diff --git a/test-requirements.txt b/test-requirements.txt index 3d7835e38f14..4b6c1751cacf 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,8 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 +filelock>=3.0.0,<3.4.2; python_version<'3.7' +filelock>=3.0.0; python_version>='3.7' flake8==3.9.2 flake8-bugbear==22.3.20 flake8-pyi>=20.5 From e661890c5bbf2dbfa72d59f7a6c182531b2c2ae6 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Sat, 28 May 2022 18:16:49 +1000 Subject: [PATCH 33/80] stubtest: add error summary, other output nits (#12855) Co-authored-by: KotlinIsland Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Co-authored-by: hauntsaninja <> --- mypy/messages.py | 10 +--- mypy/stubtest.py | 120 ++++++++++++++++++++++++++------------ mypy/test/teststubtest.py | 55 ++++++++++++----- mypy/util.py | 21 ++++--- 4 files changed, 137 insertions(+), 69 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index b5f6ca339d6a..b60f40bce561 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -42,7 +42,7 @@ ) from mypy.sametypes import is_same_type from mypy.typeops import separate_union_literals -from mypy.util import unmangle +from mypy.util import unmangle, plural_s from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes @@ -2110,14 +2110,6 @@ def strip_quotes(s: str) -> str: return s -def plural_s(s: Union[int, Sequence[Any]]) -> str: - count = s if isinstance(s, int) else len(s) - if count > 1: - return 's' - else: - return '' - - def format_string_list(lst: List[str]) -> str: assert len(lst) > 0 if len(lst) == 1: diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 6406921fbb8a..a85e9335a60d 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -9,12 +9,14 @@ import enum import importlib import inspect +import os import re import sys import types import typing import typing_extensions import warnings +from contextlib import redirect_stdout, redirect_stderr from functools import singledispatch from pathlib import Path from typing import Any, Dict, Generic, Iterator, List, Optional, Tuple, TypeVar, Union, cast @@ -29,7 +31,7 @@ from mypy import nodes from mypy.config_parser import parse_config_file from mypy.options import Options -from mypy.util import FancyFormatter, bytes_to_human_readable_repr, is_dunder +from mypy.util import FancyFormatter, bytes_to_human_readable_repr, plural_s, is_dunder class Missing: @@ -53,6 +55,10 @@ def _style(message: str, **kwargs: Any) -> str: return _formatter.style(message, **kwargs) +class StubtestFailure(Exception): + pass + + class Error: def __init__( self, @@ -163,19 +169,20 @@ def test_module(module_name: str) -> Iterator[Error]: """ stub = get_stub(module_name) if stub is None: - yield Error([module_name], "failed to find stubs", MISSING, None) + yield Error([module_name], "failed to find stubs", MISSING, None, runtime_desc="N/A") return try: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - runtime = importlib.import_module(module_name) - # Also run the equivalent of `from module import *` - # This could have the additional effect of loading not-yet-loaded submodules - # mentioned in __all__ - __import__(module_name, fromlist=["*"]) + with open(os.devnull, "w") as devnull: + with warnings.catch_warnings(), redirect_stdout(devnull), redirect_stderr(devnull): + warnings.simplefilter("ignore") + runtime = importlib.import_module(module_name) + # Also run the equivalent of `from module import *` + # This could have the additional effect of loading not-yet-loaded submodules + # mentioned in __all__ + __import__(module_name, fromlist=["*"]) except Exception as e: - yield Error([module_name], f"failed to import: {e}", stub, MISSING) + yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) return with warnings.catch_warnings(): @@ -944,7 +951,11 @@ def apply_decorator_to_funcitem( ) or decorator.fullname in mypy.types.OVERLOAD_NAMES: return func if decorator.fullname == "builtins.classmethod": - assert func.arguments[0].variable.name in ("cls", "metacls") + if func.arguments[0].variable.name not in ("cls", "mcs", "metacls"): + raise StubtestFailure( + f"unexpected class argument name {func.arguments[0].variable.name!r} " + f"in {dec.fullname}" + ) # FuncItem is written so that copy.copy() actually works, even when compiled ret = copy.copy(func) # Remove the cls argument, since it's not present in inspect.signature of classmethods @@ -1274,26 +1285,16 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa sources.extend(found_sources) all_modules.extend(s.module for s in found_sources if s.module not in all_modules) - try: - res = mypy.build.build(sources=sources, options=options) - except mypy.errors.CompileError as e: - output = [ - _style("error: ", color="red", bold=True), - "not checking stubs due to failed mypy compile:\n", - str(e), - ] - print("".join(output)) - raise RuntimeError from e - if res.errors: - output = [ - _style("error: ", color="red", bold=True), - "not checking stubs due to mypy build errors:\n", - ] - print("".join(output) + "\n".join(res.errors)) - raise RuntimeError + if sources: + try: + res = mypy.build.build(sources=sources, options=options) + except mypy.errors.CompileError as e: + raise StubtestFailure(f"failed mypy compile:\n{e}") from e + if res.errors: + raise StubtestFailure("mypy build errors:\n" + "\n".join(res.errors)) - global _all_stubs - _all_stubs = res.files + global _all_stubs + _all_stubs = res.files return all_modules @@ -1355,7 +1356,21 @@ def strip_comments(s: str) -> str: yield entry -def test_stubs(args: argparse.Namespace, use_builtins_fixtures: bool = False) -> int: +class _Arguments: + modules: List[str] + concise: bool + ignore_missing_stub: bool + ignore_positional_only: bool + allowlist: List[str] + generate_allowlist: bool + ignore_unused_allowlist: bool + mypy_config_file: str + custom_typeshed_dir: str + check_typeshed: bool + version: str + + +def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int: """This is stubtest! It's time to test the stubs!""" # Load the allowlist. This is a series of strings corresponding to Error.object_desc # Values in the dict will store whether we used the allowlist entry or not. @@ -1371,13 +1386,23 @@ def test_stubs(args: argparse.Namespace, use_builtins_fixtures: bool = False) -> modules = args.modules if args.check_typeshed: - assert not args.modules, "Cannot pass both --check-typeshed and a list of modules" + if args.modules: + print( + _style("error:", color="red", bold=True), + "cannot pass both --check-typeshed and a list of modules", + ) + return 1 modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) # typeshed added a stub for __main__, but that causes stubtest to check itself annoying_modules = {"antigravity", "this", "__main__"} modules = [m for m in modules if m not in annoying_modules] - assert modules, "No modules to check" + if not modules: + print( + _style("error:", color="red", bold=True), + "no modules to check", + ) + return 1 options = Options() options.incremental = False @@ -1392,10 +1417,15 @@ def set_strict_flags() -> None: # not needed yet try: modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) - except RuntimeError: + except StubtestFailure as stubtest_failure: + print( + _style("error:", color="red", bold=True), + f"not checking stubs due to {stubtest_failure}", + ) return 1 exit_code = 0 + error_count = 0 for module in modules: for error in test_module(module): # Filter errors @@ -1421,6 +1451,7 @@ def set_strict_flags() -> None: # not needed yet generated_allowlist.add(error.object_desc) continue print(error.get_description(concise=args.concise)) + error_count += 1 # Print unused allowlist entries if not args.ignore_unused_allowlist: @@ -1429,6 +1460,7 @@ def set_strict_flags() -> None: # not needed yet # This lets us allowlist errors that don't manifest at all on some systems if not allowlist[w] and not allowlist_regexes[w].fullmatch(""): exit_code = 1 + error_count += 1 print(f"note: unused allowlist entry {w}") # Print the generated allowlist @@ -1436,11 +1468,27 @@ def set_strict_flags() -> None: # not needed yet for e in sorted(generated_allowlist): print(e) exit_code = 0 + elif not args.concise: + if error_count: + print( + _style( + f"Found {error_count} error{plural_s(error_count)}" + f" (checked {len(modules)} module{plural_s(modules)})", + color="red", bold=True + ) + ) + else: + print( + _style( + f"Success: no issues found in {len(modules)} module{plural_s(modules)}", + color="green", bold=True + ) + ) return exit_code -def parse_options(args: List[str]) -> argparse.Namespace: +def parse_options(args: List[str]) -> _Arguments: parser = argparse.ArgumentParser( description="Compares stubs to objects introspected from the runtime." ) @@ -1502,7 +1550,7 @@ def parse_options(args: List[str]) -> argparse.Namespace: "--version", action="version", version="%(prog)s " + mypy.version.__version__ ) - return parser.parse_args(args) + return parser.parse_args(args, namespace=_Arguments()) def main() -> int: diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 6ba07fc50cfd..727bbac83cf1 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -809,7 +809,7 @@ def test_missing_no_runtime_all(self) -> Iterator[Case]: @collect_cases def test_non_public_1(self) -> Iterator[Case]: yield Case( - stub="__all__: list[str]", runtime="", error="test_module.__all__" + stub="__all__: list[str]", runtime="", error=f"{TEST_MODULE_NAME}.__all__" ) # dummy case yield Case(stub="_f: int", runtime="def _f(): ...", error="_f") @@ -1085,9 +1085,11 @@ def test_output(self) -> None: options=[], ) expected = ( - 'error: {0}.bad is inconsistent, stub argument "number" differs from runtime ' - 'argument "num"\nStub: at line 1\ndef (number: builtins.int, text: builtins.str)\n' - "Runtime: at line 1 in file {0}.py\ndef (num, text)\n\n".format(TEST_MODULE_NAME) + f'error: {TEST_MODULE_NAME}.bad is inconsistent, stub argument "number" differs ' + 'from runtime argument "num"\n' + 'Stub: at line 1\ndef (number: builtins.int, text: builtins.str)\n' + f"Runtime: at line 1 in file {TEST_MODULE_NAME}.py\ndef (num, text)\n\n" + 'Found 1 error (checked 1 module)\n' ) assert remove_color_code(output) == expected @@ -1106,17 +1108,17 @@ def test_ignore_flags(self) -> None: output = run_stubtest( stub="", runtime="__all__ = ['f']\ndef f(): pass", options=["--ignore-missing-stub"] ) - assert not output + assert output == 'Success: no issues found in 1 module\n' output = run_stubtest( stub="", runtime="def f(): pass", options=["--ignore-missing-stub"] ) - assert not output + assert output == 'Success: no issues found in 1 module\n' output = run_stubtest( stub="def f(__a): ...", runtime="def f(a): pass", options=["--ignore-positional-only"] ) - assert not output + assert output == 'Success: no issues found in 1 module\n' def test_allowlist(self) -> None: # Can't use this as a context because Windows @@ -1130,18 +1132,21 @@ def test_allowlist(self) -> None: runtime="def bad(asdf, text): pass", options=["--allowlist", allowlist.name], ) - assert not output + assert output == 'Success: no issues found in 1 module\n' # test unused entry detection output = run_stubtest(stub="", runtime="", options=["--allowlist", allowlist.name]) - assert output == f"note: unused allowlist entry {TEST_MODULE_NAME}.bad\n" + assert output == ( + f"note: unused allowlist entry {TEST_MODULE_NAME}.bad\n" + "Found 1 error (checked 1 module)\n" + ) output = run_stubtest( stub="", runtime="", options=["--allowlist", allowlist.name, "--ignore-unused-allowlist"], ) - assert not output + assert output == 'Success: no issues found in 1 module\n' # test regex matching with open(allowlist.name, mode="w+") as f: @@ -1166,8 +1171,9 @@ def also_bad(asdf): pass ), options=["--allowlist", allowlist.name, "--generate-allowlist"], ) - assert output == "note: unused allowlist entry unused.*\n{}.also_bad\n".format( - TEST_MODULE_NAME + assert output == ( + f"note: unused allowlist entry unused.*\n" + f"{TEST_MODULE_NAME}.also_bad\n" ) finally: os.unlink(allowlist.name) @@ -1189,7 +1195,11 @@ def test_missing_stubs(self) -> None: output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options(["not_a_module"])) - assert "error: not_a_module failed to find stubs" in remove_color_code(output.getvalue()) + assert remove_color_code(output.getvalue()) == ( + "error: not_a_module failed to find stubs\n" + "Stub:\nMISSING\nRuntime:\nN/A\n\n" + "Found 1 error (checked 1 module)\n" + ) def test_get_typeshed_stdlib_modules(self) -> None: stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 6)) @@ -1223,8 +1233,23 @@ def test_config_file(self) -> None: ) output = run_stubtest(stub=stub, runtime=runtime, options=[]) assert remove_color_code(output) == ( - "error: test_module.temp variable differs from runtime type Literal[5]\n" + f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n" "Stub: at line 2\n_decimal.Decimal\nRuntime:\n5\n\n" + "Found 1 error (checked 1 module)\n" ) output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) - assert output == "" + assert output == "Success: no issues found in 1 module\n" + + def test_no_modules(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options([])) + assert remove_color_code(output.getvalue()) == "error: no modules to check\n" + + def test_module_and_typeshed(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options(["--check-typeshed", "some_module"])) + assert remove_color_code(output.getvalue()) == ( + "error: cannot pass both --check-typeshed and a list of modules\n" + ) diff --git a/mypy/util.py b/mypy/util.py index c207fb7e18cd..03db281ef615 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -11,7 +11,7 @@ import time from typing import ( - TypeVar, List, Tuple, Optional, Dict, Sequence, Iterable, Container, IO, Callable + TypeVar, List, Tuple, Optional, Dict, Sequence, Iterable, Container, IO, Callable, Union, Sized ) from typing_extensions import Final, Type, Literal @@ -724,8 +724,7 @@ def format_success(self, n_sources: int, use_color: bool = True) -> str: n_sources is total number of files passed directly on command line, i.e. excluding stubs and followed imports. """ - msg = 'Success: no issues found in {}' \ - ' source file{}'.format(n_sources, 's' if n_sources != 1 else '') + msg = f'Success: no issues found in {n_sources} source file{plural_s(n_sources)}' if not use_color: return msg return self.style(msg, 'green', bold=True) @@ -735,15 +734,11 @@ def format_error( blockers: bool = False, use_color: bool = True ) -> str: """Format a short summary in case of errors.""" - - msg = 'Found {} error{} in {} file{}'.format( - n_errors, 's' if n_errors != 1 else '', - n_files, 's' if n_files != 1 else '' - ) + msg = f'Found {n_errors} error{plural_s(n_errors)} in {n_files} file{plural_s(n_files)}' if blockers: msg += ' (errors prevented further checking)' else: - msg += f" (checked {n_sources} source file{'s' if n_sources != 1 else ''})" + msg += f" (checked {n_sources} source file{plural_s(n_sources)})" if not use_color: return msg return self.style(msg, 'red', bold=True) @@ -773,3 +768,11 @@ def unnamed_function(name: Optional[str]) -> bool: def time_spent_us(t0: float) -> int: return int((time.perf_counter() - t0) * 1e6) + + +def plural_s(s: Union[int, Sized]) -> str: + count = s if isinstance(s, int) else len(s) + if count > 1: + return 's' + else: + return '' From f08c57eb1cf16bb3fb999323bd3f139bd178fd10 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 28 May 2022 01:35:19 -0700 Subject: [PATCH 34/80] stubtest: do not treat py files as source for mypy definitions (#12889) Authored by @KotlinIsland Co-authored-by: KotlinIsland --- mypy/stubtest.py | 3 ++- mypy/test/teststubtest.py | 12 ++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index a85e9335a60d..e87ba447f27a 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1301,7 +1301,8 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa def get_stub(module: str) -> Optional[nodes.MypyFile]: """Returns a stub object for the given module, if we've built one.""" - return _all_stubs.get(module) + stub = _all_stubs.get(module) + return stub if stub and stub.is_stub else None def get_typeshed_stdlib_modules( diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 727bbac83cf1..1121afc2fcd2 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1201,6 +1201,18 @@ def test_missing_stubs(self) -> None: "Found 1 error (checked 1 module)\n" ) + def test_missing_only_stubs(self) -> None: + with use_tmp_dir(TEST_MODULE_NAME): + with open(f"{TEST_MODULE_NAME}.py", "w") as f: + f.write("a = 1") + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options([TEST_MODULE_NAME])) + assert ( + f"error: {TEST_MODULE_NAME} failed to find stubs" + in remove_color_code(output.getvalue()) + ) + def test_get_typeshed_stdlib_modules(self) -> None: stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 6)) assert "builtins" in stdlib From 74b35bef35ad5a7b1aa635f52ffcfd850eadc589 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 28 May 2022 03:10:22 -0700 Subject: [PATCH 35/80] stubtest: revert to allow mixed stubs and inline types, add test (#12896) Reverts #12889 Co-authored-by: hauntsaninja <> --- mypy/stubtest.py | 3 +-- mypy/test/teststubtest.py | 10 +++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index e87ba447f27a..a85e9335a60d 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1301,8 +1301,7 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa def get_stub(module: str) -> Optional[nodes.MypyFile]: """Returns a stub object for the given module, if we've built one.""" - stub = _all_stubs.get(module) - return stub if stub and stub.is_stub else None + return _all_stubs.get(module) def get_typeshed_stdlib_modules( diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 1121afc2fcd2..72944f44414c 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -1201,17 +1201,17 @@ def test_missing_stubs(self) -> None: "Found 1 error (checked 1 module)\n" ) - def test_missing_only_stubs(self) -> None: + def test_only_py(self) -> None: + # in this case, stubtest will check the py against itself + # this is useful to support packages with a mix of stubs and inline types with use_tmp_dir(TEST_MODULE_NAME): with open(f"{TEST_MODULE_NAME}.py", "w") as f: f.write("a = 1") output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options([TEST_MODULE_NAME])) - assert ( - f"error: {TEST_MODULE_NAME} failed to find stubs" - in remove_color_code(output.getvalue()) - ) + output_str = remove_color_code(output.getvalue()) + assert output_str == 'Success: no issues found in 1 module\n' def test_get_typeshed_stdlib_modules(self) -> None: stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 6)) From 568b98a6f08205cd364a2511f8c8873f67e8b879 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sun, 29 May 2022 01:00:33 +0100 Subject: [PATCH 36/80] CONTRIBUTING.md: improve setup docs, especially on Windows (#12898) --- CONTRIBUTING.md | 52 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eafefe346d01..c51e812c6492 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,48 +17,64 @@ articulated in the [Python Community Code of Conduct](https://www.python.org/psf ### Setup -Run the following: +#### (1) Clone the mypy repository and enter into it ``` -# Clone the mypy repository git clone https://github.com/python/mypy.git - -# Enter the repository cd mypy +``` -# Create then activate a virtual environment +#### (2) Create then activate a virtual environment +``` +# On Windows, the commands may be slightly different. For more details, see +# https://docs.python.org/3/library/venv.html#creating-virtual-environments python3 -m venv venv source venv/bin/activate +``` -# Install the test requirements and the project +#### (3) Install the test requirements and the project +``` python3 -m pip install -r test-requirements.txt python3 -m pip install -e . -hash -r +hash -r # This resets shell PATH cache, not necessary on Windows ``` ### Running tests -Once setup, you should be able to run tests: -``` -python3 runtests.py -``` +Running the full test suite can take a while, and usually isn't necessary when +preparing a PR. Once you file a PR, the full test suite will run on GitHub. +You'll then be able to see any test failures, and make any necessary changes to +your PR. -To use mypy to check mypy's own code, run: +However, if you wish to do so, you can run the full test suite +like this: ``` -python3 runtests.py self -# or equivalently: -python3 -m mypy --config-file mypy_self_check.ini -p mypy +python3 runtests.py ``` -You can also use `tox` to run tests, for instance: +You can also use `tox` to run tests (`tox` handles setting up the test environment for you): ``` tox -e py ``` -The easiest way to run a single test is: +Some useful commands for running specific tests include: ``` +# Use mypy to check mypy's own code +python3 runtests.py self +# or equivalently: +python3 -m mypy --config-file mypy_self_check.ini -p mypy + +# Run a single test from the test suite pytest -n0 -k 'test_name' + +# Run all test cases in the "test-data/unit/check-dataclasses.test" file +pytest mypy/test/testcheck.py::TypeCheckSuite::check-dataclasses.test + +# Run the linter +flake8 ``` -There's more useful information on writing and running tests [here](test-data/unit/README.md) + +For an in-depth guide on running and writing tests, +see [the README in the test-data directory](test-data/unit/README.md). ## First time contributors From 2051024c07cd361352e081884db0a061221a9aab Mon Sep 17 00:00:00 2001 From: Erik Kemperman Date: Sun, 29 May 2022 21:44:59 +0200 Subject: [PATCH 37/80] Make PEP561 tests opt-in (fix #12879) (#12883) --- runtests.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/runtests.py b/runtests.py index 1f4167f2bd34..57542f7d458d 100755 --- a/runtests.py +++ b/runtests.py @@ -36,9 +36,14 @@ ] +# This must be enabled by explicitly including 'pytest-extra' on the command line +PYTEST_OPT_IN = [PEP561] + + # These must be enabled by explicitly including 'mypyc-extra' on the command line. MYPYC_OPT_IN = [MYPYC_RUN, MYPYC_RUN_MULTI] + # We split the pytest run into three parts to improve test # parallelization. Each run should have tests that each take a roughly similar # time to run. @@ -64,14 +69,14 @@ ERROR_STREAM])], # Test cases that might take minutes to run - 'pytest-slower': ['pytest', '-q', '-k', ' or '.join( - [PEP561])], + 'pytest-extra': ['pytest', '-q', '-k', ' or '.join(PYTEST_OPT_IN)], # Test cases to run in typeshed CI 'typeshed-ci': ['pytest', '-q', '-k', ' or '.join([CMDLINE, EVALUATION, SAMPLES, TYPESHED])], + # Mypyc tests that aren't run by default, since they are slow and rarely # fail for commits that don't touch mypyc 'mypyc-extra': ['pytest', '-q', '-k', ' or '.join(MYPYC_OPT_IN)], @@ -80,7 +85,8 @@ # Stop run immediately if these commands fail FAST_FAIL = ['self', 'lint'] -DEFAULT_COMMANDS = [cmd for cmd in cmds if cmd not in ('mypyc-extra', 'typeshed-ci')] +EXTRA_COMMANDS = ('pytest-extra', 'mypyc-extra', 'typeshed-ci') +DEFAULT_COMMANDS = [cmd for cmd in cmds if cmd not in EXTRA_COMMANDS] assert all(cmd in cmds for cmd in FAST_FAIL) @@ -124,7 +130,8 @@ def main() -> None: if not set(args).issubset(cmds): print("usage:", prog, " ".join('[%s]' % k for k in cmds)) print() - print('Run the given tests. If given no arguments, run everything except mypyc-extra.') + print('Run the given tests. If given no arguments, run everything except' + + ' pytest-extra and mypyc-extra.') exit(1) if not args: From 35b4d8a0279e91ef6cc0db126844ddcf263b161f Mon Sep 17 00:00:00 2001 From: Erik Kemperman Date: Mon, 30 May 2022 08:46:17 +0200 Subject: [PATCH 38/80] Fix docs CI, formatting in command_line.rst (#12905) --- .github/workflows/docs.yml | 2 +- docs/source/command_line.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 79560db2d09b..c4f36d609e74 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: '3.10' + python-version: '3.7' - name: Install tox run: pip install --upgrade 'setuptools!=50' 'virtualenv>=20.6.0' tox==3.24.5 - name: Setup tox environment diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 908fa799da46..10416766f261 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -216,7 +216,7 @@ imports. The default logic used to scan through search paths to resolve imports has a quadratic worse-case behavior in some cases, which is for instance triggered - by a large number of folders sharing a top-level namespace as in: + by a large number of folders sharing a top-level namespace as in:: foo/ company/ @@ -230,7 +230,7 @@ imports. company/ baz/ c.py - ... + ... If you are in this situation, you can enable an experimental fast path by setting the :option:`--fast-module-lookup` option. From b07018cd024cc9d15d7fc977148c2adb5c3e5527 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 30 May 2022 13:08:01 +0100 Subject: [PATCH 39/80] Run dataclass plugin before checking type var bounds (#12908) The plugin may add attributes that are needed to perform the bound check. Fixes #12876. --- mypy/semanal_main.py | 7 +++---- test-data/unit/check-dataclasses.test | 18 +++++++++++++++++ test-data/unit/fine-grained.test | 28 +++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 4 deletions(-) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index bb0af8edc46f..305d1a058d76 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -82,10 +82,10 @@ def semantic_analysis_for_scc(graph: 'Graph', scc: List[str], errors: Errors) -> # We use patch callbacks to fix up things when we expect relatively few # callbacks to be required. apply_semantic_analyzer_patches(patches) - # This pass might need fallbacks calculated above. - check_type_arguments(graph, scc, errors) # Run class decorator hooks (they requite complete MROs and no placeholders). apply_class_plugin_hooks(graph, scc, errors) + # This pass might need fallbacks calculated above and the results of hooks. + check_type_arguments(graph, scc, errors) calculate_class_properties(graph, scc, errors) check_blockers(graph, scc) # Clean-up builtins, so that TypeVar etc. are not accessible without importing. @@ -133,10 +133,9 @@ def semantic_analysis_for_targets( process_top_level_function(analyzer, state, state.id, n.node.fullname, n.node, n.active_typeinfo, patches) apply_semantic_analyzer_patches(patches) - + apply_class_plugin_hooks(graph, [state.id], state.manager.errors) check_type_arguments_in_targets(nodes, state, state.manager.errors) calculate_class_properties(graph, [state.id], state.manager.errors) - apply_class_plugin_hooks(graph, [state.id], state.manager.errors) def restore_saved_attrs(saved_attrs: SavedAttributes) -> None: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 972cc4d40a1e..fb1b4a1e8b46 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1772,3 +1772,21 @@ c = C() c2 = C(x=1) c.x # E: "C" has no attribute "x" [builtins fixtures/dataclasses.pyi] + +[case testDataclassCheckTypeVarBounds] +# flags: --python-version 3.7 +from dataclasses import dataclass +from typing import Protocol, Dict, TypeVar, Generic + +class DataclassProtocol(Protocol): + __dataclass_fields__: Dict + +T = TypeVar("T", bound=DataclassProtocol) + +@dataclass +class MyDataclass: + x: int = 1 + +class MyGeneric(Generic[T]): ... +class MyClass(MyGeneric[MyDataclass]): ... +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index fa6dc52262dd..c2bd67320f3f 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9734,6 +9734,7 @@ class C: [out] == main:5: error: Unsupported left operand type for + ("str") + [case testNoneAttribute] from typing import Generic, TypeVar @@ -9759,3 +9760,30 @@ class ExampleClass(Generic[T]): self.example_attribute = None [out] == + +[case testDataclassCheckTypeVarBoundsInReprocess] +# flags: --python-version 3.7 +from dataclasses import dataclass +from typing import Protocol, Dict, TypeVar, Generic +from m import x + +class DataclassProtocol(Protocol): + __dataclass_fields__: Dict + +T = TypeVar("T", bound=DataclassProtocol) + +@dataclass +class MyDataclass: + x: int = 1 + +class MyGeneric(Generic[T]): ... +class MyClass(MyGeneric[MyDataclass]): ... + +[file m.py] +x: int +[file m.py.2] +x: str + +[builtins fixtures/dataclasses.pyi] +[out] +== From 2004ae023b9d3628d9f09886cbbc20868aee8554 Mon Sep 17 00:00:00 2001 From: Ashley Whetter Date: Mon, 30 May 2022 12:42:24 -0700 Subject: [PATCH 40/80] Search sys.path for PEP-561 compliant packages (#11143) Closes #5701 This replaces the old hand crafted search code that was more fragile. --- mypy/main.py | 6 +- mypy/modulefinder.py | 110 ++++-------------- mypy/pyinfo.py | 42 ++++--- mypy/test/testcmdline.py | 3 + mypy/test/testmodulefinder.py | 10 +- .../modulefinder-site-packages/baz.pth | 1 - .../modulefinder-site-packages/dne.pth | 1 - .../modulefinder-site-packages/ignored.pth | 3 - .../modulefinder-site-packages/neighbor.pth | 1 - test-data/unit/cmdline.test | 26 +++++ 10 files changed, 80 insertions(+), 123 deletions(-) delete mode 100644 test-data/packages/modulefinder-site-packages/baz.pth delete mode 100644 test-data/packages/modulefinder-site-packages/dne.pth delete mode 100644 test-data/packages/modulefinder-site-packages/ignored.pth delete mode 100644 test-data/packages/modulefinder-site-packages/neighbor.pth diff --git a/mypy/main.py b/mypy/main.py index 57727821274e..14b318ead3e7 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -16,7 +16,7 @@ from mypy import util from mypy.modulefinder import ( BuildSource, FindModuleCache, SearchPaths, - get_site_packages_dirs, mypy_path, + get_search_dirs, mypy_path, ) from mypy.find_sources import create_source_list, InvalidSourceList from mypy.fscache import FileSystemCache @@ -1043,10 +1043,10 @@ def set_strict_flags() -> None: # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE - egg_dirs, site_packages = get_site_packages_dirs(options.python_executable) + search_dirs = get_search_dirs(options.python_executable) search_paths = SearchPaths((os.getcwd(),), tuple(mypy_path() + options.mypy_path), - tuple(egg_dirs + site_packages), + tuple(search_dirs), ()) targets = [] # TODO: use the same cache that the BuildManager will diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 43cc4fc0a6d3..8b3dc2e72084 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -19,7 +19,7 @@ else: import tomli as tomllib -from typing import Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Union +from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union from typing_extensions import Final, TypeAlias as _TypeAlias from mypy.fscache import FileSystemCache @@ -330,6 +330,9 @@ def _find_module_non_stub_helper(self, components: List[str], elif not plausible_match and (self.fscache.isdir(dir_path) or self.fscache.isfile(dir_path + ".py")): plausible_match = True + # If this is not a directory then we can't traverse further into it + if not self.fscache.isdir(dir_path): + break if is_legacy_bundled_package(components[0], self.python_major_ver): if (len(components) == 1 or (self.find_module(components[0]) is @@ -724,97 +727,32 @@ def default_lib_path(data_dir: str, @functools.lru_cache(maxsize=None) -def get_prefixes(python_executable: Optional[str]) -> Tuple[str, str]: - """Get the sys.base_prefix and sys.prefix for the given python. - - This runs a subprocess call to get the prefix paths of the given Python executable. - To avoid repeatedly calling a subprocess (which can be slow!) we - lru_cache the results. - """ - if python_executable is None: - return '', '' - elif python_executable == sys.executable: - # Use running Python's package dirs - return pyinfo.getprefixes() - else: - # Use subprocess to get the package directory of given Python - # executable - return ast.literal_eval( - subprocess.check_output([python_executable, pyinfo.__file__, 'getprefixes'], - stderr=subprocess.PIPE).decode()) - - -@functools.lru_cache(maxsize=None) -def get_site_packages_dirs(python_executable: Optional[str]) -> Tuple[List[str], List[str]]: +def get_search_dirs(python_executable: Optional[str]) -> List[str]: """Find package directories for given python. - This runs a subprocess call, which generates a list of the egg directories, and the site - package directories. To avoid repeatedly calling a subprocess (which can be slow!) we + This runs a subprocess call, which generates a list of the directories in sys.path. + To avoid repeatedly calling a subprocess (which can be slow!) we lru_cache the results. """ if python_executable is None: - return [], [] + return [] elif python_executable == sys.executable: # Use running Python's package dirs - site_packages = pyinfo.getsitepackages() + sys_path = pyinfo.getsearchdirs() else: # Use subprocess to get the package directory of given Python # executable try: - site_packages = ast.literal_eval( - subprocess.check_output([python_executable, pyinfo.__file__, 'getsitepackages'], + sys_path = ast.literal_eval( + subprocess.check_output([python_executable, pyinfo.__file__, 'getsearchdirs'], stderr=subprocess.PIPE).decode()) except OSError as err: reason = os.strerror(err.errno) raise CompileError( [f"mypy: Invalid python executable '{python_executable}': {reason}"] ) from err - return expand_site_packages(site_packages) - - -def expand_site_packages(site_packages: List[str]) -> Tuple[List[str], List[str]]: - """Expands .pth imports in site-packages directories""" - egg_dirs: List[str] = [] - for dir in site_packages: - if not os.path.isdir(dir): - continue - pth_filenames = sorted(name for name in os.listdir(dir) if name.endswith(".pth")) - for pth_filename in pth_filenames: - egg_dirs.extend(_parse_pth_file(dir, pth_filename)) - - return egg_dirs, site_packages - - -def _parse_pth_file(dir: str, pth_filename: str) -> Iterator[str]: - """ - Mimics a subset of .pth import hook from Lib/site.py - See https://github.com/python/cpython/blob/3.5/Lib/site.py#L146-L185 - """ - - pth_file = os.path.join(dir, pth_filename) - try: - f = open(pth_file) - except OSError: - return - with f: - for line in f.readlines(): - if line.startswith("#"): - # Skip comment lines - continue - if line.startswith(("import ", "import\t")): - # import statements in .pth files are not supported - continue - - yield _make_abspath(line.rstrip(), dir) - - -def _make_abspath(path: str, root: str) -> str: - """Take a path and make it absolute relative to root if not already absolute.""" - if os.path.isabs(path): - return os.path.normpath(path) - else: - return os.path.join(root, os.path.normpath(path)) + return sys_path def add_py2_mypypath_entries(mypypath: List[str]) -> List[str]: @@ -903,27 +841,21 @@ def compute_search_paths(sources: List[BuildSource], if options.python_version[0] == 2: mypypath = add_py2_mypypath_entries(mypypath) - egg_dirs, site_packages = get_site_packages_dirs(options.python_executable) - base_prefix, prefix = get_prefixes(options.python_executable) - is_venv = base_prefix != prefix - for site_dir in site_packages: - assert site_dir not in lib_path - if (site_dir in mypypath or - any(p.startswith(site_dir + os.path.sep) for p in mypypath) or - os.path.altsep and any(p.startswith(site_dir + os.path.altsep) for p in mypypath)): - print(f"{site_dir} is in the MYPYPATH. Please remove it.", file=sys.stderr) + search_dirs = get_search_dirs(options.python_executable) + for search_dir in search_dirs: + assert search_dir not in lib_path + if (search_dir in mypypath or + any(p.startswith(search_dir + os.path.sep) for p in mypypath) or + (os.path.altsep + and any(p.startswith(search_dir + os.path.altsep) for p in mypypath))): + print(f"{search_dir} is in the MYPYPATH. Please remove it.", file=sys.stderr) print("See https://mypy.readthedocs.io/en/stable/running_mypy.html" "#how-mypy-handles-imports for more info", file=sys.stderr) sys.exit(1) - elif site_dir in python_path and (is_venv and not site_dir.startswith(prefix)): - print("{} is in the PYTHONPATH. Please change directory" - " so it is not.".format(site_dir), - file=sys.stderr) - sys.exit(1) return SearchPaths(python_path=tuple(reversed(python_path)), mypy_path=tuple(mypypath), - package_path=tuple(egg_dirs + site_packages), + package_path=tuple(search_dirs), typeshed_path=tuple(lib_path)) diff --git a/mypy/pyinfo.py b/mypy/pyinfo.py index ab2d3286bd5c..c129063a01a4 100644 --- a/mypy/pyinfo.py +++ b/mypy/pyinfo.py @@ -6,41 +6,39 @@ library found in Python 2. This file is run each mypy run, so it should be kept as fast as possible. """ -import site +import os import sys +import sysconfig if __name__ == '__main__': sys.path = sys.path[1:] # we don't want to pick up mypy.types MYPY = False if MYPY: - from typing import List, Tuple + from typing import List -def getprefixes(): - # type: () -> Tuple[str, str] - return getattr(sys, "base_prefix", sys.prefix), sys.prefix - - -def getsitepackages(): +def getsearchdirs(): # type: () -> List[str] - res = [] - if hasattr(site, 'getsitepackages'): - res.extend(site.getsitepackages()) - - if hasattr(site, 'getusersitepackages') and site.ENABLE_USER_SITE: - res.insert(0, site.getusersitepackages()) - else: - from distutils.sysconfig import get_python_lib - res = [get_python_lib()] - return res + # Do not include things from the standard library + # because those should come from typeshed. + stdlib_zip = os.path.join( + sys.base_exec_prefix, + getattr(sys, "platlibdir", "lib"), + "python{}{}.zip".format(sys.version_info.major, sys.version_info.minor) + ) + stdlib = sysconfig.get_path("stdlib") + stdlib_ext = os.path.join(stdlib, "lib-dynload") + cwd = os.path.abspath(os.getcwd()) + excludes = set([cwd, stdlib_zip, stdlib, stdlib_ext]) + + abs_sys_path = (os.path.abspath(p) for p in sys.path) + return [p for p in abs_sys_path if p not in excludes] if __name__ == '__main__': - if sys.argv[-1] == 'getsitepackages': - print(repr(getsitepackages())) - elif sys.argv[-1] == 'getprefixes': - print(repr(getprefixes())) + if sys.argv[-1] == 'getsearchdirs': + print(repr(getsearchdirs())) else: print("ERROR: incorrect argument to pyinfo.py.", file=sys.stderr) sys.exit(1) diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 62e258677c7f..9983dc554323 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -65,7 +65,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: fixed = [python3_path, '-m', 'mypy'] env = os.environ.copy() env.pop('COLUMNS', None) + extra_path = os.path.join(os.path.abspath(test_temp_dir), 'pypath') env['PYTHONPATH'] = PREFIX + if os.path.isdir(extra_path): + env['PYTHONPATH'] += os.pathsep + extra_path process = subprocess.Popen(fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, diff --git a/mypy/test/testmodulefinder.py b/mypy/test/testmodulefinder.py index d26e7c1efe0c..fc80893659c2 100644 --- a/mypy/test/testmodulefinder.py +++ b/mypy/test/testmodulefinder.py @@ -5,7 +5,6 @@ FindModuleCache, SearchPaths, ModuleNotFoundReason, - expand_site_packages ) from mypy.test.helpers import Suite, assert_equal @@ -149,12 +148,17 @@ def setUp(self) -> None: "modulefinder-site-packages", )) - egg_dirs, site_packages = expand_site_packages([self.package_dir]) + package_paths = ( + os.path.join(self.package_dir, "baz"), + os.path.join(self.package_dir, "..", "not-a-directory"), + os.path.join(self.package_dir, "..", "modulefinder-src"), + self.package_dir, + ) self.search_paths = SearchPaths( python_path=(), mypy_path=(os.path.join(data_path, "pkg1"),), - package_path=tuple(egg_dirs + site_packages), + package_path=tuple(package_paths), typeshed_path=(), ) options = Options() diff --git a/test-data/packages/modulefinder-site-packages/baz.pth b/test-data/packages/modulefinder-site-packages/baz.pth deleted file mode 100644 index 76018072e09c..000000000000 --- a/test-data/packages/modulefinder-site-packages/baz.pth +++ /dev/null @@ -1 +0,0 @@ -baz diff --git a/test-data/packages/modulefinder-site-packages/dne.pth b/test-data/packages/modulefinder-site-packages/dne.pth deleted file mode 100644 index 1d88f1e3c6f1..000000000000 --- a/test-data/packages/modulefinder-site-packages/dne.pth +++ /dev/null @@ -1 +0,0 @@ -../does_not_exist diff --git a/test-data/packages/modulefinder-site-packages/ignored.pth b/test-data/packages/modulefinder-site-packages/ignored.pth deleted file mode 100644 index 0aa17eb504c1..000000000000 --- a/test-data/packages/modulefinder-site-packages/ignored.pth +++ /dev/null @@ -1,3 +0,0 @@ -# Includes comment lines and -import statements -# That are ignored by the .pth parser diff --git a/test-data/packages/modulefinder-site-packages/neighbor.pth b/test-data/packages/modulefinder-site-packages/neighbor.pth deleted file mode 100644 index a39c0061648c..000000000000 --- a/test-data/packages/modulefinder-site-packages/neighbor.pth +++ /dev/null @@ -1 +0,0 @@ -../modulefinder-src diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 86a975fc4949..016d215027ae 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -365,6 +365,32 @@ main.py:6: error: Unsupported operand types for + ("int" and "str") main.py:7: error: Module has no attribute "y" main.py:8: error: Unsupported operand types for + (Module and "int") +[case testConfigFollowImportsSysPath] +# cmd: mypy main.py +[file main.py] +from a import x +x + 0 +x + '' # E +import a +a.x + 0 +a.x + '' # E +a.y # E +a + 0 # E +[file mypy.ini] +\[mypy] +follow_imports = normal +no_silence_site_packages = True +[file pypath/a/__init__.py] +x = 0 +x += '' # Error reported here +[file pypath/a/py.typed] +[out] +pypath/a/__init__.py:2: error: Unsupported operand types for + ("int" and "str") +main.py:3: error: Unsupported operand types for + ("int" and "str") +main.py:6: error: Unsupported operand types for + ("int" and "str") +main.py:7: error: Module has no attribute "y" +main.py:8: error: Unsupported operand types for + (Module and "int") + [case testConfigFollowImportsSilent] # cmd: mypy main.py [file main.py] From 0628e09ede4c81b913e859c750c581fbb85fa481 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 30 May 2022 17:04:49 -0700 Subject: [PATCH 41/80] mypy_primer: report lines truncated (#12910) Co-authored-by: hauntsaninja <> --- .github/workflows/mypy_primer_comment.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index 36977862eebe..3c208d5990a1 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -51,7 +51,9 @@ jobs: let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) // posting comment fails if too long, so truncate if (data.length > 30000) { - data = data.substring(0, 30000) + `\n\n... (truncated ${data.length - 30000} chars) ...\n` + let truncated_data = data.substring(0, 30000) + let lines_truncated = data.split('\n').length - truncated_data.split('\n').length + data = truncated_data + `\n\n... (truncated ${lines_truncated} lines) ...\n` } console.log("Diff from mypy_primer:") From c2055280ab8f41382a584dc186099755b3cf014e Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 31 May 2022 16:21:22 -0700 Subject: [PATCH 42/80] Remove failing test on Python 3.11 (#12917) asyncio.coroutine has been removed. I don't think this test case is particularly useful. Co-authored-by: hauntsaninja <> --- test-data/unit/stubgen.test | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 927cc5617c75..a7c2ae6d21fd 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -1364,23 +1364,6 @@ class F: @t.coroutine def g(): ... -[case testCoroutineSpecialCase_import] -import asyncio - -__all__ = ['C'] - -@asyncio.coroutine -def f(): - pass - -class C: - def f(self): - pass -[out] -import asyncio - -class C: - def f(self) -> None: ... -- Tests for stub generation from semantically analyzed trees. -- These tests are much slower, so use the `_semanal` suffix only when needed. From 1636a0549670e1b75e2987c16ef26ebf91dfbde9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 1 Jun 2022 12:48:17 +0100 Subject: [PATCH 43/80] [mypyc] Simplify code generated for SetAttr and non-pointer type (#12916) Previously we sometimes generated an if statement with an empty body. --- mypyc/codegen/emitfunc.py | 7 ++++--- mypyc/test/test_emitfunc.py | 7 +++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 7b44b22d6cc1..ce428daaee71 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -373,12 +373,13 @@ def visit_set_attr(self, op: SetAttr) -> None: else: # ...and struct access for normal attributes. attr_expr = self.get_attr_expr(obj, op, decl_cl) - if not op.is_init: + if not op.is_init and attr_rtype.is_refcounted: + # This is not an initalization (where we know that the attribute was + # previously undefined), so decref the old value. always_defined = cl.is_always_defined(op.attr) if not always_defined: self.emitter.emit_undefined_attr_check(attr_rtype, attr_expr, '!=') - if attr_rtype.is_refcounted: - self.emitter.emit_dec_ref(attr_expr, attr_rtype) + self.emitter.emit_dec_ref(attr_expr, attr_rtype) if not always_defined: self.emitter.emit_line('}') # This steals the reference to src, so we don't need to increment the arg diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index fe47af2300d7..96d9155214b3 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -326,6 +326,13 @@ def test_set_attr(self) -> None: cpy_r_r0 = 1; """) + def test_set_attr_non_refcounted(self) -> None: + self.assert_emit( + SetAttr(self.r, 'x', self.b, 1), + """((mod___AObject *)cpy_r_r)->_x = cpy_r_b; + cpy_r_r0 = 1; + """) + def test_dict_get_item(self) -> None: self.assert_emit(CallC(dict_get_item_op.c_function_name, [self.d, self.o2], dict_get_item_op.return_type, dict_get_item_op.steals, From d21c5abcfba7cbae4398bdc075471e2ecc565e1d Mon Sep 17 00:00:00 2001 From: jhance Date: Thu, 2 Jun 2022 13:34:06 -0700 Subject: [PATCH 44/80] Treat generators with await as async. (#12925) Treat generators with await as async. --- mypy/checkexpr.py | 5 +++-- mypy/traverser.py | 16 ++++++++++++++++ test-data/unit/check-async-await.test | 11 +++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index b9c657e83e63..67c7ada55c1a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -14,6 +14,7 @@ make_optional_type, ) from mypy.semanal_enum import ENUM_BASES +from mypy.traverser import has_await_expression from mypy.types import ( Type, AnyType, CallableType, Overloaded, NoneType, TypeVarType, TupleType, TypedDictType, Instance, ErasedType, UnionType, @@ -3798,8 +3799,8 @@ def visit_set_comprehension(self, e: SetComprehension) -> Type: def visit_generator_expr(self, e: GeneratorExpr) -> Type: # If any of the comprehensions use async for, the expression will return an async generator - # object - if any(e.is_async): + # object, or if the left-side expression uses await. + if any(e.is_async) or has_await_expression(e.left_expr): typ = 'typing.AsyncGenerator' # received type is always None in async generator expressions additional_args: List[Type] = [NoneType()] diff --git a/mypy/traverser.py b/mypy/traverser.py index d9681bdd81ba..d4e87b820dfb 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -18,6 +18,7 @@ ConditionalExpr, TypeApplication, ExecStmt, Import, ImportFrom, LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr, YieldExpr, StarExpr, BackquoteExpr, AwaitExpr, PrintStmt, SuperExpr, Node, REVEAL_TYPE, + Expression, ) @@ -397,6 +398,21 @@ def has_yield_expression(fdef: FuncBase) -> bool: return seeker.found +class AwaitSeeker(TraverserVisitor): + def __init__(self) -> None: + super().__init__() + self.found = False + + def visit_await_expr(self, o: AwaitExpr) -> None: + self.found = True + + +def has_await_expression(expr: Expression) -> bool: + seeker = AwaitSeeker() + expr.accept(seeker) + return seeker.found + + class ReturnCollector(FuncCollectorBase): def __init__(self) -> None: super().__init__() diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index a9c6507bceef..4d856db869a7 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -864,3 +864,14 @@ async with C() as x: # E: "async with" outside async function [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testAsyncGeneratorExpressionAwait] +from typing import AsyncGenerator + +async def f() -> AsyncGenerator[int, None]: + async def g(x: int) -> int: + return x + + return (await g(x) for x in [1, 2, 3]) + +[typing fixtures/typing-async.pyi] From 9a35fdc94c71f6c427624b9b4d51f7495cbb97f9 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 3 Jun 2022 11:27:56 +0100 Subject: [PATCH 45/80] [mypyc] Support type checking native ints (#12881) Add minimal support for type checking native int types. Currently `mypy_extensions.i32` and `mypy_extensions.i64` are supported, but adding additional types will be easy. Summary of key type checking properties: 1. Coercion both *from* `int` and *to* `int` is supported. 2. Native int types are "sticky" -- operations involving both a native int and a regular int result in a native int. 3. Different native int types can't be mixed without explicit conversions. Work on mypyc/mypyc#837. See the issue for a more detailed description of the feature. I'll add support for compiling native integers in separate PRs. Native ints only work in tests without using hacks, since `mypy_extensions` doesn't include them yet. --- mypy/checkexpr.py | 6 +- mypy/fixup.py | 3 +- mypy/join.py | 22 +-- mypy/meet.py | 13 +- mypy/nodes.py | 22 ++- mypy/semanal_classprop.py | 22 ++- mypy/semanal_main.py | 5 +- mypy/server/astdiff.py | 2 +- mypy/server/astmerge.py | 3 +- mypy/subtypes.py | 11 +- mypy/test/testcheck.py | 1 + test-data/unit/check-native-int.test | 151 ++++++++++++++++++++ test-data/unit/fixtures/dict.pyi | 1 + test-data/unit/lib-stub/mypy_extensions.pyi | 66 ++++++++- test-data/unit/semanal-types.test | 2 +- 15 files changed, 299 insertions(+), 31 deletions(-) create mode 100644 test-data/unit/check-native-int.test diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 67c7ada55c1a..193e56b6002f 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2586,13 +2586,17 @@ def lookup_definer(typ: Instance, attr_name: str) -> Optional[str]: elif (is_subtype(right_type, left_type) and isinstance(left_type, Instance) and isinstance(right_type, Instance) + and left_type.type.alt_promote is not right_type.type and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name)): - # When we do "A() + B()" where B is a subclass of B, we'll actually try calling + # When we do "A() + B()" where B is a subclass of A, we'll actually try calling # B's __radd__ method first, but ONLY if B explicitly defines or overrides the # __radd__ method. # # This mechanism lets subclasses "refine" the expected outcome of the operation, even # if they're located on the RHS. + # + # As a special case, the alt_promote check makes sure that we don't use the + # __radd__ method of int if the LHS is a native int type. variants_raw = [ (right_op, right_type, left_expr), diff --git a/mypy/fixup.py b/mypy/fixup.py index 1f04c2b181fa..85c1df079a5a 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -48,7 +48,8 @@ def visit_type_info(self, info: TypeInfo) -> None: for base in info.bases: base.accept(self.type_fixer) if info._promote: - info._promote.accept(self.type_fixer) + for p in info._promote: + p.accept(self.type_fixer) if info.tuple_type: info.tuple_type.accept(self.type_fixer) if info.typeddict_type: diff --git a/mypy/join.py b/mypy/join.py index 94d0afc434f9..70c250a7703c 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -87,10 +87,13 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: # Give preference to joins via duck typing relationship, so that # join(int, float) == float, for example. - if t.type._promote and is_subtype(t.type._promote, s): - return join_types(t.type._promote, s, self) - elif s.type._promote and is_subtype(s.type._promote, t): - return join_types(t, s.type._promote, self) + for p in t.type._promote: + if is_subtype(p, s): + return join_types(p, s, self) + for p in s.type._promote: + if is_subtype(p, t): + return join_types(t, p, self) + # Compute the "best" supertype of t when joined with s. # The definition of "best" may evolve; for now it is the one with # the longest MRO. Ties are broken by using the earlier base. @@ -101,11 +104,12 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: if best is None or is_better(res, best): best = res assert best is not None - promote = get_proper_type(t.type._promote) - if isinstance(promote, Instance): - res = self.join_instances(promote, s) - if is_better(res, best): - best = res + for promote in t.type._promote: + promote = get_proper_type(promote) + if isinstance(promote, Instance): + res = self.join_instances(promote, s) + if is_better(res, best): + best = res return best diff --git a/mypy/meet.py b/mypy/meet.py index 2602f0c1abd8..583503bdf614 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -86,7 +86,12 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: and narrowed.type.is_metaclass()): # We'd need intersection types, so give up. return declared - elif isinstance(declared, (Instance, TupleType, TypeType, LiteralType)): + elif isinstance(declared, Instance): + if declared.type.alt_promote: + # Special case: low-level integer type can't be narrowed + return declared + return meet_types(declared, narrowed) + elif isinstance(declared, (TupleType, TypeType, LiteralType)): return meet_types(declared, narrowed) elif isinstance(declared, TypedDictType) and isinstance(narrowed, Instance): # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). @@ -574,6 +579,12 @@ def visit_instance(self, t: Instance) -> ProperType: else: return NoneType() else: + alt_promote = t.type.alt_promote + if alt_promote and alt_promote is self.s.type: + return t + alt_promote = self.s.type.alt_promote + if alt_promote and alt_promote is t.type: + return self.s if is_subtype(t, self.s): return t elif is_subtype(self.s, t): diff --git a/mypy/nodes.py b/mypy/nodes.py index d510cbeeec62..abc8666e390d 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2566,7 +2566,7 @@ class is generic then it will be a type constructor of higher kind. 'deletable_attributes', 'slots', 'assuming', 'assuming_proper', 'inferring', 'is_enum', 'fallback_to_any', 'type_vars', 'has_param_spec_type', 'bases', '_promote', 'tuple_type', 'is_named_tuple', 'typeddict_type', - 'is_newtype', 'is_intersection', 'metadata', + 'is_newtype', 'is_intersection', 'metadata', 'alt_promote', ) _fullname: Bogus[str] # Fully qualified name @@ -2658,7 +2658,17 @@ class is generic then it will be a type constructor of higher kind. # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. - _promote: Optional["mypy.types.Type"] + _promote: List["mypy.types.Type"] + + # This is used for promoting native integer types such as 'i64' to + # 'int'. (_promote is used for the other direction.) This only + # supports one-step promotions (e.g., i64 -> int, not + # i64 -> int -> float, and this isn't used to promote in joins. + # + # This results in some unintuitive results, such as that even + # though i64 is compatible with int and int is compatible with + # float, i64 is *not* compatible with float. + alt_promote: Optional["TypeInfo"] # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type @@ -2718,7 +2728,8 @@ def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> No self.is_final = False self.is_enum = False self.fallback_to_any = False - self._promote = None + self._promote = [] + self.alt_promote = None self.tuple_type = None self.is_named_tuple = False self.typeddict_type = None @@ -2897,7 +2908,7 @@ def serialize(self) -> JsonDict: 'has_param_spec_type': self.has_param_spec_type, 'bases': [b.serialize() for b in self.bases], 'mro': [c.fullname for c in self.mro], - '_promote': None if self._promote is None else self._promote.serialize(), + '_promote': [p.serialize() for p in self._promote], 'declared_metaclass': (None if self.declared_metaclass is None else self.declared_metaclass.serialize()), 'metaclass_type': @@ -2924,8 +2935,7 @@ def deserialize(cls, data: JsonDict) -> 'TypeInfo': ti.type_vars = data['type_vars'] ti.has_param_spec_type = data['has_param_spec_type'] ti.bases = [mypy.types.Instance.deserialize(b) for b in data['bases']] - ti._promote = (None if data['_promote'] is None - else mypy.types.deserialize_type(data['_promote'])) + ti._promote = [mypy.types.deserialize_type(p) for p in data['_promote']] ti.declared_metaclass = (None if data['declared_metaclass'] is None else mypy.types.Instance.deserialize(data['declared_metaclass'])) ti.metaclass_type = (None if data['metaclass_type'] is None diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index e985b55a20d1..5344f321420f 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -146,20 +146,21 @@ def calculate_class_vars(info: TypeInfo) -> None: node.is_classvar = True -def add_type_promotion(info: TypeInfo, module_names: SymbolTable, options: Options) -> None: +def add_type_promotion(info: TypeInfo, module_names: SymbolTable, options: Options, + builtin_names: SymbolTable) -> None: """Setup extra, ad-hoc subtyping relationships between classes (promotion). This includes things like 'int' being compatible with 'float'. """ defn = info.defn - promote_target: Optional[Type] = None + promote_targets: List[Type] = [] for decorator in defn.decorators: if isinstance(decorator, CallExpr): analyzed = decorator.analyzed if isinstance(analyzed, PromoteExpr): # _promote class decorator (undocumented feature). - promote_target = analyzed.type - if not promote_target: + promote_targets.append(analyzed.type) + if not promote_targets: promotions = (TYPE_PROMOTIONS_PYTHON3 if options.python_version[0] >= 3 else TYPE_PROMOTIONS_PYTHON2) if defn.fullname in promotions: @@ -168,5 +169,14 @@ def add_type_promotion(info: TypeInfo, module_names: SymbolTable, options: Optio if target_sym: target_info = target_sym.node assert isinstance(target_info, TypeInfo) - promote_target = Instance(target_info, []) - defn.info._promote = promote_target + promote_targets.append(Instance(target_info, [])) + # Special case the promotions between 'int' and native integer types. + # These have promotions going both ways, such as from 'int' to 'i64' + # and 'i64' to 'int', for convenience. + if defn.fullname == 'mypy_extensions.i64' or defn.fullname == 'mypy_extensions.i32': + int_sym = builtin_names['int'] + assert isinstance(int_sym.node, TypeInfo) + int_sym.node._promote.append(Instance(defn.info, [])) + defn.info.alt_promote = int_sym.node + if promote_targets: + defn.info._promote.extend(promote_targets) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 305d1a058d76..b25aa0e225a6 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -435,6 +435,8 @@ def apply_hooks_to_class(self: SemanticAnalyzer, def calculate_class_properties(graph: 'Graph', scc: List[str], errors: Errors) -> None: + builtins = graph['builtins'].tree + assert builtins for module in scc: state = graph[module] tree = state.tree @@ -445,7 +447,8 @@ def calculate_class_properties(graph: 'Graph', scc: List[str], errors: Errors) - calculate_class_abstract_status(node.node, tree.is_stub, errors) check_protocol_status(node.node, errors) calculate_class_vars(node.node) - add_type_promotion(node.node, tree.names, graph[module].options) + add_type_promotion(node.node, tree.names, graph[module].options, + builtins.names) def check_blockers(graph: 'Graph', scc: List[str]) -> None: diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 1f3b68fbde1b..1f1c6b65f385 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -220,7 +220,7 @@ def snapshot_definition(node: Optional[SymbolNode], # but this currently seems a bit ad hoc. tuple(snapshot_type(tdef) for tdef in node.defn.type_vars), [snapshot_type(base) for base in node.bases], - snapshot_optional_type(node._promote)) + [snapshot_type(p) for p in node._promote]) prefix = node.fullname symbol_table = snapshot_symbol_table(prefix, node.names) # Special dependency for abstract attribute handling. diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 4d684e226b21..be69b3c00d97 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -307,7 +307,8 @@ def process_type_info(self, info: Optional[TypeInfo]) -> None: return self.fixup_type(info.declared_metaclass) self.fixup_type(info.metaclass_type) - self.fixup_type(info._promote) + for target in info._promote: + self.fixup_type(target) self.fixup_type(info.tuple_type) self.fixup_type(info.typeddict_type) info.defn.info = self.fixup(info) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index bbde38c5f92f..8b7b3153ecaf 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -270,9 +270,15 @@ def visit_instance(self, left: Instance) -> bool: return True if not self.ignore_promotions: for base in left.type.mro: - if base._promote and self._is_subtype(base._promote, self.right): + if base._promote and any(self._is_subtype(p, self.right) + for p in base._promote): TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return True + # Special case: Low-level integer types are compatible with 'int'. We can't + # use promotions, since 'int' is already promoted to low-level integer types, + # and we can't have circular promotions. + if left.type.alt_promote is right.type: + return True rname = right.type.fullname # Always try a nominal check if possible, # there might be errors that a user wants to silence *once*. @@ -1415,7 +1421,8 @@ def visit_instance(self, left: Instance) -> bool: return True if not self.ignore_promotions: for base in left.type.mro: - if base._promote and self._is_proper_subtype(base._promote, right): + if base._promote and any(self._is_proper_subtype(p, right) + for p in base._promote): TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) return True diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 279ecdb2d22d..ddcb78df8100 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -105,6 +105,7 @@ 'check-singledispatch.test', 'check-slots.test', 'check-formatting.test', + 'check-native-int.test', ] # Tests that use Python 3.8-only AST features (like expression-scoped ignores): diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test new file mode 100644 index 000000000000..14bea5d715c3 --- /dev/null +++ b/test-data/unit/check-native-int.test @@ -0,0 +1,151 @@ +[case testNativeIntBasics] +from mypy_extensions import i32, i64 + +def f(x: int) -> i32: + return i32(x) + +def g(x: i32) -> None: + pass + +reveal_type(i32(1) + i32(2)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(i64(1) + i64(2)) # N: Revealed type is "mypy_extensions.i64" +i32(1) + i64(2) # E: Unsupported operand types for + ("i32" and "i64") +i64(1) + i32(2) # E: Unsupported operand types for + ("i64" and "i32") +g(i32(2)) +g(i64(2)) # E: Argument 1 to "g" has incompatible type "i64"; expected "i32" +[builtins fixtures/dict.pyi] + +[case testNativeIntCoercions] +from mypy_extensions import i32, i64 + +def f1(x: int) -> None: pass +def f2(x: i32) -> None: pass + +a: i32 = 1 +b: i64 = 2 +c: i64 = a # E: Incompatible types in assignment (expression has type "i32", variable has type "i64") +d: i64 = i64(a) +e: i32 = b # E: Incompatible types in assignment (expression has type "i64", variable has type "i32") +f: i32 = i32(b) +g: int = a +h: int = b + +f1(1) +f1(a) +f1(b) +f2(1) +f2(g) +f2(h) +f2(a) +f2(b) # E: Argument 1 to "f2" has incompatible type "i64"; expected "i32" +[builtins fixtures/dict.pyi] + +[case testNativeIntJoins] +from typing import TypeVar, Any +from mypy_extensions import i32, i64 + +T = TypeVar('T') + +def join(x: T, y: T) -> T: return x + +n32: i32 = 0 +n64: i64 = 1 +n = 2 + +reveal_type(join(n32, n)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(join(n, n32)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(join(n64, n)) # N: Revealed type is "mypy_extensions.i64" +reveal_type(join(n, n64)) # N: Revealed type is "mypy_extensions.i64" +# i32 and i64 aren't treated as compatible +reveal_type(join(n32, n64)) # N: Revealed type is "builtins.object" +reveal_type(join(n64, n32)) # N: Revealed type is "builtins.object" + +a: Any +reveal_type(join(n, a)) # N: Revealed type is "Any" +reveal_type(join(n32, a)) # N: Revealed type is "Any" +reveal_type(join(a, n64)) # N: Revealed type is "Any" +reveal_type(join(n64, a)) # N: Revealed type is "Any" +reveal_type(join(a, n64)) # N: Revealed type is "Any" +[builtins fixtures/dict.pyi] + +[case testNativeIntMeets] +# flags: --strict-optional +from typing import TypeVar, Callable, Any +from mypy_extensions import i32, i64 + +T = TypeVar('T') + +def f32(x: i32) -> None: pass +def f64(x: i64) -> None: pass +def f(x: int) -> None: pass +def fa(x: Any) -> None: pass + +def meet(c1: Callable[[T], None], c2: Callable[[T], None]) -> T: + pass + +reveal_type(meet(f32, f)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(meet(f, f32)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(meet(f64, f)) # N: Revealed type is "mypy_extensions.i64" +reveal_type(meet(f, f64)) # N: Revealed type is "mypy_extensions.i64" +reveal_type(meet(f32, f64)) # N: Revealed type is "" +reveal_type(meet(f64, f32)) # N: Revealed type is "" + +reveal_type(meet(f, fa)) # N: Revealed type is "builtins.int" +reveal_type(meet(f32, fa)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(meet(fa, f32)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(meet(f64, fa)) # N: Revealed type is "mypy_extensions.i64" +reveal_type(meet(fa, f64)) # N: Revealed type is "mypy_extensions.i64" +[builtins fixtures/dict.pyi] + +[case testNativeIntCoerceInArithmetic] +from mypy_extensions import i32, i64 + +reveal_type(i32(1) + 1) # N: Revealed type is "mypy_extensions.i32" +reveal_type(1 + i32(1)) # N: Revealed type is "mypy_extensions.i32" +reveal_type(i64(1) + 1) # N: Revealed type is "mypy_extensions.i64" +reveal_type(1 + i64(1)) # N: Revealed type is "mypy_extensions.i64" +n = int() +reveal_type(i32(1) + n) # N: Revealed type is "mypy_extensions.i32" +reveal_type(n + i32(1)) # N: Revealed type is "mypy_extensions.i32" +[builtins fixtures/dict.pyi] + +[case testNativeIntNoNarrowing] +from mypy_extensions import i32 + +x: i32 = 1 +if int(): + x = 2 + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" +reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + +y = 1 +if int(): + y = i32(1) + reveal_type(y) # N: Revealed type is "mypy_extensions.i32" +reveal_type(y) # N: Revealed type is "builtins.int" +[builtins fixtures/dict.pyi] + +[case testNativeIntFloatConversion] +# flags: --strict-optional +from typing import TypeVar, Callable +from mypy_extensions import i32 + +x: i32 = 1.1 # E: Incompatible types in assignment (expression has type "float", variable has type "i32") +y: float = i32(1) # E: Incompatible types in assignment (expression has type "i32", variable has type "float") + +T = TypeVar('T') + +def join(x: T, y: T) -> T: return x + +reveal_type(join(x, y)) # N: Revealed type is "builtins.object" +reveal_type(join(y, x)) # N: Revealed type is "builtins.object" + +def meet(c1: Callable[[T], None], c2: Callable[[T], None]) -> T: + pass + +def ff(x: float) -> None: pass +def fi32(x: i32) -> None: pass + +reveal_type(meet(ff, fi32)) # N: Revealed type is "" +reveal_type(meet(fi32, ff)) # N: Revealed type is "" +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index 9e7cb6f8c70d..48c16f262f3e 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -33,6 +33,7 @@ class dict(Mapping[KT, VT]): class int: # for convenience def __add__(self, x: Union[int, complex]) -> int: pass + def __radd__(self, x: int) -> int: pass def __sub__(self, x: Union[int, complex]) -> int: pass def __neg__(self) -> int: pass real: int diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi index 306d217f478e..6274163c497d 100644 --- a/test-data/unit/lib-stub/mypy_extensions.pyi +++ b/test-data/unit/lib-stub/mypy_extensions.pyi @@ -1,6 +1,7 @@ # NOTE: Requires fixtures/dict.pyi from typing import ( - Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator + Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator, + Union ) import sys @@ -48,3 +49,66 @@ def trait(cls: Any) -> Any: ... mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... + +if sys.version_info >= (3, 0): + _Int = Union[int, i32, i64] + + class i32: + def __init__(self, x: _Int) -> None: ... + def __add__(self, x: i32) -> i32: ... + def __radd__(self, x: i32) -> i32: ... + def __sub__(self, x: i32) -> i32: ... + def __rsub__(self, x: i32) -> i32: ... + def __mul__(self, x: i32) -> i32: ... + def __rmul__(self, x: i32) -> i32: ... + def __floordiv__(self, x: i32) -> i32: ... + def __rfloordiv__(self, x: i32) -> i32: ... + def __mod__(self, x: i32) -> i32: ... + def __rmod__(self, x: i32) -> i32: ... + def __and__(self, x: i32) -> i32: ... + def __rand__(self, x: i32) -> i32: ... + def __or__(self, x: i32) -> i32: ... + def __ror__(self, x: i32) -> i32: ... + def __xor__(self, x: i32) -> i32: ... + def __rxor__(self, x: i32) -> i32: ... + def __lshift__(self, x: i32) -> i32: ... + def __rlshift__(self, x: i32) -> i32: ... + def __rshift__(self, x: i32) -> i32: ... + def __rrshift__(self, x: i32) -> i32: ... + def __neg__(self) -> i32: ... + def __invert__(self) -> i32: ... + def __pos__(self) -> i32: ... + def __lt__(self, x: i32) -> bool: ... + def __le__(self, x: i32) -> bool: ... + def __ge__(self, x: i32) -> bool: ... + def __gt__(self, x: i32) -> bool: ... + + class i64: + def __init__(self, x: _Int) -> None: ... + def __add__(self, x: i64) -> i64: ... + def __radd__(self, x: i64) -> i64: ... + def __sub__(self, x: i64) -> i64: ... + def __rsub__(self, x: i64) -> i64: ... + def __mul__(self, x: i64) -> i64: ... + def __rmul__(self, x: i64) -> i64: ... + def __floordiv__(self, x: i64) -> i64: ... + def __rfloordiv__(self, x: i64) -> i64: ... + def __mod__(self, x: i64) -> i64: ... + def __rmod__(self, x: i64) -> i64: ... + def __and__(self, x: i64) -> i64: ... + def __rand__(self, x: i64) -> i64: ... + def __or__(self, x: i64) -> i64: ... + def __ror__(self, x: i64) -> i64: ... + def __xor__(self, x: i64) -> i64: ... + def __rxor__(self, x: i64) -> i64: ... + def __lshift__(self, x: i64) -> i64: ... + def __rlshift__(self, x: i64) -> i64: ... + def __rshift__(self, x: i64) -> i64: ... + def __rrshift__(self, x: i64) -> i64: ... + def __neg__(self) -> i64: ... + def __invert__(self) -> i64: ... + def __pos__(self) -> i64: ... + def __lt__(self, x: i64) -> bool: ... + def __le__(self, x: i64) -> bool: ... + def __ge__(self, x: i64) -> bool: ... + def __gt__(self, x: i64) -> bool: ... diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index f2cd737b1a6c..d832772f5f81 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1421,7 +1421,7 @@ MypyFile:1( ImportFrom:1(typing, [_promote]) ClassDef:3( S - Promote(builtins.str) + Promote([builtins.str]) Decorators( PromoteExpr:2(builtins.str)) PassStmt:3())) From 8153e47b45247c81ba9792ab46ab788513613f6b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 4 Jun 2022 16:27:57 +0100 Subject: [PATCH 46/80] Fix bug in constraints solver regarding `ParamSpec` upper bounds (#12938) Fixes #12930 --- mypy/constraints.py | 2 ++ test-data/unit/check-parameter-specification.test | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 9a6d87575bdc..4d9527733375 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -571,6 +571,8 @@ def visit_instance(self, template: Instance) -> List[Constraint]: if not actual.values: return infer_constraints(template, actual.upper_bound, self.direction) return [] + elif isinstance(actual, ParamSpecType): + return infer_constraints(template, actual.upper_bound, self.direction) else: return [] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 4dae32978263..682ce93cb7ea 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1066,16 +1066,21 @@ def run_job(job: Job[...]) -> T: ... [builtins fixtures/tuple.pyi] [case testTupleAndDictOperationsOnParamSpecArgsAndKwargs] -from typing import Callable +from typing import Callable, Iterator, Iterable, TypeVar, Tuple from typing_extensions import ParamSpec P = ParamSpec('P') +T = TypeVar('T') +def enumerate(x: Iterable[T]) -> Iterator[Tuple[int, T]]: ... def func(callback: Callable[P, str]) -> Callable[P, str]: def inner(*args: P.args, **kwargs: P.kwargs) -> str: reveal_type(args[5]) # N: Revealed type is "builtins.object" for a in args: reveal_type(a) # N: Revealed type is "builtins.object" + for idx, a in enumerate(args): + reveal_type(idx) # N: Revealed type is "builtins.int" + reveal_type(a) # N: Revealed type is "builtins.object" b = 'foo' in args reveal_type(b) # N: Revealed type is "builtins.bool" reveal_type(args.count(42)) # N: Revealed type is "builtins.int" From 9611e2d0b1d9130ca1591febdd60a3523cf739eb Mon Sep 17 00:00:00 2001 From: Hal Blackburn Date: Sun, 5 Jun 2022 07:57:38 +0100 Subject: [PATCH 47/80] Fix sidebar logo not loading in HTML docs (#12939) The mypy logo wasn't loading on https://mypy.readthedocs.io/ because it's referenced via an http:// (not https) URL. The logo's URL is http://mypy-lang.org/static/mypy_light.svg; that server is not accessible via https, so I've moved mypy_light.svg into docs/source and referenced it from there. The project README also referenced the logo via the same URL, so for consistency, the README now points at the same logo file in docs/source. (Although the README's logo did load because GitHub proxies external resources referenced in markdown files). --- README.md | 2 +- docs/source/conf.py | 2 +- docs/source/mypy_light.svg | 99 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 2 deletions(-) create mode 100644 docs/source/mypy_light.svg diff --git a/README.md b/README.md index c40a224ea042..e9f11833d0d1 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -mypy logo +mypy logo Mypy: Static Typing for Python ======================================= diff --git a/docs/source/conf.py b/docs/source/conf.py index 5c3bf94c2f8c..6f6b8b276d60 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -122,7 +122,7 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = "http://mypy-lang.org/static/mypy_light.svg" +html_logo = "mypy_light.svg" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 diff --git a/docs/source/mypy_light.svg b/docs/source/mypy_light.svg new file mode 100644 index 000000000000..4eaf65dbf344 --- /dev/null +++ b/docs/source/mypy_light.svg @@ -0,0 +1,99 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + From 7e38877750c04abfd436e6ad98da23d6deca4e8f Mon Sep 17 00:00:00 2001 From: Jingchen Ye <11172084+97littleleaf11@users.noreply.github.com> Date: Mon, 6 Jun 2022 08:16:52 +0800 Subject: [PATCH 48/80] Update badge for build status (#12941) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e9f11833d0d1..9a63090e95a7 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ Mypy: Static Typing for Python [![Stable Version](https://img.shields.io/pypi/v/mypy?color=blue)](https://pypi.org/project/mypy/) [![Downloads](https://img.shields.io/pypi/dm/mypy)](https://pypistats.org/packages/mypy) -[![Build Status](https://api.travis-ci.com/python/mypy.svg?branch=master)](https://travis-ci.com/python/mypy) +[![Build Status](https://github.com/python/mypy/actions/workflows/test.yml/badge.svg)](https://github.com/python/mypy/actions) [![Documentation Status](https://readthedocs.org/projects/mypy/badge/?version=latest)](https://mypy.readthedocs.io/en/latest/?badge=latest) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) From c3e32e33b37b13d08182b71dedba5c184e4ee216 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 7 Jun 2022 18:16:37 +0100 Subject: [PATCH 49/80] Fix crash on redefined class variable annotated with `Final[]` (#12951) --- mypy/checker.py | 15 ++++++++------- test-data/unit/check-final.test | 8 ++++++++ 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index e5abcfcf4541..688fbd28739d 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2715,13 +2715,14 @@ def check_final(self, if is_final_decl and self.scope.active_class(): lv = lvs[0] assert isinstance(lv, RefExpr) - assert isinstance(lv.node, Var) - if (lv.node.final_unset_in_class and not lv.node.final_set_in_init and - not self.is_stub and # It is OK to skip initializer in stub files. - # Avoid extra error messages, if there is no type in Final[...], - # then we already reported the error about missing r.h.s. - isinstance(s, AssignmentStmt) and s.type is not None): - self.msg.final_without_value(s) + if lv.node is not None: + assert isinstance(lv.node, Var) + if (lv.node.final_unset_in_class and not lv.node.final_set_in_init and + not self.is_stub and # It is OK to skip initializer in stub files. + # Avoid extra error messages, if there is no type in Final[...], + # then we already reported the error about missing r.h.s. + isinstance(s, AssignmentStmt) and s.type is not None): + self.msg.final_without_value(s) for lv in lvs: if isinstance(lv, RefExpr) and isinstance(lv.node, Var): name = lv.node.name diff --git a/test-data/unit/check-final.test b/test-data/unit/check-final.test index 2f298ad1be3b..da034caced76 100644 --- a/test-data/unit/check-final.test +++ b/test-data/unit/check-final.test @@ -1109,3 +1109,11 @@ class A(ABC): @final # E: Method B is both abstract and final @abstractmethod def B(self) -> None: ... + +[case testFinalClassVariableRedefinitionDoesNotCrash] +# This used to crash -- see #12950 +from typing import Final + +class MyClass: + a: None + a: Final[int] = 1 # E: Cannot redefine an existing name as final # E: Name "a" already defined on line 5 From 9b4bce9065cbef6185fbc77f2849b63dc9e5e293 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 8 Jun 2022 00:49:11 +0100 Subject: [PATCH 50/80] Improve handling of overloads with ParamSpec (#12953) --- mypy/meet.py | 26 ++++++++++++----------- test-data/unit/check-overloading.test | 30 +++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/mypy/meet.py b/mypy/meet.py index 583503bdf614..ebaf0f675ef1 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -6,7 +6,7 @@ TupleType, TypedDictType, ErasedType, UnionType, PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny, Overloaded, FunctionLike, LiteralType, ProperType, get_proper_type, get_proper_types, TypeAliasType, TypeGuardedType, - ParamSpecType, Parameters, UnpackType, TypeVarTupleType, + ParamSpecType, Parameters, UnpackType, TypeVarTupleType, TypeVarLikeType ) from mypy.subtypes import is_equivalent, is_subtype, is_callable_compatible, is_proper_subtype from mypy.erasetype import erase_type @@ -117,8 +117,8 @@ def get_possible_variants(typ: Type) -> List[Type]: If this function receives any other type, we return a list containing just that original type. (E.g. pretend the type was contained within a singleton union). - The only exception is regular TypeVars: we return a list containing that TypeVar's - upper bound. + The only current exceptions are regular TypeVars and ParamSpecs. For these "TypeVarLike"s, + we return a list containing that TypeVarLike's upper bound. This function is useful primarily when checking to see if two types are overlapping: the algorithm to check if two unions are overlapping is fundamentally the same as @@ -134,6 +134,8 @@ def get_possible_variants(typ: Type) -> List[Type]: return typ.values else: return [typ.upper_bound] + elif isinstance(typ, ParamSpecType): + return [typ.upper_bound] elif isinstance(typ, UnionType): return list(typ.items) elif isinstance(typ, Overloaded): @@ -244,36 +246,36 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: right_possible = get_possible_variants(right) # We start by checking multi-variant types like Unions first. We also perform - # the same logic if either type happens to be a TypeVar. + # the same logic if either type happens to be a TypeVar/ParamSpec/TypeVarTuple. # - # Handling the TypeVars now lets us simulate having them bind to the corresponding + # Handling the TypeVarLikes now lets us simulate having them bind to the corresponding # type -- if we deferred these checks, the "return-early" logic of the other # checks will prevent us from detecting certain overlaps. # - # If both types are singleton variants (and are not TypeVars), we've hit the base case: + # If both types are singleton variants (and are not TypeVarLikes), we've hit the base case: # we skip these checks to avoid infinitely recursing. - def is_none_typevar_overlap(t1: Type, t2: Type) -> bool: + def is_none_typevarlike_overlap(t1: Type, t2: Type) -> bool: t1, t2 = get_proper_types((t1, t2)) - return isinstance(t1, NoneType) and isinstance(t2, TypeVarType) + return isinstance(t1, NoneType) and isinstance(t2, TypeVarLikeType) if prohibit_none_typevar_overlap: - if is_none_typevar_overlap(left, right) or is_none_typevar_overlap(right, left): + if is_none_typevarlike_overlap(left, right) or is_none_typevarlike_overlap(right, left): return False if (len(left_possible) > 1 or len(right_possible) > 1 - or isinstance(left, TypeVarType) or isinstance(right, TypeVarType)): + or isinstance(left, TypeVarLikeType) or isinstance(right, TypeVarLikeType)): for l in left_possible: for r in right_possible: if _is_overlapping_types(l, r): return True return False - # Now that we've finished handling TypeVars, we're free to end early + # Now that we've finished handling TypeVarLikes, we're free to end early # if one one of the types is None and we're running in strict-optional mode. # (None only overlaps with None in strict-optional mode). # - # We must perform this check after the TypeVar checks because + # We must perform this check after the TypeVarLike checks because # a TypeVar could be bound to None, for example. if state.strict_optional and isinstance(left, NoneType) != isinstance(right, NoneType): diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 8259f2754bce..312d7a6cc7ae 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6506,3 +6506,33 @@ if True: @overload def f3(g: D) -> D: ... def f3(g): ... # E: Name "f3" already defined on line 32 + +[case testOverloadingWithParamSpec] +from typing import TypeVar, Callable, Any, overload +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") +R = TypeVar("R") + +@overload +def func(x: Callable[Concatenate[Any, P], R]) -> Callable[P, R]: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types +@overload +def func(x: Callable[P, R]) -> Callable[Concatenate[str, P], R]: ... +def func(x: Callable[..., R]) -> Callable[..., R]: ... + +def foo(arg1: str, arg2: int) -> bytes: ... +reveal_type(func(foo)) # N: Revealed type is "def (arg2: builtins.int) -> builtins.bytes" + +def bar() -> int: ... +reveal_type(func(bar)) # N: Revealed type is "def (builtins.str) -> builtins.int" + +baz: Callable[[str, str], str] = lambda x, y: 'baz' +reveal_type(func(baz)) # N: Revealed type is "def (builtins.str) -> builtins.str" + +eggs = lambda: 'eggs' +reveal_type(func(eggs)) # N: Revealed type is "def (builtins.str) -> builtins.str" + +spam: Callable[..., str] = lambda x, y: 'baz' +reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> Any" + +[builtins fixtures/paramspec.pyi] From 3833277fc292c9d4a514337b244d146228f1776e Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 9 Jun 2022 13:40:30 -0700 Subject: [PATCH 51/80] Raise minimum filelock version (#12960) This is the version of filelock that includes py.typed Co-authored-by: hauntsaninja <> --- test-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 4b6c1751cacf..c50705dff739 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,8 +1,8 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -filelock>=3.0.0,<3.4.2; python_version<'3.7' -filelock>=3.0.0; python_version>='3.7' +filelock>=3.3.0,<3.4.2; python_version<'3.7' +filelock>=3.3.0; python_version>='3.7' flake8==3.9.2 flake8-bugbear==22.3.20 flake8-pyi>=20.5 From 9ccd081550010700341a46f0e08b8954e5beef70 Mon Sep 17 00:00:00 2001 From: jhance Date: Fri, 10 Jun 2022 09:10:11 -0700 Subject: [PATCH 52/80] Support inferring Unpack mixed with other items (#12769) The main substance here modifies mypy/constraints.py to not assume that template.items has length 1 in the case that there is an unpack. We instead assume that that there is only a singular unpack, and do a former pass to find what index it is in, and then resolve the unpack to the corresponding subset of whatever tuple we are matching against. --- mypy/constraints.py | 69 ++++++++++++++++++++----- mypy/expandtype.py | 2 + mypy/type_visitor.py | 2 +- mypy/typeops.py | 20 ++++++- test-data/unit/check-typevar-tuple.test | 67 ++++++++++++++++++++++++ 5 files changed, 143 insertions(+), 17 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 4d9527733375..2f071e13a002 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -702,20 +702,46 @@ def visit_tuple_type(self, template: TupleType) -> List[Constraint]: isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple" ) - if len(template.items) == 1: - item = get_proper_type(template.items[0]) - if isinstance(item, UnpackType): - unpacked_type = get_proper_type(item.type) - if isinstance(unpacked_type, TypeVarTupleType): - if ( - isinstance(actual, (TupleType, AnyType)) - or is_varlength_tuple - ): - return [Constraint( - type_var=unpacked_type.id, - op=self.direction, - target=actual, - )] + unpack_index = find_unpack_in_tuple(template) + + if unpack_index is not None: + unpack_item = get_proper_type(template.items[unpack_index]) + assert isinstance(unpack_item, UnpackType) + + unpacked_type = get_proper_type(unpack_item.type) + if isinstance(unpacked_type, TypeVarTupleType): + if is_varlength_tuple: + # This case is only valid when the unpack is the only + # item in the tuple. + # + # TODO: We should support this in the case that all the items + # in the tuple besides the unpack have the same type as the + # varlength tuple's type. E.g. Tuple[int, ...] should be valid + # where we expect Tuple[int, Unpack[Ts]], but not for Tuple[str, Unpack[Ts]]. + assert len(template.items) == 1 + + if ( + isinstance(actual, (TupleType, AnyType)) + or is_varlength_tuple + ): + modified_actual = actual + if isinstance(actual, TupleType): + # Exclude the items from before and after the unpack index. + head = unpack_index + tail = len(template.items) - unpack_index - 1 + if tail: + modified_actual = actual.copy_modified( + items=actual.items[head:-tail], + ) + else: + modified_actual = actual.copy_modified( + items=actual.items[head:], + ) + return [Constraint( + type_var=unpacked_type.id, + op=self.direction, + target=modified_actual, + )] if isinstance(actual, TupleType) and len(actual.items) == len(template.items): res: List[Constraint] = [] @@ -828,3 +854,18 @@ def find_matching_overload_items(overloaded: Overloaded, # it maintains backward compatibility. res = items[:] return res + + +def find_unpack_in_tuple(t: TupleType) -> Optional[int]: + unpack_index: Optional[int] = None + for i, item in enumerate(t.items): + proper_item = get_proper_type(item) + if isinstance(proper_item, UnpackType): + # We cannot fail here, so we must check this in an earlier + # semanal phase. + # Funky code here avoids mypyc narrowing the type of unpack_index. + old_index = unpack_index + assert old_index is None + # Don't return so that we can also sanity check there is only one. + unpack_index = i + return unpack_index diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 985114a53051..ce43aeaeb6e5 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -161,6 +161,8 @@ def expand_unpack(self, t: UnpackType) -> Optional[Union[List[Type], Instance, A return repl elif isinstance(repl, TypeVarTupleType): return [UnpackType(typ=repl)] + elif isinstance(repl, UnpackType): + return [repl] elif isinstance(repl, UninhabitedType): return None else: diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index 85701a51f128..79b4cb12d512 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -209,7 +209,7 @@ def visit_partial_type(self, t: PartialType) -> Type: return t def visit_unpack_type(self, t: UnpackType) -> Type: - return t.type.accept(self) + return UnpackType(t.type.accept(self)) def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified(arg_types=self.translate_types(t.arg_types), diff --git a/mypy/typeops.py b/mypy/typeops.py index 22ca0b6ec2fe..835c8f0a7229 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -14,7 +14,8 @@ TupleType, Instance, FunctionLike, Type, CallableType, TypeVarLikeType, Overloaded, TypeVarType, UninhabitedType, FormalArgument, UnionType, NoneType, AnyType, TypeOfAny, TypeType, ProperType, LiteralType, get_proper_type, get_proper_types, - TypeAliasType, TypeQuery, ParamSpecType, Parameters, ENUM_REMOVED_PROPS + TypeAliasType, TypeQuery, ParamSpecType, Parameters, UnpackType, TypeVarTupleType, + ENUM_REMOVED_PROPS, ) from mypy.nodes import ( FuncBase, FuncItem, FuncDef, OverloadedFuncDef, TypeInfo, ARG_STAR, ARG_STAR2, ARG_POS, @@ -42,7 +43,22 @@ def tuple_fallback(typ: TupleType) -> Instance: info = typ.partial_fallback.type if info.fullname != 'builtins.tuple': return typ.partial_fallback - return Instance(info, [join_type_list(typ.items)]) + items = [] + for item in typ.items: + proper_type = get_proper_type(item) + if isinstance(proper_type, UnpackType): + unpacked_type = get_proper_type(proper_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + items.append(unpacked_type.upper_bound) + elif isinstance(unpacked_type, TupleType): + # TODO: might make sense to do recursion here to support nested unpacks + # of tuple constants + items.extend(unpacked_type.items) + else: + raise NotImplementedError + else: + items.append(item) + return Instance(info, [join_type_list(items)]) def type_object_type_from_function(signature: FunctionLike, diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index ed11e5b53263..e98f5a69001e 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -27,3 +27,70 @@ reveal_type(g(args, args2)) # N: Revealed type is "Tuple[builtins.int, builtins reveal_type(g(args, args3)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" reveal_type(g(any, any)) # N: Revealed type is "Any" [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleMixed] +from typing import Tuple +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def to_str(i: int) -> str: + ... + +def f(a: Tuple[int, Unpack[Ts]]) -> Tuple[str, Unpack[Ts]]: + return (to_str(a[0]),) + a[1:] + +def g(a: Tuple[Unpack[Ts], int]) -> Tuple[Unpack[Ts], str]: + return a[:-1] + (to_str(a[-1]),) + +def h(a: Tuple[bool, int, Unpack[Ts], str, object]) -> Tuple[Unpack[Ts]]: + return a[2:-2] + +empty = () +bad_args: Tuple[str, str] +var_len_tuple: Tuple[int, ...] + +f_args: Tuple[int, str] +f_args2: Tuple[int] +f_args3: Tuple[int, str, bool] + +reveal_type(f(f_args)) # N: Revealed type is "Tuple[builtins.str, builtins.str]" +reveal_type(f(f_args2)) # N: Revealed type is "Tuple[builtins.str]" +reveal_type(f(f_args3)) # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.bool]" +f(empty) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Tuple[int]" +f(bad_args) # E: Argument 1 to "f" has incompatible type "Tuple[str, str]"; expected "Tuple[int, str]" +# TODO: This hits a crash where we assert len(templates.items) == 1. See visit_tuple_type +# in mypy/constraints.py. +#f(var_len_tuple) + +g_args: Tuple[str, int] +reveal_type(g(g_args)) # N: Revealed type is "Tuple[builtins.str, builtins.str]" + +h_args: Tuple[bool, int, str, int, str, object] +reveal_type(h(h_args)) # N: Revealed type is "Tuple[builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleChaining] +from typing import Tuple +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def to_str(i: int) -> str: + ... + +def f(a: Tuple[int, Unpack[Ts]]) -> Tuple[str, Unpack[Ts]]: + return (to_str(a[0]),) + a[1:] + +def g(a: Tuple[bool, int, Unpack[Ts], str, object]) -> Tuple[str, Unpack[Ts]]: + return f(a[1:-2]) + +def h(a: Tuple[bool, int, Unpack[Ts], str, object]) -> Tuple[str, Unpack[Ts]]: + x = f(a[1:-2]) + return x + +args: Tuple[bool, int, str, int, str, object] +reveal_type(g(args)) # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.int]" +reveal_type(h(args)) # N: Revealed type is "Tuple[builtins.str, builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + From ddbea6988c0913c70ed16cd2fda6064e301b4b63 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 11 Jun 2022 19:19:19 +0100 Subject: [PATCH 53/80] [mypyc] Foundational work to help support native ints (#12884) Some IR and codegen changes that help with native int support. This was split off from a branch with a working implementation of native ints to make reviewing easier. Some tests and primitives are missing here and I will include them in follow-up PRs. Summary of major changes below. 1) Allow ambiguous error returns from functions. Since all values of `i64` values are valid return values, none can be reserved for errors. The approach here is to have the error value overlap a valid value, and use `PyErr_Occurred()` as a secondary check to make sure it actually was an error. 2) Add `Extend` op which extends a value to a larger integer type with either zero or sign extension. 3) Improve subtype checking with native int types. 4) Fill in other minor gaps in IR and codegen support for native ints. Work on mypyc/mypyc#837. --- mypyc/analysis/dataflow.py | 5 +- mypyc/analysis/ircheck.py | 5 +- mypyc/analysis/selfleaks.py | 6 +- mypyc/codegen/emit.py | 35 ++++++++++-- mypyc/codegen/emitfunc.py | 25 ++++++++- mypyc/ir/ops.py | 101 +++++++++++++++++++++++++++------ mypyc/ir/pprint.py | 9 ++- mypyc/ir/rtypes.py | 103 +++++++++++++++++++++++++++++----- mypyc/irbuild/ll_builder.py | 2 +- mypyc/subtype.py | 9 ++- mypyc/test/test_emitfunc.py | 38 +++++++++++-- mypyc/test/test_subtype.py | 21 ++++++- mypyc/transform/exceptions.py | 33 ++++++++++- 13 files changed, 335 insertions(+), 57 deletions(-) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index 053efc733845..528c04af546f 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -9,7 +9,7 @@ BasicBlock, OpVisitor, Assign, AssignMulti, Integer, LoadErrorValue, RegisterOp, Goto, Branch, Return, Call, Box, Unbox, Cast, Op, Unreachable, TupleGet, TupleSet, GetAttr, SetAttr, LoadLiteral, LoadStatic, InitStatic, MethodCall, RaiseStandardError, CallC, LoadGlobal, - Truncate, IntOp, LoadMem, GetElementPtr, LoadAddress, ComparisonOp, SetMem, KeepAlive + Truncate, IntOp, LoadMem, GetElementPtr, LoadAddress, ComparisonOp, SetMem, KeepAlive, Extend ) from mypyc.ir.func_ir import all_values @@ -199,6 +199,9 @@ def visit_call_c(self, op: CallC) -> GenAndKill[T]: def visit_truncate(self, op: Truncate) -> GenAndKill[T]: return self.visit_register_op(op) + def visit_extend(self, op: Extend) -> GenAndKill[T]: + return self.visit_register_op(op) + def visit_load_global(self, op: LoadGlobal) -> GenAndKill[T]: return self.visit_register_op(op) diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index 6c8e8d7f18e5..8217d9865c4b 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -7,7 +7,7 @@ InitStatic, TupleGet, TupleSet, IncRef, DecRef, Call, MethodCall, Cast, Box, Unbox, RaiseStandardError, CallC, Truncate, LoadGlobal, IntOp, ComparisonOp, LoadMem, SetMem, GetElementPtr, LoadAddress, KeepAlive, Register, Integer, - BaseAssign + BaseAssign, Extend ) from mypyc.ir.rtypes import ( RType, RPrimitive, RUnion, is_object_rprimitive, RInstance, RArray, @@ -326,6 +326,9 @@ def visit_call_c(self, op: CallC) -> None: def visit_truncate(self, op: Truncate) -> None: pass + def visit_extend(self, op: Extend) -> None: + pass + def visit_load_global(self, op: LoadGlobal) -> None: pass diff --git a/mypyc/analysis/selfleaks.py b/mypyc/analysis/selfleaks.py index ae3731a40ac3..4ba6cfb28eb3 100644 --- a/mypyc/analysis/selfleaks.py +++ b/mypyc/analysis/selfleaks.py @@ -4,7 +4,8 @@ OpVisitor, Register, Goto, Assign, AssignMulti, SetMem, Call, MethodCall, LoadErrorValue, LoadLiteral, GetAttr, SetAttr, LoadStatic, InitStatic, TupleGet, TupleSet, Box, Unbox, Cast, RaiseStandardError, CallC, Truncate, LoadGlobal, IntOp, ComparisonOp, LoadMem, - GetElementPtr, LoadAddress, KeepAlive, Branch, Return, Unreachable, RegisterOp, BasicBlock + GetElementPtr, LoadAddress, KeepAlive, Branch, Return, Unreachable, RegisterOp, BasicBlock, + Extend ) from mypyc.ir.rtypes import RInstance from mypyc.analysis.dataflow import MAYBE_ANALYSIS, run_analysis, AnalysisResult, CFG @@ -115,6 +116,9 @@ def visit_call_c(self, op: CallC) -> GenAndKill: def visit_truncate(self, op: Truncate) -> GenAndKill: return CLEAN + def visit_extend(self, op: Extend) -> GenAndKill: + return CLEAN + def visit_load_global(self, op: LoadGlobal) -> GenAndKill: return CLEAN diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 0815dd3c3bd0..b1f886ee3f5f 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -17,7 +17,8 @@ is_list_rprimitive, is_dict_rprimitive, is_set_rprimitive, is_tuple_rprimitive, is_none_rprimitive, is_object_rprimitive, object_rprimitive, is_str_rprimitive, int_rprimitive, is_optional_type, optional_value_type, is_int32_rprimitive, - is_int64_rprimitive, is_bit_rprimitive, is_range_rprimitive, is_bytes_rprimitive + is_int64_rprimitive, is_bit_rprimitive, is_range_rprimitive, is_bytes_rprimitive, + is_fixed_width_rtype ) from mypyc.ir.func_ir import FuncDecl from mypyc.ir.class_ir import ClassIR, all_concrete_classes @@ -479,9 +480,16 @@ def emit_cast(self, return # TODO: Verify refcount handling. - if (is_list_rprimitive(typ) or is_dict_rprimitive(typ) or is_set_rprimitive(typ) - or is_str_rprimitive(typ) or is_range_rprimitive(typ) or is_float_rprimitive(typ) - or is_int_rprimitive(typ) or is_bool_rprimitive(typ) or is_bit_rprimitive(typ)): + if (is_list_rprimitive(typ) + or is_dict_rprimitive(typ) + or is_set_rprimitive(typ) + or is_str_rprimitive(typ) + or is_range_rprimitive(typ) + or is_float_rprimitive(typ) + or is_int_rprimitive(typ) + or is_bool_rprimitive(typ) + or is_bit_rprimitive(typ) + or is_fixed_width_rtype(typ)): if declare_dest: self.emit_line(f'PyObject *{dest};') if is_list_rprimitive(typ): @@ -496,12 +504,13 @@ def emit_cast(self, prefix = 'PyRange' elif is_float_rprimitive(typ): prefix = 'CPyFloat' - elif is_int_rprimitive(typ): + elif is_int_rprimitive(typ) or is_fixed_width_rtype(typ): + # TODO: Range check for fixed-width types? prefix = 'PyLong' elif is_bool_rprimitive(typ) or is_bit_rprimitive(typ): prefix = 'PyBool' else: - assert False, 'unexpected primitive type' + assert False, f'unexpected primitive type: {typ}' check = '({}_Check({}))' if likely: check = f'(likely{check})' @@ -765,6 +774,20 @@ def emit_unbox(self, self.emit_line(failure) self.emit_line('} else') self.emit_line(f' {dest} = 1;') + elif is_int64_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + if declare_dest: + self.emit_line(f'int64_t {dest};') + self.emit_line(f'{dest} = CPyLong_AsInt64({src});') + # TODO: Handle 'optional' + # TODO: Handle 'failure' + elif is_int32_rprimitive(typ): + # Whether we are borrowing or not makes no difference. + if declare_dest: + self.emit_line('int32_t {};'.format(dest)) + self.emit_line('{} = CPyLong_AsInt32({});'.format(dest, src)) + # TODO: Handle 'optional' + # TODO: Handle 'failure' elif isinstance(typ, RTuple): self.declare_tuple_struct(typ) if declare_dest: diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index ce428daaee71..683bf3e7a034 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -12,7 +12,8 @@ LoadStatic, InitStatic, TupleGet, TupleSet, Call, IncRef, DecRef, Box, Cast, Unbox, BasicBlock, Value, MethodCall, Unreachable, NAMESPACE_STATIC, NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, CallC, LoadGlobal, Truncate, IntOp, LoadMem, GetElementPtr, - LoadAddress, ComparisonOp, SetMem, Register, LoadLiteral, AssignMulti, KeepAlive, ERR_FALSE + LoadAddress, ComparisonOp, SetMem, Register, LoadLiteral, AssignMulti, KeepAlive, Extend, + ERR_FALSE ) from mypyc.ir.rtypes import ( RType, RTuple, RArray, is_tagged, is_int32_rprimitive, is_int64_rprimitive, RStruct, @@ -210,6 +211,10 @@ def visit_assign(self, op: Assign) -> None: # clang whines about self assignment (which we might generate # for some casts), so don't emit it. if dest != src: + # We sometimes assign from an integer prepresentation of a pointer + # to a real pointer, and C compilers insist on a cast. + if op.src.type.is_unboxed and not op.dest.type.is_unboxed: + src = f'(void *){src}' self.emit_line(f'{dest} = {src};') def visit_assign_multi(self, op: AssignMulti) -> None: @@ -538,6 +543,15 @@ def visit_truncate(self, op: Truncate) -> None: # for C backend the generated code are straight assignments self.emit_line(f"{dest} = {value};") + def visit_extend(self, op: Extend) -> None: + dest = self.reg(op) + value = self.reg(op.src) + if op.signed: + src_cast = self.emit_signed_int_cast(op.src.type) + else: + src_cast = self.emit_unsigned_int_cast(op.src.type) + self.emit_line("{} = {}{};".format(dest, src_cast, value)) + def visit_load_global(self, op: LoadGlobal) -> None: dest = self.reg(op) ann = '' @@ -551,6 +565,10 @@ def visit_int_op(self, op: IntOp) -> None: dest = self.reg(op) lhs = self.reg(op.lhs) rhs = self.reg(op.rhs) + if op.op == IntOp.RIGHT_SHIFT: + # Signed right shift + lhs = self.emit_signed_int_cast(op.lhs.type) + lhs + rhs = self.emit_signed_int_cast(op.rhs.type) + rhs self.emit_line(f'{dest} = {lhs} {op.op_str[op.op]} {rhs};') def visit_comparison_op(self, op: ComparisonOp) -> None: @@ -624,7 +642,10 @@ def reg(self, reg: Value) -> str: s = str(val) if val >= (1 << 31): # Avoid overflowing signed 32-bit int - s += 'ULL' + if val >= (1 << 63): + s += 'ULL' + else: + s += 'LL' elif val == -(1 << 63): # Avoid overflowing C integer literal s = '(-9223372036854775807LL - 1)' diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index d36fcfb9e7eb..8474b5ab58e2 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -21,7 +21,7 @@ RType, RInstance, RTuple, RArray, RVoid, is_bool_rprimitive, is_int_rprimitive, is_short_int_rprimitive, is_none_rprimitive, object_rprimitive, bool_rprimitive, short_int_rprimitive, int_rprimitive, void_rtype, pointer_rprimitive, is_pointer_rprimitive, - bit_rprimitive, is_bit_rprimitive + bit_rprimitive, is_bit_rprimitive, is_fixed_width_rtype ) if TYPE_CHECKING: @@ -90,6 +90,9 @@ def terminator(self) -> 'ControlOp': ERR_FALSE: Final = 2 # Always fails ERR_ALWAYS: Final = 3 +# Like ERR_MAGIC, but the magic return overlaps with a possible return value, and +# an extra PyErr_Occurred() check is also required +ERR_MAGIC_OVERLAPPING: Final = 4 # Hack: using this line number for an op will suppress it in tracebacks NO_TRACEBACK_LINE_NO = -10000 @@ -489,14 +492,17 @@ class Call(RegisterOp): The call target can be a module-level function or a class. """ - error_kind = ERR_MAGIC - def __init__(self, fn: 'FuncDecl', args: Sequence[Value], line: int) -> None: - super().__init__(line) self.fn = fn self.args = list(args) assert len(self.args) == len(fn.sig.args) self.type = fn.sig.ret_type + ret_type = fn.sig.ret_type + if not ret_type.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) def sources(self) -> List[Value]: return list(self.args[:]) @@ -508,14 +514,11 @@ def accept(self, visitor: 'OpVisitor[T]') -> T: class MethodCall(RegisterOp): """Native method call obj.method(arg, ...)""" - error_kind = ERR_MAGIC - def __init__(self, obj: Value, method: str, args: List[Value], line: int = -1) -> None: - super().__init__(line) self.obj = obj self.method = method self.args = args @@ -524,7 +527,13 @@ def __init__(self, method_ir = self.receiver_type.class_ir.method_sig(method) assert method_ir is not None, "{} doesn't have method {}".format( self.receiver_type.name, method) - self.type = method_ir.ret_type + ret_type = method_ir.ret_type + self.type = ret_type + if not ret_type.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) def sources(self) -> List[Value]: return self.args[:] + [self.obj] @@ -605,8 +614,11 @@ def __init__(self, obj: Value, attr: str, line: int, *, borrow: bool = False) -> self.attr = attr assert isinstance(obj.type, RInstance), 'Attribute access not supported: %s' % obj.type self.class_type = obj.type - self.type = obj.type.attr_type(attr) - self.is_borrowed = borrow + attr_type = obj.type.attr_type(attr) + self.type = attr_type + if is_fixed_width_rtype(attr_type): + self.error_kind = ERR_NEVER + self.is_borrowed = borrow and attr_type.is_refcounted def sources(self) -> List[Value]: return [self.obj] @@ -829,12 +841,14 @@ class Unbox(RegisterOp): representation. Only supported for types with an unboxed representation. """ - error_kind = ERR_MAGIC - def __init__(self, src: Value, typ: RType, line: int) -> None: - super().__init__(line) self.src = src self.type = typ + if not typ.error_overlap: + self.error_kind = ERR_MAGIC + else: + self.error_kind = ERR_MAGIC_OVERLAPPING + super().__init__(line) def sources(self) -> List[Value]: return [self.src] @@ -924,22 +938,20 @@ class Truncate(RegisterOp): Truncate a value from type with more bits to type with less bits. - Both src_type and dst_type should be non-reference counted integer - types or bool. Note that int_rprimitive is reference counted so - it should never be used here. + dst_type and src_type can be native integer types, bools or tagged + integers. Tagged integers should have the tag bit unset. """ error_kind = ERR_NEVER def __init__(self, src: Value, - src_type: RType, dst_type: RType, line: int = -1) -> None: super().__init__(line) self.src = src - self.src_type = src_type self.type = dst_type + self.src_type = src.type def sources(self) -> List[Value]: return [self.src] @@ -951,6 +963,41 @@ def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_truncate(self) +class Extend(RegisterOp): + """result = extend src from src_type to dst_type + + Extend a value from a type with fewer bits to a type with more bits. + + dst_type and src_type can be native integer types, bools or tagged + integers. Tagged integers should have the tag bit unset. + + If 'signed' is true, perform sign extension. Otherwise, the result will be + zero extended. + """ + + error_kind = ERR_NEVER + + def __init__(self, + src: Value, + dst_type: RType, + signed: bool, + line: int = -1) -> None: + super().__init__(line) + self.src = src + self.type = dst_type + self.src_type = src.type + self.signed = signed + + def sources(self) -> List[Value]: + return [self.src] + + def stolen(self) -> List[Value]: + return [] + + def accept(self, visitor: 'OpVisitor[T]') -> T: + return visitor.visit_extend(self) + + class LoadGlobal(RegisterOp): """Load a low-level global variable/pointer. @@ -1035,6 +1082,11 @@ def accept(self, visitor: 'OpVisitor[T]') -> T: return visitor.visit_int_op(self) +# We can't have this in the IntOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +int_op_to_id: Final = {op: op_id for op_id, op in IntOp.op_str.items()} + + class ComparisonOp(RegisterOp): """Low-level comparison op for integers and pointers. @@ -1076,6 +1128,15 @@ class ComparisonOp(RegisterOp): UGE: '>=', } + signed_ops: Final = { + '==': EQ, + '!=': NEQ, + '<': SLT, + '>': SGT, + '<=': SLE, + '>=': SGE, + } + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: super().__init__(line) self.type = bit_rprimitive @@ -1327,6 +1388,10 @@ def visit_call_c(self, op: CallC) -> T: def visit_truncate(self, op: Truncate) -> T: raise NotImplementedError + @abstractmethod + def visit_extend(self, op: Extend) -> T: + raise NotImplementedError + @abstractmethod def visit_load_global(self, op: LoadGlobal) -> T: raise NotImplementedError diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index 40243dac96e9..e6cd721e4c27 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -11,7 +11,7 @@ LoadStatic, InitStatic, TupleGet, TupleSet, IncRef, DecRef, Call, MethodCall, Cast, Box, Unbox, RaiseStandardError, CallC, Truncate, LoadGlobal, IntOp, ComparisonOp, LoadMem, SetMem, GetElementPtr, LoadAddress, Register, Value, OpVisitor, BasicBlock, ControlOp, LoadLiteral, - AssignMulti, KeepAlive, Op, ERR_NEVER + AssignMulti, KeepAlive, Op, Extend, ERR_NEVER ) from mypyc.ir.func_ir import FuncIR, all_values_full from mypyc.ir.module_ir import ModuleIRs @@ -172,6 +172,13 @@ def visit_call_c(self, op: CallC) -> str: def visit_truncate(self, op: Truncate) -> str: return self.format("%r = truncate %r: %t to %t", op, op.src, op.src_type, op.type) + def visit_extend(self, op: Extend) -> str: + if op.signed: + extra = ' signed' + else: + extra = '' + return self.format("%r = extend%s %r: %t to %t", op, extra, op.src, op.src_type, op.type) + def visit_load_global(self, op: LoadGlobal) -> str: ann = f' ({repr(op.ann)})' if op.ann else '' return self.format('%r = load_global %s :: static%s', op, op.identifier, ann) diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index 2c875d7c8f01..010e25976f1c 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -45,11 +45,21 @@ class RType: is_unboxed = False # This is the C undefined value for this type. It's used for initialization # if there's no value yet, and for function return value on error/exception. + # + # TODO: This shouldn't be specific to C or a string c_undefined: str # If unboxed: does the unboxed version use reference counting? is_refcounted = True # C type; use Emitter.ctype() to access _ctype: str + # If True, error/undefined value overlaps with a valid value. To + # detect an exception, PyErr_Occurred() must be used in addition + # to checking for error value as the return value of a function. + # + # For example, no i64 value can be reserved for error value, so we + # pick an arbitrary value (e.g. -113) to signal error, but this is + # also a valid non-error value. + error_overlap = False @abstractmethod def accept(self, visitor: 'RTypeVisitor[T]') -> T: @@ -173,29 +183,40 @@ class RPrimitive(RType): def __init__(self, name: str, + *, is_unboxed: bool, is_refcounted: bool, + is_native_int: bool = False, + is_signed: bool = False, ctype: str = 'PyObject *', - size: int = PLATFORM_SIZE) -> None: + size: int = PLATFORM_SIZE, + error_overlap: bool = False) -> None: RPrimitive.primitive_map[name] = self self.name = name self.is_unboxed = is_unboxed - self._ctype = ctype self.is_refcounted = is_refcounted + self.is_native_int = is_native_int + self.is_signed = is_signed + self._ctype = ctype self.size = size - # TODO: For low-level integers, they actually don't have undefined values - # we need to figure out some way to represent here. + self.error_overlap = error_overlap if ctype == 'CPyTagged': self.c_undefined = 'CPY_INT_TAG' - elif ctype in ('int32_t', 'int64_t', 'CPyPtr', 'uint32_t', 'uint64_t'): + elif ctype in ('int32_t', 'int64_t'): + # This is basically an arbitrary value that is pretty + # unlikely to overlap with a real value. + self.c_undefined = '-113' + elif ctype in ('CPyPtr', 'uint32_t', 'uint64_t'): + # TODO: For low-level integers, we need to invent an overlapping + # error value, similar to int64_t above. self.c_undefined = '0' elif ctype == 'PyObject *': # Boxed types use the null pointer as the error value. self.c_undefined = 'NULL' elif ctype == 'char': self.c_undefined = '2' - elif ctype == 'PyObject **': + elif ctype in ('PyObject **', 'void *'): self.c_undefined = 'NULL' else: assert False, 'Unrecognized ctype: %r' % ctype @@ -265,16 +286,42 @@ def __hash__(self) -> int: # Low level integer types (correspond to C integer types) int32_rprimitive: Final = RPrimitive( - "int32", is_unboxed=True, is_refcounted=False, ctype="int32_t", size=4 + "int32", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int32_t", + size=4, + error_overlap=True, ) int64_rprimitive: Final = RPrimitive( - "int64", is_unboxed=True, is_refcounted=False, ctype="int64_t", size=8 + "int64", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype="int64_t", + size=8, + error_overlap=True, ) uint32_rprimitive: Final = RPrimitive( - "uint32", is_unboxed=True, is_refcounted=False, ctype="uint32_t", size=4 + "uint32", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint32_t", + size=4, ) uint64_rprimitive: Final = RPrimitive( - "uint64", is_unboxed=True, is_refcounted=False, ctype="uint64_t", size=8 + "uint64", + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=False, + ctype="uint64_t", + size=8, ) # The C 'int' type @@ -282,16 +329,34 @@ def __hash__(self) -> int: if IS_32_BIT_PLATFORM: c_size_t_rprimitive = uint32_rprimitive - c_pyssize_t_rprimitive = RPrimitive('native_int', is_unboxed=True, is_refcounted=False, - ctype='int32_t', size=4) + c_pyssize_t_rprimitive = RPrimitive( + 'native_int', + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype='int32_t', + size=4, + ) else: c_size_t_rprimitive = uint64_rprimitive - c_pyssize_t_rprimitive = RPrimitive('native_int', is_unboxed=True, is_refcounted=False, - ctype='int64_t', size=8) + c_pyssize_t_rprimitive = RPrimitive( + 'native_int', + is_unboxed=True, + is_refcounted=False, + is_native_int=True, + is_signed=True, + ctype='int64_t', + size=8, + ) -# Low level pointer, represented as integer in C backends +# Untyped pointer, represented as integer in the C backend pointer_rprimitive: Final = RPrimitive("ptr", is_unboxed=True, is_refcounted=False, ctype="CPyPtr") +# Untyped pointer, represented as void * in the C backend +c_pointer_rprimitive: Final = RPrimitive("c_ptr", is_unboxed=False, is_refcounted=False, + ctype="void *") + # Floats are represent as 'float' PyObject * values. (In the future # we'll likely switch to a more efficient, unboxed representation.) float_rprimitive: Final = RPrimitive("builtins.float", is_unboxed=False, is_refcounted=True) @@ -361,6 +426,10 @@ def is_int64_rprimitive(rtype: RType) -> bool: (rtype is c_pyssize_t_rprimitive and rtype._ctype == 'int64_t')) +def is_fixed_width_rtype(rtype: RType) -> bool: + return is_int32_rprimitive(rtype) or is_int64_rprimitive(rtype) + + def is_uint32_rprimitive(rtype: RType) -> bool: return rtype is uint32_rprimitive @@ -445,6 +514,10 @@ def visit_rprimitive(self, t: 'RPrimitive') -> str: return 'I' elif t._ctype == 'char': return 'C' + elif t._ctype == 'int64_t': + return '8' # "8 byte integer" + elif t._ctype == 'int32_t': + return '4' # "4 byte integer" assert not t.is_unboxed, f"{t} unexpected unboxed type" return 'O' diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index d5154707538b..20c8e3a80acf 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -1337,7 +1337,7 @@ def call_c(self, if desc.truncated_type is None: result = target else: - truncate = self.add(Truncate(target, desc.return_type, desc.truncated_type)) + truncate = self.add(Truncate(target, desc.truncated_type)) result = truncate if result_type and not is_runtime_subtype(result.type, result_type): if is_none_rprimitive(result_type): diff --git a/mypyc/subtype.py b/mypyc/subtype.py index 7e852f33bf4a..4ba8f6301c63 100644 --- a/mypyc/subtype.py +++ b/mypyc/subtype.py @@ -3,7 +3,7 @@ from mypyc.ir.rtypes import ( RType, RInstance, RPrimitive, RTuple, RVoid, RTypeVisitor, RUnion, RStruct, RArray, is_bool_rprimitive, is_int_rprimitive, is_tuple_rprimitive, is_short_int_rprimitive, - is_object_rprimitive, is_bit_rprimitive + is_object_rprimitive, is_bit_rprimitive, is_tagged, is_fixed_width_rtype ) @@ -43,14 +43,17 @@ def visit_runion(self, left: RUnion) -> bool: def visit_rprimitive(self, left: RPrimitive) -> bool: right = self.right if is_bool_rprimitive(left): - if is_int_rprimitive(right): + if is_tagged(right) or is_fixed_width_rtype(right): return True elif is_bit_rprimitive(left): - if is_bool_rprimitive(right) or is_int_rprimitive(right): + if is_bool_rprimitive(right) or is_tagged(right) or is_fixed_width_rtype(right): return True elif is_short_int_rprimitive(left): if is_int_rprimitive(right): return True + elif is_fixed_width_rtype(left): + if is_int_rprimitive(right): + return True return left is right def visit_rtuple(self, left: RTuple) -> bool: diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 96d9155214b3..8ea0906aec61 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -9,7 +9,7 @@ from mypyc.ir.ops import ( BasicBlock, Goto, Return, Integer, Assign, AssignMulti, IncRef, DecRef, Branch, Call, Unbox, Box, TupleGet, GetAttr, SetAttr, Op, Value, CallC, IntOp, LoadMem, - GetElementPtr, LoadAddress, ComparisonOp, SetMem, Register, Unreachable, Cast + GetElementPtr, LoadAddress, ComparisonOp, SetMem, Register, Unreachable, Cast, Extend ) from mypyc.ir.rtypes import ( RTuple, RInstance, RType, RArray, int_rprimitive, bool_rprimitive, list_rprimitive, @@ -31,6 +31,7 @@ from mypyc.primitives.int_ops import int_neg_op from mypyc.subtype import is_subtype from mypyc.namegen import NameGenerator +from mypyc.common import PLATFORM_SIZE class TestFunctionEmitterVisitor(unittest.TestCase): @@ -258,11 +259,11 @@ def test_list_set_item(self) -> None: list_set_item_op.is_borrowed, list_set_item_op.error_kind, 55), """cpy_r_r0 = CPyList_SetItem(cpy_r_l, cpy_r_n, cpy_r_o);""") - def test_box(self) -> None: + def test_box_int(self) -> None: self.assert_emit(Box(self.n), """cpy_r_r0 = CPyTagged_StealAsObject(cpy_r_n);""") - def test_unbox(self) -> None: + def test_unbox_int(self) -> None: self.assert_emit(Unbox(self.m, int_rprimitive, 55), """if (likely(PyLong_Check(cpy_r_m))) cpy_r_r0 = CPyTagged_FromObject(cpy_r_m); @@ -271,6 +272,14 @@ def test_unbox(self) -> None: } """) + def test_box_i64(self) -> None: + self.assert_emit(Box(self.i64), + """cpy_r_r0 = PyLong_FromLongLong(cpy_r_i64);""") + + def test_unbox_i64(self) -> None: + self.assert_emit(Unbox(self.o, int64_rprimitive, 55), + """cpy_r_r0 = CPyLong_AsInt64(cpy_r_o);""") + def test_list_append(self) -> None: self.assert_emit(CallC(list_append_op.c_function_name, [self.l, self.o], list_append_op.return_type, list_append_op.steals, @@ -382,7 +391,9 @@ def test_int_op(self) -> None: self.assert_emit(IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.LEFT_SHIFT, 1), """cpy_r_r0 = cpy_r_s1 << cpy_r_s2;""") self.assert_emit(IntOp(short_int_rprimitive, self.s1, self.s2, IntOp.RIGHT_SHIFT, 1), - """cpy_r_r0 = cpy_r_s1 >> cpy_r_s2;""") + """cpy_r_r0 = (Py_ssize_t)cpy_r_s1 >> (Py_ssize_t)cpy_r_s2;""") + self.assert_emit(IntOp(short_int_rprimitive, self.i64, self.i64_1, IntOp.RIGHT_SHIFT, 1), + """cpy_r_r0 = cpy_r_i64 >> cpy_r_i64_1;""") def test_comparison_op(self) -> None: # signed @@ -438,7 +449,7 @@ def test_assign_multi(self) -> None: def test_long_unsigned(self) -> None: a = Register(int64_rprimitive, 'a') self.assert_emit(Assign(a, Integer(1 << 31, int64_rprimitive)), - """cpy_r_a = 2147483648ULL;""") + """cpy_r_a = 2147483648LL;""") self.assert_emit(Assign(a, Integer((1 << 31) - 1, int64_rprimitive)), """cpy_r_a = 2147483647;""") @@ -545,6 +556,23 @@ def test_cast_and_branch_no_merge_4(self) -> None: next_branch=branch, ) + def test_extend(self) -> None: + a = Register(int32_rprimitive, 'a') + self.assert_emit(Extend(a, int64_rprimitive, signed=True), + """cpy_r_r0 = cpy_r_a;""") + self.assert_emit(Extend(a, int64_rprimitive, signed=False), + """cpy_r_r0 = (uint32_t)cpy_r_a;""") + if PLATFORM_SIZE == 4: + self.assert_emit(Extend(self.n, int64_rprimitive, signed=True), + """cpy_r_r0 = (Py_ssize_t)cpy_r_n;""") + self.assert_emit(Extend(self.n, int64_rprimitive, signed=False), + """cpy_r_r0 = cpy_r_n;""") + if PLATFORM_SIZE == 8: + self.assert_emit(Extend(a, int_rprimitive, signed=True), + """cpy_r_r0 = cpy_r_a;""") + self.assert_emit(Extend(a, int_rprimitive, signed=False), + """cpy_r_r0 = (uint32_t)cpy_r_a;""") + def assert_emit(self, op: Op, expected: str, diff --git a/mypyc/test/test_subtype.py b/mypyc/test/test_subtype.py index e106a1eaa4b7..e006e5425174 100644 --- a/mypyc/test/test_subtype.py +++ b/mypyc/test/test_subtype.py @@ -2,7 +2,10 @@ import unittest -from mypyc.ir.rtypes import bit_rprimitive, bool_rprimitive, int_rprimitive +from mypyc.ir.rtypes import ( + bit_rprimitive, bool_rprimitive, int_rprimitive, int64_rprimitive, int32_rprimitive, + short_int_rprimitive +) from mypyc.subtype import is_subtype from mypyc.rt_subtype import is_runtime_subtype @@ -11,10 +14,26 @@ class TestSubtype(unittest.TestCase): def test_bit(self) -> None: assert is_subtype(bit_rprimitive, bool_rprimitive) assert is_subtype(bit_rprimitive, int_rprimitive) + assert is_subtype(bit_rprimitive, short_int_rprimitive) + assert is_subtype(bit_rprimitive, int64_rprimitive) + assert is_subtype(bit_rprimitive, int32_rprimitive) def test_bool(self) -> None: assert not is_subtype(bool_rprimitive, bit_rprimitive) assert is_subtype(bool_rprimitive, int_rprimitive) + assert is_subtype(bool_rprimitive, short_int_rprimitive) + assert is_subtype(bool_rprimitive, int64_rprimitive) + assert is_subtype(bool_rprimitive, int32_rprimitive) + + def test_int64(self) -> None: + assert is_subtype(int64_rprimitive, int_rprimitive) + assert not is_subtype(int64_rprimitive, short_int_rprimitive) + assert not is_subtype(int64_rprimitive, int32_rprimitive) + + def test_int32(self) -> None: + assert is_subtype(int32_rprimitive, int_rprimitive) + assert not is_subtype(int32_rprimitive, short_int_rprimitive) + assert not is_subtype(int32_rprimitive, int64_rprimitive) class TestRuntimeSubtype(unittest.TestCase): diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index 52b25aceffe3..e845de1fcf19 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -12,11 +12,14 @@ from typing import List, Optional from mypyc.ir.ops import ( - Value, BasicBlock, LoadErrorValue, Return, Branch, RegisterOp, Integer, ERR_NEVER, ERR_MAGIC, - ERR_FALSE, ERR_ALWAYS, NO_TRACEBACK_LINE_NO + Value, BasicBlock, LoadErrorValue, Return, Branch, RegisterOp, ComparisonOp, CallC, + Integer, ERR_NEVER, ERR_MAGIC, ERR_FALSE, ERR_ALWAYS, ERR_MAGIC_OVERLAPPING, + NO_TRACEBACK_LINE_NO ) from mypyc.ir.func_ir import FuncIR from mypyc.ir.rtypes import bool_rprimitive +from mypyc.primitives.registry import CFunctionDescription +from mypyc.primitives.exc_ops import err_occurred_op def insert_exception_handling(ir: FuncIR) -> None: @@ -81,6 +84,20 @@ def split_blocks_at_errors(blocks: List[BasicBlock], # this is a hack to represent the always fail # semantics, using a temporary bool with value false target = Integer(0, bool_rprimitive) + elif op.error_kind == ERR_MAGIC_OVERLAPPING: + errvalue = Integer(int(target.type.c_undefined), rtype=op.type) + comp = ComparisonOp(target, errvalue, ComparisonOp.EQ) + cur_block.ops.append(comp) + new_block2 = BasicBlock() + new_blocks.append(new_block2) + branch = Branch(comp, true_label=new_block2, false_label=new_block, + op=Branch.BOOL) + cur_block.ops.append(branch) + cur_block = new_block2 + target = primitive_call(err_occurred_op, [], target.line) + cur_block.ops.append(target) + variant = Branch.IS_ERROR + negated = True else: assert False, 'unknown error kind %d' % op.error_kind @@ -101,3 +118,15 @@ def split_blocks_at_errors(blocks: List[BasicBlock], cur_block = new_block return new_blocks + + +def primitive_call(desc: CFunctionDescription, args: List[Value], line: int) -> CallC: + return CallC( + desc.c_function_name, + [], + desc.return_type, + desc.steals, + desc.is_borrowed, + desc.error_kind, + line, + ) From 132b8e527730b654a3f3d04684bebc9daff6cabd Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 14 Jun 2022 13:19:08 -0700 Subject: [PATCH 54/80] Update stubinfo for py.typed and removed packages (#12959) https://github.com/python/typeshed/pull/8042 https://github.com/python/typeshed/pull/8043 https://github.com/python/typeshed/pull/8044 https://github.com/python/typeshed/pull/8045 https://github.com/python/typeshed/pull/8046 https://github.com/python/typeshed/pull/8040 Co-authored-by: hauntsaninja <> --- mypy/stubinfo.py | 5 ----- mypy/test/teststubinfo.py | 4 ++-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index d1bcb4a6c157..fb034162c7dc 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -29,8 +29,6 @@ def is_legacy_bundled_package(prefix: str, py_version: int) -> bool: 'bleach': StubInfo('types-bleach'), 'boto': StubInfo('types-boto'), 'cachetools': StubInfo('types-cachetools'), - 'certifi': StubInfo('types-certifi'), - 'characteristic': StubInfo('types-characteristic'), 'chardet': StubInfo('types-chardet'), 'click_spinner': StubInfo('types-click-spinner'), 'concurrent': StubInfo('types-futures', py_version=2), @@ -46,10 +44,7 @@ def is_legacy_bundled_package(prefix: str, py_version: int) -> bool: 'emoji': StubInfo('types-emoji'), 'enum': StubInfo('types-enum34', py_version=2), 'fb303': StubInfo('types-fb303', py_version=2), - 'filelock': StubInfo('types-filelock', py_version=3), 'first': StubInfo('types-first'), - 'freezegun': StubInfo('types-freezegun', py_version=3), - 'frozendict': StubInfo('types-frozendict', py_version=3), 'geoip2': StubInfo('types-geoip2'), 'gflags': StubInfo('types-python-gflags'), 'google.protobuf': StubInfo('types-protobuf'), diff --git a/mypy/test/teststubinfo.py b/mypy/test/teststubinfo.py index 62346ce2bc9f..e00a68a24df0 100644 --- a/mypy/test/teststubinfo.py +++ b/mypy/test/teststubinfo.py @@ -8,8 +8,8 @@ def test_is_legacy_bundled_packages(self) -> None: assert not is_legacy_bundled_package('foobar_asdf', 2) assert not is_legacy_bundled_package('foobar_asdf', 3) - assert is_legacy_bundled_package('certifi', 2) - assert is_legacy_bundled_package('certifi', 3) + assert is_legacy_bundled_package('pycurl', 2) + assert is_legacy_bundled_package('pycurl', 3) assert is_legacy_bundled_package('scribe', 2) assert not is_legacy_bundled_package('scribe', 3) From 145d8a41b17ab1ba8707589be9cb5d56bbebd0ea Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 16 Jun 2022 10:51:45 +0100 Subject: [PATCH 55/80] Add a short note when an error may be fixed by adding an await (#12958) I only cover few most common situations. If this will show to be useful, we can expand the scope of this note. --- mypy/checker.py | 54 ++++++++++++++++++++++++- mypy/checkexpr.py | 11 +++++- mypy/checkmember.py | 41 +++++++++++++++---- mypy/messages.py | 3 ++ test-data/unit/check-async-await.test | 55 ++++++++++++++++++++++++++ test-data/unit/deps.test | 5 ++- test-data/unit/pythoneval-asyncio.test | 1 + 7 files changed, 158 insertions(+), 12 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 688fbd28739d..39f3fa42942d 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -12,7 +12,7 @@ from typing_extensions import Final, TypeAlias as _TypeAlias from mypy.backports import nullcontext -from mypy.errors import Errors, report_internal_error +from mypy.errors import Errors, report_internal_error, ErrorWatcher from mypy.nodes import ( SymbolTable, Statement, MypyFile, Var, Expression, Lvalue, Node, OverloadedFuncDef, FuncDef, FuncItem, FuncBase, TypeInfo, @@ -38,7 +38,7 @@ is_named_instance, union_items, TypeQuery, LiteralType, is_optional, remove_optional, TypeTranslator, StarType, get_proper_type, ProperType, get_proper_types, is_literal_type, TypeAliasType, TypeGuardedType, ParamSpecType, - OVERLOAD_NAMES, + OVERLOAD_NAMES, UnboundType ) from mypy.sametypes import is_same_type from mypy.messages import ( @@ -276,6 +276,10 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option # argument through various `checker` and `checkmember` functions. self._is_final_def = False + # This flag is set when we run type-check or attribute access check for the purpose + # of giving a note on possibly missing "await". It is used to avoid infinite recursion. + self.checking_missing_await = False + @property def type_context(self) -> List[Optional[Type]]: return self.expr_checker.type_context @@ -5285,8 +5289,54 @@ def check_subtype(self, call = find_member('__call__', supertype, subtype, is_operator=True) assert call is not None self.msg.note_call(supertype, call, context, code=code) + self.check_possible_missing_await(subtype, supertype, context) return False + def get_precise_awaitable_type(self, typ: Type, local_errors: ErrorWatcher) -> Optional[Type]: + """If type implements Awaitable[X] with non-Any X, return X. + + In all other cases return None. This method must be called in context + of local_errors. + """ + if isinstance(get_proper_type(typ), PartialType): + # Partial types are special, ignore them here. + return None + try: + aw_type = self.expr_checker.check_awaitable_expr( + typ, Context(), '', ignore_binder=True + ) + except KeyError: + # This is a hack to speed up tests by not including Awaitable in all typing stubs. + return None + if local_errors.has_new_errors(): + return None + if isinstance(get_proper_type(aw_type), (AnyType, UnboundType)): + return None + return aw_type + + @contextmanager + def checking_await_set(self) -> Iterator[None]: + self.checking_missing_await = True + try: + yield + finally: + self.checking_missing_await = False + + def check_possible_missing_await( + self, subtype: Type, supertype: Type, context: Context + ) -> None: + """Check if the given type becomes a subtype when awaited.""" + if self.checking_missing_await: + # Avoid infinite recursion. + return + with self.checking_await_set(), self.msg.filter_errors() as local_errors: + aw_type = self.get_precise_awaitable_type(subtype, local_errors) + if aw_type is None: + return + if not self.check_subtype(aw_type, supertype, context): + return + self.msg.possible_missing_await(context) + def contains_none(self, t: Type) -> bool: t = get_proper_type(t) return ( diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 193e56b6002f..055aba8de08b 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1573,6 +1573,7 @@ def check_arg(self, outer_context=outer_context) self.msg.incompatible_argument_note(original_caller_type, callee_type, context, code=code) + self.chk.check_possible_missing_await(caller_type, callee_type, context) def check_overload_call(self, callee: Overloaded, @@ -4119,7 +4120,9 @@ def visit_await_expr(self, e: AwaitExpr, allow_none_return: bool = False) -> Typ self.chk.msg.does_not_return_value(None, e) return ret - def check_awaitable_expr(self, t: Type, ctx: Context, msg: Union[str, ErrorMessage]) -> Type: + def check_awaitable_expr( + self, t: Type, ctx: Context, msg: Union[str, ErrorMessage], ignore_binder: bool = False + ) -> Type: """Check the argument to `await` and extract the type of value. Also used by `async for` and `async with`. @@ -4131,7 +4134,11 @@ def check_awaitable_expr(self, t: Type, ctx: Context, msg: Union[str, ErrorMessa generator = self.check_method_call_by_name('__await__', t, [], [], ctx)[0] ret_type = self.chk.get_generator_return_type(generator, False) ret_type = get_proper_type(ret_type) - if isinstance(ret_type, UninhabitedType) and not ret_type.ambiguous: + if ( + not ignore_binder + and isinstance(ret_type, UninhabitedType) + and not ret_type.ambiguous + ): self.chk.binder.unreachable() return ret_type diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 964ab301d171..2172361ea2f0 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -171,7 +171,38 @@ def _analyze_member_access(name: str, return AnyType(TypeOfAny.from_error) if mx.chk.should_suppress_optional_error([typ]): return AnyType(TypeOfAny.from_error) - return mx.msg.has_no_attr(mx.original_type, typ, name, mx.context, mx.module_symbol_table) + return report_missing_attribute(mx.original_type, typ, name, mx) + + +def may_be_awaitable_attribute( + name: str, + typ: Type, + mx: MemberContext, + override_info: Optional[TypeInfo] = None +) -> bool: + """Check if the given type has the attribute when awaited.""" + if mx.chk.checking_missing_await: + # Avoid infinite recursion. + return False + with mx.chk.checking_await_set(), mx.msg.filter_errors() as local_errors: + aw_type = mx.chk.get_precise_awaitable_type(typ, local_errors) + if aw_type is None: + return False + _ = _analyze_member_access(name, aw_type, mx, override_info) + return not local_errors.has_new_errors() + + +def report_missing_attribute( + original_type: Type, + typ: Type, + name: str, + mx: MemberContext, + override_info: Optional[TypeInfo] = None +) -> Type: + res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) + if may_be_awaitable_attribute(name, typ, mx, override_info): + mx.msg.possible_missing_await(mx.context) + return res_type # The several functions that follow implement analyze_member_access for various @@ -438,9 +469,7 @@ def analyze_member_var_access(name: str, else: if mx.chk and mx.chk.should_suppress_optional_error([itype]): return AnyType(TypeOfAny.from_error) - return mx.msg.has_no_attr( - mx.original_type, itype, name, mx.context, mx.module_symbol_table - ) + return report_missing_attribute(mx.original_type, itype, name, mx) def check_final_member(name: str, info: TypeInfo, msg: MessageBuilder, ctx: Context) -> None: @@ -851,9 +880,7 @@ def analyze_enum_class_attribute_access(itype: Instance, ) -> Optional[Type]: # Skip these since Enum will remove it if name in ENUM_REMOVED_PROPS: - return mx.msg.has_no_attr( - mx.original_type, itype, name, mx.context, mx.module_symbol_table - ) + return report_missing_attribute(mx.original_type, itype, name, mx) # For other names surrendered by underscores, we don't make them Enum members if name.startswith('__') and name.endswith("__") and name.replace('_', '') != '': return None diff --git a/mypy/messages.py b/mypy/messages.py index b60f40bce561..1d6641c00a61 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -806,6 +806,9 @@ def unpacking_strings_disallowed(self, context: Context) -> None: def type_not_iterable(self, type: Type, context: Context) -> None: self.fail(f'{format_type(type)} object is not iterable', context) + def possible_missing_await(self, context: Context) -> None: + self.note('Maybe you forgot to use "await"?', context) + def incompatible_operator_assignment(self, op: str, context: Context) -> None: self.fail(f'Result type of {op} incompatible in assignment', diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 4d856db869a7..950c64098cf0 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -865,6 +865,60 @@ async with C() as x: # E: "async with" outside async function [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] +[case testAwaitMissingNote] +# flags: --python-version 3.7 +from typing import Generic, TypeVar, Generator, Any, Awaitable, Type + +class C: + x: int +class D(C): ... + +async def foo() -> D: ... +def g(x: C) -> None: ... + +T = TypeVar("T") +class Custom(Generic[T]): + def __await__(self) -> Generator[Any, Any, T]: ... + +class Sub(Custom[T]): ... + +async def test(x: Sub[D], tx: Type[Sub[D]]) -> None: + foo().x # E: "Coroutine[Any, Any, D]" has no attribute "x" \ + # N: Maybe you forgot to use "await"? + (await foo()).x + foo().bad # E: "Coroutine[Any, Any, D]" has no attribute "bad" + + g(foo()) # E: Argument 1 to "g" has incompatible type "Coroutine[Any, Any, D]"; expected "C" \ + # N: Maybe you forgot to use "await"? + g(await foo()) + unknown: Awaitable[Any] + g(unknown) # E: Argument 1 to "g" has incompatible type "Awaitable[Any]"; expected "C" + + x.x # E: "Sub[D]" has no attribute "x" \ + # N: Maybe you forgot to use "await"? + (await x).x + x.bad # E: "Sub[D]" has no attribute "bad" + + a: C = x # E: Incompatible types in assignment (expression has type "Sub[D]", variable has type "C") \ + # N: Maybe you forgot to use "await"? + b: C = await x + unknown2: Awaitable[Any] + d: C = unknown2 # E: Incompatible types in assignment (expression has type "Awaitable[Any]", variable has type "C") + + # The notes are not show for Type[...] (because awaiting them will not work) + tx.x # E: "Type[Sub[D]]" has no attribute "x" + a2: C = tx # E: Incompatible types in assignment (expression has type "Type[Sub[D]]", variable has type "C") + +class F: + def __await__(self: T) -> Generator[Any, Any, T]: ... +class G(F): ... + +# This should not crash. +x: int = G() # E: Incompatible types in assignment (expression has type "G", variable has type "int") + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + [case testAsyncGeneratorExpressionAwait] from typing import AsyncGenerator @@ -874,4 +928,5 @@ async def f() -> AsyncGenerator[int, None]: return (await g(x) for x in [1, 2, 3]) +[builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index fd593a975ca0..53156b6f4f48 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -872,6 +872,8 @@ c.y # type: ignore -> m -> m -> m + -> + -> typing.Awaitable [case testIgnoredMissingInstanceAttribute] from a import C @@ -879,10 +881,11 @@ C().x # type: ignore [file a.py] class C: pass [out] + -> -> m -> m -> m - -> m + -> m, typing.Awaitable -> m [case testIgnoredMissingClassAttribute] diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index 72e4bc9cc9dd..97dd9d4f0a55 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -429,6 +429,7 @@ loop.run_until_complete(h()) loop.close() [out] _program.py:16: error: Incompatible return value type (got "Future[Future[int]]", expected "Future[int]") +_program.py:16: note: Maybe you forgot to use "await"? [case testErrorAssignmentDifferentType] import typing From eb1b1e007e7fd1e976e6dd0f49a71b662a30a2d6 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 16 Jun 2022 18:42:20 +0100 Subject: [PATCH 56/80] [mypyc] Native int primitives (#12973) Add various C primitives that will be used to support native ints. The primitives aren't used for anything yet. I'll prepare follow-up PRs that use the primitives and include tests. I'm splitting these into a separate PR to make this easier to review. All of these are tested in my local branch, at least to a basic level. Most of these are fairly straightforward, but we need to jump through some hoops to make the semantics of // and % operators compatible with Python semantics when using negative operands. Work on mypyc/mypyc#837. --- mypyc/lib-rt/CPy.h | 17 +++++ mypyc/lib-rt/int_ops.c | 135 +++++++++++++++++++++++++++++++++++ mypyc/lib-rt/list_ops.c | 58 +++++++++++++++ mypyc/lib-rt/mypyc_util.h | 3 + mypyc/primitives/int_ops.py | 61 +++++++++++++++- mypyc/primitives/list_ops.py | 33 ++++++++- 6 files changed, 303 insertions(+), 4 deletions(-) diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index f482e09cbe79..ca8bc31140af 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -122,6 +122,7 @@ static inline size_t CPy_FindAttrOffset(PyTypeObject *trait, CPyVTableItem *vtab CPyTagged CPyTagged_FromSsize_t(Py_ssize_t value); CPyTagged CPyTagged_FromVoidPtr(void *ptr); +CPyTagged CPyTagged_FromInt64(int64_t value); CPyTagged CPyTagged_FromObject(PyObject *object); CPyTagged CPyTagged_StealFromObject(PyObject *object); CPyTagged CPyTagged_BorrowFromObject(PyObject *object); @@ -150,6 +151,13 @@ PyObject *CPyLong_FromStrWithBase(PyObject *o, CPyTagged base); PyObject *CPyLong_FromStr(PyObject *o); PyObject *CPyLong_FromFloat(PyObject *o); PyObject *CPyBool_Str(bool b); +int64_t CPyLong_AsInt64(PyObject *o); +int64_t CPyInt64_Divide(int64_t x, int64_t y); +int64_t CPyInt64_Remainder(int64_t x, int64_t y); +int32_t CPyLong_AsInt32(PyObject *o); +int32_t CPyInt32_Divide(int32_t x, int32_t y); +int32_t CPyInt32_Remainder(int32_t x, int32_t y); +void CPyInt32_Overflow(void); static inline int CPyTagged_CheckLong(CPyTagged x) { return x & CPY_INT_TAG; @@ -193,6 +201,12 @@ static inline bool CPyTagged_TooBig(Py_ssize_t value) { && (value >= 0 || value < CPY_TAGGED_MIN); } +static inline bool CPyTagged_TooBigInt64(int64_t value) { + // Micro-optimized for the common case where it fits. + return (uint64_t)value > CPY_TAGGED_MAX + && (value >= 0 || value < CPY_TAGGED_MIN); +} + static inline bool CPyTagged_IsAddOverflow(CPyTagged sum, CPyTagged left, CPyTagged right) { // This check was copied from some of my old code I believe that it works :-) return (Py_ssize_t)(sum ^ left) < 0 && (Py_ssize_t)(sum ^ right) < 0; @@ -342,8 +356,11 @@ PyObject *CPyList_GetItemUnsafe(PyObject *list, CPyTagged index); PyObject *CPyList_GetItemShort(PyObject *list, CPyTagged index); PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index); PyObject *CPyList_GetItemShortBorrow(PyObject *list, CPyTagged index); +PyObject *CPyList_GetItemInt64(PyObject *list, int64_t index); +PyObject *CPyList_GetItemInt64Borrow(PyObject *list, int64_t index); bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value); bool CPyList_SetItemUnsafe(PyObject *list, CPyTagged index, PyObject *value); +bool CPyList_SetItemInt64(PyObject *list, int64_t index, PyObject *value); PyObject *CPyList_PopLast(PyObject *obj); PyObject *CPyList_Pop(PyObject *obj, CPyTagged index); CPyTagged CPyList_Count(PyObject *obj, PyObject *value); diff --git a/mypyc/lib-rt/int_ops.c b/mypyc/lib-rt/int_ops.c index caf0fe0b5391..42e6908384f6 100644 --- a/mypyc/lib-rt/int_ops.c +++ b/mypyc/lib-rt/int_ops.c @@ -35,6 +35,15 @@ CPyTagged CPyTagged_FromVoidPtr(void *ptr) { } } +CPyTagged CPyTagged_FromInt64(int64_t value) { + if (unlikely(CPyTagged_TooBigInt64(value))) { + PyObject *object = PyLong_FromLongLong(value); + return ((CPyTagged)object) | CPY_INT_TAG; + } else { + return value << 1; + } +} + CPyTagged CPyTagged_FromObject(PyObject *object) { int overflow; // The overflow check knows about CPyTagged's width @@ -504,3 +513,129 @@ CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right) { } return CPyTagged_StealFromObject(result); } + +int64_t CPyLong_AsInt64(PyObject *o) { + if (likely(PyLong_Check(o))) { + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = Py_SIZE(lobj); + if (likely(size == 1)) { + // Fast path + return lobj->ob_digit[0]; + } else if (likely(size == 0)) { + return 0; + } + } + // Slow path + int overflow; + int64_t result = PyLong_AsLongLongAndOverflow(o, &overflow); + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_INT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i64"); + return CPY_LL_INT_ERROR; + } + } + return result; +} + +int64_t CPyInt64_Divide(int64_t x, int64_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + if (y == -1 && x == -1LL << 63) { + PyErr_SetString(PyExc_OverflowError, "integer division overflow"); + return CPY_LL_INT_ERROR; + } + int64_t d = x / y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d * y != x) { + d--; + } + return d; +} + +int64_t CPyInt64_Remainder(int64_t x, int64_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + // Edge case: avoid core dump + if (y == -1 && x == -1LL << 63) { + return 0; + } + int64_t d = x % y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d != 0) { + d += y; + } + return d; +} + +int32_t CPyLong_AsInt32(PyObject *o) { + if (likely(PyLong_Check(o))) { + PyLongObject *lobj = (PyLongObject *)o; + Py_ssize_t size = lobj->ob_base.ob_size; + if (likely(size == 1)) { + // Fast path + return lobj->ob_digit[0]; + } else if (likely(size == 0)) { + return 0; + } + } + // Slow path + int overflow; + long result = PyLong_AsLongAndOverflow(o, &overflow); + if (result > 0x7fffffffLL || result < -0x80000000LL) { + overflow = 1; + result = -1; + } + if (result == -1) { + if (PyErr_Occurred()) { + return CPY_LL_INT_ERROR; + } else if (overflow) { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); + return CPY_LL_INT_ERROR; + } + } + return result; +} + +int32_t CPyInt32_Divide(int32_t x, int32_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + if (y == -1 && x == -1LL << 31) { + PyErr_SetString(PyExc_OverflowError, "integer division overflow"); + return CPY_LL_INT_ERROR; + } + int32_t d = x / y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d * y != x) { + d--; + } + return d; +} + +int32_t CPyInt32_Remainder(int32_t x, int32_t y) { + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); + return CPY_LL_INT_ERROR; + } + // Edge case: avoid core dump + if (y == -1 && x == -1LL << 31) { + return 0; + } + int32_t d = x % y; + // Adjust for Python semantics + if (((x < 0) != (y < 0)) && d != 0) { + d += y; + } + return d; +} + +void CPyInt32_Overflow() { + PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); +} diff --git a/mypyc/lib-rt/list_ops.c b/mypyc/lib-rt/list_ops.c index 885c1a3366f3..cb72662e22ee 100644 --- a/mypyc/lib-rt/list_ops.c +++ b/mypyc/lib-rt/list_ops.c @@ -118,6 +118,44 @@ PyObject *CPyList_GetItemBorrow(PyObject *list, CPyTagged index) { } } +PyObject *CPyList_GetItemInt64(PyObject *list, int64_t index) { + size_t size = PyList_GET_SIZE(list); + if (likely((uint64_t)index < size)) { + PyObject *result = PyList_GET_ITEM(list, index); + Py_INCREF(result); + return result; + } + if (index >= 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + PyObject *result = PyList_GET_ITEM(list, index); + Py_INCREF(result); + return result; +} + +PyObject *CPyList_GetItemInt64Borrow(PyObject *list, int64_t index) { + size_t size = PyList_GET_SIZE(list); + if (likely((uint64_t)index < size)) { + return PyList_GET_ITEM(list, index); + } + if (index >= 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list index out of range"); + return NULL; + } + return PyList_GET_ITEM(list, index); +} + bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value) { if (CPyTagged_CheckShort(index)) { Py_ssize_t n = CPyTagged_ShortAsSsize_t(index); @@ -145,6 +183,26 @@ bool CPyList_SetItem(PyObject *list, CPyTagged index, PyObject *value) { } } +bool CPyList_SetItemInt64(PyObject *list, int64_t index, PyObject *value) { + size_t size = PyList_GET_SIZE(list); + if (unlikely((uint64_t)index >= size)) { + if (index > 0) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + index += size; + if (index < 0) { + PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); + return false; + } + } + // PyList_SET_ITEM doesn't decref the old element, so we do + Py_DECREF(PyList_GET_ITEM(list, index)); + // N.B: Steals reference + PyList_SET_ITEM(list, index, value); + return true; +} + // This function should only be used to fill in brand new lists. bool CPyList_SetItemUnsafe(PyObject *list, CPyTagged index, PyObject *value) { if (CPyTagged_CheckShort(index)) { diff --git a/mypyc/lib-rt/mypyc_util.h b/mypyc/lib-rt/mypyc_util.h index 6c4a94f8811c..0fae239cbb9e 100644 --- a/mypyc/lib-rt/mypyc_util.h +++ b/mypyc/lib-rt/mypyc_util.h @@ -53,6 +53,9 @@ typedef PyObject CPyModule; // Tag bit used for long integers #define CPY_INT_TAG 1 +// Error value for fixed-width (low-level) integers +#define CPY_LL_INT_ERROR -113 + typedef void (*CPyVTableItem)(void); static inline CPyTagged CPyTagged_ShortFromInt(int x) { diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 44703528976c..ad33de059f02 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -9,10 +9,11 @@ """ from typing import Dict, NamedTuple -from mypyc.ir.ops import ERR_NEVER, ERR_MAGIC, ComparisonOp +from mypyc.ir.ops import ERR_NEVER, ERR_MAGIC, ERR_MAGIC_OVERLAPPING, ERR_ALWAYS, ComparisonOp from mypyc.ir.rtypes import ( int_rprimitive, bool_rprimitive, float_rprimitive, object_rprimitive, - str_rprimitive, bit_rprimitive, RType + str_rprimitive, bit_rprimitive, int64_rprimitive, int32_rprimitive, void_rtype, RType, + c_pyssize_t_rprimitive ) from mypyc.primitives.registry import ( load_address_op, unary_op, CFunctionDescription, function_op, binary_op, custom_op @@ -165,3 +166,59 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: '>': IntComparisonOpDescription(ComparisonOp.SGT, int_less_than_, False, True), '>=': IntComparisonOpDescription(ComparisonOp.SGE, int_less_than_, True, False), } + +int64_divide_op = custom_op( + arg_types=[int64_rprimitive, int64_rprimitive], + return_type=int64_rprimitive, + c_function_name='CPyInt64_Divide', + error_kind=ERR_MAGIC_OVERLAPPING) + +int64_mod_op = custom_op( + arg_types=[int64_rprimitive, int64_rprimitive], + return_type=int64_rprimitive, + c_function_name='CPyInt64_Remainder', + error_kind=ERR_MAGIC_OVERLAPPING) + +int32_divide_op = custom_op( + arg_types=[int32_rprimitive, int32_rprimitive], + return_type=int32_rprimitive, + c_function_name='CPyInt32_Divide', + error_kind=ERR_MAGIC_OVERLAPPING) + +int32_mod_op = custom_op( + arg_types=[int32_rprimitive, int32_rprimitive], + return_type=int32_rprimitive, + c_function_name='CPyInt32_Remainder', + error_kind=ERR_MAGIC_OVERLAPPING) + +# Convert tagged int (as PyObject *) to i64 +int_to_int64_op = custom_op( + arg_types=[object_rprimitive], + return_type=int64_rprimitive, + c_function_name='CPyLong_AsInt64', + error_kind=ERR_MAGIC_OVERLAPPING) + +ssize_t_to_int_op = custom_op( + arg_types=[c_pyssize_t_rprimitive], + return_type=int_rprimitive, + c_function_name='CPyTagged_FromSsize_t', + error_kind=ERR_MAGIC) + +int64_to_int_op = custom_op( + arg_types=[int64_rprimitive], + return_type=int_rprimitive, + c_function_name='CPyTagged_FromInt64', + error_kind=ERR_MAGIC) + +# Convert tagged int (as PyObject *) to i32 +int_to_int32_op = custom_op( + arg_types=[object_rprimitive], + return_type=int32_rprimitive, + c_function_name='CPyLong_AsInt32', + error_kind=ERR_MAGIC_OVERLAPPING) + +int32_overflow = custom_op( + arg_types=[], + return_type=void_rtype, + c_function_name='CPyInt32_Overflow', + error_kind=ERR_ALWAYS) diff --git a/mypyc/primitives/list_ops.py b/mypyc/primitives/list_ops.py index 78955f70f164..2bba4207cd27 100644 --- a/mypyc/primitives/list_ops.py +++ b/mypyc/primitives/list_ops.py @@ -3,7 +3,7 @@ from mypyc.ir.ops import ERR_MAGIC, ERR_NEVER, ERR_FALSE from mypyc.ir.rtypes import ( int_rprimitive, short_int_rprimitive, list_rprimitive, object_rprimitive, c_int_rprimitive, - c_pyssize_t_rprimitive, bit_rprimitive + c_pyssize_t_rprimitive, bit_rprimitive, int64_rprimitive ) from mypyc.primitives.registry import ( load_address_op, function_op, binary_op, method_op, custom_op, ERR_NEG_INT @@ -55,7 +55,7 @@ c_function_name='CPyList_GetItem', error_kind=ERR_MAGIC) -# list[index] version with no int bounds check for when it is known to be short +# list[index] version with no int tag check for when it is known to be short method_op( name='__getitem__', arg_types=[list_rprimitive, short_int_rprimitive], @@ -84,6 +84,25 @@ is_borrowed=True, priority=4) +# Version with native int index +method_op( + name='__getitem__', + arg_types=[list_rprimitive, int64_rprimitive], + return_type=object_rprimitive, + c_function_name='CPyList_GetItemInt64', + error_kind=ERR_MAGIC, + priority=5) + +# Version with native int index +method_op( + name='__getitem__', + arg_types=[list_rprimitive, int64_rprimitive], + return_type=object_rprimitive, + c_function_name='CPyList_GetItemInt64Borrow', + is_borrowed=True, + error_kind=ERR_MAGIC, + priority=6) + # This is unsafe because it assumes that the index is a non-negative short integer # that is in-bounds for the list. list_get_item_unsafe_op = custom_op( @@ -101,6 +120,16 @@ error_kind=ERR_FALSE, steals=[False, False, True]) +# list[index_i64] = obj +method_op( + name='__setitem__', + arg_types=[list_rprimitive, int64_rprimitive, object_rprimitive], + return_type=bit_rprimitive, + c_function_name='CPyList_SetItemInt64', + error_kind=ERR_FALSE, + steals=[False, False, True], + priority=2) + # PyList_SET_ITEM does no error checking, # and should only be used to fill in brand new lists. new_list_set_item_op = custom_op( From 5039c0fe44a7592b2fcb07cead49d921ecea211f Mon Sep 17 00:00:00 2001 From: t4lz <12370607+t4lz@users.noreply.github.com> Date: Thu, 23 Jun 2022 04:40:53 +0200 Subject: [PATCH 57/80] Support unannotated converters for attr.ib (#12815) ### Description Fixes #6172 If an unannotated converter function or a lambda expression is passed as a converter to `attr.ib()`, instead of giving an error, just take the type of the respective argument of the generated `__init__()` function to be `Any`, as suggested by @JelleZijlstra and @cgebbe. ## Test Plan Add two tests: one that tests the example from the issue of an unannotated function, and one that tests an example with a lambda expression as a converter. Co-authored-by: t4lz Co-authored-by: Tal Zwick Co-authored-by: Jelle Zijlstra --- mypy/plugins/attrs.py | 189 ++++++++++++++++++--------------- test-data/unit/check-attr.test | 37 ++++++- 2 files changed, 134 insertions(+), 92 deletions(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index dbce8a402141..06c11f130f11 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -12,7 +12,7 @@ TupleExpr, ListExpr, NameExpr, CallExpr, RefExpr, FuncDef, is_class_var, TempNode, Decorator, MemberExpr, Expression, SymbolTableNode, MDEF, JsonDict, OverloadedFuncDef, ARG_NAMED_OPT, ARG_NAMED, - TypeVarExpr, PlaceholderNode + TypeVarExpr, PlaceholderNode, LambdaExpr ) from mypy.plugin import SemanticAnalyzerPluginInterface from mypy.plugins.common import ( @@ -60,19 +60,16 @@ class Converter: """Holds information about a `converter=` argument""" def __init__(self, - type: Optional[Type] = None, - is_attr_converters_optional: bool = False, - is_invalid_converter: bool = False) -> None: - self.type = type - self.is_attr_converters_optional = is_attr_converters_optional - self.is_invalid_converter = is_invalid_converter + init_type: Optional[Type] = None, + ) -> None: + self.init_type = init_type class Attribute: """The value of an attr.ib() call.""" def __init__(self, name: str, info: TypeInfo, - has_default: bool, init: bool, kw_only: bool, converter: Converter, + has_default: bool, init: bool, kw_only: bool, converter: Optional[Converter], context: Context, init_type: Optional[Type]) -> None: self.name = name @@ -88,54 +85,35 @@ def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: """Return this attribute as an argument to __init__.""" assert self.init - init_type = self.init_type or self.info[self.name].type - - if self.converter.type and not self.converter.is_invalid_converter: - # When a converter is set the init_type is overridden by the first argument - # of the converter method. - converter_type = self.converter.type - init_type = None - converter_type = get_proper_type(converter_type) - if isinstance(converter_type, CallableType) and converter_type.arg_types: - init_type = converter_type.arg_types[0] - elif isinstance(converter_type, Overloaded): - types: List[Type] = [] - for item in converter_type.items: - # Walk the overloads looking for methods that can accept one argument. - num_arg_types = len(item.arg_types) - if not num_arg_types: - continue - if num_arg_types > 1 and any(kind == ARG_POS for kind in item.arg_kinds[1:]): - continue - types.append(item.arg_types[0]) - # Make a union of all the valid types. - if types: - init_type = make_simplified_union(types) - - if self.converter.is_attr_converters_optional and init_type: - # If the converter was attr.converter.optional(type) then add None to - # the allowed init_type. - init_type = UnionType.make_union([init_type, NoneType()]) - - if not init_type: + init_type: Optional[Type] = None + if self.converter: + if self.converter.init_type: + init_type = self.converter.init_type + else: ctx.api.fail("Cannot determine __init__ type from converter", self.context) init_type = AnyType(TypeOfAny.from_error) - elif self.converter.is_invalid_converter: - # This means we had a converter but it's not of a type we can infer. - init_type = AnyType(TypeOfAny.from_error) + else: # There is no converter, the init type is the normal type. + init_type = self.init_type or self.info[self.name].type + unannotated = False if init_type is None: - if ctx.api.options.disallow_untyped_defs: - # This is a compromise. If you don't have a type here then the - # __init__ will be untyped. But since the __init__ is added it's - # pointing at the decorator. So instead we also show the error in the - # assignment, which is where you would fix the issue. - node = self.info[self.name].node - assert node is not None - ctx.api.msg.need_annotation_for_var(node, self.context) - + unannotated = True # Convert type not set to Any. init_type = AnyType(TypeOfAny.unannotated) + else: + proper_type = get_proper_type(init_type) + if isinstance(proper_type, AnyType): + if proper_type.type_of_any == TypeOfAny.unannotated: + unannotated = True + + if unannotated and ctx.api.options.disallow_untyped_defs: + # This is a compromise. If you don't have a type here then the + # __init__ will be untyped. But since the __init__ is added it's + # pointing at the decorator. So instead we also show the error in the + # assignment, which is where you would fix the issue. + node = self.info[self.name].node + assert node is not None + ctx.api.msg.need_annotation_for_var(node, self.context) if self.kw_only: arg_kind = ARG_NAMED_OPT if self.has_default else ARG_NAMED @@ -154,9 +132,9 @@ def serialize(self) -> JsonDict: 'has_default': self.has_default, 'init': self.init, 'kw_only': self.kw_only, - 'converter_type': self.converter.type.serialize() if self.converter.type else None, - 'converter_is_attr_converters_optional': self.converter.is_attr_converters_optional, - 'converter_is_invalid_converter': self.converter.is_invalid_converter, + 'has_converter': self.converter is not None, + 'converter_init_type': self.converter.init_type.serialize() + if self.converter and self.converter.init_type else None, 'context_line': self.context.line, 'context_column': self.context.column, 'init_type': self.init_type.serialize() if self.init_type else None, @@ -169,17 +147,16 @@ def deserialize(cls, info: TypeInfo, """Return the Attribute that was serialized.""" raw_init_type = data['init_type'] init_type = deserialize_and_fixup_type(raw_init_type, api) if raw_init_type else None + raw_converter_init_type = data['converter_init_type'] + converter_init_type = (deserialize_and_fixup_type(raw_converter_init_type, api) + if raw_converter_init_type else None) - converter_type = None - if data['converter_type']: - converter_type = deserialize_and_fixup_type(data['converter_type'], api) return Attribute(data['name'], info, data['has_default'], data['init'], data['kw_only'], - Converter(converter_type, data['converter_is_attr_converters_optional'], - data['converter_is_invalid_converter']), + Converter(converter_init_type) if data['has_converter'] else None, Context(line=data['context_line'], column=data['context_column']), init_type) @@ -542,7 +519,7 @@ def _attribute_from_auto_attrib(ctx: 'mypy.plugin.ClassDefContext', has_rhs = not isinstance(rvalue, TempNode) sym = ctx.cls.info.names.get(name) init_type = sym.type if sym else None - return Attribute(name, ctx.cls.info, has_rhs, True, kw_only, Converter(), stmt, init_type) + return Attribute(name, ctx.cls.info, has_rhs, True, kw_only, None, stmt, init_type) def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', @@ -613,40 +590,76 @@ def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', def _parse_converter(ctx: 'mypy.plugin.ClassDefContext', - converter: Optional[Expression]) -> Converter: + converter_expr: Optional[Expression]) -> Optional[Converter]: """Return the Converter object from an Expression.""" # TODO: Support complex converters, e.g. lambdas, calls, etc. - if converter: - if isinstance(converter, RefExpr) and converter.node: - if (isinstance(converter.node, FuncDef) - and converter.node.type - and isinstance(converter.node.type, FunctionLike)): - return Converter(converter.node.type) - elif (isinstance(converter.node, OverloadedFuncDef) - and is_valid_overloaded_converter(converter.node)): - return Converter(converter.node.type) - elif isinstance(converter.node, TypeInfo): - from mypy.checkmember import type_object_type # To avoid import cycle. - return Converter(type_object_type(converter.node, ctx.api.named_type)) - - if (isinstance(converter, CallExpr) - and isinstance(converter.callee, RefExpr) - and converter.callee.fullname in attr_optional_converters - and converter.args - and converter.args[0]): - # Special handling for attr.converters.optional(type) - # We extract the type and add make the init_args Optional in Attribute.argument - argument = _parse_converter(ctx, converter.args[0]) - argument.is_attr_converters_optional = True - return argument - + if not converter_expr: + return None + converter_info = Converter() + if (isinstance(converter_expr, CallExpr) + and isinstance(converter_expr.callee, RefExpr) + and converter_expr.callee.fullname in attr_optional_converters + and converter_expr.args + and converter_expr.args[0]): + # Special handling for attr.converters.optional(type) + # We extract the type and add make the init_args Optional in Attribute.argument + converter_expr = converter_expr.args[0] + is_attr_converters_optional = True + else: + is_attr_converters_optional = False + + converter_type: Optional[Type] = None + if isinstance(converter_expr, RefExpr) and converter_expr.node: + if isinstance(converter_expr.node, FuncDef): + if converter_expr.node.type and isinstance(converter_expr.node.type, FunctionLike): + converter_type = converter_expr.node.type + else: # The converter is an unannotated function. + converter_info.init_type = AnyType(TypeOfAny.unannotated) + return converter_info + elif (isinstance(converter_expr.node, OverloadedFuncDef) + and is_valid_overloaded_converter(converter_expr.node)): + converter_type = converter_expr.node.type + elif isinstance(converter_expr.node, TypeInfo): + from mypy.checkmember import type_object_type # To avoid import cycle. + converter_type = type_object_type(converter_expr.node, ctx.api.named_type) + if isinstance(converter_expr, LambdaExpr): + # TODO: should we send a fail if converter_expr.min_args > 1? + converter_info.init_type = AnyType(TypeOfAny.unannotated) + return converter_info + + if not converter_type: # Signal that we have an unsupported converter. ctx.api.fail( - "Unsupported converter, only named functions and types are currently supported", - converter + "Unsupported converter, only named functions, types and lambdas are currently " + "supported", + converter_expr ) - return Converter(None, is_invalid_converter=True) - return Converter(None) + converter_info.init_type = AnyType(TypeOfAny.from_error) + return converter_info + + converter_type = get_proper_type(converter_type) + if isinstance(converter_type, CallableType) and converter_type.arg_types: + converter_info.init_type = converter_type.arg_types[0] + elif isinstance(converter_type, Overloaded): + types: List[Type] = [] + for item in converter_type.items: + # Walk the overloads looking for methods that can accept one argument. + num_arg_types = len(item.arg_types) + if not num_arg_types: + continue + if num_arg_types > 1 and any(kind == ARG_POS for kind in item.arg_kinds[1:]): + continue + types.append(item.arg_types[0]) + # Make a union of all the valid types. + if types: + converter_info.init_type = make_simplified_union(types) + + if is_attr_converters_optional and converter_info.init_type: + # If the converter was attr.converter.optional(type) then add None to + # the allowed init_type. + converter_info.init_type = UnionType.make_union([converter_info.init_type, NoneType()]) + + return converter_info def is_valid_overloaded_converter(defn: OverloadedFuncDef) -> bool: diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index 021be93bdd21..4e09e10a6726 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -878,9 +878,9 @@ def factory(default: int): ... @attr.s class C: - x: str = attr.ib(converter=thing.do_it) # E: Unsupported converter, only named functions and types are currently supported - y: str = attr.ib(converter=lambda x: x) # E: Unsupported converter, only named functions and types are currently supported - z: str = attr.ib(converter=factory(8)) # E: Unsupported converter, only named functions and types are currently supported + x: str = attr.ib(converter=thing.do_it) # E: Unsupported converter, only named functions, types and lambdas are currently supported + y: str = attr.ib(converter=lambda x: x) + z: str = attr.ib(converter=factory(8)) # E: Unsupported converter, only named functions, types and lambdas are currently supported reveal_type(C) # N: Revealed type is "def (x: Any, y: Any, z: Any) -> __main__.C" [builtins fixtures/list.pyi] @@ -1731,10 +1731,39 @@ class C: name: Union[str, None] = attr.ib(default=None) options: Mapping[str, Mapping[str, Any]] = attr.ib( default=None, converter=default_if_none(factory=dict) \ - # E: Unsupported converter, only named functions and types are currently supported + # E: Unsupported converter, only named functions, types and lambdas are currently supported ) [builtins fixtures/dict.pyi] +[case testAttrsUnannotatedConverter] +import attr + +def foo(value): + return value.split() + +@attr.s +class Bar: + field = attr.ib(default=None, converter=foo) + +reveal_type(Bar) # N: Revealed type is "def (field: Any =) -> __main__.Bar" +bar = Bar("Hello") +reveal_type(bar.field) # N: Revealed type is "Any" + +[builtins fixtures/tuple.pyi] + +[case testAttrsLambdaConverter] +import attr + +@attr.s +class Bar: + name: str = attr.ib(converter=lambda s: s.lower()) + +reveal_type(Bar) # N: Revealed type is "def (name: Any) -> __main__.Bar" +bar = Bar("Hello") +reveal_type(bar.name) # N: Revealed type is "builtins.str" + +[builtins fixtures/tuple.pyi] + [case testAttrsNestedClass] from typing import List import attr From e046e20882916f7c18b20cf7502b8c420fbe859b Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 24 Jun 2022 05:07:49 -0700 Subject: [PATCH 58/80] Disallow undesirable implicit reexport with ImportFrom (#12704) Fixes #12689 We always hid e.g. `import concurrent`, but looks like mypy never hid `from concurrent import futures`. It's possible this fix is pretty breaking for users, let's see what primer thinks. I last touched this logic in #11707, which fixed cases involving implicitly reexported symbols that shared the name of a module Co-authored-by: hauntsaninja <> --- mypy/semanal.py | 12 ++++++++---- test-data/unit/check-modules.test | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index e00913a8cde4..684d1f0601ab 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1900,10 +1900,14 @@ def process_imported_symbol(self, fullname: str, module_public: bool, context: ImportBase) -> None: - module_hidden = not module_public and not ( - # `from package import module` should work regardless of whether package - # re-exports module - isinstance(node.node, MypyFile) and fullname in self.modules + module_hidden = not module_public and ( + # `from package import submodule` should work regardless of whether package + # re-exports submodule, so we shouldn't hide it + not isinstance(node.node, MypyFile) + or fullname not in self.modules + # but given `from somewhere import random_unrelated_module` we should hide + # random_unrelated_module + or not fullname.startswith(self.cur_mod_id + ".") ) if isinstance(node.node, PlaceholderNode): diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 67767a9114e1..609a1b8ce0b0 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1929,6 +1929,28 @@ from package import mod as mod from package import mod as internal_detail [builtins fixtures/module.pyi] +[case testNoReExportUnrelatedModule] +from mod2 import unrelated # E: Module "mod2" has no attribute "unrelated" + +[file mod1/__init__.pyi] +[file mod1/unrelated.pyi] +x: int + +[file mod2.pyi] +from mod1 import unrelated +[builtins fixtures/module.pyi] + +[case testNoReExportUnrelatedSiblingPrefix] +from pkg.unrel import unrelated # E: Module "pkg.unrel" has no attribute "unrelated" + +[file pkg/__init__.pyi] +[file pkg/unrelated.pyi] +x: int + +[file pkg/unrel.pyi] +from pkg import unrelated +[builtins fixtures/module.pyi] + [case testNoReExportChildStubs] import mod from mod import C, D # E: Module "mod" has no attribute "C" From 203bd64d8c14e8ddbed7911eb6f8f8aa61d728de Mon Sep 17 00:00:00 2001 From: Wesley Collin Wright Date: Fri, 24 Jun 2022 13:14:14 +0000 Subject: [PATCH 59/80] Replace hard crash with typecheck error when subclass method has the same name as type alias (#13015) Work on #5425 Co-authored-by: Wesley Wright --- mypy/checker.py | 3 ++- test-data/unit/check-classes.test | 28 ++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 39f3fa42942d..62dd15da896c 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1600,7 +1600,8 @@ def check_method_override_for_base_with_name( else: original_type = NoneType() else: - assert False, str(base_attr.node) + # Will always fail to typecheck below, since we know the node is a method + original_type = NoneType() if isinstance(original_node, (FuncDef, OverloadedFuncDef)): original_class_or_static = original_node.is_class or original_node.is_static elif isinstance(original_node, Decorator): diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 5c1e8dfa44f4..ee560de89208 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7300,3 +7300,31 @@ def identity_wrapper(func: FuncT) -> FuncT: def foo(self: Any) -> str: return "" +[case testParentClassWithTypeAliasAndSubclassWithMethod] +from typing import Any, Callable, TypeVar + +class Parent: + foo = Callable[..., int] + class bar: + pass + import typing as baz + foobar = TypeVar("foobar") + +class Child(Parent): + def foo(self, val: int) -> int: # E: Signature of "foo" incompatible with supertype "Parent" + return val + def bar(self, val: str) -> str: # E: Signature of "bar" incompatible with supertype "Parent" + return val + def baz(self, val: float) -> float: # E: Signature of "baz" incompatible with supertype "Parent" + return val + def foobar(self) -> bool: # E: Signature of "foobar" incompatible with supertype "Parent" + return False + +x: Parent.foo = lambda: 5 +y: Parent.bar = Parent.bar() +z: Parent.baz.Any = 1 +child = Child() +a: int = child.foo(1) +b: str = child.bar("abc") +c: float = child.baz(3.4) +d: bool = child.foobar() From f663828a66900a438c4a4be6f036c95c46636551 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 26 Jun 2022 13:18:59 +0530 Subject: [PATCH 60/80] stubtest: better formatting for runtime objects (#13023) Co-authored-by: hauntsaninja <> --- mypy/stubtest.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index a85e9335a60d..2fbd0027c8b0 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -55,6 +55,12 @@ def _style(message: str, **kwargs: Any) -> str: return _formatter.style(message, **kwargs) +def _truncate(message: str, length: int) -> str: + if len(message) > length: + return message[:length - 3] + "..." + return message + + class StubtestFailure(Exception): pass @@ -86,7 +92,7 @@ def __init__( self.stub_object = stub_object self.runtime_object = runtime_object self.stub_desc = stub_desc or str(getattr(stub_object, "type", stub_object)) - self.runtime_desc = runtime_desc or str(runtime_object) + self.runtime_desc = runtime_desc or _truncate(repr(runtime_object), 100) def is_missing_stub(self) -> bool: """Whether or not the error is for something missing from the stub.""" From 1c1f3495772a4f02c7a0e646466d2c35a69c9813 Mon Sep 17 00:00:00 2001 From: bruno messias Date: Sun, 26 Jun 2022 11:49:10 -0300 Subject: [PATCH 61/80] Exposes `end_col_offset` attr from python AST (#12972) Co-authored-by: Jelle Zijlstra --- mypy/fastparse.py | 2 ++ mypy/fastparse2.py | 1 + mypy/nodes.py | 28 +++++++++++++++++++--------- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 242b6d260c1e..b5b31a60b539 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -373,6 +373,8 @@ def set_line(self, node: N, n: AstNode) -> N: node.line = n.lineno node.column = n.col_offset node.end_line = getattr(n, "end_lineno", None) if isinstance(n, ast3.expr) else None + node.end_column = getattr(n, "end_col_offset", None) if isinstance(n, ast3.expr) else None + return node def translate_opt_expr_list(self, l: Sequence[Optional[AST]]) -> List[Optional[Expression]]: diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py index e42a1e3c52c5..cc8d9599b741 100644 --- a/mypy/fastparse2.py +++ b/mypy/fastparse2.py @@ -599,6 +599,7 @@ def visit_ClassDef(self, n: ast27.ClassDef) -> ClassDef: cdef.line = n.lineno + len(n.decorator_list) cdef.column = n.col_offset cdef.end_line = n.lineno + cdef.end_column = None self.class_and_function_stack.pop() return cdef diff --git a/mypy/nodes.py b/mypy/nodes.py index abc8666e390d..f54564154e2c 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -23,17 +23,19 @@ class Context: """Base type for objects that are valid as error message locations.""" - __slots__ = ('line', 'column', 'end_line') + __slots__ = ('line', 'column', 'end_line', 'end_column') def __init__(self, line: int = -1, column: int = -1) -> None: self.line = line self.column = column self.end_line: Optional[int] = None + self.end_column: Optional[int] = None def set_line(self, target: Union['Context', int], column: Optional[int] = None, - end_line: Optional[int] = None) -> None: + end_line: Optional[int] = None, + end_column: Optional[int] = None) -> None: """If target is a node, pull line (and column) information into this node. If column is specified, this will override any column information coming from a node. @@ -44,6 +46,7 @@ def set_line(self, self.line = target.line self.column = target.column self.end_line = target.end_line + self.end_column = target.end_column if column is not None: self.column = column @@ -51,6 +54,9 @@ def set_line(self, if end_line is not None: self.end_line = end_line + if end_column is not None: + self.end_column = end_column + def get_line(self) -> int: """Don't use. Use x.line.""" return self.line @@ -631,13 +637,16 @@ def __init__(self, def set_line(self, target: Union[Context, int], column: Optional[int] = None, - end_line: Optional[int] = None) -> None: - super().set_line(target, column, end_line) + end_line: Optional[int] = None, + end_column: Optional[int] = None) -> None: + super().set_line(target, column, end_line, end_column) if self.initializer and self.initializer.line < 0: - self.initializer.set_line(self.line, self.column, self.end_line) + self.initializer.set_line( + self.line, self.column, self.end_line, self.end_column) - self.variable.set_line(self.line, self.column, self.end_line) + self.variable.set_line( + self.line, self.column, self.end_line, self.end_column) FUNCITEM_FLAGS: Final = FUNCBASE_FLAGS + [ @@ -698,10 +707,11 @@ def max_fixed_argc(self) -> int: def set_line(self, target: Union[Context, int], column: Optional[int] = None, - end_line: Optional[int] = None) -> None: - super().set_line(target, column, end_line) + end_line: Optional[int] = None, + end_column: Optional[int] = None) -> None: + super().set_line(target, column, end_line, end_column) for arg in self.arguments: - arg.set_line(self.line, self.column, self.end_line) + arg.set_line(self.line, self.column, self.end_line, end_column) def is_dynamic(self) -> bool: return self.type is None From a4ae0adb6fb0872042e0459f915254140df28b5e Mon Sep 17 00:00:00 2001 From: denballakh <47365157+denballakh@users.noreply.github.com> Date: Tue, 28 Jun 2022 08:50:02 +0300 Subject: [PATCH 62/80] Remove slots already defined in base class (#13028) --- mypy/nodes.py | 4 ++-- mypy/types.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index f54564154e2c..180d35eb9cf4 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3101,7 +3101,7 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here within functions that can't be looked up from the symbol table) """ __slots__ = ('target', '_fullname', 'alias_tvars', 'no_args', 'normalized', - 'line', 'column', '_is_recursive', 'eager') + '_is_recursive', 'eager') def __init__(self, target: 'mypy.types.Type', fullname: str, line: int, column: int, *, @@ -3209,7 +3209,7 @@ class C(Sequence[C]): ... something that can support general recursive types. """ - __slots__ = ('_fullname', 'node', 'line', 'becomes_typeinfo') + __slots__ = ('_fullname', 'node', 'becomes_typeinfo') def __init__(self, fullname: str, node: Node, line: int, *, becomes_typeinfo: bool = False) -> None: diff --git a/mypy/types.py b/mypy/types.py index f0f7add2d92f..f5cdb951daac 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -258,7 +258,7 @@ class Node: can be represented in a tree-like manner. """ - __slots__ = ('alias', 'args', 'line', 'column', 'type_ref') + __slots__ = ('alias', 'args', 'type_ref') def __init__(self, alias: Optional[mypy.nodes.TypeAlias], args: List[Type], line: int = -1, column: int = -1) -> None: @@ -1838,7 +1838,7 @@ class Overloaded(FunctionLike): implementation. """ - __slots__ = ('_items', 'fallback') + __slots__ = ('_items',) _items: List[CallableType] # Must not be empty From 1c03e1068f4b1f79aa760639cd12e631783dadcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Perrin?= Date: Wed, 29 Jun 2022 13:26:30 +0100 Subject: [PATCH 63/80] Fix "attribute 'arguments' of 'FuncDef' undefined" incremental crash (#12324) When deserializing from cache, FuncDef.arguments is not set, so check before use. --- mypy/messages.py | 4 +++- mypy/nodes.py | 2 +- mypy/types.py | 19 +++++++++--------- test-data/unit/check-modules.test | 32 +++++++++++++++++++++++++++++++ 4 files changed, 45 insertions(+), 12 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 1d6641c00a61..628c2cbaf0a4 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1978,7 +1978,9 @@ def [T <: int] f(self, x: int, y: T) -> None s += ' = ...' # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list - if isinstance(tp.definition, FuncDef) and tp.definition.name is not None: + if (isinstance(tp.definition, FuncDef) and + tp.definition.name is not None and + hasattr(tp.definition, 'arguments')): definition_args = [arg.variable.name for arg in tp.definition.arguments] if definition_args and tp.arg_names != definition_args \ and len(definition_args) > 0 and definition_args[0]: diff --git a/mypy/nodes.py b/mypy/nodes.py index 180d35eb9cf4..660adcc63053 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -658,7 +658,7 @@ def set_line(self, class FuncItem(FuncBase): """Base class for nodes usable as overloaded function items.""" - __slots__ = ('arguments', # Note that can be None if deserialized (type is a lie!) + __slots__ = ('arguments', # Note that can be unset if deserialized (type is a lie!) 'arg_names', # Names of arguments 'arg_kinds', # Kinds of arguments 'min_args', # Minimum number of arguments diff --git a/mypy/types.py b/mypy/types.py index f5cdb951daac..4c595d9105a1 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1524,16 +1524,15 @@ def __init__(self, # after serialization, but it is useful in error messages. # TODO: decide how to add more info here (file, line, column) # without changing interface hash. - self.def_extras = { - 'first_arg': ( - definition.arguments[0].variable.name - if (getattr(definition, 'arguments', None) - and definition.arg_names - and definition.info - and not definition.is_static) - else None - ), - } + first_arg: Optional[str] = None + if (definition.arg_names and + definition.info and + not definition.is_static): + if getattr(definition, 'arguments', None): + first_arg = definition.arguments[0].variable.name + else: + first_arg = definition.arg_names[0] + self.def_extras = {'first_arg': first_arg} else: self.def_extras = {} self.type_guard = type_guard diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 609a1b8ce0b0..17e5386a0b6d 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -3212,3 +3212,35 @@ from dir1 import * from .test2 import * [file dir1/test2.py] from test1 import aaaa # E: Module "test1" has no attribute "aaaa" + +[case testIncompatibleOverrideFromCachedModuleIncremental] +import b +[file a.py] +class Foo: + def frobnicate(self, *args, **kwargs): pass +[file b.py] +from a import Foo +class Bar(Foo): + def frobnicate(self) -> None: pass +[file b.py.2] +from a import Foo +class Bar(Foo): + def frobnicate(self, *args) -> None: pass +[file b.py.3] +from a import Foo +class Bar(Foo): + def frobnicate(self, *args) -> None: pass # type: ignore[override] # I know +[builtins fixtures/tuple.pyi] +[builtins fixtures/dict.pyi] +[out1] +tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo" +tmp/b.py:3: note: Superclass: +tmp/b.py:3: note: def frobnicate(self, *args: Any, **kwargs: Any) -> Any +tmp/b.py:3: note: Subclass: +tmp/b.py:3: note: def frobnicate(self) -> None +[out2] +tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo" +tmp/b.py:3: note: Superclass: +tmp/b.py:3: note: def frobnicate(self, *args: Any, **kwargs: Any) -> Any +tmp/b.py:3: note: Subclass: +tmp/b.py:3: note: def frobnicate(self, *args: Any) -> None From 914297e9486b141c01b34593938fdf423d892cef Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 29 Jun 2022 20:51:08 +0530 Subject: [PATCH 64/80] stubtest: find submodules missing from stubs (#13030) Co-authored-by: hauntsaninja <> --- mypy/stubtest.py | 37 ++++++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 2fbd0027c8b0..3928ee009f7f 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -10,6 +10,7 @@ import importlib import inspect import os +import pkgutil import re import sys import types @@ -164,6 +165,17 @@ def get_description(self, concise: bool = False) -> str: # Core logic # ==================== +def silent_import_module(module_name: str) -> types.ModuleType: + with open(os.devnull, "w") as devnull: + with warnings.catch_warnings(), redirect_stdout(devnull), redirect_stderr(devnull): + warnings.simplefilter("ignore") + runtime = importlib.import_module(module_name) + # Also run the equivalent of `from module import *` + # This could have the additional effect of loading not-yet-loaded submodules + # mentioned in __all__ + __import__(module_name, fromlist=["*"]) + return runtime + def test_module(module_name: str) -> Iterator[Error]: """Tests a given module's stub against introspecting it at runtime. @@ -175,18 +187,14 @@ def test_module(module_name: str) -> Iterator[Error]: """ stub = get_stub(module_name) if stub is None: - yield Error([module_name], "failed to find stubs", MISSING, None, runtime_desc="N/A") + runtime_desc = repr(sys.modules[module_name]) if module_name in sys.modules else "N/A" + yield Error( + [module_name], "failed to find stubs", MISSING, None, runtime_desc=runtime_desc + ) return try: - with open(os.devnull, "w") as devnull: - with warnings.catch_warnings(), redirect_stdout(devnull), redirect_stderr(devnull): - warnings.simplefilter("ignore") - runtime = importlib.import_module(module_name) - # Also run the equivalent of `from module import *` - # This could have the additional effect of loading not-yet-loaded submodules - # mentioned in __all__ - __import__(module_name, fromlist=["*"]) + runtime = silent_import_module(module_name) except Exception as e: yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) return @@ -1289,7 +1297,18 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa else: found_sources = find_module_cache.find_modules_recursive(module) sources.extend(found_sources) + # find submodules via mypy all_modules.extend(s.module for s in found_sources if s.module not in all_modules) + # find submodules via pkgutil + try: + runtime = silent_import_module(module) + all_modules.extend( + m.name + for m in pkgutil.walk_packages(runtime.__path__, runtime.__name__ + ".") + if m.name not in all_modules + ) + except Exception: + pass if sources: try: From 86aefb14ffb92975ccd312f12c65919b26002c8d Mon Sep 17 00:00:00 2001 From: Richard Si <63936253+ichard26@users.noreply.github.com> Date: Sun, 3 Jul 2022 17:15:14 -0400 Subject: [PATCH 65/80] [mypyc] Add LoadAddress primitive op for PySet_Type & PyFrozenSet_Type (#13057) This also fixes https://github.com/mypyc/mypyc/issues/917 RE above, the root issue is that mypyc didn't know builtins.set was a built-in name, so it guessed it comes from the module globals. This didn't blow up anything up somehow... until the dataclasses commit[^1] which made the `__annotations__` logic for dataclasses try to better preserve the type annotations (previously they would be erased to builtins.type). This new logic would use `load_type` to load `builtins.set` (so it can be put in `__annotations__`) which went poorly as only types registered with `load_address_op` are considered built-ins. [^1]: https://github.com/python/mypy/commit/1bcfc041bb767ee93e90676b0a61f3e40267e858 --- mypyc/primitives/set_ops.py | 16 +++++++++++++++- mypyc/test-data/fixtures/ir.py | 10 ++++++++-- mypyc/test-data/run-python37.test | 10 ++++++++-- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/mypyc/primitives/set_ops.py b/mypyc/primitives/set_ops.py index 70d59d749070..5d18e45ad528 100644 --- a/mypyc/primitives/set_ops.py +++ b/mypyc/primitives/set_ops.py @@ -1,6 +1,8 @@ """Primitive set (and frozenset) ops.""" -from mypyc.primitives.registry import function_op, method_op, binary_op, ERR_NEG_INT +from mypyc.primitives.registry import ( + load_address_op, function_op, method_op, binary_op, ERR_NEG_INT +) from mypyc.ir.ops import ERR_MAGIC, ERR_FALSE from mypyc.ir.rtypes import ( object_rprimitive, bool_rprimitive, set_rprimitive, c_int_rprimitive, pointer_rprimitive, @@ -8,6 +10,18 @@ ) +# Get the 'builtins.set' type object. +load_address_op( + name='builtins.set', + type=object_rprimitive, + src='PySet_Type') + +# Get the 'builtins.frozenset' tyoe object. +load_address_op( + name='builtins.frozenset', + type=object_rprimitive, + src='PyFrozenSet_Type') + # Construct an empty set. new_set_op = function_op( name='builtins.set', diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index a6914ccc36e5..d8c4333cafad 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -3,7 +3,7 @@ from typing import ( TypeVar, Generic, List, Iterator, Iterable, Dict, Optional, Tuple, Any, Set, - overload, Mapping, Union, Callable, Sequence, + overload, Mapping, Union, Callable, Sequence, FrozenSet ) T = TypeVar('T') @@ -211,7 +211,13 @@ def discard(self, x: T) -> None: pass def clear(self) -> None: pass def pop(self) -> T: pass def update(self, x: Iterable[S]) -> None: pass - def __or__(self, s: Set[S]) -> Set[Union[T, S]]: ... + def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... + +class frozenset(Generic[T]): + def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass + def __iter__(self) -> Iterator[T]: pass + def __len__(self) -> int: pass + def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... class slice: pass diff --git a/mypyc/test-data/run-python37.test b/mypyc/test-data/run-python37.test index 734e116c1335..5bf2c29263e1 100644 --- a/mypyc/test-data/run-python37.test +++ b/mypyc/test-data/run-python37.test @@ -3,7 +3,7 @@ [case testRunDataclass] import dataclasses from dataclasses import dataclass, field -from typing import Set, List, Callable, Any +from typing import Set, FrozenSet, List, Callable, Any @dataclass class Person1: @@ -68,8 +68,13 @@ class Person4: def name(self) -> str: return self._name +@dataclass +class Person5: + friends: Set[str] = field(default_factory=set) + parents: FrozenSet[str] = frozenset() + [file other.py] -from native import Person1, Person1b, Person2, Person3, Person4, testBool +from native import Person1, Person1b, Person2, Person3, Person4, Person5, testBool i1 = Person1(age = 5, name = 'robot') assert i1.age == 5 assert i1.name == 'robot' @@ -117,6 +122,7 @@ assert i8 > i9 assert Person1.__annotations__ == {'age': int, 'name': str} assert Person2.__annotations__ == {'age': int, 'name': str} +assert Person5.__annotations__ == {'friends': set, 'parents': frozenset} [file driver.py] import sys From eaf60916a494ff8d36cab05331205d2d4bfe38bc Mon Sep 17 00:00:00 2001 From: Poruri Sai Rahul Date: Wed, 6 Jul 2022 15:05:03 +0100 Subject: [PATCH 66/80] docs: remove duplicate word (#13080) --- docs/source/running_mypy.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index caf05dcdf258..afcc8c588ab3 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -205,7 +205,7 @@ will continue to be of type ``Any``. 1. To suppress a *single* missing import error, add a ``# type: ignore`` at the end of the line containing the import. -2. To suppress *all* missing import imports errors from a single library, add +2. To suppress *all* missing import errors from a single library, add a section to your :ref:`mypy config file ` for that library setting :confval:`ignore_missing_imports` to True. For example, suppose your codebase makes heavy use of an (untyped) library named ``foobar``. You can silence From 7a6ecd3c96b61a9df59fa580acade0fde2a77d90 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 8 Jul 2022 16:07:54 +0530 Subject: [PATCH 67/80] Sync typeshed (#12982) * Sync typeshed Source commit: https://github.com/python/typeshed/commit/91d6383d9d1ca38157ce46bb498a11347658db1d * fix tests Co-authored-by: hauntsaninja <> --- mypy/typeshed/stdlib/@python2/ssl.pyi | 7 +- mypy/typeshed/stdlib/VERSIONS | 1 + mypy/typeshed/stdlib/__future__.pyi | 40 +- mypy/typeshed/stdlib/_ast.pyi | 2 + mypy/typeshed/stdlib/_codecs.pyi | 63 +- mypy/typeshed/stdlib/_dummy_thread.pyi | 10 + mypy/typeshed/stdlib/_dummy_threading.pyi | 74 +-- mypy/typeshed/stdlib/_imp.pyi | 10 +- mypy/typeshed/stdlib/_pydecimal.pyi | 118 ++-- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 22 +- mypy/typeshed/stdlib/_weakrefset.pyi | 7 +- mypy/typeshed/stdlib/argparse.pyi | 131 +++-- mypy/typeshed/stdlib/array.pyi | 20 +- mypy/typeshed/stdlib/ast.pyi | 3 + mypy/typeshed/stdlib/asyncio/__init__.pyi | 1 + mypy/typeshed/stdlib/asyncio/base_events.pyi | 147 ++++- mypy/typeshed/stdlib/asyncio/constants.pyi | 4 + mypy/typeshed/stdlib/asyncio/events.pyi | 202 ++++++- mypy/typeshed/stdlib/asyncio/exceptions.pyi | 32 +- mypy/typeshed/stdlib/asyncio/futures.pyi | 1 + mypy/typeshed/stdlib/asyncio/locks.pyi | 59 +- mypy/typeshed/stdlib/asyncio/mixins.pyi | 4 +- mypy/typeshed/stdlib/asyncio/protocols.pyi | 3 +- mypy/typeshed/stdlib/asyncio/queues.pyi | 6 +- mypy/typeshed/stdlib/asyncio/runners.pyi | 27 +- mypy/typeshed/stdlib/asyncio/sslproto.pyi | 74 ++- mypy/typeshed/stdlib/asyncio/streams.pyi | 5 + mypy/typeshed/stdlib/asyncio/taskgroups.pyi | 5 +- mypy/typeshed/stdlib/asyncio/tasks.pyi | 18 +- mypy/typeshed/stdlib/asyncio/timeouts.pyi | 19 + mypy/typeshed/stdlib/asyncio/trsock.pyi | 2 +- mypy/typeshed/stdlib/base64.pyi | 65 +-- mypy/typeshed/stdlib/bdb.pyi | 5 + mypy/typeshed/stdlib/binascii.pyi | 7 +- mypy/typeshed/stdlib/builtins.pyi | 213 +++++-- mypy/typeshed/stdlib/calendar.pyi | 88 +-- mypy/typeshed/stdlib/cgi.pyi | 50 +- mypy/typeshed/stdlib/codecs.pyi | 98 +--- mypy/typeshed/stdlib/collections/__init__.pyi | 45 +- .../stdlib/concurrent/futures/process.pyi | 48 +- mypy/typeshed/stdlib/contextlib.pyi | 80 +-- mypy/typeshed/stdlib/contextvars.pyi | 5 +- mypy/typeshed/stdlib/csv.pyi | 11 +- mypy/typeshed/stdlib/ctypes/__init__.pyi | 2 +- mypy/typeshed/stdlib/dataclasses.pyi | 67 ++- mypy/typeshed/stdlib/difflib.pyi | 11 +- mypy/typeshed/stdlib/dis.pyi | 28 +- .../stdlib/distutils/command/check.pyi | 3 +- mypy/typeshed/stdlib/distutils/filelist.pyi | 12 +- mypy/typeshed/stdlib/enum.pyi | 67 ++- mypy/typeshed/stdlib/filecmp.pyi | 14 +- mypy/typeshed/stdlib/fileinput.pyi | 240 +++++++- mypy/typeshed/stdlib/fractions.pyi | 7 +- mypy/typeshed/stdlib/ftplib.pyi | 2 +- mypy/typeshed/stdlib/functools.pyi | 69 +-- mypy/typeshed/stdlib/genericpath.pyi | 4 +- mypy/typeshed/stdlib/gettext.pyi | 141 ++--- mypy/typeshed/stdlib/graphlib.pyi | 7 +- mypy/typeshed/stdlib/hmac.pyi | 2 +- mypy/typeshed/stdlib/imaplib.pyi | 2 +- mypy/typeshed/stdlib/importlib/abc.pyi | 16 + .../stdlib/importlib/metadata/__init__.pyi | 41 +- .../stdlib/importlib/metadata/_meta.pyi | 6 +- mypy/typeshed/stdlib/importlib/resources.pyi | 42 +- mypy/typeshed/stdlib/io.pyi | 64 +-- mypy/typeshed/stdlib/itertools.pyi | 2 +- .../stdlib/lib2to3/pgen2/tokenize.pyi | 216 +++---- mypy/typeshed/stdlib/locale.pyi | 8 +- mypy/typeshed/stdlib/logging/__init__.pyi | 20 +- mypy/typeshed/stdlib/logging/handlers.pyi | 3 + mypy/typeshed/stdlib/macpath.pyi | 35 ++ mypy/typeshed/stdlib/macurl2path.pyi | 2 + mypy/typeshed/stdlib/mailbox.pyi | 7 +- mypy/typeshed/stdlib/mmap.pyi | 2 +- mypy/typeshed/stdlib/modulefinder.pyi | 13 +- .../stdlib/multiprocessing/__init__.pyi | 118 ++-- .../stdlib/multiprocessing/managers.pyi | 82 ++- .../stdlib/multiprocessing/shared_memory.pyi | 7 +- mypy/typeshed/stdlib/ntpath.pyi | 131 ++--- mypy/typeshed/stdlib/operator.pyi | 172 ++---- mypy/typeshed/stdlib/optparse.pyi | 5 +- mypy/typeshed/stdlib/os/__init__.pyi | 13 +- mypy/typeshed/stdlib/pdb.pyi | 6 +- mypy/typeshed/stdlib/pickle.pyi | 247 +++----- mypy/typeshed/stdlib/posixpath.pyi | 39 +- mypy/typeshed/stdlib/pydoc.pyi | 42 +- mypy/typeshed/stdlib/pyexpat/__init__.pyi | 3 + mypy/typeshed/stdlib/pyexpat/errors.pyi | 9 + mypy/typeshed/stdlib/random.pyi | 86 +-- mypy/typeshed/stdlib/re.pyi | 129 ++--- mypy/typeshed/stdlib/shutil.pyi | 12 +- mypy/typeshed/stdlib/smtplib.pyi | 49 +- mypy/typeshed/stdlib/socket.pyi | 64 ++- mypy/typeshed/stdlib/socketserver.pyi | 48 +- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 18 +- mypy/typeshed/stdlib/sre_constants.pyi | 3 +- mypy/typeshed/stdlib/sre_parse.pyi | 8 +- mypy/typeshed/stdlib/ssl.pyi | 7 +- mypy/typeshed/stdlib/statistics.pyi | 76 +-- mypy/typeshed/stdlib/string.pyi | 3 + mypy/typeshed/stdlib/subprocess.pyi | 246 ++++---- mypy/typeshed/stdlib/symtable.pyi | 7 +- mypy/typeshed/stdlib/sys.pyi | 3 + mypy/typeshed/stdlib/tarfile.pyi | 4 +- mypy/typeshed/stdlib/tempfile.pyi | 130 +++-- mypy/typeshed/stdlib/termios.pyi | 3 + mypy/typeshed/stdlib/threading.pyi | 114 ++-- mypy/typeshed/stdlib/tkinter/__init__.pyi | 70 +-- mypy/typeshed/stdlib/tkinter/ttk.pyi | 82 +-- mypy/typeshed/stdlib/token.pyi | 351 +++--------- mypy/typeshed/stdlib/tokenize.pyi | 376 +++--------- mypy/typeshed/stdlib/traceback.pyi | 47 +- mypy/typeshed/stdlib/turtle.pyi | 21 +- mypy/typeshed/stdlib/types.pyi | 332 +++++------ mypy/typeshed/stdlib/typing.pyi | 540 +++--------------- mypy/typeshed/stdlib/typing_extensions.pyi | 4 +- mypy/typeshed/stdlib/unicodedata.pyi | 3 + mypy/typeshed/stdlib/unittest/__init__.pyi | 80 +-- mypy/typeshed/stdlib/unittest/async_case.pyi | 8 + mypy/typeshed/stdlib/unittest/case.pyi | 72 ++- mypy/typeshed/stdlib/unittest/mock.pyi | 8 +- mypy/typeshed/stdlib/urllib/parse.pyi | 3 +- mypy/typeshed/stdlib/urllib/request.pyi | 9 + .../typeshed/stdlib/xml/etree/ElementTree.pyi | 129 ++--- mypy/typeshed/stdlib/xml/sax/handler.pyi | 40 +- mypy/typeshed/stdlib/xmlrpc/server.pyi | 5 +- mypy/typeshed/stdlib/zipfile.pyi | 82 +-- .../stubs/mypy-extensions/mypy_extensions.pyi | 38 +- test-data/unit/cmdline.test | 14 +- 129 files changed, 3480 insertions(+), 3705 deletions(-) create mode 100644 mypy/typeshed/stdlib/asyncio/timeouts.pyi diff --git a/mypy/typeshed/stdlib/@python2/ssl.pyi b/mypy/typeshed/stdlib/@python2/ssl.pyi index 2c6b32567249..edc22ff1515a 100644 --- a/mypy/typeshed/stdlib/@python2/ssl.pyi +++ b/mypy/typeshed/stdlib/@python2/ssl.pyi @@ -225,7 +225,12 @@ class SSLContext: def load_verify_locations( self, cafile: StrPath | None = ..., capath: StrPath | None = ..., cadata: Text | bytes | None = ... ) -> None: ... - def get_ca_certs(self, binary_form: bool = ...) -> list[_PeerCertRetDictType] | list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: bool = ...) -> Any: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, __cipherlist: str) -> None: ... def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ... diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index eefc7b895436..acf392d97816 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -70,6 +70,7 @@ asyncio.runners: 3.7- asyncio.staggered: 3.8- asyncio.taskgroups: 3.11- asyncio.threads: 3.9- +asyncio.timeouts: 3.11- asyncio.trsock: 3.8- asyncore: 2.7- atexit: 2.7- diff --git a/mypy/typeshed/stdlib/__future__.pyi b/mypy/typeshed/stdlib/__future__.pyi index 1a465c3e213d..52941a0c5229 100644 --- a/mypy/typeshed/stdlib/__future__.pyi +++ b/mypy/typeshed/stdlib/__future__.pyi @@ -21,30 +21,18 @@ if sys.version_info >= (3, 7): all_feature_names: list[str] # undocumented +__all__ = [ + "all_feature_names", + "absolute_import", + "division", + "generators", + "nested_scopes", + "print_function", + "unicode_literals", + "with_statement", + "barry_as_FLUFL", + "generator_stop", +] + if sys.version_info >= (3, 7): - __all__ = [ - "all_feature_names", - "absolute_import", - "division", - "generators", - "nested_scopes", - "print_function", - "unicode_literals", - "with_statement", - "barry_as_FLUFL", - "generator_stop", - "annotations", - ] -else: - __all__ = [ - "all_feature_names", - "absolute_import", - "division", - "generators", - "nested_scopes", - "print_function", - "unicode_literals", - "with_statement", - "barry_as_FLUFL", - "generator_stop", - ] + __all__ += ["annotations"] diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index 1305b0c94d9b..81cb9ffbf26e 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -415,6 +415,8 @@ class Tuple(expr): __match_args__ = ("elts", "ctx") elts: list[expr] ctx: expr_context + if sys.version_info >= (3, 9): + dims: list[expr] class expr_context(AST): ... diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index e335f6d5119a..8fabf94d827e 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -1,22 +1,71 @@ import codecs import sys from collections.abc import Callable -from typing import Any -from typing_extensions import TypeAlias +from typing import overload +from typing_extensions import Literal, TypeAlias # This type is not exposed; it is defined in unicodeobject.c class _EncodingMap: def size(self) -> int: ... _MapT: TypeAlias = dict[int, int] | _EncodingMap -_Handler: TypeAlias = Callable[[Exception], tuple[str, int]] +_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] +_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] + +def register(__search_function: _SearchFunction) -> None: ... + +if sys.version_info >= (3, 10): + def unregister(__search_function: _SearchFunction) -> None: ... -def register(__search_function: Callable[[str], Any]) -> None: ... def register_error(__errors: str, __handler: _Handler) -> None: ... -def lookup(__encoding: str) -> codecs.CodecInfo: ... def lookup_error(__name: str) -> _Handler: ... -def decode(obj: Any, encoding: str = ..., errors: str | None = ...) -> Any: ... -def encode(obj: Any, encoding: str = ..., errors: str | None = ...) -> Any: ... + +# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 +# https://docs.python.org/3/library/codecs.html#binary-transforms +_BytesToBytesEncoding: TypeAlias = Literal[ + "base64", + "base_64", + "base64_codec", + "bz2", + "bz2_codec", + "hex", + "hex_codec", + "quopri", + "quotedprintable", + "quoted_printable", + "quopri_codec", + "uu", + "uu_codec", + "zip", + "zlib", + "zlib_codec", +] +# https://docs.python.org/3/library/codecs.html#text-transforms +_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] + +@overload +def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +@overload +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] +@overload +def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... +@overload +def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +@overload +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... + +# these are documented as text encodings but in practice they also accept str as input +@overload +def decode( + obj: str, encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], errors: str = ... +) -> str: ... + +# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str +@overload +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... +@overload +def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... +def lookup(__encoding: str) -> codecs.CodecInfo: ... def charmap_build(__map: str) -> _MapT: ... def ascii_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_dummy_thread.pyi b/mypy/typeshed/stdlib/_dummy_thread.pyi index f257b758eeab..4bcf84964add 100644 --- a/mypy/typeshed/stdlib/_dummy_thread.pyi +++ b/mypy/typeshed/stdlib/_dummy_thread.pyi @@ -1,7 +1,13 @@ +import sys from collections.abc import Callable from types import TracebackType from typing import Any, NoReturn +__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType"] + +if sys.version_info >= (3, 7): + __all__ += ["RLock"] + TIMEOUT_MAX: int error = RuntimeError @@ -20,4 +26,8 @@ class LockType: def release(self) -> bool: ... def locked(self) -> bool: ... +if sys.version_info >= (3, 7): + class RLock(LockType): + def release(self) -> None: ... # type: ignore[override] + def interrupt_main() -> None: ... diff --git a/mypy/typeshed/stdlib/_dummy_threading.pyi b/mypy/typeshed/stdlib/_dummy_threading.pyi index 2daceaedd4ad..6f888b3dda70 100644 --- a/mypy/typeshed/stdlib/_dummy_threading.pyi +++ b/mypy/typeshed/stdlib/_dummy_threading.pyi @@ -10,56 +10,32 @@ _TF: TypeAlias = Callable[[FrameType, str, Any], Callable[..., Any] | None] _PF: TypeAlias = Callable[[FrameType, str, Any], None] _T = TypeVar("_T") +__all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", +] + if sys.version_info >= (3, 8): - __all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - "excepthook", - "ExceptHookArgs", - ] -else: - __all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - ] + __all__ += ["ExceptHookArgs", "excepthook"] def active_count() -> int: ... def current_thread() -> Thread: ... diff --git a/mypy/typeshed/stdlib/_imp.pyi b/mypy/typeshed/stdlib/_imp.pyi index e2bbb9385ceb..856188dfbcd2 100644 --- a/mypy/typeshed/stdlib/_imp.pyi +++ b/mypy/typeshed/stdlib/_imp.pyi @@ -1,10 +1,12 @@ import sys import types +from _typeshed import ReadableBuffer from importlib.machinery import ModuleSpec from typing import Any if sys.version_info >= (3, 7): check_hash_based_pycs: str + def source_hash(key: int, source: ReadableBuffer) -> bytes: ... def create_builtin(__spec: ModuleSpec) -> types.ModuleType: ... def create_dynamic(__spec: ModuleSpec, __file: Any = ...) -> types.ModuleType: ... @@ -12,10 +14,16 @@ def acquire_lock() -> None: ... def exec_builtin(__mod: types.ModuleType) -> int: ... def exec_dynamic(__mod: types.ModuleType) -> int: ... def extension_suffixes() -> list[str]: ... -def get_frozen_object(__name: str) -> types.CodeType: ... def init_frozen(__name: str) -> types.ModuleType: ... def is_builtin(__name: str) -> int: ... def is_frozen(__name: str) -> bool: ... def is_frozen_package(__name: str) -> bool: ... def lock_held() -> bool: ... def release_lock() -> None: ... + +if sys.version_info >= (3, 11): + def find_frozen(__name: str, *, withdata: bool = ...) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(__name: str, __data: ReadableBuffer | None = ...) -> types.CodeType: ... + +else: + def get_frozen_object(__name: str) -> types.CodeType: ... diff --git a/mypy/typeshed/stdlib/_pydecimal.pyi b/mypy/typeshed/stdlib/_pydecimal.pyi index 90dbef1dc2e2..0d639bc164d4 100644 --- a/mypy/typeshed/stdlib/_pydecimal.pyi +++ b/mypy/typeshed/stdlib/_pydecimal.pyi @@ -4,82 +4,44 @@ import sys # However, in all likelihood, the differences are inconsequential from _decimal import * +__all__ = [ + "Decimal", + "Context", + "DecimalTuple", + "DefaultContext", + "BasicContext", + "ExtendedContext", + "DecimalException", + "Clamped", + "InvalidOperation", + "DivisionByZero", + "Inexact", + "Rounded", + "Subnormal", + "Overflow", + "Underflow", + "FloatOperation", + "DivisionImpossible", + "InvalidContext", + "ConversionSyntax", + "DivisionUndefined", + "ROUND_DOWN", + "ROUND_HALF_UP", + "ROUND_HALF_EVEN", + "ROUND_CEILING", + "ROUND_FLOOR", + "ROUND_UP", + "ROUND_HALF_DOWN", + "ROUND_05UP", + "setcontext", + "getcontext", + "localcontext", + "MAX_PREC", + "MAX_EMAX", + "MIN_EMIN", + "MIN_ETINY", + "HAVE_THREADS", +] + if sys.version_info >= (3, 7): - __all__ = [ - "Decimal", - "Context", - "DecimalTuple", - "DefaultContext", - "BasicContext", - "ExtendedContext", - "DecimalException", - "Clamped", - "InvalidOperation", - "DivisionByZero", - "Inexact", - "Rounded", - "Subnormal", - "Overflow", - "Underflow", - "FloatOperation", - "DivisionImpossible", - "InvalidContext", - "ConversionSyntax", - "DivisionUndefined", - "ROUND_DOWN", - "ROUND_HALF_UP", - "ROUND_HALF_EVEN", - "ROUND_CEILING", - "ROUND_FLOOR", - "ROUND_UP", - "ROUND_HALF_DOWN", - "ROUND_05UP", - "setcontext", - "getcontext", - "localcontext", - "MAX_PREC", - "MAX_EMAX", - "MIN_EMIN", - "MIN_ETINY", - "HAVE_THREADS", - "HAVE_CONTEXTVAR", - ] -else: - __all__ = [ - "Decimal", - "Context", - "DecimalTuple", - "DefaultContext", - "BasicContext", - "ExtendedContext", - "DecimalException", - "Clamped", - "InvalidOperation", - "DivisionByZero", - "Inexact", - "Rounded", - "Subnormal", - "Overflow", - "Underflow", - "FloatOperation", - "DivisionImpossible", - "InvalidContext", - "ConversionSyntax", - "DivisionUndefined", - "ROUND_DOWN", - "ROUND_HALF_UP", - "ROUND_HALF_EVEN", - "ROUND_CEILING", - "ROUND_FLOOR", - "ROUND_UP", - "ROUND_HALF_DOWN", - "ROUND_05UP", - "setcontext", - "getcontext", - "localcontext", - "MAX_PREC", - "MAX_EMAX", - "MIN_EMIN", - "MIN_ETINY", - "HAVE_THREADS", - ] + __all__ += ["HAVE_CONTEXTVAR"] diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index d5e0c691e8c0..162c40522224 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -10,8 +10,8 @@ import sys from collections.abc import Awaitable, Container, Iterable, Set as AbstractSet from os import PathLike from types import TracebackType -from typing import Any, Generic, Protocol, TypeVar, Union -from typing_extensions import Final, Literal, TypeAlias, final +from typing import Any, AnyStr, Generic, Protocol, TypeVar, Union +from typing_extensions import Final, Literal, LiteralString, TypeAlias, final _KT = TypeVar("_KT") _KT_co = TypeVar("_KT_co", covariant=True) @@ -26,6 +26,9 @@ _T_contra = TypeVar("_T_contra", contravariant=True) # def __enter__(self: Self) -> Self: ... Self = TypeVar("Self") # noqa: Y001 +# covariant version of typing.AnyStr, useful for protocols +AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 + # For partially known annotations. Usually, fields where type annotations # haven't been added are left unannotated, but in some situations this # isn't possible or a type is already partially known. In cases like these, @@ -66,8 +69,14 @@ SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichC # Dunder protocols -class SupportsAdd(Protocol): - def __add__(self, __x: Any) -> Any: ... +class SupportsAdd(Protocol[_T_contra, _T_co]): + def __add__(self, __x: _T_contra) -> _T_co: ... + +class SupportsRAdd(Protocol[_T_contra, _T_co]): + def __radd__(self, __x: _T_contra) -> _T_co: ... + +class SupportsSub(Protocol[_T_contra, _T_co]): + def __sub__(self, __x: _T_contra) -> _T_co: ... class SupportsDivMod(Protocol[_T_contra, _T_co]): def __divmod__(self, __other: _T_contra) -> _T_co: ... @@ -112,9 +121,9 @@ class SupportsItemAccess(SupportsGetItem[_KT_contra, _VT], Protocol[_KT_contra, def __setitem__(self, __k: _KT_contra, __v: _VT) -> None: ... def __delitem__(self, __v: _KT_contra) -> None: ... -# These aliases are simple strings in Python 2. StrPath: TypeAlias = str | PathLike[str] # stable BytesPath: TypeAlias = bytes | PathLike[bytes] # stable +GenericPath: TypeAlias = AnyStr | PathLike[AnyStr] StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable OpenTextModeUpdating: TypeAlias = Literal[ @@ -248,3 +257,6 @@ class structseq(Generic[_T_co]): # but only has any meaning if you supply it a dict where the keys are strings. # https://github.com/python/typeshed/pull/6560#discussion_r767149830 def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... + +# Superset of typing.AnyStr that also inclues LiteralString +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 diff --git a/mypy/typeshed/stdlib/_weakrefset.pyi b/mypy/typeshed/stdlib/_weakrefset.pyi index 382dbdeb6c8a..9e9269758b00 100644 --- a/mypy/typeshed/stdlib/_weakrefset.pyi +++ b/mypy/typeshed/stdlib/_weakrefset.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import Self from collections.abc import Iterable, Iterator, MutableSet -from typing import Any, Generic, TypeVar +from typing import Any, Generic, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias @@ -12,7 +12,10 @@ _S = TypeVar("_S") _T = TypeVar("_T") class WeakSet(MutableSet[_T], Generic[_T]): - def __init__(self, data: Iterable[_T] | None = ...) -> None: ... + @overload + def __init__(self, data: None = ...) -> None: ... + @overload + def __init__(self, data: Iterable[_T]) -> None: ... def add(self, item: _T) -> None: ... def clear(self) -> None: ... def discard(self, item: _T) -> None: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index 759027d3a890..4f6cb6720988 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -3,47 +3,28 @@ from collections.abc import Callable, Generator, Iterable, Sequence from typing import IO, Any, Generic, NewType, NoReturn, Pattern, Protocol, TypeVar, overload from typing_extensions import Literal, TypeAlias +__all__ = [ + "ArgumentParser", + "ArgumentError", + "ArgumentTypeError", + "FileType", + "HelpFormatter", + "ArgumentDefaultsHelpFormatter", + "RawDescriptionHelpFormatter", + "RawTextHelpFormatter", + "MetavarTypeHelpFormatter", + "Namespace", + "Action", + "ONE_OR_MORE", + "OPTIONAL", + "PARSER", + "REMAINDER", + "SUPPRESS", + "ZERO_OR_MORE", +] + if sys.version_info >= (3, 9): - __all__ = [ - "ArgumentParser", - "ArgumentError", - "ArgumentTypeError", - "BooleanOptionalAction", - "FileType", - "HelpFormatter", - "ArgumentDefaultsHelpFormatter", - "RawDescriptionHelpFormatter", - "RawTextHelpFormatter", - "MetavarTypeHelpFormatter", - "Namespace", - "Action", - "ONE_OR_MORE", - "OPTIONAL", - "PARSER", - "REMAINDER", - "SUPPRESS", - "ZERO_OR_MORE", - ] -else: - __all__ = [ - "ArgumentParser", - "ArgumentError", - "ArgumentTypeError", - "FileType", - "HelpFormatter", - "ArgumentDefaultsHelpFormatter", - "RawDescriptionHelpFormatter", - "RawTextHelpFormatter", - "MetavarTypeHelpFormatter", - "Namespace", - "Action", - "ONE_OR_MORE", - "OPTIONAL", - "PARSER", - "REMAINDER", - "SUPPRESS", - "ZERO_OR_MORE", - ] + __all__ += ["BooleanOptionalAction"] _T = TypeVar("_T") _ActionT = TypeVar("_ActionT", bound=Action) @@ -212,7 +193,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): title: str = ..., description: str | None = ..., prog: str = ..., - parser_class: type[_ArgumentParserT] = ..., + parser_class: type[_ArgumentParserT], action: type[Action] = ..., option_string: str = ..., dest: str | None = ..., @@ -241,7 +222,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): title: str = ..., description: str | None = ..., prog: str = ..., - parser_class: type[_ArgumentParserT] = ..., + parser_class: type[_ArgumentParserT], action: type[Action] = ..., option_string: str = ..., dest: str | None = ..., @@ -309,7 +290,7 @@ class HelpFormatter: def format_help(self) -> str: ... def _join_parts(self, part_strings: Iterable[str]) -> str: ... def _format_usage( - self, usage: str, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None ) -> str: ... def _format_actions_usage(self, actions: Iterable[Action], groups: Iterable[_ArgumentGroup]) -> str: ... def _format_text(self, text: str) -> str: ... @@ -409,16 +390,28 @@ class _StoreAction(Action): ... # undocumented class _StoreConstAction(Action): - def __init__( - self, - option_strings: Sequence[str], - dest: str, - const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., - ) -> None: ... + if sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = ..., + default: Any = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + ) -> None: ... # undocumented class _StoreTrueAction(_StoreConstAction): @@ -437,16 +430,28 @@ class _AppendAction(Action): ... # undocumented class _AppendConstAction(Action): - def __init__( - self, - option_strings: Sequence[str], - dest: str, - const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., - ) -> None: ... + if sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = ..., + default: Any = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + ) -> None: ... # undocumented class _CountAction(Action): diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index d69f02d338cf..4797bd067008 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite from collections.abc import Iterable # pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence -from typing import Any, BinaryIO, Generic, MutableSequence, TypeVar, overload # noqa: Y027 +from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y027 from typing_extensions import Literal, SupportsIndex, TypeAlias _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] @@ -21,20 +21,22 @@ class array(MutableSequence[_T], Generic[_T]): @property def itemsize(self) -> int: ... @overload - def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[int] = ...) -> None: ... @overload - def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[float] = ...) -> None: ... @overload - def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[str] = ...) -> None: ... @overload - def __init__(self, typecode: str, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self, __typecode: str, __initializer: Iterable[_T]) -> None: ... + @overload + def __init__(self, __typecode: str, __initializer: bytes = ...) -> None: ... def append(self, __v: _T) -> None: ... def buffer_info(self) -> tuple[int, int]: ... def byteswap(self) -> None: ... def count(self, __v: _T) -> int: ... def extend(self, __bb: Iterable[_T]) -> None: ... - def frombytes(self, __buffer: bytes) -> None: ... - def fromfile(self, __f: BinaryIO, __n: int) -> None: ... + def frombytes(self, __buffer: ReadableBuffer) -> None: ... + def fromfile(self, __f: SupportsRead[bytes], __n: int) -> None: ... def fromlist(self, __list: list[_T]) -> None: ... def fromunicode(self, __ustr: str) -> None: ... if sys.version_info >= (3, 10): @@ -47,7 +49,7 @@ class array(MutableSequence[_T], Generic[_T]): def remove(self, __v: _T) -> None: ... def reverse(self) -> None: ... def tobytes(self) -> bytes: ... - def tofile(self, __f: BinaryIO) -> None: ... + def tofile(self, __f: SupportsWrite[bytes]) -> None: ... def tolist(self) -> list[_T]: ... def tounicode(self) -> str: ... if sys.version_info < (3, 9): diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 199e4f2acb68..3a54d158affd 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -259,3 +259,6 @@ if sys.version_info >= (3, 8): def get_source_segment(source: str, node: AST, *, padded: bool = ...) -> str | None: ... def walk(node: AST) -> Iterator[AST]: ... + +if sys.version_info >= (3, 9): + def main() -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/__init__.pyi b/mypy/typeshed/stdlib/asyncio/__init__.pyi index 2f4823b22b24..24a86caed66e 100644 --- a/mypy/typeshed/stdlib/asyncio/__init__.pyi +++ b/mypy/typeshed/stdlib/asyncio/__init__.pyi @@ -24,6 +24,7 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 11): from .taskgroups import * + from .timeouts import * if sys.platform == "win32": from .windows_events import * diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 7742651fea2a..310a9f585591 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, WriteableBuffer from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle from asyncio.futures import Future from asyncio.protocols import BaseProtocol @@ -29,7 +29,18 @@ _ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] _SSLContext: TypeAlias = bool | None | ssl.SSLContext class Server(AbstractServer): - if sys.version_info >= (3, 7): + if sys.version_info >= (3, 11): + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ssl_shutdown_timeout: float | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 7): def __init__( self, loop: AbstractEventLoop, @@ -39,12 +50,13 @@ class Server(AbstractServer): backlog: int, ssl_handshake_timeout: float | None, ) -> None: ... + else: + def __init__(self, loop: AbstractEventLoop, sockets: list[socket]) -> None: ... + if sys.version_info >= (3, 7): def get_loop(self) -> AbstractEventLoop: ... def is_serving(self) -> bool: ... async def start_serving(self) -> None: ... async def serve_forever(self) -> None: ... - else: - def __init__(self, loop: AbstractEventLoop, sockets: list[socket]) -> None: ... if sys.version_info >= (3, 8): @property def sockets(self) -> tuple[socket, ...]: ... @@ -86,7 +98,11 @@ class BaseEventLoop(AbstractEventLoop): # Future methods def create_future(self) -> Future[Any]: ... # Tasks methods - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + def create_task( + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ..., context: Context | None = ... + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ...) -> Task[_T]: ... else: def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... @@ -113,7 +129,46 @@ class BaseEventLoop(AbstractEventLoop): flags: int = ..., ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: None = ..., + local_addr: tuple[str, int] | None = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + happy_eyeballs_delay: float | None = ..., + interleave: int | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = ..., + port: None = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: socket, + local_addr: None = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + happy_eyeballs_delay: float | None = ..., + interleave: int | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): @overload async def create_connection( self, @@ -214,10 +269,7 @@ class BaseEventLoop(AbstractEventLoop): local_addr: None = ..., server_hostname: str | None = ..., ) -> tuple[BaseTransport, _ProtocolT]: ... - if sys.version_info >= (3, 7): - async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... - ) -> int: ... + if sys.version_info >= (3, 11): @overload async def create_server( self, @@ -233,6 +285,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_address: bool | None = ..., reuse_port: bool | None = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., start_serving: bool = ..., ) -> Server: ... @overload @@ -250,8 +303,20 @@ class BaseEventLoop(AbstractEventLoop): reuse_address: bool | None = ..., reuse_port: bool | None = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., start_serving: bool = ..., ) -> Server: ... + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + ) -> BaseTransport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -259,10 +324,43 @@ class BaseEventLoop(AbstractEventLoop): *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., ) -> tuple[BaseTransport, _ProtocolT]: ... - async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... - ) -> int: ... + elif sys.version_info >= (3, 7): + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = ..., + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: bool | None = ..., + reuse_port: bool | None = ..., + ssl_handshake_timeout: float | None = ..., + start_serving: bool = ..., + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: bool | None = ..., + reuse_port: bool | None = ..., + ssl_handshake_timeout: float | None = ..., + start_serving: bool = ..., + ) -> Server: ... async def start_tls( self, transport: BaseTransport, @@ -273,6 +371,14 @@ class BaseEventLoop(AbstractEventLoop): server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ) -> BaseTransport: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = ..., + ssl_handshake_timeout: float | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... else: @overload async def create_server( @@ -307,6 +413,13 @@ class BaseEventLoop(AbstractEventLoop): async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, ssl: _SSLContext = ... ) -> tuple[BaseTransport, _ProtocolT]: ... + if sys.version_info >= (3, 7): + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + ) -> int: ... + async def sendfile( + self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, @@ -378,10 +491,12 @@ class BaseEventLoop(AbstractEventLoop): def remove_reader(self, fd: FileDescriptorLike) -> bool: ... def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # The sock_* methods (and probably some others) are not actually implemented on + # BaseEventLoop, only on subclasses. We list them here for now for convenience. # Completion based I/O methods returning Futures prior to 3.7 if sys.version_info >= (3, 7): async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... - async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... async def sock_sendall(self, sock: socket, data: bytes) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... @@ -390,6 +505,10 @@ class BaseEventLoop(AbstractEventLoop): def sock_sendall(self, sock: socket, data: bytes) -> Future[None]: ... def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... def sock_accept(self, sock: socket) -> Future[tuple[socket, _RetAddress]]: ... + if sys.version_info >= (3, 11): + async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... + async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/constants.pyi b/mypy/typeshed/stdlib/asyncio/constants.pyi index 230cf4faf483..1fa643c7414b 100644 --- a/mypy/typeshed/stdlib/asyncio/constants.pyi +++ b/mypy/typeshed/stdlib/asyncio/constants.pyi @@ -8,6 +8,10 @@ DEBUG_STACK_DEPTH: Literal[10] if sys.version_info >= (3, 7): SSL_HANDSHAKE_TIMEOUT: float SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144] +if sys.version_info >= (3, 11): + SSL_SHUTDOWN_TIMEOUT: float + FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256] + FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512] class _SendfileMode(enum.Enum): UNSUPPORTED: int diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index ae566234160b..8396f0957a1e 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike, Self +from _typeshed import FileDescriptorLike, Self, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket @@ -184,7 +184,16 @@ class AbstractEventLoop: @abstractmethod def create_future(self) -> Future[Any]: ... # Tasks methods - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + @abstractmethod + def create_task( + self, + coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], + *, + name: str | None = ..., + context: Context | None = ..., + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): @abstractmethod def create_task( self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = ... @@ -223,7 +232,48 @@ class AbstractEventLoop: ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... @abstractmethod async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: None = ..., + local_addr: tuple[str, int] | None = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + happy_eyeballs_delay: float | None = ..., + interleave: int | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = ..., + port: None = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: socket, + local_addr: None = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + happy_eyeballs_delay: float | None = ..., + interleave: int | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): @overload @abstractmethod async def create_connection( @@ -330,11 +380,7 @@ class AbstractEventLoop: local_addr: None = ..., server_hostname: str | None = ..., ) -> tuple[BaseTransport, _ProtocolT]: ... - if sys.version_info >= (3, 7): - @abstractmethod - async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... - ) -> int: ... + if sys.version_info >= (3, 11): @overload @abstractmethod async def create_server( @@ -351,6 +397,7 @@ class AbstractEventLoop: reuse_address: bool | None = ..., reuse_port: bool | None = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., start_serving: bool = ..., ) -> Server: ... @overload @@ -369,18 +416,21 @@ class AbstractEventLoop: reuse_address: bool | None = ..., reuse_port: bool | None = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., start_serving: bool = ..., ) -> Server: ... - async def create_unix_connection( + @abstractmethod + async def start_tls( self, - protocol_factory: Callable[[], _ProtocolT], - path: str | None = ..., + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, *, - ssl: _SSLContext = ..., - sock: socket | None = ..., + server_side: bool = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl_shutdown_timeout: float | None = ..., + ) -> BaseTransport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -390,12 +440,46 @@ class AbstractEventLoop: backlog: int = ..., ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., start_serving: bool = ..., ) -> Server: ... + elif sys.version_info >= (3, 7): + @overload @abstractmethod - async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... - ) -> int: ... + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = ..., + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: bool | None = ..., + reuse_port: bool | None = ..., + ssl_handshake_timeout: float | None = ..., + start_serving: bool = ..., + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: bool | None = ..., + reuse_port: bool | None = ..., + ssl_handshake_timeout: float | None = ..., + start_serving: bool = ..., + ) -> Server: ... @abstractmethod async def start_tls( self, @@ -407,6 +491,17 @@ class AbstractEventLoop: server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ) -> BaseTransport: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: str | None = ..., + *, + sock: socket | None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + ssl_handshake_timeout: float | None = ..., + start_serving: bool = ..., + ) -> Server: ... else: @overload @abstractmethod @@ -440,24 +535,76 @@ class AbstractEventLoop: reuse_address: bool | None = ..., reuse_port: bool | None = ..., ) -> Server: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: str, + *, + sock: socket | None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + ) -> Server: ... + if sys.version_info >= (3, 11): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + elif sys.version_info >= (3, 10): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = ..., + ssl_handshake_timeout: float | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + if sys.version_info >= (3, 11): async def create_unix_connection( self, protocol_factory: Callable[[], _ProtocolT], - path: str, + path: str | None = ..., *, ssl: _SSLContext = ..., sock: socket | None = ..., server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = ..., ) -> tuple[BaseTransport, _ProtocolT]: ... - async def create_unix_server( + elif sys.version_info >= (3, 7): + async def create_unix_connection( self, - protocol_factory: _ProtocolFactory, - path: str, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = ..., *, + ssl: _SSLContext = ..., sock: socket | None = ..., - backlog: int = ..., + server_hostname: str | None = ..., + ssl_handshake_timeout: float | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + else: + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str, + *, ssl: _SSLContext = ..., - ) -> Server: ... + sock: socket | None = ..., + server_hostname: str | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + if sys.version_info >= (3, 7): + @abstractmethod + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + ) -> int: ... + @abstractmethod + async def sendfile( + self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + ) -> int: ... @abstractmethod async def create_datagram_endpoint( @@ -529,7 +676,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... @abstractmethod - async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... @abstractmethod async def sock_sendall(self, sock: socket, data: bytes) -> None: ... @abstractmethod @@ -545,6 +692,13 @@ class AbstractEventLoop: def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... @abstractmethod def sock_accept(self, sock: socket) -> Future[tuple[socket, _RetAddress]]: ... + if sys.version_info >= (3, 11): + @abstractmethod + async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... + @abstractmethod + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... + @abstractmethod + async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/exceptions.pyi b/mypy/typeshed/stdlib/asyncio/exceptions.pyi index a1bc2c16ab1f..075fbb805bb9 100644 --- a/mypy/typeshed/stdlib/asyncio/exceptions.pyi +++ b/mypy/typeshed/stdlib/asyncio/exceptions.pyi @@ -1,11 +1,24 @@ -__all__ = ( - "CancelledError", - "InvalidStateError", - "TimeoutError", - "IncompleteReadError", - "LimitOverrunError", - "SendfileNotAvailableError", -) +import sys + +if sys.version_info >= (3, 11): + __all__ = ( + "BrokenBarrierError", + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) +else: + __all__ = ( + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) class CancelledError(BaseException): ... class TimeoutError(Exception): ... @@ -20,3 +33,6 @@ class IncompleteReadError(EOFError): class LimitOverrunError(Exception): consumed: int def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 11): + class BrokenBarrierError(RuntimeError): ... diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi index 692d263f673b..21bfe86e44c6 100644 --- a/mypy/typeshed/stdlib/asyncio/futures.pyi +++ b/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -55,6 +55,7 @@ class Future(Awaitable[_T], Iterable[_T]): def __del__(self) -> None: ... if sys.version_info >= (3, 7): def get_loop(self) -> AbstractEventLoop: ... + @property def _callbacks(self: Self) -> list[tuple[Callable[[Self], Any], Context]]: ... def add_done_callback(self: Self, __fn: Callable[[Self], Any], *, context: Context | None = ...) -> None: ... else: diff --git a/mypy/typeshed/stdlib/asyncio/locks.pyi b/mypy/typeshed/stdlib/asyncio/locks.pyi index 2758e0c46919..269602c7bc66 100644 --- a/mypy/typeshed/stdlib/asyncio/locks.pyi +++ b/mypy/typeshed/stdlib/asyncio/locks.pyi @@ -1,4 +1,6 @@ +import enum import sys +from _typeshed import Self from collections import deque from collections.abc import Callable, Generator from types import TracebackType @@ -8,7 +10,12 @@ from typing_extensions import Literal from .events import AbstractEventLoop from .futures import Future -if sys.version_info >= (3, 7): +if sys.version_info >= (3, 11): + from .mixins import _LoopBoundMixin + +if sys.version_info >= (3, 11): + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier") +elif sys.version_info >= (3, 7): __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore") else: __all__ = ["Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore"] @@ -40,20 +47,32 @@ else: ) -> None: ... class Lock(_ContextManagerMixin): - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... class Event: - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... async def wait(self) -> Literal[True]: ... class Condition(_ContextManagerMixin): - def __init__(self, lock: Lock | None = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, lock: Lock | None = ...) -> None: ... + else: + def __init__(self, lock: Lock | None = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... @@ -65,11 +84,39 @@ class Condition(_ContextManagerMixin): class Semaphore(_ContextManagerMixin): _value: int _waiters: deque[Future[Any]] - def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, value: int = ...) -> None: ... + else: + def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... def _wake_up_next(self) -> None: ... class BoundedSemaphore(Semaphore): - def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, value: int = ...) -> None: ... + else: + def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + +if sys.version_info >= (3, 11): + class _BarrierState(enum.Enum): # undocumented + FILLING: str + DRAINING: str + RESETTING: str + BROKEN: str + + class Barrier(_LoopBoundMixin): + def __init__(self, parties: int) -> None: ... + async def __aenter__(self: Self) -> Self: ... + async def __aexit__(self, *args: object) -> None: ... + async def wait(self) -> int: ... + async def abort(self) -> None: ... + async def reset(self) -> None: ... + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/mixins.pyi b/mypy/typeshed/stdlib/asyncio/mixins.pyi index 4c11865c8968..3e04f2b37518 100644 --- a/mypy/typeshed/stdlib/asyncio/mixins.pyi +++ b/mypy/typeshed/stdlib/asyncio/mixins.pyi @@ -1,7 +1,9 @@ +import sys import threading from typing import NoReturn _global_lock: threading.Lock class _LoopBoundMixin: - def __init__(self, *, loop: NoReturn = ...) -> None: ... + if sys.version_info < (3, 11): + def __init__(self, *, loop: NoReturn = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/protocols.pyi b/mypy/typeshed/stdlib/asyncio/protocols.pyi index 7b5169702dba..e2fc118947bc 100644 --- a/mypy/typeshed/stdlib/asyncio/protocols.pyi +++ b/mypy/typeshed/stdlib/asyncio/protocols.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from asyncio import transports from typing import Any @@ -19,7 +20,7 @@ class Protocol(BaseProtocol): if sys.version_info >= (3, 7): class BufferedProtocol(BaseProtocol): - def get_buffer(self, sizehint: int) -> bytearray: ... + def get_buffer(self, sizehint: int) -> ReadableBuffer: ... def buffer_updated(self, nbytes: int) -> None: ... def eof_received(self) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/asyncio/queues.pyi b/mypy/typeshed/stdlib/asyncio/queues.pyi index 93ea9d9fc6fe..0e1a0b2808df 100644 --- a/mypy/typeshed/stdlib/asyncio/queues.pyi +++ b/mypy/typeshed/stdlib/asyncio/queues.pyi @@ -16,7 +16,11 @@ class QueueFull(Exception): ... _T = TypeVar("_T") class Queue(Generic[_T]): - def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, maxsize: int = ...) -> None: ... + else: + def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 32cd839f2f79..49d236bbee9e 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -1,11 +1,28 @@ import sys -from collections.abc import Awaitable -from typing import TypeVar +from _typeshed import Self +from collections.abc import Callable, Coroutine +from contextvars import Context +from typing import Any, TypeVar -__all__ = ("run",) +from .events import AbstractEventLoop + +if sys.version_info >= (3, 11): + __all__ = ("Runner", "run") +else: + __all__ = ("run",) _T = TypeVar("_T") + +if sys.version_info >= (3, 11): + class Runner: + def __init__(self, *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> None: ... + def close(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = ...) -> _T: ... + if sys.version_info >= (3, 8): - def run(main: Awaitable[_T], *, debug: bool | None = ...) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = ...) -> _T: ... else: - def run(main: Awaitable[_T], *, debug: bool = ...) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool = ...) -> _T: ... diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi index 619e329bfb43..77807743f749 100644 --- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi +++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -2,17 +2,36 @@ import ssl import sys from collections import deque from collections.abc import Callable +from enum import Enum from typing import Any, ClassVar -from typing_extensions import Literal +from typing_extensions import Literal, TypeAlias from . import constants, events, futures, protocols, transports def _create_transport_context(server_side: bool, server_hostname: str | None) -> ssl.SSLContext: ... -_UNWRAPPED: Literal["UNWRAPPED"] -_DO_HANDSHAKE: Literal["DO_HANDSHAKE"] -_WRAPPED: Literal["WRAPPED"] -_SHUTDOWN: Literal["SHUTDOWN"] +if sys.version_info >= (3, 11): + SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]] + + class SSLProtocolState(Enum): + UNWRAPPED: str + DO_HANDSHAKE: str + WRAPPED: str + FLUSHING: str + SHUTDOWN: str + + class AppProtocolState(Enum): + STATE_INIT: str + STATE_CON_MADE: str + STATE_EOF: str + STATE_CON_LOST: str + def add_flowcontrol_defaults(high: int | None, low: int | None, kb: int) -> tuple[int, int]: ... + +else: + _UNWRAPPED: Literal["UNWRAPPED"] + _DO_HANDSHAKE: Literal["DO_HANDSHAKE"] + _WRAPPED: Literal["WRAPPED"] + _SHUTDOWN: Literal["SHUTDOWN"] class _SSLPipe: @@ -70,8 +89,20 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): def write(self, data: bytes) -> None: ... def can_write_eof(self) -> Literal[False]: ... def abort(self) -> None: ... + if sys.version_info >= (3, 11): + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def get_read_buffer_limits(self) -> tuple[int, int]: ... + def set_read_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... + def get_read_buffer_size(self) -> int: ... -class SSLProtocol(protocols.Protocol): +if sys.version_info >= (3, 11): + _SSLProtocolBase: TypeAlias = protocols.BufferedProtocol +else: + _SSLProtocolBase: TypeAlias = protocols.Protocol + +class SSLProtocol(_SSLProtocolBase): + if sys.version_info >= (3, 11): + max_size: ClassVar[int] _server_side: bool _server_hostname: str | None @@ -92,7 +123,20 @@ class SSLProtocol(protocols.Protocol): _app_protocol: protocols.BaseProtocol _app_protocol_is_buffer: bool - if sys.version_info >= (3, 7): + if sys.version_info >= (3, 11): + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = ..., + server_hostname: str | None = ..., + call_connection_made: bool = ..., + ssl_handshake_timeout: int | None = ..., + ssl_shutdown_timeout: float | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 7): def __init__( self, loop: events.AbstractEventLoop, @@ -123,17 +167,25 @@ class SSLProtocol(protocols.Protocol): def connection_lost(self, exc: BaseException | None) -> None: ... def pause_writing(self) -> None: ... def resume_writing(self) -> None: ... - def data_received(self, data: bytes) -> None: ... def eof_received(self) -> None: ... def _get_extra_info(self, name: str, default: Any | None = ...) -> Any: ... def _start_shutdown(self) -> None: ... - def _write_appdata(self, data: bytes) -> None: ... + if sys.version_info >= (3, 11): + def _write_appdata(self, list_of_data: list[bytes]) -> None: ... + else: + def _write_appdata(self, data: bytes) -> None: ... + def _start_handshake(self) -> None: ... if sys.version_info >= (3, 7): def _check_handshake_timeout(self) -> None: ... def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... - def _process_write_backlog(self) -> None: ... def _fatal_error(self, exc: BaseException, message: str = ...) -> None: ... - def _finalize(self) -> None: ... def _abort(self) -> None: ... + if sys.version_info >= (3, 11): + def buffer_updated(self, nbytes: int) -> None: ... + def get_buffer(self, n: int) -> memoryview: ... + else: + def _finalize(self) -> None: ... + def _process_write_backlog(self) -> None: ... + def data_received(self, data: bytes) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 14a6d2c4d8fe..0f24d01d50cf 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -1,3 +1,4 @@ +import ssl import sys from _typeshed import Self, StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence @@ -179,6 +180,10 @@ class StreamWriter: def get_extra_info(self, name: str, default: Any = ...) -> Any: ... async def drain(self) -> None: ... + if sys.version_info >= (3, 11): + async def start_tls( + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ... + ) -> None: ... class StreamReader(AsyncIterator[bytes]): def __init__(self, limit: int = ..., loop: events.AbstractEventLoop | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 58e3d41e53f1..9b2f15506c50 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -2,6 +2,7 @@ from _typeshed import Self from collections.abc import Coroutine, Generator +from contextvars import Context from types import TracebackType from typing import Any, TypeVar @@ -15,4 +16,6 @@ class TaskGroup: def __init__(self) -> None: ... async def __aenter__(self: Self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... - def create_task(self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... + def create_task( + self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + ) -> Task[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 4a8e565afb2f..d7119b0400ba 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -10,6 +10,8 @@ from .futures import Future if sys.version_info >= (3, 9): from types import GenericAlias +if sys.version_info >= (3, 11): + from contextvars import Context if sys.version_info >= (3, 7): __all__ = ( @@ -268,7 +270,10 @@ def run_coroutine_threadsafe(coro: _FutureT[_T], loop: AbstractEventLoop) -> con if sys.version_info >= (3, 10): def shield(arg: _FutureT[_T]) -> Future[_T]: ... - async def sleep(delay: float, result: _T = ...) -> _T: ... + @overload + async def sleep(delay: float) -> None: ... + @overload + async def sleep(delay: float, result: _T) -> _T: ... @overload async def wait(fs: Iterable[_FT], *, timeout: float | None = ..., return_when: str = ...) -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] @overload @@ -279,7 +284,10 @@ if sys.version_info >= (3, 10): else: def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... - async def sleep(delay: float, result: _T = ..., *, loop: AbstractEventLoop | None = ...) -> _T: ... + @overload + async def sleep(delay: float, *, loop: AbstractEventLoop | None = ...) -> None: ... + @overload + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = ...) -> _T: ... @overload async def wait( # type: ignore[misc] fs: Iterable[_FT], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... @@ -329,7 +337,11 @@ class Task(Future[_T], Generic[_T]): if sys.version_info >= (3, 7): def all_tasks(loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + def create_task( + coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... else: def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T]) -> Task[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/timeouts.pyi b/mypy/typeshed/stdlib/asyncio/timeouts.pyi new file mode 100644 index 000000000000..be516b5851d1 --- /dev/null +++ b/mypy/typeshed/stdlib/asyncio/timeouts.pyi @@ -0,0 +1,19 @@ +from _typeshed import Self +from types import TracebackType +from typing_extensions import final + +__all__ = ("Timeout", "timeout", "timeout_at") + +@final +class Timeout: + def __init__(self, when: float | None) -> None: ... + def when(self) -> float | None: ... + def reschedule(self, when: float | None) -> None: ... + def expired(self) -> bool: ... + async def __aenter__(self: Self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +def timeout(delay: float | None) -> Timeout: ... +def timeout_at(when: float | None) -> Timeout: ... diff --git a/mypy/typeshed/stdlib/asyncio/trsock.pyi b/mypy/typeshed/stdlib/asyncio/trsock.pyi index 20df2a78a5ab..b8972e43d255 100644 --- a/mypy/typeshed/stdlib/asyncio/trsock.pyi +++ b/mypy/typeshed/stdlib/asyncio/trsock.pyi @@ -14,7 +14,6 @@ _CMSG: TypeAlias = tuple[int, int, bytes] class TransportSocket: def __init__(self, sock: socket.socket) -> None: ... - def _na(self, what: str) -> None: ... @property def family(self) -> int: ... @property @@ -41,6 +40,7 @@ class TransportSocket: def gettimeout(self) -> float | None: ... def setblocking(self, flag: bool) -> None: ... if sys.version_info < (3, 11): + def _na(self, what: str) -> None: ... def accept(self) -> tuple[socket.socket, _RetAddress]: ... def connect(self, address: _Address | bytes) -> None: ... def connect_ex(self, address: _Address | bytes) -> int: ... diff --git a/mypy/typeshed/stdlib/base64.pyi b/mypy/typeshed/stdlib/base64.pyi index ceed7d018d82..c2ec85cac40a 100644 --- a/mypy/typeshed/stdlib/base64.pyi +++ b/mypy/typeshed/stdlib/base64.pyi @@ -2,50 +2,29 @@ import sys from _typeshed import ReadableBuffer from typing import IO +__all__ = [ + "encode", + "decode", + "encodebytes", + "decodebytes", + "b64encode", + "b64decode", + "b32encode", + "b32decode", + "b16encode", + "b16decode", + "b85encode", + "b85decode", + "a85encode", + "a85decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", +] + if sys.version_info >= (3, 10): - __all__ = [ - "encode", - "decode", - "encodebytes", - "decodebytes", - "b64encode", - "b64decode", - "b32encode", - "b32decode", - "b32hexencode", - "b32hexdecode", - "b16encode", - "b16decode", - "b85encode", - "b85decode", - "a85encode", - "a85decode", - "standard_b64encode", - "standard_b64decode", - "urlsafe_b64encode", - "urlsafe_b64decode", - ] -else: - __all__ = [ - "encode", - "decode", - "encodebytes", - "decodebytes", - "b64encode", - "b64decode", - "b32encode", - "b32decode", - "b16encode", - "b16decode", - "b85encode", - "b85decode", - "a85encode", - "a85decode", - "standard_b64encode", - "standard_b64decode", - "urlsafe_b64encode", - "urlsafe_b64decode", - ] + __all__ += ["b32hexencode", "b32hexdecode"] def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = ...) -> bytes: ... def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = ..., validate: bool = ...) -> bytes: ... diff --git a/mypy/typeshed/stdlib/bdb.pyi b/mypy/typeshed/stdlib/bdb.pyi index bbd0f20af6c8..f4d1875efb69 100644 --- a/mypy/typeshed/stdlib/bdb.pyi +++ b/mypy/typeshed/stdlib/bdb.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import ExcInfo from collections.abc import Callable, Iterable, Mapping from types import CodeType, FrameType, TracebackType @@ -86,6 +87,10 @@ class Breakpoint: def __init__( self, file: str, line: int, temporary: bool = ..., cond: str | None = ..., funcname: str | None = ... ) -> None: ... + if sys.version_info >= (3, 11): + @staticmethod + def clearBreakpoints() -> None: ... + def deleteMe(self) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... diff --git a/mypy/typeshed/stdlib/binascii.pyi b/mypy/typeshed/stdlib/binascii.pyi index 53f72ad6a88f..0656794d39d9 100644 --- a/mypy/typeshed/stdlib/binascii.pyi +++ b/mypy/typeshed/stdlib/binascii.pyi @@ -14,7 +14,12 @@ if sys.version_info >= (3, 7): else: def b2a_uu(__data: ReadableBuffer) -> bytes: ... -def a2b_base64(__data: _AsciiBuffer) -> bytes: ... +if sys.version_info >= (3, 11): + def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = ...) -> bytes: ... + +else: + def a2b_base64(__data: _AsciiBuffer) -> bytes: ... + def b2a_base64(__data: ReadableBuffer, *, newline: bool = ...) -> bytes: ... def a2b_qp(data: _AsciiBuffer, header: bool = ...) -> bytes: ... def b2a_qp(data: ReadableBuffer, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index d3d34c72fcfc..577d5fd99e36 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -3,6 +3,7 @@ import types from _ast import AST from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( + AnyStr_co, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -19,6 +20,7 @@ from _typeshed import ( SupportsKeysAndGetItem, SupportsLenAndGetItem, SupportsNext, + SupportsRAdd, SupportsRDivMod, SupportsRichComparison, SupportsRichComparisonT, @@ -52,7 +54,7 @@ from typing import ( # noqa: Y027 TypeVar, overload, ) -from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -398,21 +400,39 @@ class str(Sequence[str]): def __new__(cls: type[Self], object: object = ...) -> Self: ... @overload def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - def capitalize(self) -> str: ... - def casefold(self) -> str: ... - def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload + def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload + def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... def endswith( self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = ...) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... # type: ignore[misc] else: - def expandtabs(self, tabsize: int = ...) -> str: ... + @overload + def expandtabs(self: LiteralString, tabsize: int = ...) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: int = ...) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def format(self, *args: object, **kwargs: object) -> str: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload + def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def isalnum(self) -> bool: ... @@ -429,40 +449,102 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - def join(self, __iterable: Iterable[str]) -> str: ... - def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... - def lower(self) -> str: ... - def lstrip(self, __chars: str | None = ...) -> str: ... - def partition(self, __sep: str) -> tuple[str, str, str]: ... - def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload + def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload + def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def lstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = ... + ) -> LiteralString: ... + @overload + def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - def removeprefix(self, __prefix: str) -> str: ... - def removesuffix(self, __suffix: str) -> str: ... + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload + def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload + def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... - def rpartition(self, __sep: str) -> tuple[str, str, str]: ... - def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... - def rstrip(self, __chars: str | None = ...) -> str: ... - def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... - def splitlines(self, keepends: bool = ...) -> list[str]: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... + @overload + def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def rstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... + @overload + def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = ...) -> list[LiteralString]: ... + @overload + def splitlines(self, keepends: bool = ...) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - def strip(self, __chars: str | None = ...) -> str: ... - def swapcase(self) -> str: ... - def title(self) -> str: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def strip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload + def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload + def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: Mapping[int, int | str | None] | Sequence[int | str | None]) -> str: ... - def upper(self) -> str: ... - def zfill(self, __width: SupportsIndex) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload + def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload + def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... @staticmethod @overload def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... - def __add__(self, __s: str) -> str: ... + @overload + def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... + @overload + def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... @@ -470,14 +552,26 @@ class str(Sequence[str]): def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[str]: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload + def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... - def __mod__(self, __x: Any) -> str: ... - def __mul__(self, __n: SupportsIndex) -> str: ... + @overload + def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload + def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + @overload + def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... - def __rmul__(self, __n: SupportsIndex) -> str: ... + @overload + def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... class bytes(ByteString): @@ -871,11 +965,16 @@ class list(MutableSequence[_T], Generic[_T]): def extend(self, __iterable: Iterable[_T]) -> None: ... def pop(self, __index: SupportsIndex = ...) -> _T: ... # Signature of `list.index` should be kept in line with `collections.UserList.index()` + # and multiprocessing.managers.ListProxy.index() def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... def count(self, __value: _T) -> int: ... def insert(self, __index: SupportsIndex, __object: _T) -> None: ... def remove(self, __value: _T) -> None: ... # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` + # and multiprocessing.managers.ListProxy.sort() + # + # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance @overload def sort(self: list[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... @overload @@ -908,8 +1007,9 @@ class list(MutableSequence[_T], Generic[_T]): class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics + # Also multiprocessing.managers.SyncManager.dict() @overload - def __init__(self: dict[_KT, _VT]) -> None: ... + def __init__(self) -> None: ... @overload def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... @overload @@ -962,7 +1062,10 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ior__(self: Self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... class set(MutableSet[_T], Generic[_T]): - def __init__(self, __iterable: Iterable[_T] = ...) -> None: ... + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... def add(self, __element: _T) -> None: ... def copy(self) -> set[_T]: ... def difference(self, *s: Iterable[Any]) -> set[_T]: ... @@ -998,7 +1101,10 @@ class set(MutableSet[_T], Generic[_T]): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class frozenset(AbstractSet[_T_co], Generic[_T_co]): - def __init__(self, __iterable: Iterable[_T_co] = ...) -> None: ... + @overload + def __new__(cls: type[Self]) -> Self: ... + @overload + def __new__(cls: type[Self], __iterable: Iterable[_T_co]) -> Self: ... def copy(self) -> frozenset[_T_co]: ... def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... @@ -1092,10 +1198,8 @@ def chr(__i: int) -> str: ... # We define this here instead of using os.PathLike to avoid import cycle issues. # See https://github.com/python/typeshed/pull/991#issuecomment-288160993 -_AnyStr_co = TypeVar("_AnyStr_co", str, bytes, covariant=True) - -class _PathLike(Protocol[_AnyStr_co]): - def __fspath__(self) -> _AnyStr_co: ... +class _PathLike(Protocol[AnyStr_co]): + def __fspath__(self) -> AnyStr_co: ... if sys.version_info >= (3, 10): def aiter(__async_iterable: SupportsAiter[_SupportsAnextT]) -> _SupportsAnextT: ... @@ -1534,19 +1638,35 @@ def sorted( @overload def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> list[_T]: ... -_SumT = TypeVar("_SumT", bound=SupportsAdd) -_SumS = TypeVar("_SumS", bound=SupportsAdd) +_AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) +_AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) + +class _SupportsSumWithNoDefaultGiven(SupportsAdd[Any, Any], SupportsRAdd[int, Any], Protocol): ... + +_SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWithNoDefaultGiven) + +# In general, the return type of `x + x` is *not* guaranteed to be the same type as x. +# However, we can't express that in the stub for `sum()` +# without creating many false-positive errors (see #7578). +# Instead, we special-case the most common example of this: bool. +if sys.version_info >= (3, 8): + @overload + def sum(__iterable: Iterable[bool], start: int = ...) -> int: ... # type: ignore[misc] + +else: + @overload + def sum(__iterable: Iterable[bool], __start: int = ...) -> int: ... # type: ignore[misc] @overload -def sum(__iterable: Iterable[_SumT]) -> _SumT | Literal[0]: ... +def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[_SumT], start: _SumS) -> _SumT | _SumS: ... + def sum(__iterable: Iterable[_AddableT1], start: _AddableT2) -> _AddableT1 | _AddableT2: ... else: @overload - def sum(__iterable: Iterable[_SumT], __start: _SumS) -> _SumT | _SumS: ... + def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... # The argument to `vars()` has to have a `__dict__` attribute, so can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) @@ -1654,10 +1774,13 @@ class BaseException: __context__: BaseException | None __suppress_context__: bool __traceback__: TracebackType | None - if sys.version_info >= (3, 11): - __note__: str | None def __init__(self, *args: object) -> None: ... + def __setstate__(self, __state: dict[str, Any] | None) -> None: ... def with_traceback(self: Self, __tb: TracebackType | None) -> Self: ... + if sys.version_info >= (3, 11): + # only present after add_note() is called + __notes__: list[str] + def add_note(self, __note: str) -> None: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... diff --git a/mypy/typeshed/stdlib/calendar.pyi b/mypy/typeshed/stdlib/calendar.pyi index c7e0a6b4606f..00b7054ba60a 100644 --- a/mypy/typeshed/stdlib/calendar.pyi +++ b/mypy/typeshed/stdlib/calendar.pyi @@ -4,67 +4,35 @@ from collections.abc import Iterable, Sequence from time import struct_time from typing_extensions import Literal, TypeAlias +__all__ = [ + "IllegalMonthError", + "IllegalWeekdayError", + "setfirstweekday", + "firstweekday", + "isleap", + "leapdays", + "weekday", + "monthrange", + "monthcalendar", + "prmonth", + "month", + "prcal", + "calendar", + "timegm", + "month_name", + "month_abbr", + "day_name", + "day_abbr", + "Calendar", + "TextCalendar", + "HTMLCalendar", + "LocaleTextCalendar", + "LocaleHTMLCalendar", + "weekheader", +] + if sys.version_info >= (3, 10): - __all__ = [ - "IllegalMonthError", - "IllegalWeekdayError", - "setfirstweekday", - "firstweekday", - "isleap", - "leapdays", - "weekday", - "monthrange", - "monthcalendar", - "prmonth", - "month", - "prcal", - "calendar", - "timegm", - "month_name", - "month_abbr", - "day_name", - "day_abbr", - "Calendar", - "TextCalendar", - "HTMLCalendar", - "LocaleTextCalendar", - "LocaleHTMLCalendar", - "weekheader", - "FRIDAY", - "MONDAY", - "SATURDAY", - "SUNDAY", - "THURSDAY", - "TUESDAY", - "WEDNESDAY", - ] -else: - __all__ = [ - "IllegalMonthError", - "IllegalWeekdayError", - "setfirstweekday", - "firstweekday", - "isleap", - "leapdays", - "weekday", - "monthrange", - "monthcalendar", - "prmonth", - "month", - "prcal", - "calendar", - "timegm", - "month_name", - "month_abbr", - "day_name", - "day_abbr", - "Calendar", - "TextCalendar", - "HTMLCalendar", - "LocaleTextCalendar", - "LocaleHTMLCalendar", - "weekheader", - ] + __all__ += ["FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] _LocaleType: TypeAlias = tuple[str | None, str | None] diff --git a/mypy/typeshed/stdlib/cgi.pyi b/mypy/typeshed/stdlib/cgi.pyi index 5e7bebc2a7f8..59c0a27067f1 100644 --- a/mypy/typeshed/stdlib/cgi.pyi +++ b/mypy/typeshed/stdlib/cgi.pyi @@ -5,39 +5,23 @@ from collections.abc import Iterable, Iterator, Mapping from types import TracebackType from typing import IO, Any, Protocol -if sys.version_info >= (3, 8): - __all__ = [ - "MiniFieldStorage", - "FieldStorage", - "parse", - "parse_multipart", - "parse_header", - "test", - "print_exception", - "print_environ", - "print_form", - "print_directory", - "print_arguments", - "print_environ_usage", - ] -else: - __all__ = [ - "MiniFieldStorage", - "FieldStorage", - "parse", - "parse_qs", - "parse_qsl", - "parse_multipart", - "parse_header", - "test", - "print_exception", - "print_environ", - "print_form", - "print_directory", - "print_arguments", - "print_environ_usage", - "escape", - ] +__all__ = [ + "MiniFieldStorage", + "FieldStorage", + "parse", + "parse_multipart", + "parse_header", + "test", + "print_exception", + "print_environ", + "print_form", + "print_directory", + "print_arguments", + "print_environ_usage", +] + +if sys.version_info < (3, 8): + __all__ += ["parse_qs", "parse_qsl", "escape"] def parse( fp: IO[Any] | None = ..., diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index bba7703c7d33..64d1c93ba3a3 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,10 +1,11 @@ -import sys import types from _typeshed import Self from abc import abstractmethod -from collections.abc import Callable, Generator, Iterable, Iterator -from typing import IO, Any, BinaryIO, Protocol, TextIO, overload -from typing_extensions import Literal, TypeAlias +from collections.abc import Callable, Generator, Iterable +from typing import Any, BinaryIO, Protocol, TextIO +from typing_extensions import Literal + +from _codecs import * __all__ = [ "register", @@ -58,6 +59,18 @@ BOM32_LE: Literal[b"\xff\xfe"] BOM64_BE: Literal[b"\x00\x00\xfe\xff"] BOM64_LE: Literal[b"\xff\xfe\x00\x00"] +class _WritableStream(Protocol): + def write(self, __data: bytes) -> object: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _ReadableStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _Stream(_WritableStream, _ReadableStream, Protocol): ... + # TODO: this only satisfies the most common interface, where # bytes is the raw form and str is the cooked form. # In the long run, both should become template parameters maybe? @@ -71,10 +84,10 @@ class _Decoder(Protocol): def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode class _StreamReader(Protocol): - def __call__(self, stream: IO[bytes], errors: str = ...) -> StreamReader: ... + def __call__(self, stream: _ReadableStream, errors: str = ...) -> StreamReader: ... class _StreamWriter(Protocol): - def __call__(self, stream: IO[bytes], errors: str = ...) -> StreamWriter: ... + def __call__(self, stream: _WritableStream, errors: str = ...) -> StreamWriter: ... class _IncrementalEncoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalEncoder: ... @@ -82,49 +95,6 @@ class _IncrementalEncoder(Protocol): class _IncrementalDecoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalDecoder: ... -# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 -# https://docs.python.org/3/library/codecs.html#binary-transforms -_BytesToBytesEncoding: TypeAlias = Literal[ - "base64", - "base_64", - "base64_codec", - "bz2", - "bz2_codec", - "hex", - "hex_codec", - "quopri", - "quotedprintable", - "quoted_printable", - "quopri_codec", - "uu", - "uu_codec", - "zip", - "zlib", - "zlib_codec", -] -# https://docs.python.org/3/library/codecs.html#text-transforms -_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] - -@overload -def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... -@overload -def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] -@overload -def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... -@overload -def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] -@overload -def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... - -# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str -@overload -def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... -@overload -def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... -def lookup(__encoding: str) -> CodecInfo: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... # undocumented -def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... # undocumented - class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): @property def encode(self) -> _Encoder: ... @@ -158,17 +128,13 @@ def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: ... def getwriter(encoding: str) -> _StreamWriter: ... -def register(__search_function: Callable[[str], CodecInfo | None]) -> None: ... def open( filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., buffering: int = ... ) -> StreamReaderWriter: ... -def EncodedFile(file: IO[bytes], data_encoding: str, file_encoding: str | None = ..., errors: str = ...) -> StreamRecoder: ... +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = ..., errors: str = ...) -> StreamRecoder: ... def iterencode(iterator: Iterable[str], encoding: str, errors: str = ...) -> Generator[bytes, None, None]: ... def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = ...) -> Generator[str, None, None]: ... -if sys.version_info >= (3, 10): - def unregister(__search_function: Callable[[str], CodecInfo | None]) -> None: ... - BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` BOM_BE: Literal[b"\xfe\xff"] BOM_LE: Literal[b"\xff\xfe"] @@ -180,11 +146,6 @@ BOM_UTF32: Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"] # depends on `sys. BOM_UTF32_BE: Literal[b"\x00\x00\xfe\xff"] BOM_UTF32_LE: Literal[b"\xff\xfe\x00\x00"] -# It is expected that different actions be taken depending on which of the -# three subclasses of `UnicodeError` is actually ...ed. However, the Union -# is still needed for at least one of the cases. -def register_error(__errors: str, __handler: Callable[[UnicodeError], tuple[str | bytes, int]]) -> None: ... -def lookup_error(__name: str) -> Callable[[UnicodeError], tuple[str | bytes, int]]: ... def strict_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... def replace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... def ignore_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... @@ -235,8 +196,9 @@ class BufferedIncrementalDecoder(IncrementalDecoder): # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. class StreamWriter(Codec): + stream: _WritableStream errors: str - def __init__(self, stream: IO[bytes], errors: str = ...) -> None: ... + def __init__(self, stream: _WritableStream, errors: str = ...) -> None: ... def write(self, object: str) -> None: ... def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... @@ -244,9 +206,10 @@ class StreamWriter(Codec): def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... -class StreamReader(Codec, Iterator[str]): +class StreamReader(Codec): + stream: _ReadableStream errors: str - def __init__(self, stream: IO[bytes], errors: str = ...) -> None: ... + def __init__(self, stream: _ReadableStream, errors: str = ...) -> None: ... def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> str: ... def readline(self, size: int | None = ..., keepends: bool = ...) -> str: ... def readlines(self, sizehint: int | None = ..., keepends: bool = ...) -> list[str]: ... @@ -260,7 +223,8 @@ class StreamReader(Codec, Iterator[str]): # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): - def __init__(self, stream: IO[bytes], Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... + stream: _Stream + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... def read(self, size: int = ...) -> str: ... def readline(self, size: int | None = ...) -> str: ... def readlines(self, sizehint: int | None = ...) -> list[str]: ... @@ -287,13 +251,7 @@ class StreamReaderWriter(TextIO): class StreamRecoder(BinaryIO): def __init__( - self, - stream: IO[bytes], - encode: _Encoder, - decode: _Decoder, - Reader: _StreamReader, - Writer: _StreamWriter, - errors: str = ..., + self, stream: _Stream, encode: _Encoder, decode: _Decoder, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ... ) -> None: ... def read(self, size: int = ...) -> bytes: ... def readline(self, size: int | None = ...) -> bytes: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 2e88c0d8f474..5fff9f48c489 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -14,6 +14,35 @@ else: __all__ = ["ChainMap", "Counter", "OrderedDict", "UserDict", "UserList", "UserString", "defaultdict", "deque", "namedtuple"] +if sys.version_info < (3, 7): + __all__ += [ + "Awaitable", + "Coroutine", + "AsyncIterable", + "AsyncIterator", + "AsyncGenerator", + "Hashable", + "Iterable", + "Iterator", + "Generator", + "Reversible", + "Sized", + "Container", + "Callable", + "Collection", + "Set", + "MutableSet", + "Mapping", + "MutableMapping", + "MappingView", + "KeysView", + "ItemsView", + "ValuesView", + "Sequence", + "MutableSequence", + "ByteString", + ] + _S = TypeVar("_S") _T = TypeVar("_T") _T1 = TypeVar("_T1") @@ -43,7 +72,7 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): data: dict[_KT, _VT] # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics @overload - def __init__(self: UserDict[_KT, _VT], __dict: None = ...) -> None: ... + def __init__(self, __dict: None = ...) -> None: ... @overload def __init__(self: UserDict[str, _VT], __dict: None = ..., **kwargs: _VT) -> None: ... @overload @@ -82,7 +111,10 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): class UserList(MutableSequence[_T]): data: list[_T] - def __init__(self, initlist: Iterable[_T] | None = ...) -> None: ... + @overload + def __init__(self, initlist: None = ...) -> None: ... + @overload + def __init__(self, initlist: Iterable[_T]) -> None: ... def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... def __le__(self, other: list[_T] | UserList[_T]) -> bool: ... def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ... @@ -214,7 +246,10 @@ class UserString(Sequence[UserString]): class deque(MutableSequence[_T], Generic[_T]): @property def maxlen(self) -> int | None: ... - def __init__(self, iterable: Iterable[_T] = ..., maxlen: int | None = ...) -> None: ... + @overload + def __init__(self, *, maxlen: int | None = ...) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], maxlen: int | None = ...) -> None: ... def append(self, __x: _T) -> None: ... def appendleft(self, __x: _T) -> None: ... def copy(self: Self) -> Self: ... @@ -248,7 +283,7 @@ class deque(MutableSequence[_T], Generic[_T]): class Counter(dict[_T, int], Generic[_T]): @overload - def __init__(self: Counter[_T], __iterable: None = ...) -> None: ... + def __init__(self, __iterable: None = ...) -> None: ... @overload def __init__(self: Counter[str], __iterable: None = ..., **kwargs: int) -> None: ... @overload @@ -340,7 +375,7 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Callable[[], _VT] | None @overload - def __init__(self: defaultdict[_KT, _VT]) -> None: ... + def __init__(self) -> None: ... @overload def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... @overload diff --git a/mypy/typeshed/stdlib/concurrent/futures/process.pyi b/mypy/typeshed/stdlib/concurrent/futures/process.pyi index 4cfa8276897f..1dd2ee0a6105 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/process.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/process.pyi @@ -53,7 +53,13 @@ class _ResultItem: work_id: int exception: Exception result: Any - def __init__(self, work_id: int, exception: Exception | None = ..., result: Any | None = ...) -> None: ... + if sys.version_info >= (3, 11): + exit_pid: int | None + def __init__( + self, work_id: int, exception: Exception | None = ..., result: Any | None = ..., exit_pid: int | None = ... + ) -> None: ... + else: + def __init__(self, work_id: int, exception: Exception | None = ..., result: Any | None = ...) -> None: ... class _CallItem: work_id: int @@ -86,11 +92,31 @@ if sys.version_info >= (3, 7): def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... def _process_chunk(fn: Callable[..., Any], chunk: tuple[Any, None, None]) -> Generator[Any, None, None]: ... -def _sendback_result( - result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = ..., exception: Exception | None = ... -) -> None: ... -if sys.version_info >= (3, 7): +if sys.version_info >= (3, 11): + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], + work_id: int, + result: Any | None = ..., + exception: Exception | None = ..., + exit_pid: int | None = ..., + ) -> None: ... + +else: + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = ..., exception: Exception | None = ... + ) -> None: ... + +if sys.version_info >= (3, 11): + def _process_worker( + call_queue: Queue[_CallItem], + result_queue: SimpleQueue[_ResultItem], + initializer: Callable[..., None] | None, + initargs: tuple[Any, ...], + max_tasks: int | None = ..., + ) -> None: ... + +elif sys.version_info >= (3, 7): def _process_worker( call_queue: Queue[_CallItem], result_queue: SimpleQueue[_ResultItem], @@ -153,7 +179,17 @@ class ProcessPoolExecutor(Executor): _executor_manager_thread_wakeup: _ThreadWakeup _result_queue: SimpleQueue[Any] _work_ids: Queue[Any] - if sys.version_info >= (3, 7): + if sys.version_info >= (3, 11): + def __init__( + self, + max_workers: int | None = ..., + mp_context: BaseContext | None = ..., + initializer: Callable[..., None] | None = ..., + initargs: tuple[Any, ...] = ..., + *, + max_tasks_per_child: int | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 7): def __init__( self, max_workers: int | None = ..., diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index 1b6ee4298174..81213b954093 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -5,65 +5,25 @@ from types import TracebackType from typing import IO, Any, ContextManager, Generic, Protocol, TypeVar, overload # noqa: Y027 from typing_extensions import ParamSpec, TypeAlias +__all__ = [ + "contextmanager", + "closing", + "AbstractContextManager", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", +] + +if sys.version_info >= (3, 7): + __all__ += ["AbstractAsyncContextManager", "AsyncExitStack", "asynccontextmanager", "nullcontext"] + +if sys.version_info >= (3, 10): + __all__ += ["aclosing"] + if sys.version_info >= (3, 11): - __all__ = [ - "asynccontextmanager", - "contextmanager", - "closing", - "nullcontext", - "AbstractContextManager", - "AbstractAsyncContextManager", - "AsyncExitStack", - "ContextDecorator", - "ExitStack", - "redirect_stdout", - "redirect_stderr", - "suppress", - "aclosing", - "chdir", - ] -elif sys.version_info >= (3, 10): - __all__ = [ - "asynccontextmanager", - "contextmanager", - "closing", - "nullcontext", - "AbstractContextManager", - "AbstractAsyncContextManager", - "AsyncExitStack", - "ContextDecorator", - "ExitStack", - "redirect_stdout", - "redirect_stderr", - "suppress", - "aclosing", - ] -elif sys.version_info >= (3, 7): - __all__ = [ - "asynccontextmanager", - "contextmanager", - "closing", - "nullcontext", - "AbstractContextManager", - "AbstractAsyncContextManager", - "AsyncExitStack", - "ContextDecorator", - "ExitStack", - "redirect_stdout", - "redirect_stderr", - "suppress", - ] -else: - __all__ = [ - "contextmanager", - "closing", - "AbstractContextManager", - "ContextDecorator", - "ExitStack", - "redirect_stdout", - "redirect_stderr", - "suppress", - ] + __all__ += ["chdir"] AbstractContextManager = ContextManager if sys.version_info >= (3, 7): @@ -163,7 +123,7 @@ class _RedirectStream(AbstractContextManager[_T_io]): class redirect_stdout(_RedirectStream[_T_io]): ... class redirect_stderr(_RedirectStream[_T_io]): ... -class ExitStack(AbstractContextManager[ExitStack]): +class ExitStack: def __init__(self) -> None: ... def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... @@ -179,7 +139,7 @@ if sys.version_info >= (3, 7): _ExitCoroFunc: TypeAlias = Callable[[type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool]] _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any] | _ExitCoroFunc) - class AsyncExitStack(AbstractAsyncContextManager[AsyncExitStack]): + class AsyncExitStack: def __init__(self) -> None: ... def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... async def enter_async_context(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... diff --git a/mypy/typeshed/stdlib/contextvars.pyi b/mypy/typeshed/stdlib/contextvars.pyi index 341cd8491caf..266d96bce6ff 100644 --- a/mypy/typeshed/stdlib/contextvars.pyi +++ b/mypy/typeshed/stdlib/contextvars.pyi @@ -14,7 +14,10 @@ _P = ParamSpec("_P") @final class ContextVar(Generic[_T]): - def __init__(self, name: str, *, default: _T = ...) -> None: ... + @overload + def __init__(self, name: str) -> None: ... + @overload + def __init__(self, name: str, *, default: _T) -> None: ... @property def name(self) -> str: ... @overload diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index de69c71ad941..e9552c759c16 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -31,6 +31,9 @@ if sys.version_info >= (3, 8): else: from collections import OrderedDict as _DictReadMapping +if sys.version_info >= (3, 12): + from types import GenericAlias + __all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", @@ -76,7 +79,7 @@ class unix_dialect(Dialect): lineterminator: str quoting: _QuotingType -class DictReader(Generic[_T], Iterator[_DictReadMapping[_T, str]]): +class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): fieldnames: Sequence[_T] | None restkey: str | None restval: str | None @@ -120,7 +123,9 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T, str]]): strict: bool = ..., ) -> None: ... def __iter__(self: Self) -> Self: ... - def __next__(self) -> _DictReadMapping[_T, str]: ... + def __next__(self) -> _DictReadMapping[_T | Any, str | Any]: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... class DictWriter(Generic[_T]): fieldnames: Collection[_T] @@ -151,6 +156,8 @@ class DictWriter(Generic[_T]): def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... class Sniffer: preferred: list[str] diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 53a382ec0e71..ee26cbddefe4 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -164,7 +164,7 @@ def POINTER(type: type[_CT]) -> type[pointer[_CT]]: ... class pointer(Generic[_CT], _PointerLike, _CData): _type_: type[_CT] contents: _CT - def __init__(self, arg: _CT = ...) -> None: ... + def __init__(self, arg: _CT) -> None: ... @overload def __getitem__(self, __i: int) -> _CT: ... @overload diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index 1cbf998dd303..04ae771fc064 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -12,37 +12,23 @@ if sys.version_info >= (3, 9): _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + if sys.version_info >= (3, 10): - __all__ = [ - "dataclass", - "field", - "Field", - "FrozenInstanceError", - "InitVar", - "KW_ONLY", - "MISSING", - "fields", - "asdict", - "astuple", - "make_dataclass", - "replace", - "is_dataclass", - ] -else: - __all__ = [ - "dataclass", - "field", - "Field", - "FrozenInstanceError", - "InitVar", - "MISSING", - "fields", - "asdict", - "astuple", - "make_dataclass", - "replace", - "is_dataclass", - ] + __all__ += ["KW_ONLY"] # define _MISSING_TYPE as an enum within the type stubs, # even though that is not really its type at runtime @@ -240,7 +226,26 @@ class InitVar(Generic[_T]): @overload def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = ..., + namespace: dict[str, Any] | None = ..., + init: bool = ..., + repr: bool = ..., + eq: bool = ..., + order: bool = ..., + unsafe_hash: bool = ..., + frozen: bool = ..., + match_args: bool = ..., + kw_only: bool = ..., + slots: bool = ..., + weakref_slot: bool = ..., + ) -> type: ... + +elif sys.version_info >= (3, 10): def make_dataclass( cls_name: str, fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], diff --git a/mypy/typeshed/stdlib/difflib.pyi b/mypy/typeshed/stdlib/difflib.pyi index 87e3768034bf..854a53d433ae 100644 --- a/mypy/typeshed/stdlib/difflib.pyi +++ b/mypy/typeshed/stdlib/difflib.pyi @@ -28,8 +28,17 @@ class Match(NamedTuple): size: int class SequenceMatcher(Generic[_T]): + @overload + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + @overload + def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + @overload def __init__( - self, isjunk: Callable[[_T], bool] | None = ..., a: Sequence[_T] = ..., b: Sequence[_T] = ..., autojunk: bool = ... + self: SequenceMatcher[str], + isjunk: Callable[[str], bool] | None = ..., + a: Sequence[str] = ..., + b: Sequence[str] = ..., + autojunk: bool = ..., ) -> None: ... def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index 9a99d4498668..0b78e17b360b 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -80,9 +80,12 @@ class Bytecode: first_line: int | None = ..., current_offset: int | None = ..., show_caches: bool = ..., + adaptive: bool = ..., ) -> None: ... @classmethod - def from_traceback(cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ...) -> Self: ... + def from_traceback( + cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ..., adaptive: bool = ... + ) -> Self: ... else: def __init__( self, x: _HaveCodeOrStringType, *, first_line: int | None = ..., current_offset: int | None = ... @@ -103,7 +106,12 @@ def code_info(x: _HaveCodeOrStringType) -> str: ... if sys.version_info >= (3, 11): def dis( - x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ..., depth: int | None = ..., show_caches: bool = ... + x: _HaveCodeOrStringType | None = ..., + *, + file: IO[str] | None = ..., + depth: int | None = ..., + show_caches: bool = ..., + adaptive: bool = ..., ) -> None: ... elif sys.version_info >= (3, 7): @@ -113,10 +121,18 @@ else: def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ...) -> None: ... if sys.version_info >= (3, 11): - def disassemble(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ...) -> None: ... - def disco(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ...) -> None: ... - def distb(tb: types.TracebackType | None = ..., *, file: IO[str] | None = ..., show_caches: bool = ...) -> None: ... - def get_instructions(x: _HaveCodeType, *, first_line: int | None = ..., show_caches: bool = ...) -> Iterator[Instruction]: ... + def disassemble( + co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + ) -> None: ... + def disco( + co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + ) -> None: ... + def distb( + tb: types.TracebackType | None = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + ) -> None: ... + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = ..., show_caches: bool = ..., adaptive: bool = ... + ) -> Iterator[Instruction]: ... else: def disassemble(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/command/check.pyi b/mypy/typeshed/stdlib/distutils/command/check.pyi index 36895d2c16f1..cdbe40fff71d 100644 --- a/mypy/typeshed/stdlib/distutils/command/check.pyi +++ b/mypy/typeshed/stdlib/distutils/command/check.pyi @@ -1,8 +1,9 @@ from typing import Any +from typing_extensions import TypeAlias from ..cmd import Command -_Reporter = Any # really docutils.utils.Reporter +_Reporter: TypeAlias = Any # really docutils.utils.Reporter # Only defined if docutils is installed. class SilentReporter(_Reporter): diff --git a/mypy/typeshed/stdlib/distutils/filelist.pyi b/mypy/typeshed/stdlib/distutils/filelist.pyi index 361cb13f0c47..d8b87e251509 100644 --- a/mypy/typeshed/stdlib/distutils/filelist.pyi +++ b/mypy/typeshed/stdlib/distutils/filelist.pyi @@ -17,34 +17,34 @@ class FileList: def process_template_line(self, line: str) -> None: ... @overload def include_pattern( - self, pattern: str, anchor: int | bool = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... ) -> bool: ... @overload def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... @overload def include_pattern( - self, pattern: str | Pattern[str], anchor: int | bool = ..., prefix: str | None = ..., is_regex: int | bool = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... ) -> bool: ... @overload def exclude_pattern( - self, pattern: str, anchor: int | bool = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... ) -> bool: ... @overload def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... @overload def exclude_pattern( - self, pattern: str | Pattern[str], anchor: int | bool = ..., prefix: str | None = ..., is_regex: int | bool = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... ) -> bool: ... def findall(dir: str = ...) -> list[str]: ... def glob_to_re(pattern: str) -> str: ... @overload def translate_pattern( - pattern: str, anchor: int | bool = ..., prefix: str | None = ..., is_regex: Literal[False, 0] = ... + pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[False, 0] = ... ) -> Pattern[str]: ... @overload def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> Pattern[str]: ... @overload def translate_pattern( - pattern: str | Pattern[str], anchor: int | bool = ..., prefix: str | None = ..., is_regex: int | bool = ... + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... ) -> Pattern[str]: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 9ebeba37ab71..6063dc47b004 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -1,44 +1,37 @@ import sys import types -from _typeshed import Self +from _typeshed import Self, SupportsKeysAndGetItem from abc import ABCMeta from builtins import property as _builtins_property from collections.abc import Iterable, Iterator, Mapping from typing import Any, Generic, TypeVar, overload from typing_extensions import Literal, TypeAlias +__all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] + if sys.version_info >= (3, 11): - __all__ = [ - "EnumType", - "EnumMeta", - "Enum", - "IntEnum", - "StrEnum", - "Flag", - "IntFlag", - "ReprEnum", - "auto", - "unique", - "property", - "verify", - "member", - "nonmember", - "FlagBoundary", - "STRICT", + __all__ += [ "CONFORM", + "CONTINUOUS", "EJECT", - "KEEP", - "global_flag_repr", - "global_enum_repr", - "global_str", - "global_enum", "EnumCheck", - "CONTINUOUS", + "EnumType", + "FlagBoundary", + "KEEP", "NAMED_FLAGS", + "ReprEnum", + "STRICT", + "StrEnum", "UNIQUE", + "global_enum", + "global_enum_repr", + "global_flag_repr", + "global_str", + "member", + "nonmember", + "property", + "verify", ] -else: - __all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] _EnumMemberT = TypeVar("_EnumMemberT") _EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) @@ -68,6 +61,16 @@ if sys.version_info >= (3, 11): class _EnumDict(dict[str, Any]): def __init__(self) -> None: ... def __setitem__(self, key: str, value: Any) -> None: ... + if sys.version_info >= (3, 11): + # See comment above `typing.MutableMapping.update` + # for why overloads are preferable to a Union here + # + # Unlike with MutableMapping.update(), the first argument is required, + # hence the type: ignore + @overload # type: ignore[override] + def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ... + @overload + def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins @@ -213,15 +216,21 @@ class Flag(Enum): def __and__(self: Self, other: Self) -> Self: ... def __xor__(self: Self, other: Self) -> Self: ... def __invert__(self: Self) -> Self: ... + if sys.version_info >= (3, 11): + def __iter__(self: Self) -> Iterator[Self]: ... + def __len__(self) -> int: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ class IntFlag(int, Flag): def __new__(cls: type[Self], value: int) -> Self: ... def __or__(self: Self, other: int) -> Self: ... def __and__(self: Self, other: int) -> Self: ... def __xor__(self: Self, other: int) -> Self: ... - def __ror__(self: Self, other: int) -> Self: ... - def __rand__(self: Self, other: int) -> Self: ... - def __rxor__(self: Self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): diff --git a/mypy/typeshed/stdlib/filecmp.pyi b/mypy/typeshed/stdlib/filecmp.pyi index a6747dd504a3..dd4a0628b026 100644 --- a/mypy/typeshed/stdlib/filecmp.pyi +++ b/mypy/typeshed/stdlib/filecmp.pyi @@ -1,7 +1,6 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence -from os import PathLike from typing import Any, AnyStr, Generic from typing_extensions import Literal @@ -13,19 +12,16 @@ __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: list[str] BUFSIZE: Literal[8192] -def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: int | bool = ...) -> bool: ... +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = ...) -> bool: ... def cmpfiles( - a: AnyStr | PathLike[AnyStr], - b: AnyStr | PathLike[AnyStr], - common: Iterable[AnyStr | PathLike[AnyStr]], - shallow: int | bool = ..., + a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = ... ) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... class dircmp(Generic[AnyStr]): def __init__( self, - a: AnyStr | PathLike[AnyStr], - b: AnyStr | PathLike[AnyStr], + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], ignore: Sequence[AnyStr] | None = ..., hide: Sequence[AnyStr] | None = ..., ) -> None: ... diff --git a/mypy/typeshed/stdlib/fileinput.pyi b/mypy/typeshed/stdlib/fileinput.pyi index 0ef8c14ddaac..e0babbcd40cc 100644 --- a/mypy/typeshed/stdlib/fileinput.pyi +++ b/mypy/typeshed/stdlib/fileinput.pyi @@ -1,8 +1,12 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import AnyStr_co, Self, StrOrBytesPath from collections.abc import Callable, Iterable, Iterator from types import TracebackType -from typing import IO, Any, AnyStr, Generic +from typing import IO, Any, AnyStr, Generic, Protocol, overload +from typing_extensions import Literal, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias __all__ = [ "input", @@ -19,40 +23,131 @@ __all__ = [ "hook_encoded", ] -if sys.version_info >= (3, 9): - from types import GenericAlias +if sys.version_info >= (3, 11): + _TextMode: TypeAlias = Literal["r"] +else: + _TextMode: TypeAlias = Literal["r", "rU", "U"] + +class _HasReadlineAndFileno(Protocol[AnyStr_co]): + def readline(self) -> AnyStr_co: ... + def fileno(self) -> int: ... if sys.version_info >= (3, 10): + # encoding and errors are added + @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., *, - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., encoding: str | None = ..., errors: str | None = ..., - ) -> FileInput[AnyStr]: ... + ) -> FileInput[str]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + encoding: None = ..., + errors: None = ..., + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + encoding: str | None = ..., + errors: str | None = ..., + ) -> FileInput[Any]: ... elif sys.version_info >= (3, 8): + # bufsize is dropped and mode and openhook become keyword-only + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + ) -> FileInput[str]: ... + @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., *, - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., - ) -> FileInput[AnyStr]: ... + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + ) -> FileInput[Any]: ... else: + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + bufsize: int = ..., + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + ) -> FileInput[str]: ... + # Because mode isn't keyword-only here yet, we need two overloads each for + # the bytes case and the fallback case. + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + bufsize: int = ..., + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> FileInput[bytes]: ... + @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., - ) -> FileInput[AnyStr]: ... + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + ) -> FileInput[Any]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + ) -> FileInput[Any]: ... def close() -> None: ... def nextfile() -> None: ... @@ -65,36 +160,131 @@ def isstdin() -> bool: ... class FileInput(Iterator[AnyStr], Generic[AnyStr]): if sys.version_info >= (3, 10): + # encoding and errors are added + @overload def __init__( - self, - files: None | StrOrBytesPath | Iterable[StrOrBytesPath] = ..., + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., *, - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., encoding: str | None = ..., errors: str | None = ..., ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + encoding: None = ..., + errors: None = ..., + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + encoding: str | None = ..., + errors: str | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): + # bufsize is dropped and mode and openhook become keyword-only + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> None: ... + @overload def __init__( - self, - files: None | StrOrBytesPath | Iterable[StrOrBytesPath] = ..., + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., *, - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., ) -> None: ... + else: + @overload def __init__( - self, - files: None | StrOrBytesPath | Iterable[StrOrBytesPath] = ..., + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., inplace: bool = ..., backup: str = ..., bufsize: int = ..., - mode: str = ..., - openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., + mode: _TextMode = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + ) -> None: ... + # Because mode isn't keyword-only here yet, we need two overloads each for + # the bytes case and the fallback case. + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + bufsize: int = ..., + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., + inplace: bool = ..., + backup: str = ..., + bufsize: int = ..., + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., ) -> None: ... def __del__(self) -> None: ... diff --git a/mypy/typeshed/stdlib/fractions.pyi b/mypy/typeshed/stdlib/fractions.pyi index 0d787a011f5b..fb64c659224a 100644 --- a/mypy/typeshed/stdlib/fractions.pyi +++ b/mypy/typeshed/stdlib/fractions.pyi @@ -1,9 +1,10 @@ import sys from _typeshed import Self +from collections.abc import Callable from decimal import Decimal from numbers import Integral, Rational, Real from typing import Any, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, SupportsIndex, TypeAlias _ComparableNum: TypeAlias = int | float | Decimal | Real @@ -118,7 +119,7 @@ class Fraction(Rational): @overload def __pow__(self, b: complex) -> complex: ... @overload - def __rpow__(self, a: int | float | Fraction) -> float: ... + def __rpow__(self, a: float | Fraction) -> float: ... @overload def __rpow__(self, a: complex) -> complex: ... def __pos__(self) -> Fraction: ... @@ -141,7 +142,7 @@ class Fraction(Rational): def __copy__(self: Self) -> Self: ... def __deepcopy__(self: Self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): - def __int__(self) -> int: ... + def __int__(self, _index: Callable[[SupportsIndex], int] = ...) -> int: ... # Not actually defined within fractions.py, but provides more useful # overrides @property diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 925ad5884700..49c680a6f0c7 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -70,7 +70,7 @@ class FTP: def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... - def set_pasv(self, val: bool | int) -> None: ... + def set_pasv(self, val: bool | Literal[0, 1]) -> None: ... def sanitize(self, s: str) -> str: ... def putline(self, line: str) -> None: ... def putcmd(self, line: str) -> None: ... diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 44feeed63f6a..3003ef061a84 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,6 +1,6 @@ import sys import types -from _typeshed import Self, SupportsAllComparisons, SupportsItems +from _typeshed import IdentityFunction, Self, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload from typing_extensions import Literal, TypeAlias, final @@ -8,52 +8,25 @@ from typing_extensions import Literal, TypeAlias, final if sys.version_info >= (3, 9): from types import GenericAlias - __all__ = [ - "update_wrapper", - "wraps", - "WRAPPER_ASSIGNMENTS", - "WRAPPER_UPDATES", - "total_ordering", - "cache", - "cmp_to_key", - "lru_cache", - "reduce", - "partial", - "partialmethod", - "singledispatch", - "singledispatchmethod", - "cached_property", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "update_wrapper", - "wraps", - "WRAPPER_ASSIGNMENTS", - "WRAPPER_UPDATES", - "total_ordering", - "cmp_to_key", - "lru_cache", - "reduce", - "partial", - "partialmethod", - "singledispatch", - "singledispatchmethod", - "cached_property", - ] -else: - __all__ = [ - "update_wrapper", - "wraps", - "WRAPPER_ASSIGNMENTS", - "WRAPPER_UPDATES", - "total_ordering", - "cmp_to_key", - "lru_cache", - "reduce", - "partial", - "partialmethod", - "singledispatch", - ] +__all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", +] + +if sys.version_info >= (3, 8): + __all__ += ["cached_property", "singledispatchmethod"] + +if sys.version_info >= (3, 9): + __all__ += ["cache"] _AnyCallable: TypeAlias = Callable[..., Any] @@ -95,7 +68,7 @@ WRAPPER_ASSIGNMENTS: tuple[ WRAPPER_UPDATES: tuple[Literal["__dict__"]] def update_wrapper(wrapper: _T, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _T: ... -def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_T], _T]: ... +def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> IdentityFunction: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi index 984d0c3cf51e..911d582fd538 100644 --- a/mypy/typeshed/stdlib/genericpath.pyi +++ b/mypy/typeshed/stdlib/genericpath.pyi @@ -2,7 +2,7 @@ import os from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRichComparisonT from collections.abc import Sequence from typing import overload -from typing_extensions import Literal +from typing_extensions import Literal, LiteralString __all__ = [ "commonprefix", @@ -22,6 +22,8 @@ __all__ = [ # Iterable[T], so that list[T] | Literal[""] could be used as a return # type. But because this only works when T is str, we need Sequence[T] instead. @overload +def commonprefix(m: Sequence[LiteralString]) -> LiteralString: ... +@overload def commonprefix(m: Sequence[StrPath]) -> str: ... @overload def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... diff --git a/mypy/typeshed/stdlib/gettext.pyi b/mypy/typeshed/stdlib/gettext.pyi index 829ade96cb40..3c07abeb2d8a 100644 --- a/mypy/typeshed/stdlib/gettext.pyi +++ b/mypy/typeshed/stdlib/gettext.pyi @@ -1,72 +1,30 @@ +import io import sys from _typeshed import StrPath -from collections.abc import Container, Iterable, Sequence +from collections.abc import Callable, Container, Iterable, Sequence from typing import Any, Protocol, TypeVar, overload from typing_extensions import Final, Literal -if sys.version_info >= (3, 11): - __all__ = [ - "NullTranslations", - "GNUTranslations", - "Catalog", - "bindtextdomain", - "find", - "translation", - "install", - "textdomain", - "dgettext", - "dngettext", - "gettext", - "ngettext", - "pgettext", - "dpgettext", - "npgettext", - "dnpgettext", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "NullTranslations", - "GNUTranslations", - "Catalog", - "find", - "translation", - "install", - "textdomain", - "bindtextdomain", - "bind_textdomain_codeset", - "dgettext", - "dngettext", - "gettext", - "lgettext", - "ldgettext", - "ldngettext", - "lngettext", - "ngettext", - "pgettext", - "dpgettext", - "npgettext", - "dnpgettext", - ] -else: - __all__ = [ - "NullTranslations", - "GNUTranslations", - "Catalog", - "find", - "translation", - "install", - "textdomain", - "bindtextdomain", - "bind_textdomain_codeset", - "dgettext", - "dngettext", - "gettext", - "lgettext", - "ldgettext", - "ldngettext", - "lngettext", - "ngettext", - ] +__all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "find", + "translation", + "install", + "textdomain", + "bindtextdomain", + "dgettext", + "dngettext", + "gettext", + "ngettext", +] + +if sys.version_info < (3, 11): + __all__ += ["bind_textdomain_codeset", "ldgettext", "ldngettext", "lgettext", "lngettext"] + +if sys.version_info >= (3, 8): + __all__ += ["dnpgettext", "dpgettext", "npgettext", "pgettext"] class _TranslationsReader(Protocol): def read(self) -> bytes: ... @@ -78,9 +36,7 @@ class NullTranslations: def _parse(self, fp: _TranslationsReader) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... - def lgettext(self, message: str) -> str: ... def ngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... - def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... if sys.version_info >= (3, 8): def pgettext(self, context: str, message: str) -> str: ... def npgettext(self, context: str, msgid1: str, msgid2: str, n: int) -> str: ... @@ -90,6 +46,8 @@ class NullTranslations: if sys.version_info < (3, 11): def output_charset(self) -> str | None: ... def set_output_charset(self, charset: str) -> None: ... + def lgettext(self, message: str) -> str: ... + def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... def install(self, names: Container[str] | None = ...) -> None: ... @@ -110,7 +68,7 @@ def find( @overload def find(domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: bool = ...) -> Any: ... -_T = TypeVar("_T") +_NullTranslationsT = TypeVar("_NullTranslationsT", bound=NullTranslations) if sys.version_info >= (3, 11): @overload @@ -119,25 +77,34 @@ if sys.version_info >= (3, 11): localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., class_: None = ..., - fallback: bool = ..., - ) -> NullTranslations: ... + fallback: Literal[False] = ..., + ) -> GNUTranslations: ... @overload def translation( domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., - class_: type[_T] = ..., + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = ..., + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], fallback: Literal[False] = ..., - ) -> _T: ... + ) -> _NullTranslationsT: ... @overload def translation( domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., - class_: type[Any] = ..., - fallback: Literal[True] = ..., - ) -> Any: ... - def install(domain: str, localedir: StrPath | None = ..., names: Container[str] | None = ...) -> None: ... + class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., + fallback: bool = ..., + ) -> NullTranslations: ... + def install(domain: str, localedir: StrPath | None = ..., *, names: Container[str] | None = ...) -> None: ... else: @overload @@ -146,27 +113,37 @@ else: localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., class_: None = ..., - fallback: bool = ..., + fallback: Literal[False] = ..., codeset: str | None = ..., - ) -> NullTranslations: ... + ) -> GNUTranslations: ... @overload def translation( domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., - class_: type[_T] = ..., + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = ..., + codeset: str | None = ..., + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], fallback: Literal[False] = ..., codeset: str | None = ..., - ) -> _T: ... + ) -> _NullTranslationsT: ... @overload def translation( domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., - class_: type[Any] = ..., - fallback: Literal[True] = ..., + class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., + fallback: bool = ..., codeset: str | None = ..., - ) -> Any: ... + ) -> NullTranslations: ... def install( domain: str, localedir: StrPath | None = ..., codeset: str | None = ..., names: Container[str] | None = ... ) -> None: ... diff --git a/mypy/typeshed/stdlib/graphlib.pyi b/mypy/typeshed/stdlib/graphlib.pyi index 2fca402bf906..4c6959decc4b 100644 --- a/mypy/typeshed/stdlib/graphlib.pyi +++ b/mypy/typeshed/stdlib/graphlib.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import SupportsItems from collections.abc import Iterable -from typing import Any, Generic, TypeVar +from typing import Any, Generic, TypeVar, overload __all__ = ["TopologicalSorter", "CycleError"] @@ -11,7 +11,10 @@ if sys.version_info >= (3, 11): from types import GenericAlias class TopologicalSorter(Generic[_T]): - def __init__(self, graph: SupportsItems[_T, Iterable[_T]] | None = ...) -> None: ... + @overload + def __init__(self, graph: None = ...) -> None: ... + @overload + def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... def add(self, node: _T, *predecessors: _T) -> None: ... def prepare(self) -> None: ... def is_active(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/hmac.pyi b/mypy/typeshed/stdlib/hmac.pyi index 858e46a71b68..a7bf15493f0b 100644 --- a/mypy/typeshed/stdlib/hmac.pyi +++ b/mypy/typeshed/stdlib/hmac.pyi @@ -42,4 +42,4 @@ def compare_digest(__a: ReadableBuffer, __b: ReadableBuffer) -> bool: ... def compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... if sys.version_info >= (3, 7): - def digest(key: bytes, msg: ReadableBuffer, digest: str) -> bytes: ... + def digest(key: bytes, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index eef1c1957769..b082100774c0 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -169,7 +169,7 @@ class _Authenticator: def encode(self, inp: bytes) -> str: ... def decode(self, inp: str) -> bytes: ... -def Internaldate2tuple(resp: str) -> time.struct_time: ... +def Internaldate2tuple(resp: bytes) -> time.struct_time: ... def Int2AP(num: int) -> str: ... def ParseFlags(resp: str) -> tuple[str, ...]: ... def Time2Internaldate(date_time: float | time.struct_time | str) -> str: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 63fd02f7b3d5..805910329b64 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -16,6 +16,22 @@ from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWra from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable from typing_extensions import Literal, TypeAlias +if sys.version_info >= (3, 11): + __all__ = [ + "Loader", + "Finder", + "MetaPathFinder", + "PathEntryFinder", + "ResourceLoader", + "InspectLoader", + "ExecutionLoader", + "FileLoader", + "SourceLoader", + "ResourceReader", + "Traversable", + "TraversableResources", + ] + _Path: TypeAlias = bytes | str class Finder(metaclass=ABCMeta): ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 1f32f0770b37..6466ce0a23ac 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -9,34 +9,21 @@ from os import PathLike from pathlib import Path from typing import Any, ClassVar, NamedTuple, Pattern, overload +__all__ = [ + "Distribution", + "DistributionFinder", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "requires", + "version", +] + if sys.version_info >= (3, 10): - __all__ = [ - "Distribution", - "DistributionFinder", - "PackageMetadata", - "PackageNotFoundError", - "distribution", - "distributions", - "entry_points", - "files", - "metadata", - "packages_distributions", - "requires", - "version", - ] -else: - __all__ = [ - "Distribution", - "DistributionFinder", - "PackageNotFoundError", - "distribution", - "distributions", - "entry_points", - "files", - "metadata", - "requires", - "version", - ] + __all__ += ["PackageMetadata", "packages_distributions"] if sys.version_info >= (3, 10): from importlib.metadata._meta import PackageMetadata as PackageMetadata diff --git a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi index a1101df0d5ce..6a7cd858c80b 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Iterator from typing import Any, Protocol, TypeVar @@ -14,6 +15,9 @@ class PackageMetadata(Protocol): class SimplePath(Protocol): def joinpath(self) -> SimplePath: ... - def __div__(self) -> SimplePath: ... def parent(self) -> SimplePath: ... def read_text(self) -> str: ... + if sys.version_info >= (3, 11): + def __truediv__(self) -> SimplePath: ... + else: + def __div__(self) -> SimplePath: ... diff --git a/mypy/typeshed/stdlib/importlib/resources.pyi b/mypy/typeshed/stdlib/importlib/resources.pyi index 04d7e8dcc967..28ca107f4195 100644 --- a/mypy/typeshed/stdlib/importlib/resources.pyi +++ b/mypy/typeshed/stdlib/importlib/resources.pyi @@ -7,40 +7,20 @@ from types import ModuleType from typing import Any, BinaryIO, TextIO from typing_extensions import TypeAlias +__all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] + +if sys.version_info >= (3, 9): + __all__ += ["as_file", "files"] + if sys.version_info >= (3, 10): - __all__ = [ - "Package", - "Resource", - "ResourceReader", - "as_file", - "contents", - "files", - "is_resource", - "open_binary", - "open_text", - "path", - "read_binary", - "read_text", - ] -elif sys.version_info >= (3, 9): - __all__ = [ - "Package", - "Resource", - "as_file", - "contents", - "files", - "is_resource", - "open_binary", - "open_text", - "path", - "read_binary", - "read_text", - ] -else: - __all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] + __all__ += ["ResourceReader"] Package: TypeAlias = str | ModuleType -Resource: TypeAlias = str | os.PathLike[Any] + +if sys.version_info >= (3, 11): + Resource: TypeAlias = str +else: + Resource: TypeAlias = str | os.PathLike[Any] def open_binary(package: Package, resource: Resource) -> BinaryIO: ... def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index 327fd0d94a4d..0670b65fe359 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -8,49 +8,29 @@ from types import TracebackType from typing import IO, Any, BinaryIO, TextIO from typing_extensions import Literal +__all__ = [ + "BlockingIOError", + "open", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", +] + if sys.version_info >= (3, 8): - __all__ = [ - "BlockingIOError", - "open", - "open_code", - "IOBase", - "RawIOBase", - "FileIO", - "BytesIO", - "StringIO", - "BufferedIOBase", - "BufferedReader", - "BufferedWriter", - "BufferedRWPair", - "BufferedRandom", - "TextIOBase", - "TextIOWrapper", - "UnsupportedOperation", - "SEEK_SET", - "SEEK_CUR", - "SEEK_END", - ] -else: - __all__ = [ - "BlockingIOError", - "open", - "IOBase", - "RawIOBase", - "FileIO", - "BytesIO", - "StringIO", - "BufferedIOBase", - "BufferedReader", - "BufferedWriter", - "BufferedRWPair", - "BufferedRandom", - "TextIOBase", - "TextIOWrapper", - "UnsupportedOperation", - "SEEK_SET", - "SEEK_CUR", - "SEEK_END", - ] + __all__ += ["open_code"] DEFAULT_BUFFER_SIZE: Literal[8192] diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 43a0e8038d69..7299ee8200db 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -18,7 +18,7 @@ _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") _T6 = TypeVar("_T6") -_Step: TypeAlias = int | float | SupportsFloat | SupportsInt | SupportsIndex | SupportsComplex +_Step: TypeAlias = SupportsFloat | SupportsInt | SupportsIndex | SupportsComplex _Predicate: TypeAlias = Callable[[_T], object] diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index a998ad5fe49e..34df53994c92 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -3,149 +3,79 @@ from collections.abc import Callable, Iterable, Iterator from lib2to3.pgen2.token import * from typing_extensions import TypeAlias -if sys.version_info >= (3, 8): - __all__ = [ - "AMPER", - "AMPEREQUAL", - "ASYNC", - "AT", - "ATEQUAL", - "AWAIT", - "BACKQUOTE", - "CIRCUMFLEX", - "CIRCUMFLEXEQUAL", - "COLON", - "COLONEQUAL", - "COMMA", - "COMMENT", - "DEDENT", - "DOT", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "DOUBLESTAR", - "DOUBLESTAREQUAL", - "ENDMARKER", - "EQEQUAL", - "EQUAL", - "ERRORTOKEN", - "GREATER", - "GREATEREQUAL", - "INDENT", - "ISEOF", - "ISNONTERMINAL", - "ISTERMINAL", - "LBRACE", - "LEFTSHIFT", - "LEFTSHIFTEQUAL", - "LESS", - "LESSEQUAL", - "LPAR", - "LSQB", - "MINEQUAL", - "MINUS", - "NAME", - "NEWLINE", - "NL", - "NOTEQUAL", - "NT_OFFSET", - "NUMBER", - "N_TOKENS", - "OP", - "PERCENT", - "PERCENTEQUAL", - "PLUS", - "PLUSEQUAL", - "RARROW", - "RBRACE", - "RIGHTSHIFT", - "RIGHTSHIFTEQUAL", - "RPAR", - "RSQB", - "SEMI", - "SLASH", - "SLASHEQUAL", - "STAR", - "STAREQUAL", - "STRING", - "TILDE", - "VBAR", - "VBAREQUAL", - "tok_name", - "tokenize", - "generate_tokens", - "untokenize", - ] -else: - __all__ = [ - "AMPER", - "AMPEREQUAL", - "ASYNC", - "AT", - "ATEQUAL", - "AWAIT", - "BACKQUOTE", - "CIRCUMFLEX", - "CIRCUMFLEXEQUAL", - "COLON", - "COMMA", - "COMMENT", - "DEDENT", - "DOT", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "DOUBLESTAR", - "DOUBLESTAREQUAL", - "ENDMARKER", - "EQEQUAL", - "EQUAL", - "ERRORTOKEN", - "GREATER", - "GREATEREQUAL", - "INDENT", - "ISEOF", - "ISNONTERMINAL", - "ISTERMINAL", - "LBRACE", - "LEFTSHIFT", - "LEFTSHIFTEQUAL", - "LESS", - "LESSEQUAL", - "LPAR", - "LSQB", - "MINEQUAL", - "MINUS", - "NAME", - "NEWLINE", - "NL", - "NOTEQUAL", - "NT_OFFSET", - "NUMBER", - "N_TOKENS", - "OP", - "PERCENT", - "PERCENTEQUAL", - "PLUS", - "PLUSEQUAL", - "RARROW", - "RBRACE", - "RIGHTSHIFT", - "RIGHTSHIFTEQUAL", - "RPAR", - "RSQB", - "SEMI", - "SLASH", - "SLASHEQUAL", - "STAR", - "STAREQUAL", - "STRING", - "TILDE", - "VBAR", - "VBAREQUAL", - "tok_name", - "tokenize", - "generate_tokens", - "untokenize", - ] +__all__ = [ + "AMPER", + "AMPEREQUAL", + "ASYNC", + "AT", + "ATEQUAL", + "AWAIT", + "BACKQUOTE", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "tokenize", + "generate_tokens", + "untokenize", +] + +if sys.version_info >= (3, 7): + __all__ += ["COLONEQUAL"] _Coord: TypeAlias = tuple[int, int] _TokenEater: TypeAlias = Callable[[int, str, _Coord, _Coord, str], None] diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index 959054e847a8..393ddcbda841 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -29,6 +29,9 @@ __all__ = [ "CHAR_MAX", ] +if sys.version_info >= (3, 11): + __all__ += ["getencoding"] + # This module defines a function "str()", which is why "str" can't be used # as a type annotation or type alias. from builtins import str as _str @@ -126,7 +129,7 @@ if sys.version_info >= (3, 7): else: def format_string(f: _str, val: Any, grouping: bool = ...) -> _str: ... -def currency(val: int | float | Decimal, symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... +def currency(val: float | Decimal, symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... def delocalize(string: _str) -> _str: ... def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... @@ -143,6 +146,9 @@ if sys.platform == "linux" or sys.platform == "darwin": def bindtextdomain(__domain: _str, __dir: StrPath | None) -> _str: ... def bind_textdomain_codeset(__domain: _str, __codeset: _str | None) -> _str | None: ... +if sys.version_info >= (3, 11): + def getencoding() -> _str: ... + locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented windows_locale: dict[int, _str] # undocumented diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 6ad4cd4f94e7..6a8f66871a67 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -9,6 +9,9 @@ from types import FrameType, TracebackType from typing import Any, ClassVar, Generic, Pattern, TextIO, TypeVar, Union, overload from typing_extensions import Literal, TypeAlias +if sys.version_info >= (3, 11): + from types import GenericAlias + __all__ = [ "BASIC_FORMAT", "BufferingFormatter", @@ -54,6 +57,9 @@ __all__ = [ "raiseExceptions", ] +if sys.version_info >= (3, 11): + __all__ += ["getLevelNamesMapping"] + _SysExcInfoType: TypeAlias = Union[tuple[type[BaseException], BaseException, TracebackType | None], tuple[None, None, None]] _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException _ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] @@ -407,6 +413,8 @@ class LogRecord: sinfo: str | None = ..., ) -> None: ... def getMessage(self) -> str: ... + # Allows setting contextual information on LogRecord objects as per the docs, see #7833 + def __setattr__(self, __name: str, __value: Any) -> None: ... _L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) @@ -593,6 +601,8 @@ class LoggerAdapter(Generic[_L]): ) -> None: ... # undocumented @property def name(self) -> str: ... # undocumented + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... def getLogger(name: str | None = ...) -> Logger: ... def getLoggerClass() -> type[Logger]: ... @@ -706,6 +716,10 @@ else: def addLevelName(level: int, levelName: str) -> None: ... def getLevelName(level: _Level) -> Any: ... + +if sys.version_info >= (3, 11): + def getLevelNamesMapping() -> dict[str, int]: ... + def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... if sys.version_info >= (3, 9): @@ -769,6 +783,8 @@ class StreamHandler(Handler, Generic[_StreamT]): def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... if sys.version_info >= (3, 7): def setStream(self, stream: _StreamT) -> _StreamT | None: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... class FileHandler(StreamHandler[TextIOWrapper]): baseFilename: str # undocumented @@ -818,8 +834,8 @@ class PercentStyle: # undocumented def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): # undocumented - fmt_spec = Any - field_spec = Any + fmt_spec: Pattern[str] + field_spec: Pattern[str] class StringTemplateStyle(PercentStyle): # undocumented _tpl: Template diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index 640236149000..d3ea29075b81 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -181,6 +181,9 @@ class SysLogHandler(Handler): facility_names: ClassVar[dict[str, int]] # undocumented priority_map: ClassVar[dict[str, str]] # undocumented def __init__(self, address: tuple[str, int] | str = ..., facility: int = ..., socktype: SocketKind | None = ...) -> None: ... + if sys.version_info >= (3, 11): + def createSocket(self) -> None: ... + def encodePriority(self, facility: int | str, priority: int | str) -> int: ... def mapPriority(self, levelName: str) -> str: ... diff --git a/mypy/typeshed/stdlib/macpath.pyi b/mypy/typeshed/stdlib/macpath.pyi index 2512e086b735..37821f44b200 100644 --- a/mypy/typeshed/stdlib/macpath.pyi +++ b/mypy/typeshed/stdlib/macpath.pyi @@ -34,6 +34,41 @@ from posixpath import ( ) from typing import AnyStr, overload +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", +] + altsep: str | None @overload diff --git a/mypy/typeshed/stdlib/macurl2path.pyi b/mypy/typeshed/stdlib/macurl2path.pyi index 6aac6dfeace5..af74b11c7850 100644 --- a/mypy/typeshed/stdlib/macurl2path.pyi +++ b/mypy/typeshed/stdlib/macurl2path.pyi @@ -1,3 +1,5 @@ +__all__ = ["url2pathname", "pathname2url"] + def url2pathname(pathname: str) -> str: ... def pathname2url(pathname: str) -> str: ... def _pncomp2url(component: str | bytes) -> str: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 64183cd0b3a4..3169e8cfa689 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -46,9 +46,10 @@ class Mailbox(Generic[_MessageT]): _path: bytes | str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT] | None = ..., create: bool = ... - ) -> None: ... + @overload + def __init__(self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... + @overload + def __init__(self, path: StrOrBytesPath, factory: None = ..., create: bool = ...) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: ... @abstractmethod diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 96bb01a271fc..8dbec2388838 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -79,6 +79,7 @@ if sys.version_info >= (3, 8) and sys.platform != "win32": MADV_SEQUENTIAL: int MADV_WILLNEED: int MADV_DONTNEED: int + MADV_FREE: int if sys.platform == "linux": MADV_REMOVE: int @@ -94,7 +95,6 @@ if sys.version_info >= (3, 8) and sys.platform != "win32": MADV_NOHUGEPAGE: int MADV_DONTDUMP: int MADV_DODUMP: int - MADV_FREE: int # This Values are defined for FreeBSD but type checkers do not support conditions for these if sys.platform != "linux" and sys.platform != "darwin": diff --git a/mypy/typeshed/stdlib/modulefinder.pyi b/mypy/typeshed/stdlib/modulefinder.pyi index cd01e0e1381f..caed7efadccc 100644 --- a/mypy/typeshed/stdlib/modulefinder.pyi +++ b/mypy/typeshed/stdlib/modulefinder.pyi @@ -3,12 +3,13 @@ from collections.abc import Container, Iterable, Iterator, Sequence from types import CodeType from typing import IO, Any -LOAD_CONST: int # undocumented -IMPORT_NAME: int # undocumented -STORE_NAME: int # undocumented -STORE_GLOBAL: int # undocumented -STORE_OPS: tuple[int, int] # undocumented -EXTENDED_ARG: int # undocumented +if sys.version_info < (3, 11): + LOAD_CONST: int # undocumented + IMPORT_NAME: int # undocumented + STORE_NAME: int # undocumented + STORE_GLOBAL: int # undocumented + STORE_OPS: tuple[int, int] # undocumented + EXTENDED_ARG: int # undocumented packagePathMap: dict[str, list[str]] # undocumented diff --git a/mypy/typeshed/stdlib/multiprocessing/__init__.pyi b/mypy/typeshed/stdlib/multiprocessing/__init__.pyi index 87ceda10573d..41af971bc619 100644 --- a/mypy/typeshed/stdlib/multiprocessing/__init__.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/__init__.pyi @@ -29,85 +29,47 @@ if sys.version_info >= (3, 8): if sys.platform != "win32": from multiprocessing.context import ForkContext, ForkServerContext +__all__ = [ + "Array", + "AuthenticationError", + "Barrier", + "BoundedSemaphore", + "BufferTooShort", + "Condition", + "Event", + "JoinableQueue", + "Lock", + "Manager", + "Pipe", + "Pool", + "Process", + "ProcessError", + "Queue", + "RLock", + "RawArray", + "RawValue", + "Semaphore", + "SimpleQueue", + "TimeoutError", + "Value", + "active_children", + "allow_connection_pickling", + "cpu_count", + "current_process", + "freeze_support", + "get_all_start_methods", + "get_context", + "get_logger", + "get_start_method", + "log_to_stderr", + "reducer", + "set_executable", + "set_forkserver_preload", + "set_start_method", +] + if sys.version_info >= (3, 8): - __all__ = [ - "Array", - "AuthenticationError", - "Barrier", - "BoundedSemaphore", - "BufferTooShort", - "Condition", - "Event", - "JoinableQueue", - "Lock", - "Manager", - "Pipe", - "Pool", - "Process", - "ProcessError", - "Queue", - "RLock", - "RawArray", - "RawValue", - "Semaphore", - "SimpleQueue", - "TimeoutError", - "Value", - "active_children", - "allow_connection_pickling", - "cpu_count", - "current_process", - "freeze_support", - "get_all_start_methods", - "get_context", - "get_logger", - "get_start_method", - "parent_process", - "log_to_stderr", - "reducer", - "set_executable", - "set_forkserver_preload", - "set_start_method", - ] -else: - __all__ = [ - "Array", - "AuthenticationError", - "Barrier", - "BoundedSemaphore", - "BufferTooShort", - "Condition", - "Event", - "JoinableQueue", - "Lock", - "Manager", - "Pipe", - "Pool", - "Process", - "ProcessError", - "Queue", - "RLock", - "RawArray", - "RawValue", - "Semaphore", - "SimpleQueue", - "TimeoutError", - "Value", - "active_children", - "allow_connection_pickling", - "cpu_count", - "current_process", - "freeze_support", - "get_all_start_methods", - "get_context", - "get_logger", - "get_start_method", - "log_to_stderr", - "reducer", - "set_executable", - "set_forkserver_preload", - "set_start_method", - ] + __all__ += ["parent_process"] # The following type aliases can be used to annotate the return values of # the corresponding functions. They are not defined at runtime. diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index b8d5ddda0f35..212ffcbf5a3a 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -1,12 +1,11 @@ import queue import sys import threading -from _typeshed import Self -from builtins import dict as _dict, list as _list # Conflicts with method names -from collections.abc import Callable, Iterable, Mapping, Sequence +from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence from types import TracebackType -from typing import Any, AnyStr, Generic, TypeVar -from typing_extensions import TypeAlias +from typing import Any, AnyStr, ClassVar, Generic, TypeVar, overload +from typing_extensions import SupportsIndex, TypeAlias from .connection import Connection from .context import BaseContext @@ -66,6 +65,62 @@ class ValueProxy(BaseProxy, Generic[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... +class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __getitem__(self, __k: _KT) -> _VT: ... + def __setitem__(self, __k: _KT, __v: _VT) -> None: ... + def __delitem__(self, __v: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> dict[_KT, _VT]: ... + @overload + def get(self, __key: _KT) -> _VT | None: ... + @overload + def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def keys(self) -> list[_KT]: ... # type: ignore[override] + def values(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] + def items(self) -> list[_VT]: ... # type: ignore[override] + if sys.version_info < (3, 7): + def has_key(self, k: _KT) -> bool: ... + +class BaseListProxy(BaseProxy, MutableSequence[_T]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __add__(self, __x: list[_T]) -> list[_T]: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, __s: slice) -> list[_T]: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[_T]) -> None: ... + def __mul__(self, __n: SupportsIndex) -> list[_T]: ... + def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... + def __reversed__(self) -> Iterator[_T]: ... + def append(self, __object: _T) -> None: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def pop(self, __index: SupportsIndex = ...) -> _T: ... + def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def count(self, __value: _T) -> int: ... + def insert(self, __index: SupportsIndex, __object: _T) -> None: ... + def remove(self, __value: _T) -> None: ... + # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + +class ListProxy(BaseListProxy[_T]): + def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] + def __imul__(self: Self, __n: SupportsIndex) -> Self: ... # type: ignore[override] + # Returned by BaseManager.get_server() class Server: address: Any @@ -124,8 +179,21 @@ class SyncManager(BaseManager): def Semaphore(self, value: Any = ...) -> threading.Semaphore: ... def Array(self, typecode: Any, sequence: Sequence[_T]) -> Sequence[_T]: ... def Value(self, typecode: Any, value: _T) -> ValueProxy[_T]: ... - def dict(self, sequence: Mapping[_KT, _VT] = ...) -> _dict[_KT, _VT]: ... - def list(self, sequence: Sequence[_T] = ...) -> _list[_T]: ... + # Overloads are copied from builtins.dict.__init__ + @overload + def dict(self) -> DictProxy[Any, Any]: ... + @overload + def dict(self, **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, __map: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, __iterable: Iterable[list[str]]) -> DictProxy[str, str]: ... + @overload + def list(self, __sequence: Sequence[_T]) -> ListProxy[_T]: ... + @overload + def list(self) -> ListProxy[Any]: ... class RemoteError(Exception): ... diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi index 76ccedaf478e..3ce0ca3863cc 100644 --- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import Self from collections.abc import Iterable -from typing import Any, Generic, TypeVar +from typing import Any, Generic, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias @@ -23,7 +23,10 @@ class SharedMemory: class ShareableList(Generic[_SLT]): shm: SharedMemory - def __init__(self, sequence: Iterable[_SLT] | None = ..., *, name: str | None = ...) -> None: ... + @overload + def __init__(self, sequence: None = ..., *, name: str | None = ...) -> None: ... + @overload + def __init__(self, sequence: Iterable[_SLT], *, name: str | None = ...) -> None: ... def __getitem__(self, position: int) -> _SLT: ... def __setitem__(self, position: int, value: _SLT) -> None: ... def __reduce__(self: Self) -> tuple[Self, tuple[_SLT, ...]]: ... diff --git a/mypy/typeshed/stdlib/ntpath.pyi b/mypy/typeshed/stdlib/ntpath.pyi index ffe5cc1e5a2d..78aa2346835c 100644 --- a/mypy/typeshed/stdlib/ntpath.pyi +++ b/mypy/typeshed/stdlib/ntpath.pyi @@ -43,99 +43,62 @@ from posixpath import ( supports_unicode_filenames as supports_unicode_filenames, ) from typing import AnyStr, overload +from typing_extensions import LiteralString -if sys.version_info >= (3, 7) or sys.platform != "win32": - __all__ = [ - "normcase", - "isabs", - "join", - "splitdrive", - "split", - "splitext", - "basename", - "dirname", - "commonprefix", - "getsize", - "getmtime", - "getatime", - "getctime", - "islink", - "exists", - "lexists", - "isdir", - "isfile", - "ismount", - "expanduser", - "expandvars", - "normpath", - "abspath", - "curdir", - "pardir", - "sep", - "pathsep", - "defpath", - "altsep", - "extsep", - "devnull", - "realpath", - "supports_unicode_filenames", - "relpath", - "samefile", - "sameopenfile", - "samestat", - "commonpath", - ] -else: - __all__ = [ - "normcase", - "isabs", - "join", - "splitdrive", - "split", - "splitext", - "basename", - "dirname", - "commonprefix", - "getsize", - "getmtime", - "getatime", - "getctime", - "islink", - "exists", - "lexists", - "isdir", - "isfile", - "ismount", - "expanduser", - "expandvars", - "normpath", - "abspath", - "splitunc", - "curdir", - "pardir", - "sep", - "pathsep", - "defpath", - "altsep", - "extsep", - "devnull", - "realpath", - "supports_unicode_filenames", - "relpath", - "samefile", - "sameopenfile", - "samestat", - "commonpath", - ] +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "samefile", + "sameopenfile", + "samestat", + "commonpath", +] + +if sys.version_info < (3, 7): + __all__ += ["splitunc"] def splitunc(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... # deprecated -altsep: str +altsep: LiteralString # First parameter is not actually pos-only, # but must be defined as pos-only in the stub or cross-platform code doesn't type-check, # as the parameter name is different in posixpath.join() @overload +def join(__path: LiteralString, *paths: LiteralString) -> LiteralString: ... +@overload def join(__path: StrPath, *paths: StrPath) -> str: ... @overload def join(__path: BytesPath, *paths: BytesPath) -> bytes: ... diff --git a/mypy/typeshed/stdlib/operator.pyi b/mypy/typeshed/stdlib/operator.pyi index 603e15ebc7be..c3fc4b0a8503 100644 --- a/mypy/typeshed/stdlib/operator.pyi +++ b/mypy/typeshed/stdlib/operator.pyi @@ -2,121 +2,65 @@ import sys from _operator import * +__all__ = [ + "abs", + "add", + "and_", + "attrgetter", + "concat", + "contains", + "countOf", + "delitem", + "eq", + "floordiv", + "ge", + "getitem", + "gt", + "iadd", + "iand", + "iconcat", + "ifloordiv", + "ilshift", + "imatmul", + "imod", + "imul", + "index", + "indexOf", + "inv", + "invert", + "ior", + "ipow", + "irshift", + "is_", + "is_not", + "isub", + "itemgetter", + "itruediv", + "ixor", + "le", + "length_hint", + "lshift", + "lt", + "matmul", + "methodcaller", + "mod", + "mul", + "ne", + "neg", + "not_", + "or_", + "pos", + "pow", + "rshift", + "setitem", + "sub", + "truediv", + "truth", + "xor", +] + if sys.version_info >= (3, 11): - __all__ = [ - "abs", - "add", - "and_", - "attrgetter", - "call", - "concat", - "contains", - "countOf", - "delitem", - "eq", - "floordiv", - "ge", - "getitem", - "gt", - "iadd", - "iand", - "iconcat", - "ifloordiv", - "ilshift", - "imatmul", - "imod", - "imul", - "index", - "indexOf", - "inv", - "invert", - "ior", - "ipow", - "irshift", - "is_", - "is_not", - "isub", - "itemgetter", - "itruediv", - "ixor", - "le", - "length_hint", - "lshift", - "lt", - "matmul", - "methodcaller", - "mod", - "mul", - "ne", - "neg", - "not_", - "or_", - "pos", - "pow", - "rshift", - "setitem", - "sub", - "truediv", - "truth", - "xor", - ] -else: - __all__ = [ - "abs", - "add", - "and_", - "attrgetter", - "concat", - "contains", - "countOf", - "delitem", - "eq", - "floordiv", - "ge", - "getitem", - "gt", - "iadd", - "iand", - "iconcat", - "ifloordiv", - "ilshift", - "imatmul", - "imod", - "imul", - "index", - "indexOf", - "inv", - "invert", - "ior", - "ipow", - "irshift", - "is_", - "is_not", - "isub", - "itemgetter", - "itruediv", - "ixor", - "le", - "length_hint", - "lshift", - "lt", - "matmul", - "methodcaller", - "mod", - "mul", - "ne", - "neg", - "not_", - "or_", - "pos", - "pow", - "rshift", - "setitem", - "sub", - "truediv", - "truth", - "xor", - ] + __all__ += ["call"] __lt__ = lt __le__ = le diff --git a/mypy/typeshed/stdlib/optparse.pyi b/mypy/typeshed/stdlib/optparse.pyi index 1e7428f59a95..b571ff0680b7 100644 --- a/mypy/typeshed/stdlib/optparse.pyi +++ b/mypy/typeshed/stdlib/optparse.pyi @@ -240,7 +240,10 @@ class OptionParser(OptionContainer): def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... - def parse_args(self, args: Sequence[AnyStr] | None = ..., values: Values | None = ...) -> tuple[Values, list[AnyStr]]: ... + @overload + def parse_args(self, args: None = ..., values: Values | None = ...) -> tuple[Values, list[str]]: ... + @overload + def parse_args(self, args: Sequence[AnyStr], values: Values | None = ...) -> tuple[Values, list[AnyStr]]: ... def print_usage(self, file: IO[str] | None = ...) -> None: ... def print_help(self, file: IO[str] | None = ...) -> None: ... def print_version(self, file: IO[str] | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 2310de701d54..68ea2948f17e 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -1,7 +1,9 @@ import sys from _typeshed import ( + AnyStr_co, BytesPath, FileDescriptorLike, + GenericPath, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -32,7 +34,6 @@ path = _path _T = TypeVar("_T") _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") -_AnyStr_co = TypeVar("_AnyStr_co", str, bytes, covariant=True) # ----- os variables ----- @@ -356,9 +357,9 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 @runtime_checkable -class PathLike(Protocol[_AnyStr_co]): +class PathLike(Protocol[AnyStr_co]): @abstractmethod - def __fspath__(self) -> _AnyStr_co: ... + def __fspath__(self) -> AnyStr_co: ... @overload def listdir(path: StrPath | None = ...) -> list[str]: ... @@ -726,7 +727,7 @@ if sys.platform != "win32": def makedev(__major: int, __minor: int) -> int: ... def pathconf(path: _FdOrAnyPath, name: str | int) -> int: ... # Unix only -def readlink(path: AnyStr | PathLike[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... def removedirs(name: StrOrBytesPath) -> None: ... def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... @@ -747,7 +748,7 @@ if sys.version_info >= (3, 7): def scandir(path: int) -> _ScandirIterator[str]: ... @overload -def scandir(path: AnyStr | PathLike[AnyStr]) -> _ScandirIterator[AnyStr]: ... +def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... def stat(path: _FdOrAnyPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... if sys.version_info < (3, 7): @@ -778,7 +779,7 @@ def utime( _OnError: TypeAlias = Callable[[OSError], Any] def walk( - top: AnyStr | PathLike[AnyStr], topdown: bool = ..., onerror: _OnError | None = ..., followlinks: bool = ... + top: GenericPath[AnyStr], topdown: bool = ..., onerror: _OnError | None = ..., followlinks: bool = ... ) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... if sys.platform != "win32": diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index e787b4a4c416..3c2cabe8abe2 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -74,7 +74,9 @@ class Pdb(Bdb, Cmd): def print_stack_trace(self) -> None: ... def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = ...) -> None: ... def lookupmodule(self, filename: str) -> str | None: ... - def _runscript(self, filename: str) -> None: ... + if sys.version_info < (3, 11): + def _runscript(self, filename: str) -> None: ... + def do_commands(self, arg: str) -> bool | None: ... def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ... def do_tbreak(self, arg: str) -> bool | None: ... @@ -165,7 +167,7 @@ class Pdb(Bdb, Cmd): complete_whatis = _complete_expression complete_display = _complete_expression - if sys.version_info >= (3, 7): + if sys.version_info >= (3, 7) and sys.version_info < (3, 11): def _runmodule(self, module_name: str) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index d58cf8ed9d50..088adc8196c2 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -4,172 +4,89 @@ from collections.abc import Callable, Iterable, Iterator, Mapping from typing import Any, ClassVar, Protocol, SupportsBytes, Union from typing_extensions import SupportsIndex, TypeAlias, final +__all__ = [ + "PickleError", + "PicklingError", + "UnpicklingError", + "Pickler", + "Unpickler", + "dump", + "dumps", + "load", + "loads", + "ADDITEMS", + "APPEND", + "APPENDS", + "BINBYTES", + "BINBYTES8", + "BINFLOAT", + "BINGET", + "BININT", + "BININT1", + "BININT2", + "BINPERSID", + "BINPUT", + "BINSTRING", + "BINUNICODE", + "BINUNICODE8", + "BUILD", + "DEFAULT_PROTOCOL", + "DICT", + "DUP", + "EMPTY_DICT", + "EMPTY_LIST", + "EMPTY_SET", + "EMPTY_TUPLE", + "EXT1", + "EXT2", + "EXT4", + "FALSE", + "FLOAT", + "FRAME", + "FROZENSET", + "GET", + "GLOBAL", + "HIGHEST_PROTOCOL", + "INST", + "INT", + "LIST", + "LONG", + "LONG1", + "LONG4", + "LONG_BINGET", + "LONG_BINPUT", + "MARK", + "MEMOIZE", + "NEWFALSE", + "NEWOBJ", + "NEWOBJ_EX", + "NEWTRUE", + "NONE", + "OBJ", + "PERSID", + "POP", + "POP_MARK", + "PROTO", + "PUT", + "REDUCE", + "SETITEM", + "SETITEMS", + "SHORT_BINBYTES", + "SHORT_BINSTRING", + "SHORT_BINUNICODE", + "STACK_GLOBAL", + "STOP", + "STRING", + "TRUE", + "TUPLE", + "TUPLE1", + "TUPLE2", + "TUPLE3", + "UNICODE", +] + if sys.version_info >= (3, 8): - __all__ = [ - "PickleError", - "PicklingError", - "UnpicklingError", - "Pickler", - "Unpickler", - "dump", - "dumps", - "load", - "loads", - "PickleBuffer", - "ADDITEMS", - "APPEND", - "APPENDS", - "BINBYTES", - "BINBYTES8", - "BINFLOAT", - "BINGET", - "BININT", - "BININT1", - "BININT2", - "BINPERSID", - "BINPUT", - "BINSTRING", - "BINUNICODE", - "BINUNICODE8", - "BUILD", - "BYTEARRAY8", - "DEFAULT_PROTOCOL", - "DICT", - "DUP", - "EMPTY_DICT", - "EMPTY_LIST", - "EMPTY_SET", - "EMPTY_TUPLE", - "EXT1", - "EXT2", - "EXT4", - "FALSE", - "FLOAT", - "FRAME", - "FROZENSET", - "GET", - "GLOBAL", - "HIGHEST_PROTOCOL", - "INST", - "INT", - "LIST", - "LONG", - "LONG1", - "LONG4", - "LONG_BINGET", - "LONG_BINPUT", - "MARK", - "MEMOIZE", - "NEWFALSE", - "NEWOBJ", - "NEWOBJ_EX", - "NEWTRUE", - "NEXT_BUFFER", - "NONE", - "OBJ", - "PERSID", - "POP", - "POP_MARK", - "PROTO", - "PUT", - "READONLY_BUFFER", - "REDUCE", - "SETITEM", - "SETITEMS", - "SHORT_BINBYTES", - "SHORT_BINSTRING", - "SHORT_BINUNICODE", - "STACK_GLOBAL", - "STOP", - "STRING", - "TRUE", - "TUPLE", - "TUPLE1", - "TUPLE2", - "TUPLE3", - "UNICODE", - ] -else: - __all__ = [ - "PickleError", - "PicklingError", - "UnpicklingError", - "Pickler", - "Unpickler", - "dump", - "dumps", - "load", - "loads", - "ADDITEMS", - "APPEND", - "APPENDS", - "BINBYTES", - "BINBYTES8", - "BINFLOAT", - "BINGET", - "BININT", - "BININT1", - "BININT2", - "BINPERSID", - "BINPUT", - "BINSTRING", - "BINUNICODE", - "BINUNICODE8", - "BUILD", - "DEFAULT_PROTOCOL", - "DICT", - "DUP", - "EMPTY_DICT", - "EMPTY_LIST", - "EMPTY_SET", - "EMPTY_TUPLE", - "EXT1", - "EXT2", - "EXT4", - "FALSE", - "FLOAT", - "FRAME", - "FROZENSET", - "GET", - "GLOBAL", - "HIGHEST_PROTOCOL", - "INST", - "INT", - "LIST", - "LONG", - "LONG1", - "LONG4", - "LONG_BINGET", - "LONG_BINPUT", - "MARK", - "MEMOIZE", - "NEWFALSE", - "NEWOBJ", - "NEWOBJ_EX", - "NEWTRUE", - "NONE", - "OBJ", - "PERSID", - "POP", - "POP_MARK", - "PROTO", - "PUT", - "REDUCE", - "SETITEM", - "SETITEMS", - "SHORT_BINBYTES", - "SHORT_BINSTRING", - "SHORT_BINUNICODE", - "STACK_GLOBAL", - "STOP", - "STRING", - "TRUE", - "TUPLE", - "TUPLE1", - "TUPLE2", - "TUPLE3", - "UNICODE", - ] + __all__ += ["BYTEARRAY8", "NEXT_BUFFER", "PickleBuffer", "READONLY_BUFFER"] HIGHEST_PROTOCOL: int DEFAULT_PROTOCOL: int diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index 4cec7c770ea3..8d880a072dfb 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import BytesPath, StrOrBytesPath, StrPath +from _typeshed import AnyOrLiteralStr, BytesPath, StrOrBytesPath, StrPath from collections.abc import Sequence from genericpath import ( commonprefix as commonprefix, @@ -16,6 +16,7 @@ from genericpath import ( ) from os import PathLike from typing import AnyStr, overload +from typing_extensions import LiteralString __all__ = [ "normcase", @@ -60,14 +61,14 @@ __all__ = [ supports_unicode_filenames: bool # aliases (also in os) -curdir: str -pardir: str -sep: str -altsep: str | None -extsep: str -pathsep: str -defpath: str -devnull: str +curdir: LiteralString +pardir: LiteralString +sep: LiteralString +altsep: LiteralString | None +extsep: LiteralString +pathsep: LiteralString +defpath: LiteralString +devnull: LiteralString # Overloads are necessary to work around python/mypy#3644. @overload @@ -77,11 +78,11 @@ def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(p: PathLike[AnyStr]) -> AnyStr: ... @overload -def basename(p: AnyStr) -> AnyStr: ... +def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def dirname(p: PathLike[AnyStr]) -> AnyStr: ... @overload -def dirname(p: AnyStr) -> AnyStr: ... +def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... @overload @@ -93,11 +94,13 @@ def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(s: PathLike[AnyStr]) -> AnyStr: ... @overload -def normcase(s: AnyStr) -> AnyStr: ... +def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def normpath(path: PathLike[AnyStr]) -> AnyStr: ... @overload -def normpath(path: AnyStr) -> AnyStr: ... +def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def commonpath(paths: Sequence[LiteralString]) -> LiteralString: ... @overload def commonpath(paths: Sequence[StrPath]) -> str: ... @overload @@ -107,6 +110,8 @@ def commonpath(paths: Sequence[BytesPath]) -> bytes: ... # but must be defined as pos-only in the stub or cross-platform code doesn't type-check, # as the parameter name is different in ntpath.join() @overload +def join(__a: LiteralString, *paths: LiteralString) -> LiteralString: ... +@overload def join(__a: StrPath, *paths: StrPath) -> str: ... @overload def join(__a: BytesPath, *paths: BytesPath) -> bytes: ... @@ -123,6 +128,8 @@ else: @overload def realpath(filename: AnyStr) -> AnyStr: ... +@overload +def relpath(path: LiteralString, start: LiteralString | None = ...) -> LiteralString: ... @overload def relpath(path: BytesPath, start: BytesPath | None = ...) -> bytes: ... @overload @@ -130,15 +137,15 @@ def relpath(path: StrPath, start: StrPath | None = ...) -> str: ... @overload def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def split(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def splitdrive(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def splitext(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... def isabs(s: StrOrBytesPath) -> bool: ... def islink(path: StrOrBytesPath | int) -> bool: ... def ismount(path: StrOrBytesPath | int) -> bool: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index 6e5d3e818f83..6ea4a74a9d28 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import OptExcInfo, SupportsWrite from abc import abstractmethod from builtins import list as _list # "list" conflicts with method name @@ -78,21 +79,36 @@ class HTMLDoc(Doc): repr = _repr_instance.repr escape = _repr_instance.escape def page(self, title: str, contents: str) -> str: ... - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... - def section( - self, - title: str, - fgcol: str, - bgcol: str, - contents: str, - width: int = ..., - prelude: str = ..., - marginalia: str | None = ..., - gap: str = ..., - ) -> str: ... + if sys.version_info >= (3, 11): + def heading(self, title: str, extras: str = ...) -> str: ... + def section( + self, + title: str, + cls: str, + contents: str, + width: int = ..., + prelude: str = ..., + marginalia: str | None = ..., + gap: str = ..., + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... + else: + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... + def section( + self, + title: str, + fgcol: str, + bgcol: str, + contents: str, + width: int = ..., + prelude: str = ..., + marginalia: str | None = ..., + gap: str = ..., + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = ...) -> str: ... + def bigsection(self, title: str, *args: Any) -> str: ... def preformat(self, text: str) -> str: ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = ...) -> str: ... def grey(self, text: str) -> str: ... def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... def classlink(self, object: object, modname: str) -> str: ... diff --git a/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/mypy/typeshed/stdlib/pyexpat/__init__.pyi index 2fe76a3d61b6..6a4ed891fe10 100644 --- a/mypy/typeshed/stdlib/pyexpat/__init__.pyi +++ b/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -33,6 +33,8 @@ class XMLParserType: def ExternalEntityParserCreate(self, __context: str | None, __encoding: str = ...) -> XMLParserType: ... def SetParamEntityParsing(self, __flag: int) -> int: ... def UseForeignDTD(self, __flag: bool = ...) -> None: ... + @property + def intern(self) -> dict[str, str]: ... buffer_size: int buffer_text: bool buffer_used: int @@ -69,6 +71,7 @@ class XMLParserType: DefaultHandlerExpand: Callable[[str], Any] | None NotStandaloneHandler: Callable[[], int] | None ExternalEntityRefHandler: Callable[[str, str | None, str | None, str | None], int] | None + SkippedEntityHandler: Callable[[str, bool], Any] | None def ErrorString(__code: int) -> str: ... diff --git a/mypy/typeshed/stdlib/pyexpat/errors.pyi b/mypy/typeshed/stdlib/pyexpat/errors.pyi index 61826e12da00..2e512eb12989 100644 --- a/mypy/typeshed/stdlib/pyexpat/errors.pyi +++ b/mypy/typeshed/stdlib/pyexpat/errors.pyi @@ -1,3 +1,5 @@ +import sys + codes: dict[str, int] messages: dict[int, str] @@ -38,3 +40,10 @@ XML_ERROR_UNDEFINED_ENTITY: str XML_ERROR_UNEXPECTED_STATE: str XML_ERROR_UNKNOWN_ENCODING: str XML_ERROR_XML_DECL: str +if sys.version_info >= (3, 11): + XML_ERROR_RESERVED_PREFIX_XML: str + XML_ERROR_RESERVED_PREFIX_XMLNS: str + XML_ERROR_RESERVED_NAMESPACE_URI: str + XML_ERROR_INVALID_ARGUMENT: str + XML_ERROR_NO_BUFFER: str + XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str diff --git a/mypy/typeshed/stdlib/random.pyi b/mypy/typeshed/stdlib/random.pyi index 255436dc377d..3bb999bfaaa6 100644 --- a/mypy/typeshed/stdlib/random.pyi +++ b/mypy/typeshed/stdlib/random.pyi @@ -5,61 +5,35 @@ from collections.abc import Callable, Iterable, MutableSequence, Sequence, Set a from fractions import Fraction from typing import Any, ClassVar, NoReturn, TypeVar +__all__ = [ + "Random", + "seed", + "random", + "uniform", + "randint", + "choice", + "sample", + "randrange", + "shuffle", + "normalvariate", + "lognormvariate", + "expovariate", + "vonmisesvariate", + "gammavariate", + "triangular", + "gauss", + "betavariate", + "paretovariate", + "weibullvariate", + "getstate", + "setstate", + "getrandbits", + "choices", + "SystemRandom", +] + if sys.version_info >= (3, 9): - __all__ = [ - "Random", - "SystemRandom", - "betavariate", - "choice", - "choices", - "expovariate", - "gammavariate", - "gauss", - "getrandbits", - "getstate", - "lognormvariate", - "normalvariate", - "paretovariate", - "randbytes", - "randint", - "random", - "randrange", - "sample", - "seed", - "setstate", - "shuffle", - "triangular", - "uniform", - "vonmisesvariate", - "weibullvariate", - ] -else: - __all__ = [ - "Random", - "seed", - "random", - "uniform", - "randint", - "choice", - "sample", - "randrange", - "shuffle", - "normalvariate", - "lognormvariate", - "expovariate", - "vonmisesvariate", - "gammavariate", - "triangular", - "gauss", - "betavariate", - "paretovariate", - "weibullvariate", - "getstate", - "setstate", - "getrandbits", - "choices", - "SystemRandom", - ] + __all__ += ["randbytes"] _T = TypeVar("_T") @@ -67,8 +41,10 @@ class Random(_random.Random): VERSION: ClassVar[int] def __init__(self, x: Any = ...) -> None: ... # Using other `seed` types is deprecated since 3.9 and removed in 3.11 + # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit + # int better documents conventional usage of random.seed. if sys.version_info >= (3, 9): - def seed(self, a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... # type: ignore[override] + def seed(self, a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... # type: ignore[override] # noqa: Y041 else: def seed(self, a: Any = ..., version: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi index 2f4f3a3a0ed4..bdabf32d895e 100644 --- a/mypy/typeshed/stdlib/re.pyi +++ b/mypy/typeshed/stdlib/re.pyi @@ -13,102 +13,41 @@ if sys.version_info >= (3, 7): else: from typing import Match, Pattern +__all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", +] + +if sys.version_info >= (3, 7): + __all__ += ["Match", "Pattern"] + if sys.version_info >= (3, 11): - __all__ = [ - "match", - "fullmatch", - "search", - "sub", - "subn", - "split", - "findall", - "finditer", - "compile", - "purge", - "template", - "escape", - "error", - "Pattern", - "Match", - "A", - "I", - "L", - "M", - "S", - "X", - "U", - "ASCII", - "IGNORECASE", - "LOCALE", - "MULTILINE", - "DOTALL", - "VERBOSE", - "UNICODE", - "RegexFlag", - "NOFLAG", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "match", - "fullmatch", - "search", - "sub", - "subn", - "split", - "findall", - "finditer", - "compile", - "purge", - "template", - "escape", - "error", - "Pattern", - "Match", - "A", - "I", - "L", - "M", - "S", - "X", - "U", - "ASCII", - "IGNORECASE", - "LOCALE", - "MULTILINE", - "DOTALL", - "VERBOSE", - "UNICODE", - ] -else: - __all__ = [ - "match", - "fullmatch", - "search", - "sub", - "subn", - "split", - "findall", - "finditer", - "compile", - "purge", - "template", - "escape", - "error", - "A", - "I", - "L", - "M", - "S", - "X", - "U", - "ASCII", - "IGNORECASE", - "LOCALE", - "MULTILINE", - "DOTALL", - "VERBOSE", - "UNICODE", - ] + __all__ += ["NOFLAG", "RegexFlag"] class RegexFlag(enum.IntFlag): A = sre_compile.SRE_FLAG_ASCII diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index b367a46fe572..ae62ea4b658f 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -82,7 +82,17 @@ else: ignore_dangling_symlinks: bool = ..., ) -> _PathReturn: ... -def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: Callable[[Any, Any, Any], Any] | None = ...) -> None: ... +if sys.version_info >= (3, 11): + def rmtree( + path: StrOrBytesPath, + ignore_errors: bool = ..., + onerror: Callable[[Any, Any, Any], Any] | None = ..., + *, + dir_fd: int | None = ..., + ) -> None: ... + +else: + def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: Callable[[Any, Any, Any], Any] | None = ...) -> None: ... _CopyFn: TypeAlias = Callable[[str, str], None] | Callable[[StrPath, StrPath], None] diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index d90c744c504a..65a85627b642 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -8,39 +8,24 @@ from types import TracebackType from typing import Any, Pattern, Protocol, overload from typing_extensions import TypeAlias +__all__ = [ + "SMTPException", + "SMTPServerDisconnected", + "SMTPResponseException", + "SMTPSenderRefused", + "SMTPRecipientsRefused", + "SMTPDataError", + "SMTPConnectError", + "SMTPHeloError", + "SMTPAuthenticationError", + "quoteaddr", + "quotedata", + "SMTP", + "SMTP_SSL", +] + if sys.version_info >= (3, 7): - __all__ = [ - "SMTPException", - "SMTPNotSupportedError", - "SMTPServerDisconnected", - "SMTPResponseException", - "SMTPSenderRefused", - "SMTPRecipientsRefused", - "SMTPDataError", - "SMTPConnectError", - "SMTPHeloError", - "SMTPAuthenticationError", - "quoteaddr", - "quotedata", - "SMTP", - "SMTP_SSL", - ] -else: - __all__ = [ - "SMTPException", - "SMTPServerDisconnected", - "SMTPResponseException", - "SMTPSenderRefused", - "SMTPRecipientsRefused", - "SMTPDataError", - "SMTPConnectError", - "SMTPHeloError", - "SMTPAuthenticationError", - "quoteaddr", - "quotedata", - "SMTP", - "SMTP_SSL", - ] + __all__ += ["SMTPNotSupportedError"] _Reply: TypeAlias = tuple[int, bytes] _SendErrs: TypeAlias = dict[str, _Reply] diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 4f8ec07ccc95..d84fd66b87cf 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -2,8 +2,8 @@ import sys from _typeshed import ReadableBuffer, Self, WriteableBuffer from collections.abc import Iterable from enum import IntEnum, IntFlag -from io import RawIOBase -from typing import Any, BinaryIO, TextIO, overload +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload from typing_extensions import Literal # Ideally, we'd just do "from _socket import *". Unfortunately, socket @@ -538,6 +538,16 @@ AI_V4MAPPED_CFG: AddressInfo if sys.platform == "win32": errorTab: dict[int, str] # undocumented +class _SendableFile(Protocol): + def read(self, __size: int) -> bytes: ... + def seek(self, __offset: int) -> object: ... + + # optional fields: + # + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + class socket(_socket.socket): def __init__( self, family: AddressFamily | int = ..., type: SocketKind | int = ..., proto: int = ..., fileno: int | None = ... @@ -549,26 +559,66 @@ class socket(_socket.socket): # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @overload + def makefile( # type: ignore[misc] + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[0], + *, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> SocketIO: ... + @overload def makefile( self, - mode: Literal["r", "w", "rw", "wr", ""] = ..., - buffering: int | None = ..., + mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[-1, 1] | None = ..., *, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ..., - ) -> TextIO: ... + ) -> BufferedRWPair: ... + @overload + def makefile( + self, + mode: Literal["rb", "br"], + buffering: Literal[-1, 1] | None = ..., + *, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> BufferedReader: ... + @overload + def makefile( + self, + mode: Literal["wb", "bw"], + buffering: Literal[-1, 1] | None = ..., + *, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> BufferedWriter: ... @overload def makefile( self, mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: int, + *, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> IOBase: ... + @overload + def makefile( + self, + mode: Literal["r", "w", "rw", "wr", ""] = ..., buffering: int | None = ..., *, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ..., - ) -> BinaryIO: ... - def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ... + ) -> TextIOWrapper: ... + def sendfile(self, file: _SendableFile, offset: int = ..., count: int | None = ...) -> int: ... @property def family(self) -> AddressFamily: ... # type: ignore[override] @property diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index 20ff5daa718e..8e2a24e7edfd 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -6,36 +6,26 @@ from socket import socket as _socket from typing import Any, BinaryIO, ClassVar, Union from typing_extensions import TypeAlias -if sys.platform == "win32": - __all__ = [ - "BaseServer", - "TCPServer", - "UDPServer", - "ThreadingUDPServer", - "ThreadingTCPServer", - "BaseRequestHandler", - "StreamRequestHandler", - "DatagramRequestHandler", - "ThreadingMixIn", - ] -else: - __all__ = [ - "BaseServer", - "TCPServer", - "UDPServer", - "ThreadingUDPServer", - "ThreadingTCPServer", - "BaseRequestHandler", - "StreamRequestHandler", - "DatagramRequestHandler", - "ThreadingMixIn", - "ForkingUDPServer", - "ForkingTCPServer", +__all__ = [ + "BaseServer", + "TCPServer", + "UDPServer", + "ThreadingUDPServer", + "ThreadingTCPServer", + "BaseRequestHandler", + "StreamRequestHandler", + "DatagramRequestHandler", + "ThreadingMixIn", +] +if sys.platform != "win32": + __all__ += [ "ForkingMixIn", - "UnixStreamServer", - "UnixDatagramServer", - "ThreadingUnixStreamServer", + "ForkingTCPServer", + "ForkingUDPServer", "ThreadingUnixDatagramServer", + "ThreadingUnixStreamServer", + "UnixDatagramServer", + "UnixStreamServer", ] _RequestType: TypeAlias = Union[_socket, tuple[bytes, _socket]] @@ -91,6 +81,8 @@ class TCPServer(BaseServer): def get_request(self) -> tuple[_socket, Any]: ... class UDPServer(BaseServer): + if sys.version_info >= (3, 11): + allow_reuse_port: bool max_packet_size: ClassVar[int] def get_request(self) -> tuple[tuple[bytes, _socket], Any]: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index dc00a3971b0f..6db4a9294755 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -4,11 +4,10 @@ from _typeshed import ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetIte from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType -from typing import Any, Generic, Protocol, TypeVar, overload +from typing import Any, Protocol, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, final _T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) _CursorT = TypeVar("_CursorT", bound=Cursor) _SqliteData: TypeAlias = str | ReadableBuffer | int | float | None # Data that is passed through adapters can be of any type accepted by an adapter. @@ -379,7 +378,7 @@ class Cursor(Iterator[Any]): def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | Any: ... @property def lastrowid(self) -> int | None: ... - row_factory: Callable[[Cursor, Row[Any]], object] | None + row_factory: Callable[[Cursor, Row], object] | None @property def rowcount(self) -> int: ... def __init__(self, __cursor: Connection) -> None: ... @@ -420,15 +419,15 @@ class PrepareProtocol: class ProgrammingError(DatabaseError): ... -class Row(Generic[_T_co]): - def __init__(self, __cursor: Cursor, __data: tuple[_T_co, ...]) -> None: ... +class Row: + def __init__(self, __cursor: Cursor, __data: tuple[Any, ...]) -> None: ... def keys(self) -> list[str]: ... @overload - def __getitem__(self, __index: int | str) -> _T_co: ... + def __getitem__(self, __index: int | str) -> Any: ... @overload - def __getitem__(self, __index: slice) -> tuple[_T_co, ...]: ... + def __getitem__(self, __index: slice) -> tuple[Any, ...]: ... def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[_T_co]: ... + def __iter__(self) -> Iterator[Any]: ... def __len__(self) -> int: ... # These return NotImplemented for anything that is not a Row. def __eq__(self, __other: object) -> bool: ... @@ -451,13 +450,14 @@ else: class Warning(Exception): ... if sys.version_info >= (3, 11): + @final class Blob: def close(self) -> None: ... def read(self, __length: int = ...) -> bytes: ... def write(self, __data: bytes) -> None: ... def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, __offset: int, __whence: int = ...) -> None: ... + def seek(self, __offset: int, __origin: int = ...) -> None: ... def __len__(self) -> int: ... def __enter__(self: Self) -> Self: ... def __exit__(self, __typ: object, __val: object, __tb: object) -> Literal[False]: ... diff --git a/mypy/typeshed/stdlib/sre_constants.pyi b/mypy/typeshed/stdlib/sre_constants.pyi index 00644994fe3e..20a8437ed007 100644 --- a/mypy/typeshed/stdlib/sre_constants.pyi +++ b/mypy/typeshed/stdlib/sre_constants.pyi @@ -55,7 +55,8 @@ ASSERT: _NamedIntConstant ASSERT_NOT: _NamedIntConstant AT: _NamedIntConstant BRANCH: _NamedIntConstant -CALL: _NamedIntConstant +if sys.version_info < (3, 11): + CALL: _NamedIntConstant CATEGORY: _NamedIntConstant CHARSET: _NamedIntConstant BIGCHARSET: _NamedIntConstant diff --git a/mypy/typeshed/stdlib/sre_parse.pyi b/mypy/typeshed/stdlib/sre_parse.pyi index 0958e73f5176..1e903028ba7e 100644 --- a/mypy/typeshed/stdlib/sre_parse.pyi +++ b/mypy/typeshed/stdlib/sre_parse.pyi @@ -19,7 +19,8 @@ if sys.version_info >= (3, 7): TYPE_FLAGS: int GLOBAL_FLAGS: int -class Verbose(Exception): ... +if sys.version_info < (3, 11): + class Verbose(Exception): ... class _State: flags: int @@ -64,7 +65,7 @@ class SubPattern: def __setitem__(self, index: int | slice, code: _CodeType) -> None: ... def insert(self, index: int, code: _CodeType) -> None: ... def append(self, code: _CodeType) -> None: ... - def getwidth(self) -> int: ... + def getwidth(self) -> tuple[int, int]: ... class Tokenizer: istext: bool @@ -87,6 +88,9 @@ class Tokenizer: def seek(self, index: int) -> None: ... def error(self, msg: str, offset: int = ...) -> _Error: ... + if sys.version_info >= (3, 11): + def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... + def fix_flags(src: str | bytes, flags: int) -> int: ... _TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 8445435fa346..9f0420029258 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -412,7 +412,12 @@ class SSLContext: def load_verify_locations( self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | bytes | None = ... ) -> None: ... - def get_ca_certs(self, binary_form: bool = ...) -> list[_PeerCertRetDictType] | list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: bool = ...) -> Any: ... def get_ciphers(self) -> list[_Cipher]: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, __cipherlist: str) -> None: ... diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi index e6c3d8f35bc6..58e7fd909f1f 100644 --- a/mypy/typeshed/stdlib/statistics.pyi +++ b/mypy/typeshed/stdlib/statistics.pyi @@ -6,64 +6,26 @@ from fractions import Fraction from typing import Any, NamedTuple, SupportsFloat, TypeVar from typing_extensions import Literal, TypeAlias +__all__ = [ + "StatisticsError", + "pstdev", + "pvariance", + "stdev", + "variance", + "median", + "median_low", + "median_high", + "median_grouped", + "mean", + "mode", + "harmonic_mean", +] + +if sys.version_info >= (3, 8): + __all__ += ["geometric_mean", "multimode", "NormalDist", "fmean", "quantiles"] + if sys.version_info >= (3, 10): - __all__ = [ - "NormalDist", - "StatisticsError", - "correlation", - "covariance", - "fmean", - "geometric_mean", - "harmonic_mean", - "linear_regression", - "mean", - "median", - "median_grouped", - "median_high", - "median_low", - "mode", - "multimode", - "pstdev", - "pvariance", - "quantiles", - "stdev", - "variance", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "NormalDist", - "StatisticsError", - "fmean", - "geometric_mean", - "harmonic_mean", - "mean", - "median", - "median_grouped", - "median_high", - "median_low", - "mode", - "multimode", - "pstdev", - "pvariance", - "quantiles", - "stdev", - "variance", - ] -else: - __all__ = [ - "StatisticsError", - "pstdev", - "pvariance", - "stdev", - "variance", - "median", - "median_low", - "median_high", - "median_grouped", - "mean", - "mode", - "harmonic_mean", - ] + __all__ += ["covariance", "correlation", "linear_regression"] # Most functions in this module accept homogeneous collections of one of these types _Number: TypeAlias = float | Decimal | Fraction diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index 4404bde8bc4f..525806a74043 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -45,6 +45,9 @@ class Template: def __init__(self, template: str) -> None: ... def substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... def safe_substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... + if sys.version_info >= (3, 11): + def get_identifiers(self) -> list[str]: ... + def is_valid(self) -> bool: ... # TODO(MichalPokorny): This is probably badly and/or loosely typed. class Formatter: diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 83178e15d9e8..6ce1073002b8 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -8,86 +8,49 @@ from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = [ + "Popen", + "PIPE", + "STDOUT", + "call", + "check_call", + "getstatusoutput", + "getoutput", + "check_output", + "run", + "CalledProcessError", + "DEVNULL", + "SubprocessError", + "TimeoutExpired", + "CompletedProcess", +] + if sys.platform == "win32": + __all__ += [ + "CREATE_NEW_CONSOLE", + "CREATE_NEW_PROCESS_GROUP", + "STARTF_USESHOWWINDOW", + "STARTF_USESTDHANDLES", + "STARTUPINFO", + "STD_ERROR_HANDLE", + "STD_INPUT_HANDLE", + "STD_OUTPUT_HANDLE", + "SW_HIDE", + ] + if sys.version_info >= (3, 7): - __all__ = [ - "Popen", - "PIPE", - "STDOUT", - "call", - "check_call", - "getstatusoutput", - "getoutput", - "check_output", - "run", - "CalledProcessError", - "DEVNULL", - "SubprocessError", - "TimeoutExpired", - "CompletedProcess", - "CREATE_NEW_CONSOLE", - "CREATE_NEW_PROCESS_GROUP", - "STD_INPUT_HANDLE", - "STD_OUTPUT_HANDLE", - "STD_ERROR_HANDLE", - "SW_HIDE", - "STARTF_USESTDHANDLES", - "STARTF_USESHOWWINDOW", - "STARTUPINFO", + __all__ += [ "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS", + "CREATE_BREAKAWAY_FROM_JOB", + "CREATE_DEFAULT_ERROR_MODE", + "CREATE_NO_WINDOW", + "DETACHED_PROCESS", "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS", "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS", - "CREATE_NO_WINDOW", - "DETACHED_PROCESS", - "CREATE_DEFAULT_ERROR_MODE", - "CREATE_BREAKAWAY_FROM_JOB", - ] - else: - __all__ = [ - "Popen", - "PIPE", - "STDOUT", - "call", - "check_call", - "getstatusoutput", - "getoutput", - "check_output", - "run", - "CalledProcessError", - "DEVNULL", - "SubprocessError", - "TimeoutExpired", - "CompletedProcess", - "CREATE_NEW_CONSOLE", - "CREATE_NEW_PROCESS_GROUP", - "STD_INPUT_HANDLE", - "STD_OUTPUT_HANDLE", - "STD_ERROR_HANDLE", - "SW_HIDE", - "STARTF_USESTDHANDLES", - "STARTF_USESHOWWINDOW", - "STARTUPINFO", ] -else: - __all__ = [ - "Popen", - "PIPE", - "STDOUT", - "call", - "check_call", - "getstatusoutput", - "getoutput", - "check_output", - "run", - "CalledProcessError", - "DEVNULL", - "SubprocessError", - "TimeoutExpired", - "CompletedProcess", - ] # We prefer to annotate inputs to methods (eg subprocess.check_call) with these # union types. @@ -444,49 +407,99 @@ else: ) -> CompletedProcess[Any]: ... # Same args as Popen.__init__ -def call( - args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE = ..., - stdout: _FILE = ..., - stderr: _FILE = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., - *, - timeout: float | None = ..., -) -> int: ... +if sys.version_info >= (3, 7): + def call( + args: _CMD, + bufsize: int = ..., + executable: StrOrBytesPath | None = ..., + stdin: _FILE = ..., + stdout: _FILE = ..., + stderr: _FILE = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + shell: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: _ENV | None = ..., + universal_newlines: bool = ..., + startupinfo: Any = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Any = ..., + *, + timeout: float | None = ..., + text: bool | None = ..., + ) -> int: ... + +else: + def call( + args: _CMD, + bufsize: int = ..., + executable: StrOrBytesPath | None = ..., + stdin: _FILE = ..., + stdout: _FILE = ..., + stderr: _FILE = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + shell: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: _ENV | None = ..., + universal_newlines: bool = ..., + startupinfo: Any = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Any = ..., + *, + timeout: float | None = ..., + ) -> int: ... # Same args as Popen.__init__ -def check_call( - args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath = ..., - stdin: _FILE = ..., - stdout: _FILE = ..., - stderr: _FILE = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., - timeout: float | None = ..., -) -> int: ... +if sys.version_info >= (3, 7): + def check_call( + args: _CMD, + bufsize: int = ..., + executable: StrOrBytesPath = ..., + stdin: _FILE = ..., + stdout: _FILE = ..., + stderr: _FILE = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + shell: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: _ENV | None = ..., + universal_newlines: bool = ..., + startupinfo: Any = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Any = ..., + timeout: float | None = ..., + *, + text: bool | None = ..., + ) -> int: ... + +else: + def check_call( + args: _CMD, + bufsize: int = ..., + executable: StrOrBytesPath = ..., + stdin: _FILE = ..., + stdout: _FILE = ..., + stderr: _FILE = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + shell: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: _ENV | None = ..., + universal_newlines: bool = ..., + startupinfo: Any = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Any = ..., + timeout: float | None = ..., + ) -> int: ... if sys.version_info >= (3, 7): # 3.7 added text @@ -1652,8 +1665,13 @@ class Popen(Generic[AnyStr]): def __class_getitem__(cls, item: Any) -> GenericAlias: ... # The result really is always a str. -def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... -def getoutput(cmd: _TXT) -> str: ... +if sys.version_info >= (3, 11): + def getstatusoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... + def getoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... + +else: + def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... + def getoutput(cmd: _TXT) -> str: ... if sys.version_info >= (3, 8): def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/symtable.pyi b/mypy/typeshed/stdlib/symtable.pyi index 7a95fe5e445f..d44b2d7927b3 100644 --- a/mypy/typeshed/stdlib/symtable.pyi +++ b/mypy/typeshed/stdlib/symtable.pyi @@ -1,4 +1,5 @@ import sys +from _collections_abc import dict_keys from collections.abc import Sequence from typing import Any @@ -15,8 +16,10 @@ class SymbolTable: def is_optimized(self) -> bool: ... def is_nested(self) -> bool: ... def has_children(self) -> bool: ... - def has_exec(self) -> bool: ... - def get_identifiers(self) -> Sequence[str]: ... + if sys.version_info < (3, 9): + def has_exec(self) -> bool: ... + + def get_identifiers(self) -> dict_keys[str, int]: ... def lookup(self, name: str) -> Symbol: ... def get_symbols(self) -> list[Symbol]: ... def get_children(self) -> list[SymbolTable]: ... diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index 4e24cbd167d9..667b7024fe12 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -221,6 +221,9 @@ def __displayhook__(__value: object) -> None: ... def __excepthook__(__exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None) -> None: ... def exc_info() -> OptExcInfo: ... +if sys.version_info >= (3, 11): + def exception() -> BaseException | None: ... + # sys.exit() accepts an optional argument of anything printable def exit(__status: object = ...) -> NoReturn: ... def getallocatedblocks() -> int: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 364bcad0683f..87c57311aa99 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -315,7 +315,9 @@ class TarFile: ) -> None: ... def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = ...) -> None: ... - def gettarinfo(self, name: str | None = ..., arcname: str | None = ..., fileobj: IO[bytes] | None = ...) -> TarInfo: ... + def gettarinfo( + self, name: StrOrBytesPath | None = ..., arcname: str | None = ..., fileobj: IO[bytes] | None = ... + ) -> TarInfo: ... def close(self) -> None: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index 4f2b4a545ff7..2c096f0fb4de 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -1,6 +1,6 @@ -import os +import io import sys -from _typeshed import Self +from _typeshed import BytesPath, GenericPath, Self, StrPath, WriteableBuffer from collections.abc import Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, overload @@ -30,31 +30,32 @@ TMP_MAX: int tempdir: str | None template: str -_DirT: TypeAlias = AnyStr | os.PathLike[AnyStr] +_StrMode: TypeAlias = Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] +_BytesMode: TypeAlias = Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] if sys.version_info >= (3, 8): @overload def NamedTemporaryFile( - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], + mode: _StrMode, buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., *, errors: str | None = ..., ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., *, errors: str | None = ..., @@ -67,7 +68,7 @@ if sys.version_info >= (3, 8): newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., *, errors: str | None = ..., @@ -76,24 +77,24 @@ if sys.version_info >= (3, 8): else: @overload def NamedTemporaryFile( - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], + mode: _StrMode, buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., ) -> _TemporaryFileWrapper[bytes]: ... @overload @@ -104,7 +105,7 @@ else: newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., delete: bool = ..., ) -> _TemporaryFileWrapper[Any]: ... @@ -114,25 +115,25 @@ else: if sys.version_info >= (3, 8): @overload def TemporaryFile( - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], + mode: _StrMode, buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., *, errors: str | None = ..., ) -> IO[str]: ... @overload def TemporaryFile( - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., *, errors: str | None = ..., ) -> IO[bytes]: ... @@ -144,30 +145,30 @@ else: newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., *, errors: str | None = ..., ) -> IO[Any]: ... else: @overload def TemporaryFile( - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"], + mode: _StrMode, buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., ) -> IO[str]: ... @overload def TemporaryFile( - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., ) -> IO[bytes]: ... @overload def TemporaryFile( @@ -177,7 +178,7 @@ else: newline: str | None = ..., suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + dir: GenericPath[AnyStr] | None = ..., ) -> IO[Any]: ... class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): @@ -217,9 +218,14 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): def write(self, s: AnyStr) -> int: ... def writelines(self, lines: Iterable[AnyStr]) -> None: ... -# It does not actually derive from IO[AnyStr], but it does implement the -# protocol. -class SpooledTemporaryFile(IO[AnyStr]): +if sys.version_info >= (3, 11): + _SpooledTemporaryFileBase = io.IOBase +else: + _SpooledTemporaryFileBase = object + +# It does not actually derive from IO[AnyStr], but it does mostly behave +# like one. +class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): @property def encoding(self) -> str: ... # undocumented @property @@ -230,7 +236,7 @@ class SpooledTemporaryFile(IO[AnyStr]): def __init__( self: SpooledTemporaryFile[bytes], max_size: int = ..., - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., @@ -244,7 +250,7 @@ class SpooledTemporaryFile(IO[AnyStr]): def __init__( self: SpooledTemporaryFile[str], max_size: int = ..., - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] = ..., + mode: _StrMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., @@ -275,7 +281,7 @@ class SpooledTemporaryFile(IO[AnyStr]): def __init__( self: SpooledTemporaryFile[bytes], max_size: int = ..., - mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] = ..., + mode: _BytesMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., @@ -287,7 +293,7 @@ class SpooledTemporaryFile(IO[AnyStr]): def __init__( self: SpooledTemporaryFile[str], max_size: int = ..., - mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] = ..., + mode: _StrMode = ..., buffering: int = ..., encoding: str | None = ..., newline: str | None = ..., @@ -318,20 +324,28 @@ class SpooledTemporaryFile(IO[AnyStr]): def fileno(self) -> int: ... def flush(self) -> None: ... def isatty(self) -> bool: ... - def read(self, n: int = ...) -> AnyStr: ... - def readline(self, limit: int = ...) -> AnyStr: ... - def readlines(self, hint: int = ...) -> list[AnyStr]: ... + if sys.version_info >= (3, 11): + # These three work only if the SpooledTemporaryFile is opened in binary mode, + # because the underlying object in text mode does not have these methods. + def read1(self, __size: int = ...) -> AnyStr: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + def detach(self) -> io.RawIOBase: ... + + def read(self, __n: int = ...) -> AnyStr: ... + def readline(self, __limit: int | None = ...) -> AnyStr: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[AnyStr]: ... # type: ignore[override] def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: int | None = ...) -> None: ... # type: ignore[override] def write(self, s: AnyStr) -> int: ... - def writelines(self, iterable: Iterable[AnyStr]) -> None: ... - def __iter__(self) -> Iterator[AnyStr]: ... - # Other than the following methods, which do not exist on SpooledTemporaryFile + def writelines(self, iterable: Iterable[AnyStr]) -> None: ... # type: ignore[override] + def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] + # These exist at runtime only on 3.11+. def readable(self) -> bool: ... def seekable(self) -> bool: ... def writable(self) -> bool: ... - def __next__(self) -> AnyStr: ... + def __next__(self) -> AnyStr: ... # type: ignore[override] if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -341,24 +355,28 @@ class TemporaryDirectory(Generic[AnyStr]): @overload def __init__( self: TemporaryDirectory[str], - suffix: None = ..., - prefix: None = ..., - dir: None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: StrPath | None = ..., ignore_cleanup_errors: bool = ..., ) -> None: ... @overload def __init__( - self, - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: _DirT[AnyStr] | None = ..., + self: TemporaryDirectory[bytes], + suffix: bytes | None = ..., + prefix: bytes | None = ..., + dir: BytesPath | None = ..., ignore_cleanup_errors: bool = ..., ) -> None: ... else: @overload - def __init__(self: TemporaryDirectory[str], suffix: None = ..., prefix: None = ..., dir: None = ...) -> None: ... + def __init__( + self: TemporaryDirectory[str], suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ... + ) -> None: ... @overload - def __init__(self, suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ...) -> None: ... + def __init__( + self: TemporaryDirectory[bytes], suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ... + ) -> None: ... def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... @@ -366,20 +384,22 @@ class TemporaryDirectory(Generic[AnyStr]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... +# The overloads overlap, but they should still work fine. @overload -def mkstemp() -> tuple[int, str]: ... +def mkstemp( # type: ignore[misc] + suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ..., text: bool = ... +) -> tuple[int, str]: ... @overload def mkstemp( - suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ..., text: bool = ... -) -> tuple[int, AnyStr]: ... -@overload -def mkdtemp() -> str: ... -@overload -def mkdtemp(suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ...) -> AnyStr: ... + suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ..., text: bool = ... +) -> tuple[int, bytes]: ... + +# The overloads overlap, but they should still work fine. @overload -def mktemp() -> str: ... +def mkdtemp(suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ...) -> str: ... # type: ignore[misc] @overload -def mktemp(suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ...) -> AnyStr: ... +def mkdtemp(suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ...) -> bytes: ... +def mktemp(suffix: str = ..., prefix: str = ..., dir: StrPath | None = ...) -> str: ... def gettempdirb() -> bytes: ... def gettempprefixb() -> bytes: ... def gettempdir() -> str: ... diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi index b2423304b930..494162a49b38 100644 --- a/mypy/typeshed/stdlib/termios.pyi +++ b/mypy/typeshed/stdlib/termios.pyi @@ -244,5 +244,8 @@ if sys.platform != "win32": def tcdrain(__fd: FileDescriptorLike) -> None: ... def tcflush(__fd: FileDescriptorLike, __queue: int) -> None: ... def tcflow(__fd: FileDescriptorLike, __action: int) -> None: ... + if sys.version_info >= (3, 11): + def tcgetwinsize(__fd: FileDescriptorLike) -> tuple[int, int]: ... + def tcsetwinsize(__fd: FileDescriptorLike, __winsize: tuple[int, int]) -> None: ... class error(Exception): ... diff --git a/mypy/typeshed/stdlib/threading.pyi b/mypy/typeshed/stdlib/threading.pyi index 231018ca731a..afc37b771e8c 100644 --- a/mypy/typeshed/stdlib/threading.pyi +++ b/mypy/typeshed/stdlib/threading.pyi @@ -10,86 +10,35 @@ _TF: TypeAlias = Callable[[FrameType, str, Any], Callable[..., Any] | None] _PF: TypeAlias = Callable[[FrameType, str, Any], None] _T = TypeVar("_T") +__all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", +] + +if sys.version_info >= (3, 8): + __all__ += ["ExceptHookArgs", "excepthook", "get_native_id"] + if sys.version_info >= (3, 10): - __all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - "excepthook", - "ExceptHookArgs", - "gettrace", - "getprofile", - "get_native_id", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - "excepthook", - "ExceptHookArgs", - "get_native_id", - ] -else: - __all__ = [ - "get_ident", - "active_count", - "Condition", - "current_thread", - "enumerate", - "main_thread", - "TIMEOUT_MAX", - "Event", - "Lock", - "RLock", - "Semaphore", - "BoundedSemaphore", - "Thread", - "Barrier", - "BrokenBarrierError", - "Timer", - "ThreadError", - "setprofile", - "settrace", - "local", - "stack_size", - ] + __all__ += ["getprofile", "gettrace"] _profile_hook: _PF | None @@ -190,6 +139,7 @@ class Condition: def notifyAll(self) -> None: ... # deprecated alias for notify_all() class Semaphore: + _value: int def __init__(self, value: int = ...) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... @@ -216,6 +166,12 @@ if sys.version_info >= (3, 8): ExceptHookArgs = _ExceptHookArgs class Timer(Thread): + args: Iterable[Any] # undocumented + finished: Event # undocumented + function: Callable[..., Any] # undocumented + interval: float # undocumented + kwargs: Mapping[str, Any] # undocumented + def __init__( self, interval: float, diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index 582503971e15..0955992d2688 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -2197,31 +2197,29 @@ class Listbox(Widget, XView, YView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index) -> None: ... - def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def activate(self, index: str | int) -> None: ... + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] def curselection(self): ... - def delete(self, first, last: Any | None = ...) -> None: ... - def get(self, first, last: Any | None = ...): ... - def index(self, index): ... - def insert(self, index, *elements) -> None: ... + def delete(self, first: str | int, last: str | int | None = ...) -> None: ... + def get(self, first: str | int, last: str | int | None = ...): ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, *elements: str | float) -> None: ... def nearest(self, y): ... def scan_mark(self, x, y) -> None: ... def scan_dragto(self, x, y) -> None: ... - def see(self, index) -> None: ... - def selection_anchor(self, index) -> None: ... - select_anchor: Any - def selection_clear(self, first, last: Any | None = ...) -> None: ... # type: ignore[override] - select_clear: Any - def selection_includes(self, index): ... - select_includes: Any - def selection_set(self, first, last: Any | None = ...) -> None: ... - select_set: Any + def see(self, index: str | int) -> None: ... + def selection_anchor(self, index: str | int) -> None: ... + select_anchor = selection_anchor + def selection_clear(self, first: str | int, last: str | int | None = ...) -> None: ... # type: ignore[override] + select_clear = selection_clear + def selection_includes(self, index: str | int): ... + select_includes = selection_includes + def selection_set(self, first: str | int, last: str | int | None = ...) -> None: ... + select_set = selection_set def size(self) -> int: ... # type: ignore[override] - def itemcget(self, index, option): ... - def itemconfigure(self, index, cnf: Any | None = ..., **kw): ... - itemconfig: Any - -_MenuIndex: TypeAlias = str | int + def itemcget(self, index: str | int, option): ... + def itemconfigure(self, index: str | int, cnf: Any | None = ..., **kw): ... + itemconfig = itemconfigure class Menu(Widget): def __init__( @@ -2285,8 +2283,8 @@ class Menu(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def tk_popup(self, x: int, y: int, entry: _MenuIndex = ...) -> None: ... - def activate(self, index: _MenuIndex) -> None: ... + def tk_popup(self, x: int, y: int, entry: str | int = ...) -> None: ... + def activate(self, index: str | int) -> None: ... def add(self, itemType, cnf=..., **kw): ... # docstring says "Internal function." def insert(self, index, itemType, cnf=..., **kw): ... # docstring says "Internal function." def add_cascade( @@ -2384,7 +2382,7 @@ class Menu(Widget): def add_separator(self, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... def insert_cascade( self, - index: _MenuIndex, + index: str | int, cnf: dict[str, Any] | None = ..., *, accelerator: str = ..., @@ -2406,7 +2404,7 @@ class Menu(Widget): ) -> None: ... def insert_checkbutton( self, - index: _MenuIndex, + index: str | int, cnf: dict[str, Any] | None = ..., *, accelerator: str = ..., @@ -2433,7 +2431,7 @@ class Menu(Widget): ) -> None: ... def insert_command( self, - index: _MenuIndex, + index: str | int, cnf: dict[str, Any] | None = ..., *, accelerator: str = ..., @@ -2454,7 +2452,7 @@ class Menu(Widget): ) -> None: ... def insert_radiobutton( self, - index: _MenuIndex, + index: str | int, cnf: dict[str, Any] | None = ..., *, accelerator: str = ..., @@ -2478,20 +2476,20 @@ class Menu(Widget): value: Any = ..., variable: Variable = ..., ) -> None: ... - def insert_separator(self, index: _MenuIndex, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... - def delete(self, index1: _MenuIndex, index2: _MenuIndex | None = ...) -> None: ... - def entrycget(self, index: _MenuIndex, option: str) -> Any: ... + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = ...) -> None: ... + def entrycget(self, index: str | int, option: str) -> Any: ... def entryconfigure( - self, index: _MenuIndex, cnf: dict[str, Any] | None = ..., **kw: Any + self, index: str | int, cnf: dict[str, Any] | None = ..., **kw: Any ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... entryconfig = entryconfigure - def index(self, index: _MenuIndex) -> int | None: ... - def invoke(self, index: _MenuIndex) -> Any: ... + def index(self, index: str | int) -> int | None: ... + def invoke(self, index: str | int) -> Any: ... def post(self, x: int, y: int) -> None: ... - def type(self, index: _MenuIndex) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... def unpost(self) -> None: ... - def xposition(self, index: _MenuIndex) -> int: ... - def yposition(self, index: _MenuIndex) -> int: ... + def xposition(self, index: str | int) -> int: ... + def yposition(self, index: str | int) -> int: ... class Menubutton(Widget): def __init__( @@ -3578,3 +3576,5 @@ class PanedWindow(Widget): def paneconfigure(self, tagOrId, cnf: Any | None = ..., **kw): ... paneconfig: Any def panes(self): ... + +def _test() -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index 7ca8f9b800ce..0fe94ad30ff5 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -6,61 +6,35 @@ from tkinter.font import _FontDescription from typing import Any, overload from typing_extensions import Literal, TypeAlias, TypedDict +__all__ = [ + "Button", + "Checkbutton", + "Combobox", + "Entry", + "Frame", + "Label", + "Labelframe", + "LabelFrame", + "Menubutton", + "Notebook", + "Panedwindow", + "PanedWindow", + "Progressbar", + "Radiobutton", + "Scale", + "Scrollbar", + "Separator", + "Sizegrip", + "Style", + "Treeview", + "LabeledScale", + "OptionMenu", + "tclobjs_to_py", + "setup_master", +] + if sys.version_info >= (3, 7): - __all__ = [ - "Button", - "Checkbutton", - "Combobox", - "Entry", - "Frame", - "Label", - "Labelframe", - "LabelFrame", - "Menubutton", - "Notebook", - "Panedwindow", - "PanedWindow", - "Progressbar", - "Radiobutton", - "Scale", - "Scrollbar", - "Separator", - "Sizegrip", - "Spinbox", - "Style", - "Treeview", - "LabeledScale", - "OptionMenu", - "tclobjs_to_py", - "setup_master", - ] -else: - __all__ = [ - "Button", - "Checkbutton", - "Combobox", - "Entry", - "Frame", - "Label", - "Labelframe", - "LabelFrame", - "Menubutton", - "Notebook", - "Panedwindow", - "PanedWindow", - "Progressbar", - "Radiobutton", - "Scale", - "Scrollbar", - "Separator", - "Sizegrip", - "Style", - "Treeview", - "LabeledScale", - "OptionMenu", - "tclobjs_to_py", - "setup_master", - ] + __all__ += ["Spinbox"] def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... def setup_master(master: Any | None = ...): ... diff --git a/mypy/typeshed/stdlib/token.pyi b/mypy/typeshed/stdlib/token.pyi index 49329ec442f0..5fe9db7e230d 100644 --- a/mypy/typeshed/stdlib/token.pyi +++ b/mypy/typeshed/stdlib/token.pyi @@ -1,283 +1,80 @@ import sys +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", +] + +if sys.version_info < (3, 7) or sys.version_info >= (3, 8): + __all__ += ["ASYNC", "AWAIT"] + +if sys.version_info >= (3, 7): + __all__ += ["ENCODING", "NL", "COMMENT"] + +if sys.version_info >= (3, 8): + __all__ += ["COLONEQUAL", "TYPE_COMMENT", "TYPE_IGNORE"] + if sys.version_info >= (3, 10): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "COLONEQUAL", - "OP", - "AWAIT", - "ASYNC", - "TYPE_IGNORE", - "TYPE_COMMENT", - "SOFT_KEYWORD", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "COLONEQUAL", - "OP", - "AWAIT", - "ASYNC", - "TYPE_IGNORE", - "TYPE_COMMENT", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - ] -elif sys.version_info >= (3, 7): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "OP", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - ] -else: - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "OP", - "AWAIT", - "ASYNC", - "ERRORTOKEN", - "N_TOKENS", - "NT_OFFSET", - ] + __all__ += ["SOFT_KEYWORD"] ENDMARKER: int NAME: int diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index dea83263b550..3ac136150ab5 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -6,305 +6,85 @@ from token import * from typing import Any, NamedTuple, Pattern, TextIO from typing_extensions import TypeAlias +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENCODING", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "TokenInfo", + "VBAR", + "VBAREQUAL", + "detect_encoding", + "tok_name", + "tokenize", + "untokenize", +] + +if sys.version_info < (3, 7) or sys.version_info >= (3, 8): + __all__ += ["ASYNC", "AWAIT"] + +if sys.version_info >= (3, 8): + __all__ += ["COLONEQUAL", "generate_tokens", "TYPE_COMMENT", "TYPE_IGNORE"] + if sys.version_info >= (3, 10): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "COLONEQUAL", - "OP", - "AWAIT", - "ASYNC", - "TYPE_IGNORE", - "TYPE_COMMENT", - "SOFT_KEYWORD", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - "tokenize", - "generate_tokens", - "detect_encoding", - "untokenize", - "TokenInfo", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "COLONEQUAL", - "OP", - "AWAIT", - "ASYNC", - "TYPE_IGNORE", - "TYPE_COMMENT", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - "tokenize", - "generate_tokens", - "detect_encoding", - "untokenize", - "TokenInfo", - ] -elif sys.version_info >= (3, 7): - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "OP", - "ERRORTOKEN", - "COMMENT", - "NL", - "ENCODING", - "N_TOKENS", - "NT_OFFSET", - "tokenize", - "detect_encoding", - "untokenize", - "TokenInfo", - ] -else: - __all__ = [ - "tok_name", - "ISTERMINAL", - "ISNONTERMINAL", - "ISEOF", - "ENDMARKER", - "NAME", - "NUMBER", - "STRING", - "NEWLINE", - "INDENT", - "DEDENT", - "LPAR", - "RPAR", - "LSQB", - "RSQB", - "COLON", - "COMMA", - "SEMI", - "PLUS", - "MINUS", - "STAR", - "SLASH", - "VBAR", - "AMPER", - "LESS", - "GREATER", - "EQUAL", - "DOT", - "PERCENT", - "LBRACE", - "RBRACE", - "EQEQUAL", - "NOTEQUAL", - "LESSEQUAL", - "GREATEREQUAL", - "TILDE", - "CIRCUMFLEX", - "LEFTSHIFT", - "RIGHTSHIFT", - "DOUBLESTAR", - "PLUSEQUAL", - "MINEQUAL", - "STAREQUAL", - "SLASHEQUAL", - "PERCENTEQUAL", - "AMPEREQUAL", - "VBAREQUAL", - "CIRCUMFLEXEQUAL", - "LEFTSHIFTEQUAL", - "RIGHTSHIFTEQUAL", - "DOUBLESTAREQUAL", - "DOUBLESLASH", - "DOUBLESLASHEQUAL", - "AT", - "ATEQUAL", - "RARROW", - "ELLIPSIS", - "OP", - "AWAIT", - "ASYNC", - "ERRORTOKEN", - "N_TOKENS", - "NT_OFFSET", - "COMMENT", - "tokenize", - "detect_encoding", - "NL", - "untokenize", - "ENCODING", - "TokenInfo", - ] + __all__ += ["SOFT_KEYWORD"] if sys.version_info >= (3, 8): from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index 5c4d323a2d9f..16151f9431eb 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -96,6 +96,12 @@ def clear_frames(tb: TracebackType) -> None: ... def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... +if sys.version_info >= (3, 11): + class _ExceptionPrintContext: + def __init__(self) -> None: ... + def indent(self) -> str: ... + def emit(self, text_gen: str | Iterable[str], margin_char: str | None = ...) -> Generator[str, None, None]: ... + class TracebackException: __cause__: TracebackException __context__: TracebackException @@ -107,7 +113,34 @@ class TracebackException: text: str offset: int msg: str - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 11): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = ..., + lookup_lines: bool = ..., + capture_locals: bool = ..., + compact: bool = ..., + max_group_width: int = ..., + max_group_depth: int = ..., + _seen: set[int] | None = ..., + ) -> None: ... + @classmethod + def from_exception( + cls: type[Self], + exc: BaseException, + *, + limit: int | None = ..., + lookup_lines: bool = ..., + capture_locals: bool = ..., + compact: bool = ..., + max_group_width: int = ..., + max_group_depth: int = ..., + ) -> Self: ... + elif sys.version_info >= (3, 10): def __init__( self, exc_type: type[BaseException], @@ -148,9 +181,16 @@ class TracebackException: ) -> Self: ... def __eq__(self, other: object) -> bool: ... - def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... + if sys.version_info >= (3, 11): + def format(self, *, chain: bool = ..., _ctx: _ExceptionPrintContext | None = ...) -> Generator[str, None, None]: ... + else: + def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... + def format_exception_only(self) -> Generator[str, None, None]: ... + if sys.version_info >= (3, 11): + def print(self, *, file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... + class FrameSummary(Iterable[Any]): if sys.version_info >= (3, 11): def __init__( @@ -213,4 +253,7 @@ class StackSummary(list[FrameSummary]): ) -> StackSummary: ... @classmethod def from_list(cls, a_list: list[_PT]) -> StackSummary: ... + if sys.version_info >= (3, 11): + def format_frame_summary(self, frame_summary: FrameSummary) -> str: ... + def format(self) -> list[str]: ... diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 3e91a5eb0ebf..cdacaf63c41f 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -142,9 +142,18 @@ _PenState: TypeAlias = dict[str, Any] _Speed: TypeAlias = str | float _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] -# TODO: Type this more accurately -# Vec2D is actually a custom subclass of 'tuple'. -Vec2D: TypeAlias = tuple[float, float] +class Vec2D(tuple[float, float]): + def __new__(cls: type[Self], x: float, y: float) -> Self: ... + def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] + @overload # type: ignore[override] + def __mul__(self, other: Vec2D) -> float: ... + @overload + def __mul__(self, other: float) -> Vec2D: ... + def __rmul__(self, other: float) -> Vec2D: ... # type: ignore[override] + def __sub__(self, other: tuple[float, float]) -> Vec2D: ... + def __neg__(self) -> Vec2D: ... + def __abs__(self) -> float: ... + def rotate(self, angle: float) -> Vec2D: ... # Does not actually inherit from Canvas, but dynamically gets all methods of Canvas class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] @@ -410,7 +419,11 @@ class _Screen(TurtleScreen): def __init__(self) -> None: ... # Note int and float are interpreted differently, hence the Union instead of just float def setup( - self, width: int | float = ..., height: int | float = ..., startx: int | None = ..., starty: int | None = ... + self, + width: int | float = ..., # noqa: Y041 + height: int | float = ..., # noqa: Y041 + startx: int | None = ..., + starty: int | None = ..., ) -> None: ... def title(self, titlestring: str) -> None: ... def bye(self) -> None: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index ed2476e44a86..de8c8423d47e 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -20,150 +20,47 @@ from importlib.machinery import ModuleSpec from typing import Any, ClassVar, Generic, Mapping, TypeVar, overload # noqa: Y027 from typing_extensions import Literal, ParamSpec, final -if sys.version_info >= (3, 10): - __all__ = [ - "FunctionType", - "LambdaType", - "CodeType", - "MappingProxyType", - "SimpleNamespace", - "CellType", - "GeneratorType", - "CoroutineType", - "AsyncGeneratorType", - "MethodType", - "BuiltinFunctionType", - "BuiltinMethodType", - "WrapperDescriptorType", - "MethodWrapperType", - "MethodDescriptorType", +__all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "BuiltinMethodType", +] + +if sys.version_info >= (3, 7): + __all__ += [ "ClassMethodDescriptorType", - "ModuleType", - "TracebackType", - "FrameType", - "GetSetDescriptorType", - "MemberDescriptorType", - "new_class", - "resolve_bases", - "prepare_class", - "DynamicClassAttribute", - "coroutine", - "GenericAlias", - "UnionType", - "EllipsisType", - "NoneType", - "NotImplementedType", - ] -elif sys.version_info >= (3, 9): - __all__ = [ - "FunctionType", - "LambdaType", - "CodeType", - "MappingProxyType", - "SimpleNamespace", - "CellType", - "GeneratorType", - "CoroutineType", - "AsyncGeneratorType", - "MethodType", - "BuiltinFunctionType", - "BuiltinMethodType", - "WrapperDescriptorType", - "MethodWrapperType", "MethodDescriptorType", - "ClassMethodDescriptorType", - "ModuleType", - "TracebackType", - "FrameType", - "GetSetDescriptorType", - "MemberDescriptorType", - "new_class", - "resolve_bases", - "prepare_class", - "DynamicClassAttribute", - "coroutine", - "GenericAlias", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "FunctionType", - "LambdaType", - "CodeType", - "MappingProxyType", - "SimpleNamespace", - "CellType", - "GeneratorType", - "CoroutineType", - "AsyncGeneratorType", - "MethodType", - "BuiltinFunctionType", - "BuiltinMethodType", - "WrapperDescriptorType", "MethodWrapperType", - "MethodDescriptorType", - "ClassMethodDescriptorType", - "ModuleType", - "TracebackType", - "FrameType", - "GetSetDescriptorType", - "MemberDescriptorType", - "new_class", - "resolve_bases", - "prepare_class", - "DynamicClassAttribute", - "coroutine", - ] -elif sys.version_info >= (3, 7): - __all__ = [ - "FunctionType", - "LambdaType", - "CodeType", - "MappingProxyType", - "SimpleNamespace", - "GeneratorType", - "CoroutineType", - "AsyncGeneratorType", - "MethodType", - "BuiltinFunctionType", - "BuiltinMethodType", "WrapperDescriptorType", - "MethodWrapperType", - "MethodDescriptorType", - "ClassMethodDescriptorType", - "ModuleType", - "TracebackType", - "FrameType", - "GetSetDescriptorType", - "MemberDescriptorType", - "new_class", "resolve_bases", - "prepare_class", - "DynamicClassAttribute", - "coroutine", - ] -else: - __all__ = [ - "FunctionType", - "LambdaType", - "CodeType", - "MappingProxyType", - "SimpleNamespace", - "GeneratorType", - "CoroutineType", - "AsyncGeneratorType", - "MethodType", - "BuiltinFunctionType", - "ModuleType", - "TracebackType", - "FrameType", - "GetSetDescriptorType", - "MemberDescriptorType", - "new_class", - "prepare_class", - "DynamicClassAttribute", - "coroutine", ] +if sys.version_info >= (3, 8): + __all__ += ["CellType"] + +if sys.version_info >= (3, 9): + __all__ += ["GenericAlias"] + +if sys.version_info >= (3, 10): + __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] + # Note, all classes "defined" here require special handling. _T1 = TypeVar("_T1") @@ -250,46 +147,99 @@ class CodeType: def co_freevars(self) -> tuple[str, ...]: ... @property def co_cellvars(self) -> tuple[str, ...]: ... - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 10): + @property + def co_linetable(self) -> bytes: ... + def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... + if sys.version_info >= (3, 11): + @property + def co_exceptiontable(self) -> bytes: ... + @property + def co_qualname(self) -> str: ... + def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... + + if sys.version_info >= (3, 11): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __qualname: str, + __firstlineno: int, + __linetable: bytes, + __exceptiontable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __linetable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): def __init__( self, - argcount: int, - posonlyargcount: int, - kwonlyargcount: int, - nlocals: int, - stacksize: int, - flags: int, - codestring: bytes, - constants: tuple[Any, ...], - names: tuple[str, ...], - varnames: tuple[str, ...], - filename: str, - name: str, - firstlineno: int, - lnotab: bytes, - freevars: tuple[str, ...] = ..., - cellvars: tuple[str, ...] = ..., + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., ) -> None: ... else: def __init__( self, - argcount: int, - kwonlyargcount: int, - nlocals: int, - stacksize: int, - flags: int, - codestring: bytes, - constants: tuple[Any, ...], - names: tuple[str, ...], - varnames: tuple[str, ...], - filename: str, - name: str, - firstlineno: int, - lnotab: bytes, - freevars: tuple[str, ...] = ..., - cellvars: tuple[str, ...] = ..., + __argcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., ) -> None: ... - if sys.version_info >= (3, 10): + if sys.version_info >= (3, 11): def replace( self, *, @@ -301,18 +251,38 @@ class CodeType: co_flags: int = ..., co_firstlineno: int = ..., co_code: bytes = ..., - co_consts: tuple[Any, ...] = ..., + co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., co_varnames: tuple[str, ...] = ..., co_freevars: tuple[str, ...] = ..., co_cellvars: tuple[str, ...] = ..., co_filename: str = ..., co_name: str = ..., - co_linetable: object = ..., + co_qualname: str = ..., + co_linetable: bytes = ..., + co_exceptiontable: bytes = ..., + ) -> CodeType: ... + elif sys.version_info >= (3, 10): + def replace( + self, + *, + co_argcount: int = ..., + co_posonlyargcount: int = ..., + co_kwonlyargcount: int = ..., + co_nlocals: int = ..., + co_stacksize: int = ..., + co_flags: int = ..., + co_firstlineno: int = ..., + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_linetable: bytes = ..., ) -> CodeType: ... - def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... - @property - def co_linetable(self) -> object: ... elif sys.version_info >= (3, 8): def replace( self, @@ -325,7 +295,7 @@ class CodeType: co_flags: int = ..., co_firstlineno: int = ..., co_code: bytes = ..., - co_consts: tuple[Any, ...] = ..., + co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., co_varnames: tuple[str, ...] = ..., co_freevars: tuple[str, ...] = ..., @@ -334,8 +304,6 @@ class CodeType: co_name: str = ..., co_lnotab: bytes = ..., ) -> CodeType: ... - if sys.version_info >= (3, 11): - def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... @final class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): @@ -386,6 +354,9 @@ class GeneratorType(Generator[_T_co, _T_contra, _V_co]): def gi_running(self) -> bool: ... @property def gi_yieldfrom(self) -> GeneratorType[_T_co, _T_contra, Any] | None: ... + if sys.version_info >= (3, 11): + @property + def gi_suspended(self) -> bool: ... __name__: str __qualname__: str def __iter__(self) -> GeneratorType[_T_co, _T_contra, _V_co]: ... @@ -439,6 +410,9 @@ class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): if sys.version_info >= (3, 7): @property def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ... + if sys.version_info >= (3, 11): + @property + def cr_suspended(self) -> bool: ... def close(self) -> None: ... def __await__(self) -> Generator[Any, None, _V_co]: ... @@ -654,6 +628,8 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 11): @property def __unpacked__(self) -> bool: ... + @property + def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... def __getattr__(self, name: str) -> Any: ... # incomplete diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 37ea55c9f2ef..969e61952d5f 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -1,6 +1,6 @@ import collections # Needed by aliases like DefaultDict, see mypy issue 2986 import sys -from _typeshed import ReadableBuffer, Self as TypeshedSelf, SupportsKeysAndGetItem +from _typeshed import IdentityFunction, ReadableBuffer, Self as TypeshedSelf, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from types import BuiltinFunctionType, CodeType, FrameType, FunctionType, MethodType, ModuleType, TracebackType from typing_extensions import Literal as _Literal, ParamSpec as _ParamSpec, final as _final @@ -11,466 +11,108 @@ if sys.version_info >= (3, 7): if sys.version_info >= (3, 9): from types import GenericAlias -if sys.version_info >= (3, 11): - __all__ = [ - "Annotated", - "Any", - "Callable", - "ClassVar", - "Concatenate", +__all__ = [ + "AbstractSet", + "Any", + "AnyStr", + "AsyncContextManager", + "AsyncGenerator", + "AsyncIterable", + "AsyncIterator", + "Awaitable", + "ByteString", + "Callable", + "ChainMap", + "ClassVar", + "Collection", + "Container", + "ContextManager", + "Coroutine", + "Counter", + "DefaultDict", + "Deque", + "Dict", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "NamedTuple", + "NewType", + "Optional", + "Reversible", + "Sequence", + "Set", + "Sized", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "SupportsRound", + "Text", + "Tuple", + "Type", + "TypeVar", + "Union", + "ValuesView", + "TYPE_CHECKING", + "cast", + "get_type_hints", + "no_type_check", + "no_type_check_decorator", + "overload", +] + +if sys.version_info < (3, 7): + __all__ += ["GenericMeta"] + +if sys.version_info >= (3, 7): + __all__ += ["ForwardRef", "NoReturn", "OrderedDict"] + +if sys.version_info >= (3, 8): + __all__ += [ "Final", - "ForwardRef", - "Generic", "Literal", + "Protocol", + "SupportsIndex", + "TypedDict", + "final", + "get_args", + "get_origin", + "runtime_checkable", + ] + +if sys.version_info >= (3, 9): + __all__ += ["Annotated", "BinaryIO", "IO", "Match", "Pattern", "TextIO"] + +if sys.version_info >= (3, 10): + __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] + +if sys.version_info >= (3, 11): + __all__ += [ "LiteralString", + "Never", "NotRequired", - "Optional", - "ParamSpec", - "Protocol", "Required", - "Tuple", - "Type", - "TypeVar", + "Self", "TypeVarTuple", - "Union", "Unpack", - "AbstractSet", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Awaitable", - "AsyncIterator", - "AsyncIterable", - "Coroutine", - "Collection", - "AsyncGenerator", - "AsyncContextManager", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsIndex", - "SupportsInt", - "SupportsRound", - "ChainMap", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "OrderedDict", - "Set", - "FrozenSet", - "NamedTuple", - "TypedDict", - "Generator", - "BinaryIO", - "IO", - "Match", - "Pattern", - "TextIO", - "AnyStr", "assert_never", "assert_type", - "cast", "clear_overloads", "dataclass_transform", - "final", - "get_args", - "get_origin", "get_overloads", - "get_type_hints", - "is_typeddict", - "Never", - "NewType", - "no_type_check", - "no_type_check_decorator", - "NoReturn", - "overload", - "ParamSpecArgs", - "ParamSpecKwargs", "reveal_type", - "runtime_checkable", - "Self", - "Text", - "TYPE_CHECKING", - "TypeAlias", - "TypeGuard", - ] -elif sys.version_info >= (3, 10): - __all__ = [ - "Annotated", - "Any", - "Callable", - "ClassVar", - "Concatenate", - "Final", - "ForwardRef", - "Generic", - "Literal", - "Optional", - "ParamSpec", - "Protocol", - "Tuple", - "Type", - "TypeVar", - "Union", - "AbstractSet", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Awaitable", - "AsyncIterator", - "AsyncIterable", - "Coroutine", - "Collection", - "AsyncGenerator", - "AsyncContextManager", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsIndex", - "SupportsInt", - "SupportsRound", - "ChainMap", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "OrderedDict", - "Set", - "FrozenSet", - "NamedTuple", - "TypedDict", - "Generator", - "BinaryIO", - "IO", - "Match", - "Pattern", - "TextIO", - "AnyStr", - "cast", - "final", - "get_args", - "get_origin", - "get_type_hints", - "is_typeddict", - "NewType", - "no_type_check", - "no_type_check_decorator", - "NoReturn", - "overload", - "ParamSpecArgs", - "ParamSpecKwargs", - "runtime_checkable", - "Text", - "TYPE_CHECKING", - "TypeAlias", - "TypeGuard", - ] -elif sys.version_info >= (3, 9): - __all__ = [ - "Annotated", - "Any", - "Callable", - "ClassVar", - "Final", - "ForwardRef", - "Generic", - "Literal", - "Optional", - "Protocol", - "Tuple", - "Type", - "TypeVar", - "Union", - "AbstractSet", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Awaitable", - "AsyncIterator", - "AsyncIterable", - "Coroutine", - "Collection", - "AsyncGenerator", - "AsyncContextManager", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsIndex", - "SupportsInt", - "SupportsRound", - "ChainMap", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "OrderedDict", - "Set", - "FrozenSet", - "NamedTuple", - "TypedDict", - "Generator", - "BinaryIO", - "IO", - "Match", - "Pattern", - "TextIO", - "AnyStr", - "cast", - "final", - "get_args", - "get_origin", - "get_type_hints", - "NewType", - "no_type_check", - "no_type_check_decorator", - "NoReturn", - "overload", - "runtime_checkable", - "Text", - "TYPE_CHECKING", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "Any", - "Callable", - "ClassVar", - "Final", - "ForwardRef", - "Generic", - "Literal", - "Optional", - "Protocol", - "Tuple", - "Type", - "TypeVar", - "Union", - "AbstractSet", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Awaitable", - "AsyncIterator", - "AsyncIterable", - "Coroutine", - "Collection", - "AsyncGenerator", - "AsyncContextManager", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsIndex", - "SupportsInt", - "SupportsRound", - "ChainMap", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "OrderedDict", - "Set", - "FrozenSet", - "NamedTuple", - "TypedDict", - "Generator", - "AnyStr", - "cast", - "final", - "get_args", - "get_origin", - "get_type_hints", - "NewType", - "no_type_check", - "no_type_check_decorator", - "NoReturn", - "overload", - "runtime_checkable", - "Text", - "TYPE_CHECKING", - ] -elif sys.version_info >= (3, 7): - __all__ = [ - "Any", - "Callable", - "ClassVar", - "ForwardRef", - "Generic", - "Optional", - "Tuple", - "Type", - "TypeVar", - "Union", - "AbstractSet", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Awaitable", - "AsyncIterator", - "AsyncIterable", - "Coroutine", - "Collection", - "AsyncGenerator", - "AsyncContextManager", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsInt", - "SupportsRound", - "ChainMap", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "OrderedDict", - "Set", - "FrozenSet", - "NamedTuple", - "Generator", - "AnyStr", - "cast", - "get_type_hints", - "NewType", - "no_type_check", - "no_type_check_decorator", - "NoReturn", - "overload", - "Text", - "TYPE_CHECKING", - ] -else: - __all__ = [ - "Any", - "Callable", - "ClassVar", - "Generic", - "Optional", - "Tuple", - "Type", - "TypeVar", - "Union", - "AbstractSet", - "GenericMeta", - "ByteString", - "Container", - "ContextManager", - "Hashable", - "ItemsView", - "Iterable", - "Iterator", - "KeysView", - "Mapping", - "MappingView", - "MutableMapping", - "MutableSequence", - "MutableSet", - "Sequence", - "Sized", - "ValuesView", - "Reversible", - "SupportsAbs", - "SupportsBytes", - "SupportsComplex", - "SupportsFloat", - "SupportsInt", - "SupportsRound", - "Counter", - "Deque", - "Dict", - "DefaultDict", - "List", - "Set", - "FrozenSet", - "NamedTuple", - "Generator", - "AnyStr", - "cast", - "get_type_hints", - "NewType", - "no_type_check", - "no_type_check_decorator", - "overload", - "Text", - "TYPE_CHECKING", ] Any = object() @@ -1232,7 +874,7 @@ if sys.version_info >= (3, 11): kw_only_default: bool = ..., field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: Any, - ) -> Callable[[_T], _T]: ... + ) -> IdentityFunction: ... # Type constructors diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index b94daaba9f49..38fb9dec19d9 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -1,6 +1,6 @@ import abc import sys -from _typeshed import Self as TypeshedSelf # see #6932 for why the alias cannot have a leading underscore +from _typeshed import IdentityFunction, Self as TypeshedSelf # see #6932 for why the Self alias cannot have a leading underscore from typing import ( # noqa: Y022,Y027,Y039 TYPE_CHECKING as TYPE_CHECKING, Any, @@ -232,4 +232,4 @@ else: kw_only_default: bool = ..., field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: object, - ) -> Callable[[_T], _T]: ... + ) -> IdentityFunction: ... diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index aec8867df48d..7337ab8789b2 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -38,6 +38,9 @@ class UCD: def decomposition(self, __chr: str) -> str: ... def digit(self, __chr: str, __default: _T = ...) -> int | _T: ... def east_asian_width(self, __chr: str) -> str: ... + if sys.version_info >= (3, 8): + def is_normalized(self, __form: str, __unistr: str) -> bool: ... + def lookup(self, __name: str | bytes) -> str: ... def mirrored(self, __chr: str) -> int: ... def name(self, __chr: str, __default: _T = ...) -> str | _T: ... diff --git a/mypy/typeshed/stdlib/unittest/__init__.pyi b/mypy/typeshed/stdlib/unittest/__init__.pyi index 4bbf98c992c1..673597275b33 100644 --- a/mypy/typeshed/stdlib/unittest/__init__.pyi +++ b/mypy/typeshed/stdlib/unittest/__init__.pyi @@ -32,55 +32,37 @@ if sys.version_info >= (3, 8): from .case import addModuleCleanup as addModuleCleanup - __all__ = [ - "TestResult", - "TestCase", - "IsolatedAsyncioTestCase", - "TestSuite", - "TextTestRunner", - "TestLoader", - "FunctionTestCase", - "main", - "defaultTestLoader", - "SkipTest", - "skip", - "skipIf", - "skipUnless", - "expectedFailure", - "TextTestResult", - "installHandler", - "registerResult", - "removeResult", - "removeHandler", - "addModuleCleanup", - "getTestCaseNames", - "makeSuite", - "findTestCases", - ] +if sys.version_info >= (3, 11): + from .case import doModuleCleanups as doModuleCleanups, enterModuleContext as enterModuleContext -else: - __all__ = [ - "TestResult", - "TestCase", - "TestSuite", - "TextTestRunner", - "TestLoader", - "FunctionTestCase", - "main", - "defaultTestLoader", - "SkipTest", - "skip", - "skipIf", - "skipUnless", - "expectedFailure", - "TextTestResult", - "installHandler", - "registerResult", - "removeResult", - "removeHandler", - "getTestCaseNames", - "makeSuite", - "findTestCases", - ] +__all__ = [ + "TestResult", + "TestCase", + "TestSuite", + "TextTestRunner", + "TestLoader", + "FunctionTestCase", + "main", + "defaultTestLoader", + "SkipTest", + "skip", + "skipIf", + "skipUnless", + "expectedFailure", + "TextTestResult", + "installHandler", + "registerResult", + "removeResult", + "removeHandler", + "getTestCaseNames", + "makeSuite", + "findTestCases", +] + +if sys.version_info >= (3, 8): + __all__ += ["addModuleCleanup", "IsolatedAsyncioTestCase"] + +if sys.version_info >= (3, 11): + __all__ += ["enterModuleContext", "doModuleCleanups"] def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... diff --git a/mypy/typeshed/stdlib/unittest/async_case.pyi b/mypy/typeshed/stdlib/unittest/async_case.pyi index 55407ec3f1c8..c1de205fbd55 100644 --- a/mypy/typeshed/stdlib/unittest/async_case.pyi +++ b/mypy/typeshed/stdlib/unittest/async_case.pyi @@ -1,11 +1,19 @@ +import sys from collections.abc import Awaitable, Callable +from typing import TypeVar from typing_extensions import ParamSpec from .case import TestCase +if sys.version_info >= (3, 11): + from contextlib import AbstractAsyncContextManager + +_T = TypeVar("_T") _P = ParamSpec("_P") class IsolatedAsyncioTestCase(TestCase): async def asyncSetUp(self) -> None: ... async def asyncTearDown(self) -> None: ... def addAsyncCleanup(self, __func: Callable[_P, Awaitable[object]], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + if sys.version_info >= (3, 11): + async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 578bd6d6f271..15b573edeebb 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -1,18 +1,32 @@ -import datetime import logging import sys import unittest.result -from _typeshed import Self +from _typeshed import Self, SupportsDunderGE, SupportsSub from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet from contextlib import AbstractContextManager from types import TracebackType -from typing import Any, AnyStr, ClassVar, Generic, NamedTuple, NoReturn, Pattern, TypeVar, overload +from typing import ( + Any, + AnyStr, + ClassVar, + Generic, + NamedTuple, + NoReturn, + Pattern, + Protocol, + SupportsAbs, + SupportsRound, + TypeVar, + overload, +) from typing_extensions import ParamSpec from warnings import WarningMessage if sys.version_info >= (3, 9): from types import GenericAlias +_T = TypeVar("_T") +_S = TypeVar("_S", bound=SupportsSub[Any, Any]) _E = TypeVar("_E", bound=BaseException) _FT = TypeVar("_FT", bound=Callable[..., Any]) _P = ParamSpec("_P") @@ -50,6 +64,9 @@ if sys.version_info >= (3, 8): def addModuleCleanup(__function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... def doModuleCleanups() -> None: ... +if sys.version_info >= (3, 11): + def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: ... + def expectedFailure(test_item: _FT) -> _FT: ... def skip(reason: str) -> Callable[[_FT], _FT]: ... def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... @@ -58,6 +75,8 @@ def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... class SkipTest(Exception): def __init__(self, reason: str) -> None: ... +class _SupportsAbsAndDunderGE(SupportsDunderGE, SupportsAbs[Any], Protocol): ... + class TestCase: failureException: type[BaseException] longMessage: bool @@ -79,7 +98,9 @@ class TestCase: def skipTest(self, reason: Any) -> None: ... def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... def debug(self) -> None: ... - def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... + if sys.version_info < (3, 11): + def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... + def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertNotEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... def assertTrue(self, expr: Any, msg: Any = ...) -> None: ... @@ -159,33 +180,35 @@ class TestCase: self, logger: str | logging.Logger | None = ..., level: int | str | None = ... ) -> _AssertLogsContext[None]: ... + @overload + def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertAlmostEqual( - self, first: float, second: float, places: int | None = ..., msg: Any = ..., delta: float | None = ... + self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE ) -> None: ... @overload def assertAlmostEqual( self, - first: datetime.datetime, - second: datetime.datetime, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, places: int | None = ..., msg: Any = ..., - delta: datetime.timedelta | None = ..., + delta: None = ..., ) -> None: ... @overload - def assertNotAlmostEqual(self, first: float, second: float, *, msg: Any = ...) -> None: ... - @overload - def assertNotAlmostEqual(self, first: float, second: float, places: int | None = ..., msg: Any = ...) -> None: ... + def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload - def assertNotAlmostEqual(self, first: float, second: float, *, msg: Any = ..., delta: float | None = ...) -> None: ... + def assertNotAlmostEqual( + self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE + ) -> None: ... @overload def assertNotAlmostEqual( self, - first: datetime.datetime, - second: datetime.datetime, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, places: int | None = ..., msg: Any = ..., - delta: datetime.timedelta | None = ..., + delta: None = ..., ) -> None: ... def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... @@ -209,6 +232,9 @@ class TestCase: else: def addCleanup(self, function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + if sys.version_info >= (3, 11): + def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... + def doCleanups(self) -> None: ... if sys.version_info >= (3, 8): @classmethod @@ -216,6 +242,10 @@ class TestCase: @classmethod def doClassCleanups(cls) -> None: ... + if sys.version_info >= (3, 11): + @classmethod + def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: ... + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented if sys.version_info < (3, 12): @@ -236,14 +266,10 @@ class TestCase: ) -> None: ... @overload def failUnlessRaises(self, exception: type[_E] | tuple[type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... - def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... - def assertAlmostEquals( - self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ... - ) -> None: ... - def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... - def assertNotAlmostEquals( - self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ... - ) -> None: ... + failUnlessAlmostEqual = assertAlmostEqual + assertAlmostEquals = assertAlmostEqual + failIfAlmostEqual = assertNotAlmostEqual + assertNotAlmostEquals = assertNotAlmostEqual def assertRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertNotRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... @overload diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 400bdaac3b41..a7111ff2d090 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -440,7 +440,13 @@ class _SpecState: def mock_open(mock: Any | None = ..., read_data: Any = ...) -> Any: ... -PropertyMock = Any +class PropertyMock(Mock): + if sys.version_info >= (3, 8): + def __get__(self: Self, obj: _T, obj_type: type[_T] | None = ...) -> Self: ... + else: + def __get__(self: Self, obj: _T, obj_type: type[_T] | None) -> Self: ... + + def __set__(self, obj: Any, value: Any) -> None: ... if sys.version_info >= (3, 7): def seal(mock: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index c6a6836e6e95..49f3825e0821 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -39,7 +39,8 @@ non_hierarchical: list[str] uses_query: list[str] uses_fragment: list[str] scheme_chars: str -MAX_CACHE_SIZE: int +if sys.version_info < (3, 11): + MAX_CACHE_SIZE: int class _ResultMixinBase(Generic[AnyStr]): def geturl(self) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 5e6dde01480a..02ad1bd30052 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -150,6 +150,10 @@ class HTTPRedirectHandler(BaseHandler): def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_307(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + if sys.version_info >= (3, 11): + def http_error_308( + self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage + ) -> _UrlopenRet | None: ... class HTTPCookieProcessor(BaseHandler): cookiejar: CookieJar @@ -330,6 +334,11 @@ class FancyURLopener(URLopener): def http_error_307( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented + if sys.version_info >= (3, 11): + def http_error_308( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_401( self, url: str, diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index dacb6fffcc6b..82cd735bd829 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,92 +1,41 @@ import sys +from _collections_abc import dict_keys from _typeshed import FileDescriptor, StrOrBytesPath, SupportsRead, SupportsWrite -from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, KeysView, Mapping, MutableSequence, Sequence +from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence from typing import Any, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard +__all__ = [ + "Comment", + "dump", + "Element", + "ElementTree", + "fromstring", + "fromstringlist", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", +] + +if sys.version_info >= (3, 8): + __all__ += ["C14NWriterTarget", "canonicalize"] + if sys.version_info >= (3, 9): - __all__ = [ - "Comment", - "dump", - "Element", - "ElementTree", - "fromstring", - "fromstringlist", - "indent", - "iselement", - "iterparse", - "parse", - "ParseError", - "PI", - "ProcessingInstruction", - "QName", - "SubElement", - "tostring", - "tostringlist", - "TreeBuilder", - "VERSION", - "XML", - "XMLID", - "XMLParser", - "XMLPullParser", - "register_namespace", - "canonicalize", - "C14NWriterTarget", - ] -elif sys.version_info >= (3, 8): - __all__ = [ - "Comment", - "dump", - "Element", - "ElementTree", - "fromstring", - "fromstringlist", - "iselement", - "iterparse", - "parse", - "ParseError", - "PI", - "ProcessingInstruction", - "QName", - "SubElement", - "tostring", - "tostringlist", - "TreeBuilder", - "VERSION", - "XML", - "XMLID", - "XMLParser", - "XMLPullParser", - "register_namespace", - "canonicalize", - "C14NWriterTarget", - ] -else: - __all__ = [ - "Comment", - "dump", - "Element", - "ElementTree", - "fromstring", - "fromstringlist", - "iselement", - "iterparse", - "parse", - "ParseError", - "PI", - "ProcessingInstruction", - "QName", - "SubElement", - "tostring", - "tostringlist", - "TreeBuilder", - "VERSION", - "XML", - "XMLID", - "XMLParser", - "XMLPullParser", - "register_namespace", - ] + __all__ += ["indent"] _T = TypeVar("_T") _FileRead: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsRead[bytes] | SupportsRead[str] @@ -132,7 +81,7 @@ if sys.version_info >= (3, 8): exclude_tags: Iterable[str] | None = ..., ) -> None: ... -class Element(MutableSequence[Element]): +class Element: tag: str attrib: dict[str, str] text: str | None @@ -156,7 +105,7 @@ class Element(MutableSequence[Element]): def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... def itertext(self) -> Generator[str, None, None]: ... - def keys(self) -> KeysView[str]: ... + def keys(self) -> dict_keys[str, str]: ... # makeelement returns the type of self in Python impl, but not in C impl def makeelement(self, __tag: str, __attrib: dict[str, str]) -> Element: ... def remove(self, __subelement: Element) -> None: ... @@ -167,8 +116,10 @@ class Element(MutableSequence[Element]): @overload def __getitem__(self, __i: SupportsIndex) -> Element: ... @overload - def __getitem__(self, __s: slice) -> MutableSequence[Element]: ... + def __getitem__(self, __s: slice) -> list[Element]: ... def __len__(self) -> int: ... + # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. + def __iter__(self) -> Iterator[Element]: ... @overload def __setitem__(self, __i: SupportsIndex, __o: Element) -> None: ... @overload @@ -194,7 +145,7 @@ class QName: class ElementTree: def __init__(self, element: Element | None = ..., file: _FileRead | None = ...) -> None: ... - def getroot(self) -> Element: ... + def getroot(self) -> Element | Any: ... def parse(self, source: _FileRead, parser: XMLParser | None = ...) -> Element: ... def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... if sys.version_info < (3, 9): @@ -321,7 +272,9 @@ class XMLPullParser: def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... def feed(self, data: str | bytes) -> None: ... def close(self) -> None: ... - def read_events(self) -> Iterator[tuple[str, Element]]: ... + # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. + # Use `Any` to avoid false-positive errors. + def read_events(self) -> Iterator[tuple[str, Any]]: ... def XML(text: str | bytes, parser: XMLParser | None = ...) -> Element: ... def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... diff --git a/mypy/typeshed/stdlib/xml/sax/handler.pyi b/mypy/typeshed/stdlib/xml/sax/handler.pyi index 3a5193300981..abf124f836cd 100644 --- a/mypy/typeshed/stdlib/xml/sax/handler.pyi +++ b/mypy/typeshed/stdlib/xml/sax/handler.pyi @@ -1,6 +1,6 @@ -from typing import Any +import sys -version: Any +version: str class ErrorHandler: def error(self, exception): ... @@ -30,17 +30,25 @@ class DTDHandler: class EntityResolver: def resolveEntity(self, publicId, systemId): ... -feature_namespaces: Any -feature_namespace_prefixes: Any -feature_string_interning: Any -feature_validation: Any -feature_external_ges: Any -feature_external_pes: Any -all_features: Any -property_lexical_handler: Any -property_declaration_handler: Any -property_dom_node: Any -property_xml_string: Any -property_encoding: Any -property_interning_dict: Any -all_properties: Any +feature_namespaces: str +feature_namespace_prefixes: str +feature_string_interning: str +feature_validation: str +feature_external_ges: str +feature_external_pes: str +all_features: list[str] +property_lexical_handler: str +property_declaration_handler: str +property_dom_node: str +property_xml_string: str +property_encoding: str +property_interning_dict: str +all_properties: list[str] + +if sys.version_info >= (3, 10): + class LexicalHandler: + def comment(self, content: str) -> object: ... + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> object: ... + def endDTD(self) -> object: ... + def startCDATA(self) -> object: ... + def endCDATA(self) -> object: ... diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index 371f1821b29d..237620f70250 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -4,7 +4,7 @@ import socketserver import sys from collections.abc import Callable, Iterable, Mapping from datetime import datetime -from typing import Any, Pattern, Protocol +from typing import Any, ClassVar, Pattern, Protocol from typing_extensions import TypeAlias from xmlrpc.client import Fault @@ -68,8 +68,7 @@ class SimpleXMLRPCDispatcher: # undocumented def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: ... # undocumented class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): - - rpc_paths: tuple[str, str] + rpc_paths: ClassVar[tuple[str, ...]] encode_threshold: int # undocumented aepattern: Pattern[str] # undocumented def accept_encodings(self) -> dict[str, float]: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index 276f8df82a6d..c799cf9b4e12 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -7,37 +7,23 @@ from types import TracebackType from typing import IO, Any, Protocol, overload from typing_extensions import Literal, TypeAlias +__all__ = [ + "BadZipFile", + "BadZipfile", + "error", + "ZIP_STORED", + "ZIP_DEFLATED", + "ZIP_BZIP2", + "ZIP_LZMA", + "is_zipfile", + "ZipInfo", + "ZipFile", + "PyZipFile", + "LargeZipFile", +] + if sys.version_info >= (3, 8): - __all__ = [ - "BadZipFile", - "BadZipfile", - "error", - "ZIP_STORED", - "ZIP_DEFLATED", - "ZIP_BZIP2", - "ZIP_LZMA", - "is_zipfile", - "ZipInfo", - "ZipFile", - "PyZipFile", - "LargeZipFile", - "Path", - ] -else: - __all__ = [ - "BadZipFile", - "BadZipfile", - "error", - "ZIP_STORED", - "ZIP_DEFLATED", - "ZIP_BZIP2", - "ZIP_LZMA", - "is_zipfile", - "ZipInfo", - "ZipFile", - "PyZipFile", - "LargeZipFile", - ] + __all__ += ["Path"] _DateTuple: TypeAlias = tuple[int, int, int, int, int, int] _ReadWriteMode: TypeAlias = Literal["r", "w"] @@ -158,7 +144,32 @@ class ZipFile: compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented pwd: str | None # undocumented - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 11): + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: Literal["r"] = ..., + compression: int = ..., + allowZip64: bool = ..., + compresslevel: int | None = ..., + *, + strict_timestamps: bool = ..., + metadata_encoding: str | None, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = ..., + compression: int = ..., + allowZip64: bool = ..., + compresslevel: int | None = ..., + *, + strict_timestamps: bool = ..., + metadata_encoding: None = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): def __init__( self, file: StrPath | IO[bytes], @@ -223,7 +234,7 @@ class ZipFile: else: def writestr(self, zinfo_or_arcname: str | ZipInfo, data: bytes | str, compress_type: int | None = ...) -> None: ... if sys.version_info >= (3, 11): - def mkdir(self, zinfo_or_directory: str | ZipInfo, mode: int = ...) -> None: ... + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = ...) -> None: ... class PyZipFile(ZipFile): def __init__( @@ -275,6 +286,13 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): @property def filename(self) -> PathLike[str]: ... # undocumented + if sys.version_info >= (3, 11): + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = ...) -> None: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi index 412c3cb15142..bf8d72ba8393 100644 --- a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi +++ b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi @@ -1,7 +1,7 @@ import abc -from _typeshed import Self -from collections.abc import Callable, ItemsView, KeysView, Mapping, ValuesView -from typing import Any, Generic, TypeVar +from _typeshed import IdentityFunction, Self +from collections.abc import ItemsView, KeysView, Mapping, ValuesView +from typing import Any, Generic, TypeVar, overload _T = TypeVar("_T") _U = TypeVar("_U") @@ -21,12 +21,30 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def __delitem__(self, k: NoReturn) -> None: ... def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... -def Arg(type: _T = ..., name: str | None = ...) -> _T: ... -def DefaultArg(type: _T = ..., name: str | None = ...) -> _T: ... -def NamedArg(type: _T = ..., name: str | None = ...) -> _T: ... -def DefaultNamedArg(type: _T = ..., name: str | None = ...) -> _T: ... -def VarArg(type: _T = ...) -> _T: ... -def KwArg(type: _T = ...) -> _T: ... +@overload +def Arg(type: _T, name: str | None = ...) -> _T: ... +@overload +def Arg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultArg(*, name: str | None = ...) -> Any: ... +@overload +def NamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def NamedArg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultNamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultNamedArg(*, name: str | None = ...) -> Any: ... +@overload +def VarArg(type: _T) -> _T: ... +@overload +def VarArg() -> Any: ... +@overload +def KwArg(type: _T) -> _T: ... +@overload +def KwArg() -> Any: ... # Return type that indicates a function does not return. # Deprecated: Use typing.NoReturn instead. @@ -36,6 +54,6 @@ class NoReturn: ... # a class decorator, but mypy does not support type[_T] for abstract # classes until this issue is resolved, https://github.com/python/mypy/issues/4717. def trait(cls: _T) -> _T: ... -def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ... +def mypyc_attr(*attrs: str, **kwattrs: object) -> IdentityFunction: ... class FlexibleAlias(Generic[_T, _U]): ... diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 016d215027ae..3b3b64c038c1 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -335,11 +335,11 @@ file.py:6: error: Argument 1 to "foo" has incompatible type "str"; expected "int \[mypy-x] ignore_errors = True [file x.py] -"" + 0 +x: str = 5 [file y.py] -"" + 0 +x: str = 5 [out] -y.py:1: error: Unsupported operand types for + ("str" and "int") +y.py:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testConfigFollowImportsNormal] # cmd: mypy main.py @@ -840,19 +840,19 @@ src/anamespace/foo/bar.py:2: error: Incompatible return value type (got "int", e x = 0 # type: str [file pkg/a1/b/f.py] from pkg.a1.b.c.d.e import x -x + 1 +x() [file pkg/a2/__init__.py] [file pkg/a2/b/c/d/e.py] x = 0 # type: str [file pkg/a2/b/f.py] from pkg.a2.b.c.d.e import x -x + 1 +x() [out] pkg/a2/b/c/d/e.py:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") pkg/a1/b/c/d/e.py:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") -pkg/a2/b/f.py:2: error: Unsupported operand types for + ("str" and "int") -pkg/a1/b/f.py:2: error: Unsupported operand types for + ("str" and "int") +pkg/a2/b/f.py:2: error: "str" not callable +pkg/a1/b/f.py:2: error: "str" not callable [case testFollowImportStubs1] # cmd: mypy main.py From ce891e32d4306df2b9a8bbcb4b88dae9750aa16c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 8 Jul 2022 14:15:39 +0100 Subject: [PATCH 68/80] Remove use of LiteralString in builtins (#13093) Fixes #13091 Co-authored-by: Ivan Levkivskyi --- mypy/typeshed/stdlib/builtins.pyi | 154 ++++++------------------------ 1 file changed, 31 insertions(+), 123 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 577d5fd99e36..9d1d1f4b1b10 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -54,7 +54,7 @@ from typing import ( # noqa: Y027 TypeVar, overload, ) -from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -400,39 +400,21 @@ class str(Sequence[str]): def __new__(cls: type[Self], object: object = ...) -> Self: ... @overload def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - @overload - def capitalize(self: LiteralString) -> LiteralString: ... - @overload - def capitalize(self) -> str: ... # type: ignore[misc] - @overload - def casefold(self: LiteralString) -> LiteralString: ... - @overload - def casefold(self) -> str: ... # type: ignore[misc] - @overload - def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... - @overload - def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + def capitalize(self) -> str: ... + def casefold(self) -> str: ... + def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... def endswith( self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = ...) -> LiteralString: ... - @overload - def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... # type: ignore[misc] + def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... else: - @overload - def expandtabs(self: LiteralString, tabsize: int = ...) -> LiteralString: ... - @overload - def expandtabs(self, tabsize: int = ...) -> str: ... # type: ignore[misc] + def expandtabs(self, tabsize: int = ...) -> str: ... def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload - def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] + def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def isalnum(self) -> bool: ... @@ -449,102 +431,40 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... - @overload - def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - @overload - def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... - @overload - def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] - @overload - def lower(self: LiteralString) -> LiteralString: ... - @overload - def lower(self) -> str: ... # type: ignore[misc] - @overload - def lstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... - @overload - def lstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] - @overload - def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload - def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def replace( - self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = ... - ) -> LiteralString: ... - @overload - def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... # type: ignore[misc] + def join(self, __iterable: Iterable[str]) -> str: ... + def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... + def lower(self) -> str: ... + def lstrip(self, __chars: str | None = ...) -> str: ... + def partition(self, __sep: str) -> tuple[str, str, str]: ... + def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... - @overload - def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... - @overload - def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] + def removeprefix(self, __prefix: str) -> str: ... + def removesuffix(self, __suffix: str) -> str: ... def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... - @overload - def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] - @overload - def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload - def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def rsplit(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... - @overload - def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] - @overload - def rstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... - @overload - def rstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] - @overload - def split(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... - @overload - def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] - @overload - def splitlines(self: LiteralString, keepends: bool = ...) -> list[LiteralString]: ... - @overload - def splitlines(self, keepends: bool = ...) -> list[str]: ... # type: ignore[misc] + def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... + def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... + def rstrip(self, __chars: str | None = ...) -> str: ... + def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... + def splitlines(self, keepends: bool = ...) -> list[str]: ... def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - @overload - def strip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... - @overload - def strip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] - @overload - def swapcase(self: LiteralString) -> LiteralString: ... - @overload - def swapcase(self) -> str: ... # type: ignore[misc] - @overload - def title(self: LiteralString) -> LiteralString: ... - @overload - def title(self) -> str: ... # type: ignore[misc] + def strip(self, __chars: str | None = ...) -> str: ... + def swapcase(self) -> str: ... + def title(self) -> str: ... def translate(self, __table: Mapping[int, int | str | None] | Sequence[int | str | None]) -> str: ... - @overload - def upper(self: LiteralString) -> LiteralString: ... - @overload - def upper(self) -> str: ... # type: ignore[misc] - @overload - def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... - @overload - def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] + def upper(self) -> str: ... + def zfill(self, __width: SupportsIndex) -> str: ... @staticmethod @overload def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... @staticmethod @overload def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... - @overload - def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... - @overload - def __add__(self, __s: str) -> str: ... # type: ignore[misc] + def __add__(self, __s: str) -> str: ... # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... @@ -552,26 +472,14 @@ class str(Sequence[str]): def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... def __hash__(self) -> int: ... - @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... - @overload - def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] + def __iter__(self) -> Iterator[str]: ... def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... - @overload - def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... - @overload - def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] - @overload - def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload - def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __mod__(self, __x: Any) -> str: ... + def __mul__(self, __n: SupportsIndex) -> str: ... def __ne__(self, __x: object) -> bool: ... - @overload - def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... - @overload - def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __rmul__(self, __n: SupportsIndex) -> str: ... def __getnewargs__(self) -> tuple[str]: ... class bytes(ByteString): From b0c1556c145a8b1fbd613b55d2144b8757baed54 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 8 Jul 2022 14:17:36 +0100 Subject: [PATCH 69/80] Fix typeshed regression in unittest (#13092) Fixes #13090 Co-authored-by: Ivan Levkivskyi --- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 3 +++ mypy/typeshed/stdlib/unittest/case.pyi | 20 +++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 162c40522224..005849e0fb05 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -78,6 +78,9 @@ class SupportsRAdd(Protocol[_T_contra, _T_co]): class SupportsSub(Protocol[_T_contra, _T_co]): def __sub__(self, __x: _T_contra) -> _T_co: ... +class SupportsRSub(Protocol[_T_contra, _T_co]): + def __rsub__(self, __x: _T_contra) -> _T_co: ... + class SupportsDivMod(Protocol[_T_contra, _T_co]): def __divmod__(self, __other: _T_contra) -> _T_co: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 15b573edeebb..4f69c0a1f3b8 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -1,7 +1,7 @@ import logging import sys import unittest.result -from _typeshed import Self, SupportsDunderGE, SupportsSub +from _typeshed import Self, SupportsDunderGE, SupportsRSub, SupportsSub from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet from contextlib import AbstractContextManager from types import TracebackType @@ -196,6 +196,15 @@ class TestCase: delta: None = ..., ) -> None: ... @overload + def assertAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = ..., + msg: Any = ..., + delta: None = ..., + ) -> None: ... + @overload def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertNotAlmostEqual( @@ -210,6 +219,15 @@ class TestCase: msg: Any = ..., delta: None = ..., ) -> None: ... + @overload + def assertNotAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = ..., + msg: Any = ..., + delta: None = ..., + ) -> None: ... def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... From 1fedf2c1adfcaccf7c19adbc8dee5cddebdd5d33 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 8 Jul 2022 14:39:59 +0100 Subject: [PATCH 70/80] Fix false-positive error on multiple enum base classes (#12963) Fixes #12787. Mypy currently emits a false-positive error for this snippet of code, even though it works fine at runtime (and this exact inheritance structure is used in enum.py in the stdlib): ``` from enum import Enum, Flag class ReprEnum(Enum): ... class MyFlag(ReprEnum, Flag): ... ``` This PR fixes that. --- mypy/checker.py | 19 +++++++++++++++++-- test-data/unit/check-enum.test | 34 +++++++++++++++++++++++++++------- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 62dd15da896c..60be940a327c 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1965,14 +1965,29 @@ def is_final_enum_value(self, sym: SymbolTableNode) -> bool: return False def check_enum_bases(self, defn: ClassDef) -> None: + """ + Non-enum mixins cannot appear after enum bases; this is disallowed at runtime: + + class Foo: ... + class Bar(enum.Enum, Foo): ... + + But any number of enum mixins can appear in a class definition + (even if multiple enum bases define __new__). So this is fine: + + class Foo(enum.Enum): + def __new__(cls, val): ... + class Bar(enum.Enum): + def __new__(cls, val): ... + class Baz(int, Foo, Bar, enum.Flag): ... + """ enum_base: Optional[Instance] = None for base in defn.info.bases: if enum_base is None and base.type.is_enum: enum_base = base continue - elif enum_base is not None: + elif enum_base is not None and not base.type.is_enum: self.fail( - f'No base classes are allowed after "{enum_base}"', + f'No non-enum mixin classes are allowed after "{enum_base}"', defn, ) break diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 84ac3904772a..4bd04cf67354 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -1903,6 +1903,14 @@ class Third: class Mixin: pass +class EnumWithCustomNew(enum.Enum): + def __new__(cls, val): + pass + +class SecondEnumWithCustomNew(enum.Enum): + def __new__(cls, val): + pass + # Correct Enums: class Correct0(enum.Enum): @@ -1920,6 +1928,9 @@ class Correct3(Mixin, enum.Enum): class RegularClass(Mixin, First, Second): pass +class Correct5(enum.Enum): + pass + # Correct inheritance: class _InheritingDataAndMixin(Correct1): @@ -1934,25 +1945,34 @@ class _CorrectWithDataAndMixin(Mixin, First, Correct0): class _CorrectWithMixin(Mixin, Correct2): pass +class _CorrectMultipleEnumBases(Correct0, Correct5): + pass + +class _MultipleEnumBasesAndMixin(int, Correct0, enum.Flag): + pass + +class _MultipleEnumBasesWithCustomNew(int, EnumWithCustomNew, SecondEnumWithCustomNew): + pass + # Wrong Enums: class TwoDataTypesViaInheritance(Second, Correct2): # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Correct2" pass -class TwoDataTypesViaInheritanceAndMixin(Second, Correct2, Mixin): # E: No base classes are allowed after "__main__.Correct2" \ +class TwoDataTypesViaInheritanceAndMixin(Second, Correct2, Mixin): # E: No non-enum mixin classes are allowed after "__main__.Correct2" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Correct2" pass -class MixinAfterEnum1(enum.Enum, Mixin): # E: No base classes are allowed after "enum.Enum" +class MixinAfterEnum1(enum.Enum, Mixin): # E: No non-enum mixin classes are allowed after "enum.Enum" pass -class MixinAfterEnum2(First, enum.Enum, Mixin): # E: No base classes are allowed after "enum.Enum" +class MixinAfterEnum2(First, enum.Enum, Mixin): # E: No non-enum mixin classes are allowed after "enum.Enum" pass class TwoDataTypes(First, Second, enum.Enum): # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Second" pass -class TwoDataTypesAndIntEnumMixin(First, Second, enum.IntEnum, Mixin): # E: No base classes are allowed after "enum.IntEnum" \ +class TwoDataTypesAndIntEnumMixin(First, Second, enum.IntEnum, Mixin): # E: No non-enum mixin classes are allowed after "enum.IntEnum" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Second" pass @@ -1960,16 +1980,16 @@ class ThreeDataTypes(First, Second, Third, enum.Enum): # E: Only a single data # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Third" pass -class ThreeDataTypesAndMixin(First, Second, Third, enum.Enum, Mixin): # E: No base classes are allowed after "enum.Enum" \ +class ThreeDataTypesAndMixin(First, Second, Third, enum.Enum, Mixin): # E: No non-enum mixin classes are allowed after "enum.Enum" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Second" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Third" pass -class FromEnumAndOther1(Correct2, Second, enum.Enum): # E: No base classes are allowed after "__main__.Correct2" \ +class FromEnumAndOther1(Correct2, Second, enum.Enum): # E: No non-enum mixin classes are allowed after "__main__.Correct2" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Second" pass -class FromEnumAndOther2(Correct2, Second): # E: No base classes are allowed after "__main__.Correct2" \ +class FromEnumAndOther2(Correct2, Second): # E: No non-enum mixin classes are allowed after "__main__.Correct2" \ # E: Only a single data type mixin is allowed for Enum subtypes, found extra "__main__.Second" pass [builtins fixtures/tuple.pyi] From 49b4b3da336cefb2aceb13ee721b5aa7b15bf8e0 Mon Sep 17 00:00:00 2001 From: Stanislav K <44553725+stkrizh@users.noreply.github.com> Date: Fri, 8 Jul 2022 15:43:27 +0200 Subject: [PATCH 71/80] Don't add __match_args__ for dataclasses and named tuples with --python-version < 3.10 (#12503) Fixes #12489 --- mypy/plugins/dataclasses.py | 3 +- mypy/semanal_namedtuple.py | 3 +- mypy/test/testmerge.py | 2 - test-data/unit/check-dataclasses.test | 45 ++++++++++++++++++++ test-data/unit/check-namedtuple.test | 23 ++++++++++ test-data/unit/deps.test | 32 +++++++++++++- test-data/unit/merge.test | 61 ++++++++++++++++++++++++++- 7 files changed, 163 insertions(+), 6 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 00c46e1417c5..87b42a499a1c 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -230,7 +230,8 @@ def transform(self) -> bool: if (decorator_arguments['match_args'] and ('__match_args__' not in info.names or info.names['__match_args__'].plugin_generated) and - attributes): + attributes and + py_version >= (3, 10)): str_type = ctx.api.named_type("builtins.str") literals: List[Type] = [LiteralType(attr.name, str_type) for attr in attributes if attr.is_in_init] diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 109ec17cbc89..ef0a38d22277 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -447,7 +447,8 @@ def add_field(var: Var, is_initialized_in_class: bool = False, add_field(Var('_source', strtype), is_initialized_in_class=True) add_field(Var('__annotations__', ordereddictype), is_initialized_in_class=True) add_field(Var('__doc__', strtype), is_initialized_in_class=True) - add_field(Var('__match_args__', match_args_type), is_initialized_in_class=True) + if self.options.python_version >= (3, 10): + add_field(Var('__match_args__', match_args_type), is_initialized_in_class=True) tvd = TypeVarType(SELF_TVAR_NAME, info.fullname + '.' + SELF_TVAR_NAME, -1, [], info.tuple_type) diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index fe0de2a7fe2d..3f07c39f856d 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -7,7 +7,6 @@ from mypy import build from mypy.build import BuildResult from mypy.modulefinder import BuildSource -from mypy.defaults import PYTHON3_VERSION from mypy.errors import CompileError from mypy.nodes import ( Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, TypeVarExpr, @@ -107,7 +106,6 @@ def build(self, source: str, testcase: DataDrivenTestCase) -> Optional[BuildResu options.use_builtins_fixtures = True options.export_types = True options.show_traceback = True - options.python_version = PYTHON3_VERSION main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w', encoding='utf8') as f: f.write(source) diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index fb1b4a1e8b46..40c6b66d5c39 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1790,3 +1790,48 @@ class MyDataclass: class MyGeneric(Generic[T]): ... class MyClass(MyGeneric[MyDataclass]): ... [builtins fixtures/dataclasses.pyi] + +[case testDataclassWithMatchArgs] +# flags: --python-version 3.10 +from dataclasses import dataclass +@dataclass +class One: + bar: int + baz: str +o: One +reveal_type(o.__match_args__) # N: Revealed type is "Tuple[Literal['bar'], Literal['baz']]" +@dataclass(match_args=True) +class Two: + bar: int +t: Two +reveal_type(t.__match_args__) # N: Revealed type is "Tuple[Literal['bar']]" +[builtins fixtures/dataclasses.pyi] + +[case testDataclassWithoutMatchArgs] +# flags: --python-version 3.10 +from dataclasses import dataclass +@dataclass(match_args=False) +class One: + bar: int + baz: str +o: One +reveal_type(o.__match_args__) # E: "One" has no attribute "__match_args__" \ + # N: Revealed type is "Any" +[builtins fixtures/dataclasses.pyi] + +[case testDataclassWithMatchArgsOldVersion] +# flags: --python-version 3.9 +from dataclasses import dataclass +@dataclass(match_args=True) +class One: + bar: int +o: One +reveal_type(o.__match_args__) # E: "One" has no attribute "__match_args__" \ + # N: Revealed type is "Any" +@dataclass +class Two: + bar: int +t: Two +reveal_type(t.__match_args__) # E: "Two" has no attribute "__match_args__" \ + # N: Revealed type is "Any" +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index c6f1fe3b1d04..034889878c37 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -1134,3 +1134,26 @@ def f(fields) -> None: NT2 = namedtuple("bad", "x") # E: First argument to namedtuple() should be "NT2", not "bad" nt2: NT2 = NT2(x=1) [builtins fixtures/tuple.pyi] + +[case testNamedTupleHasMatchArgs] +# flags: --python-version 3.10 +from typing import NamedTuple +class One(NamedTuple): + bar: int + baz: str +o: One +reveal_type(o.__match_args__) # N: Revealed type is "Tuple[Literal['bar'], Literal['baz']]" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleHasNoMatchArgsOldVersion] +# flags: --python-version 3.9 +from typing import NamedTuple +class One(NamedTuple): + bar: int + baz: str +o: One +reveal_type(o.__match_args__) # E: "One" has no attribute "__match_args__" \ + # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index 53156b6f4f48..884b10f166b0 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -1420,7 +1420,7 @@ class D(C): -> m, m.C, m.D -> m.D -[case testDataclassDeps] +[case testDataclassDepsOldVersion] # flags: --python-version 3.7 from dataclasses import dataclass @@ -1435,6 +1435,36 @@ class B(A): y: int [builtins fixtures/dataclasses.pyi] +[out] + -> , m + -> + -> , m.B.__init__ + -> + -> + -> + -> m, m.A, m.B + -> m + -> m + -> m.B + -> m + -> m + -> m + +[case testDataclassDeps] +# flags: --python-version 3.10 +from dataclasses import dataclass + +Z = int + +@dataclass +class A: + x: Z + +@dataclass +class B(A): + y: int +[builtins fixtures/dataclasses.pyi] + [out] -> , m -> diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index 881e21bb1a92..a593a064cbb2 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -646,7 +646,7 @@ TypeInfo<2>( f<3>)) [case testNamedTuple_typeinfo] - +# flags: --python-version 3.10 import target [file target.py] from typing import NamedTuple @@ -707,6 +707,65 @@ TypeInfo<2>( x<19> (target.A<0>) y<20> (target.A<0>))) +[case testNamedTupleOldVersion_typeinfo] +import target +[file target.py] +from typing import NamedTuple +class A: pass +N = NamedTuple('N', [('x', A)]) +[file target.py.next] +from typing import NamedTuple +class A: pass +N = NamedTuple('N', [('x', A), ('y', A)]) +[builtins fixtures/tuple.pyi] +[out] +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names()) +TypeInfo<2>( + Name(target.N) + Bases(builtins.tuple[target.A<0>, ...]<3>) + Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) + Names( + _NT<6> + __annotations__<7> (builtins.object<1>) + __doc__<8> (builtins.str<9>) + __new__<10> + _asdict<11> + _field_defaults<12> (builtins.object<1>) + _field_types<13> (builtins.object<1>) + _fields<14> (Tuple[builtins.str<9>]) + _make<15> + _replace<16> + _source<17> (builtins.str<9>) + x<18> (target.A<0>))) +==> +TypeInfo<0>( + Name(target.A) + Bases(builtins.object<1>) + Mro(target.A<0>, builtins.object<1>) + Names()) +TypeInfo<2>( + Name(target.N) + Bases(builtins.tuple[target.A<0>, ...]<3>) + Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) + Names( + _NT<6> + __annotations__<7> (builtins.object<1>) + __doc__<8> (builtins.str<9>) + __new__<10> + _asdict<11> + _field_defaults<12> (builtins.object<1>) + _field_types<13> (builtins.object<1>) + _fields<14> (Tuple[builtins.str<9>, builtins.str<9>]) + _make<15> + _replace<16> + _source<17> (builtins.str<9>) + x<18> (target.A<0>) + y<19> (target.A<0>))) + [case testUnionType_types] import target [file target.py] From 9f99400b748a4550813c54fb3f7a00268affdd6d Mon Sep 17 00:00:00 2001 From: Jake Lishman Date: Fri, 8 Jul 2022 15:07:15 +0100 Subject: [PATCH 72/80] Fix crash overriding partial-type attribute with method (#12943) Attributes can still be partially typed (e.g. ``) after a parent-class definition if the block they are declared in is deferred. The first pass for child classes might then encounter this type when considering method overrides, which could cause a crash when attempting to determine subtype compatibility. Fixes #11686 Fixes #11981 --- mypy/checker.py | 4 +++- test-data/unit/check-classes.test | 18 ++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index 60be940a327c..d17871039332 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1579,7 +1579,9 @@ def check_method_override_for_base_with_name( # it can be checked for compatibility. original_type = get_proper_type(base_attr.type) original_node = base_attr.node - if original_type is None: + # `original_type` can be partial if (e.g.) it is originally an + # instance variable from an `__init__` block that becomes deferred. + if original_type is None or isinstance(original_type, PartialType): if self.pass_num < self.last_pass: # If there are passes left, defer this node until next pass, # otherwise try reconstructing the method type from available information. diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index ee560de89208..e326e24df0e6 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -151,6 +151,24 @@ class Derived(Base): __hash__ = 1 # E: Incompatible types in assignment (expression has type "int", base class "Base" defined the type as "Callable[[Base], int]") +[case testOverridePartialAttributeWithMethod] +# This was crashing: https://github.com/python/mypy/issues/11686. +class Base: + def __init__(self, arg: int): + self.partial_type = [] # E: Need type annotation for "partial_type" (hint: "partial_type: List[] = ...") + self.force_deferral = [] + + # Force inference of the `force_deferral` attribute in `__init__` to be + # deferred to a later pass by providing a definition in another context, + # which means `partial_type` remains only partially inferred. + force_deferral = [] # E: Need type annotation for "force_deferral" (hint: "force_deferral: List[] = ...") + + +class Derived(Base): + def partial_type(self) -> int: # E: Signature of "partial_type" incompatible with supertype "Base" + ... + + -- Attributes -- ---------- From d06dcf0c0383a95492aef8fe46eafcff10be63c0 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 16 Jul 2022 09:23:20 -0700 Subject: [PATCH 73/80] [0.970 backport] add typing_extensions.NamedTuple (#13149) https://github.com/python/typeshed/pull/8295 --- mypy/typeshed/stdlib/typing_extensions.pyi | 26 ++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 38fb9dec19d9..358853549bed 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -1,6 +1,8 @@ import abc +import collections import sys from _typeshed import IdentityFunction, Self as TypeshedSelf # see #6932 for why the Self alias cannot have a leading underscore +from collections.abc import Iterable from typing import ( # noqa: Y022,Y027,Y039 TYPE_CHECKING as TYPE_CHECKING, Any, @@ -52,6 +54,7 @@ __all__ = [ "Counter", "Deque", "DefaultDict", + "NamedTuple", "OrderedDict", "TypedDict", "SupportsIndex", @@ -189,9 +192,11 @@ else: def is_typeddict(tp: object) -> bool: ... # New things in 3.11 +# NamedTuples are not new, but the ability to create generic NamedTuples is new in 3.11 if sys.version_info >= (3, 11): from typing import ( LiteralString as LiteralString, + NamedTuple as NamedTuple, Never as Never, NotRequired as NotRequired, Required as Required, @@ -233,3 +238,24 @@ else: field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: object, ) -> IdentityFunction: ... + + class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: collections.OrderedDict[str, type] + elif sys.version_info < (3, 9): + _field_types: dict[str, type] + _field_defaults: dict[str, Any] + _fields: tuple[str, ...] + _source: str + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + @classmethod + def _make(cls: type[TypeshedSelf], iterable: Iterable[Any]) -> TypeshedSelf: ... + if sys.version_info >= (3, 8): + def _asdict(self) -> dict[str, Any]: ... + else: + def _asdict(self) -> collections.OrderedDict[str, Any]: ... + + def _replace(self: TypeshedSelf, **kwargs: Any) -> TypeshedSelf: ... From 88c1b85c7ca5bb7ac2d7ad36d7dcdfb222412a75 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 16 Jul 2022 09:24:44 -0700 Subject: [PATCH 74/80] [mypyc] Add LoadAddress op for PyFloat_Type & PyTuple_Type (#13078) (#13150) - Fixes https://github.com/mypyc/mypyc/issues/924 - Fixes https://github.com/mypyc/mypyc/issues/926 - Fixes https://github.com/mypyc/mypyc/issues/935 This is a follow-up of commit 7811f085d0081d48302237faa20254c0fc3eb43a. Co-authored-by: Richard Si <63936253+ichard26@users.noreply.github.com> --- mypyc/primitives/float_ops.py | 10 +++++-- mypyc/primitives/tuple_ops.py | 7 ++++- mypyc/test-data/irbuild-dunders.test | 11 +++---- mypyc/test-data/run-python37.test | 4 ++- mypyc/test-data/run-tuples.test | 43 ++++++++++++++++++++++++++++ 5 files changed, 64 insertions(+), 11 deletions(-) diff --git a/mypyc/primitives/float_ops.py b/mypyc/primitives/float_ops.py index ad028a901222..3359cf6fe122 100644 --- a/mypyc/primitives/float_ops.py +++ b/mypyc/primitives/float_ops.py @@ -2,12 +2,18 @@ from mypyc.ir.ops import ERR_MAGIC from mypyc.ir.rtypes import ( - str_rprimitive, float_rprimitive + str_rprimitive, float_rprimitive, object_rprimitive ) from mypyc.primitives.registry import ( - function_op + load_address_op, function_op ) +# Get the 'builtins.float' type object. +load_address_op( + name='builtins.float', + type=object_rprimitive, + src='PyFloat_Type') + # float(str) function_op( name='builtins.float', diff --git a/mypyc/primitives/tuple_ops.py b/mypyc/primitives/tuple_ops.py index ce88a4ee0f4d..33f8e331b56d 100644 --- a/mypyc/primitives/tuple_ops.py +++ b/mypyc/primitives/tuple_ops.py @@ -9,8 +9,13 @@ tuple_rprimitive, int_rprimitive, list_rprimitive, object_rprimitive, c_pyssize_t_rprimitive, bit_rprimitive ) -from mypyc.primitives.registry import method_op, function_op, custom_op +from mypyc.primitives.registry import load_address_op, method_op, function_op, custom_op +# Get the 'builtins.tuple' type object. +load_address_op( + name='builtins.tuple', + type=object_rprimitive, + src='PyTuple_Type') # tuple[index] (for an int index) tuple_get_item_op = method_op( diff --git a/mypyc/test-data/irbuild-dunders.test b/mypyc/test-data/irbuild-dunders.test index 808617e43889..d06a570aa7b0 100644 --- a/mypyc/test-data/irbuild-dunders.test +++ b/mypyc/test-data/irbuild-dunders.test @@ -175,16 +175,13 @@ L0: def f(c): c :: __main__.C r0, r1 :: int - r2, r3, r4 :: object - r5 :: str - r6, r7 :: object + r2, r3, r4, r5 :: object L0: r0 = c.__neg__() r1 = c.__invert__() r2 = load_address PyLong_Type r3 = PyObject_CallFunctionObjArgs(r2, c, 0) - r4 = builtins :: module - r5 = 'float' - r6 = CPyObject_GetAttr(r4, r5) - r7 = PyObject_CallFunctionObjArgs(r6, c, 0) + r4 = load_address PyFloat_Type + r5 = PyObject_CallFunctionObjArgs(r4, c, 0) return 1 + diff --git a/mypyc/test-data/run-python37.test b/mypyc/test-data/run-python37.test index 5bf2c29263e1..61d428c17a44 100644 --- a/mypyc/test-data/run-python37.test +++ b/mypyc/test-data/run-python37.test @@ -70,6 +70,7 @@ class Person4: @dataclass class Person5: + weight: float friends: Set[str] = field(default_factory=set) parents: FrozenSet[str] = frozenset() @@ -122,7 +123,8 @@ assert i8 > i9 assert Person1.__annotations__ == {'age': int, 'name': str} assert Person2.__annotations__ == {'age': int, 'name': str} -assert Person5.__annotations__ == {'friends': set, 'parents': frozenset} +assert Person5.__annotations__ == {'weight': float, 'friends': set, + 'parents': frozenset} [file driver.py] import sys diff --git a/mypyc/test-data/run-tuples.test b/mypyc/test-data/run-tuples.test index 759177342fa9..26b039320844 100644 --- a/mypyc/test-data/run-tuples.test +++ b/mypyc/test-data/run-tuples.test @@ -95,6 +95,49 @@ class Sub(NT): pass assert f(Sub(3, 2)) == 3 +-- Ref: https://github.com/mypyc/mypyc/issues/924 +[case testNamedTupleClassSyntax] +from typing import Dict, List, NamedTuple, Optional, Tuple, Union + +class ClassIR: pass + +class FuncIR: pass + +StealsDescription = Union[bool, List[bool]] + +class Record(NamedTuple): + st_mtime: float + st_size: int + is_borrowed: bool + hash: str + python_path: Tuple[str, ...] + type: 'ClassIR' + method: FuncIR + shadow_method: Optional[FuncIR] + classes: Dict[str, 'ClassIR'] + steals: StealsDescription + ordering: Optional[List[int]] + extra_int_constants: List[Tuple[int]] + +[file driver.py] +from typing import Optional +from native import ClassIR, FuncIR, Record + +assert Record.__annotations__ == { + 'st_mtime': float, + 'st_size': int, + 'is_borrowed': bool, + 'hash': str, + 'python_path': tuple, + 'type': ClassIR, + 'method': FuncIR, + 'shadow_method': type, + 'classes': dict, + 'steals': type, + 'ordering': type, + 'extra_int_constants': list, +}, Record.__annotations__ + [case testTupleOps] from typing import Tuple, List, Any, Optional from typing_extensions import Final From ee10d29a0eeded05ad4fa281c90341412f843d0b Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 16 Jul 2022 09:25:38 -0700 Subject: [PATCH 75/80] flush keepalives on operator assignment statements (#13144) (#13151) This PR makes mypyc flush keepalives on operator assignment statements, to prevent accessing undefined variables in the generated C code. Fixes mypyc/mypyc#941. Co-authored-by: Zsolt Dollenstein --- mypyc/irbuild/statement.py | 1 + mypyc/test-data/run-generators.test | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 93dc5f24158f..c1d9666a34ec 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -136,6 +136,7 @@ def transform_operator_assignment_stmt(builder: IRBuilder, stmt: OperatorAssignm # usually operator assignments are done in-place # but when target doesn't support that we need to manually assign builder.assign(target, res, res.line) + builder.flush_keep_alives() def transform_import(builder: IRBuilder, node: Import) -> None: diff --git a/mypyc/test-data/run-generators.test b/mypyc/test-data/run-generators.test index 8aecce6564c8..8feabd21258b 100644 --- a/mypyc/test-data/run-generators.test +++ b/mypyc/test-data/run-generators.test @@ -595,3 +595,15 @@ class C: self.foo.flag = True yield self.foo.flag = False + +[case testGeneratorEarlyReturnWithBorrows] +from typing import Iterator +class Bar: + bar = 0 +class Foo: + bar = Bar() + def f(self) -> Iterator[int]: + if self: + self.bar.bar += 1 + return + yield 0 \ No newline at end of file From 2132036a019ed26d77389c0a9af9b928911a5846 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 17 Jul 2022 02:22:04 -0700 Subject: [PATCH 76/80] Handle cwd correctly in pyinfo (#13161) (#13162) This fixes a recent regression introduced by the change to use sys.path Fixes #12956 --- mypy/pyinfo.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/mypy/pyinfo.py b/mypy/pyinfo.py index c129063a01a4..b8a1b234e67a 100644 --- a/mypy/pyinfo.py +++ b/mypy/pyinfo.py @@ -10,9 +10,6 @@ import sys import sysconfig -if __name__ == '__main__': - sys.path = sys.path[1:] # we don't want to pick up mypy.types - MYPY = False if MYPY: from typing import List @@ -29,10 +26,17 @@ def getsearchdirs(): ) stdlib = sysconfig.get_path("stdlib") stdlib_ext = os.path.join(stdlib, "lib-dynload") - cwd = os.path.abspath(os.getcwd()) - excludes = set([cwd, stdlib_zip, stdlib, stdlib_ext]) - - abs_sys_path = (os.path.abspath(p) for p in sys.path) + excludes = set([stdlib_zip, stdlib, stdlib_ext]) + + # Drop the first entry of sys.path + # - If pyinfo.py is executed as a script (in a subprocess), this is the directory + # containing pyinfo.py + # - Otherwise, if mypy launched via console script, this is the directory of the script + # - Otherwise, if mypy launched via python -m mypy, this is the current directory + # In all cases, this is safe to drop + # Note that mypy adds the cwd to SearchPaths.python_path, so we still find things on the + # cwd consistently (the return value here sets SearchPaths.package_path) + abs_sys_path = (os.path.abspath(p) for p in sys.path[1:]) return [p for p in abs_sys_path if p not in excludes] From 72fa32526bff25d0738c39835b2ac309976961f8 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 18 Jul 2022 14:22:56 +0100 Subject: [PATCH 77/80] Update version to 0.970 --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index b46cd5b82a66..543def4a3dea 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.970+dev' +__version__ = '0.970' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From d8d900c4b8a178d8416ea831864234a21204dadb Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 18 Jul 2022 17:36:10 +0100 Subject: [PATCH 78/80] Update version to 0.971+dev --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 543def4a3dea..e080326d8cf1 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.970' +__version__ = '0.971+dev' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From 61c0064fc4eb0a97ce1b69c05124fa6c2a4f3670 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 18 Jul 2022 17:23:55 +0100 Subject: [PATCH 79/80] Add back workaround to avoid confusing mypy.types and types in pyinfo (#13176) We run mypy/pyinfo.py as a script, and this means that mypy/types.py could be picked up instead of the stdlib `types` module, which clearly doesn't work. This seems to happen at least on macOS, and it broke PEP 561 tests. The workaround was accidentally removed as part of #13161. This should fix #13174 and help with the wheel builds. --- mypy/pyinfo.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mypy/pyinfo.py b/mypy/pyinfo.py index b8a1b234e67a..c874530a1799 100644 --- a/mypy/pyinfo.py +++ b/mypy/pyinfo.py @@ -14,6 +14,15 @@ if MYPY: from typing import List +if __name__ == '__main__': + # HACK: We don't want to pick up mypy.types as the top-level types + # module. This could happen if this file is run as a script. + # This workaround fixes it. + old_sys_path = sys.path + sys.path = sys.path[1:] + import types # noqa + sys.path = old_sys_path + def getsearchdirs(): # type: () -> List[str] From 1f08cf44c7ec3dc4111aaf817958f7a51018ba38 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 19 Jul 2022 13:32:01 +0100 Subject: [PATCH 80/80] Update version to 0.971 --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index e080326d8cf1..66c5bb807798 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.971+dev' +__version__ = '0.971' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))