gtn/.venv/Lib/site-packages/mypy/constraints.py

1594 lines
71 KiB
Python
Raw Normal View History

"""Type inference constraints."""
from __future__ import annotations
from typing import TYPE_CHECKING, Final, Iterable, List, Sequence, cast
import mypy.subtypes
import mypy.typeops
from mypy.argmap import ArgTypeExpander
from mypy.erasetype import erase_typevars
from mypy.maptype import map_instance_to_supertype
from mypy.nodes import (
ARG_OPT,
ARG_POS,
ARG_STAR,
ARG_STAR2,
CONTRAVARIANT,
COVARIANT,
ArgKind,
TypeInfo,
)
from mypy.types import (
TUPLE_LIKE_INSTANCE_NAMES,
AnyType,
CallableType,
DeletedType,
ErasedType,
Instance,
LiteralType,
NoneType,
Overloaded,
Parameters,
ParamSpecType,
PartialType,
ProperType,
TupleType,
Type,
TypeAliasType,
TypedDictType,
TypeOfAny,
TypeQuery,
TypeType,
TypeVarId,
TypeVarLikeType,
TypeVarTupleType,
TypeVarType,
TypeVisitor,
UnboundType,
UninhabitedType,
UnionType,
UnpackType,
find_unpack_in_list,
get_proper_type,
has_recursive_types,
has_type_vars,
is_named_instance,
split_with_prefix_and_suffix,
)
from mypy.types_utils import is_union_with_any
from mypy.typestate import type_state
from mypy.typevartuples import extract_unpack, split_with_mapped_and_template
if TYPE_CHECKING:
from mypy.infer import ArgumentInferContext
SUBTYPE_OF: Final = 0
SUPERTYPE_OF: Final = 1
class Constraint:
"""A representation of a type constraint.
It can be either T <: type or T :> type (T is a type variable).
"""
type_var: TypeVarId
op = 0 # SUBTYPE_OF or SUPERTYPE_OF
target: Type
def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None:
self.type_var = type_var.id
self.op = op
# TODO: should we add "assert not isinstance(target, UnpackType)"?
# UnpackType is a synthetic type, and is never valid as a constraint target.
self.target = target
self.origin_type_var = type_var
# These are additional type variables that should be solved for together with type_var.
# TODO: A cleaner solution may be to modify the return type of infer_constraints()
# to include these instead, but this is a rather big refactoring.
self.extra_tvars: list[TypeVarLikeType] = []
def __repr__(self) -> str:
op_str = "<:"
if self.op == SUPERTYPE_OF:
op_str = ":>"
return f"{self.type_var} {op_str} {self.target}"
def __hash__(self) -> int:
return hash((self.type_var, self.op, self.target))
def __eq__(self, other: object) -> bool:
if not isinstance(other, Constraint):
return False
return (self.type_var, self.op, self.target) == (other.type_var, other.op, other.target)
def infer_constraints_for_callable(
callee: CallableType,
arg_types: Sequence[Type | None],
arg_kinds: list[ArgKind],
arg_names: Sequence[str | None] | None,
formal_to_actual: list[list[int]],
context: ArgumentInferContext,
) -> list[Constraint]:
"""Infer type variable constraints for a callable and actual arguments.
Return a list of constraints.
"""
constraints: list[Constraint] = []
mapper = ArgTypeExpander(context)
param_spec = callee.param_spec()
param_spec_arg_types = []
param_spec_arg_names = []
param_spec_arg_kinds = []
incomplete_star_mapping = False
for i, actuals in enumerate(formal_to_actual):
for actual in actuals:
if actual is None and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2):
# We can't use arguments to infer ParamSpec constraint, if only some
# are present in the current inference pass.
incomplete_star_mapping = True
break
for i, actuals in enumerate(formal_to_actual):
if isinstance(callee.arg_types[i], UnpackType):
unpack_type = callee.arg_types[i]
assert isinstance(unpack_type, UnpackType)
# In this case we are binding all of the actuals to *args
# and we want a constraint that the typevar tuple being unpacked
# is equal to a type list of all the actuals.
actual_types = []
for actual in actuals:
actual_arg_type = arg_types[actual]
if actual_arg_type is None:
continue
actual_types.append(
mapper.expand_actual_type(
actual_arg_type,
arg_kinds[actual],
callee.arg_names[i],
callee.arg_kinds[i],
)
)
unpacked_type = get_proper_type(unpack_type.type)
if isinstance(unpacked_type, TypeVarTupleType):
constraints.append(
Constraint(
unpacked_type,
SUPERTYPE_OF,
TupleType(actual_types, unpacked_type.tuple_fallback),
)
)
elif isinstance(unpacked_type, TupleType):
# Prefixes get converted to positional args, so technically the only case we
# should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out
# not to hold we can always handle the prefixes too.
inner_unpack = unpacked_type.items[0]
assert isinstance(inner_unpack, UnpackType)
inner_unpacked_type = get_proper_type(inner_unpack.type)
suffix_len = len(unpacked_type.items) - 1
if isinstance(inner_unpacked_type, TypeVarTupleType):
# Variadic item can be either *Ts...
constraints.append(
Constraint(
inner_unpacked_type,
SUPERTYPE_OF,
TupleType(
actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback
),
)
)
else:
# ...or it can be a homogeneous tuple.
assert (
isinstance(inner_unpacked_type, Instance)
and inner_unpacked_type.type.fullname == "builtins.tuple"
)
for at in actual_types[:-suffix_len]:
constraints.extend(
infer_constraints(inner_unpacked_type.args[0], at, SUPERTYPE_OF)
)
# Now handle the suffix (if any).
if suffix_len:
for tt, at in zip(unpacked_type.items[1:], actual_types[-suffix_len:]):
constraints.extend(infer_constraints(tt, at, SUPERTYPE_OF))
else:
assert False, "mypy bug: unhandled constraint inference case"
else:
for actual in actuals:
actual_arg_type = arg_types[actual]
if actual_arg_type is None:
continue
actual_type = mapper.expand_actual_type(
actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i]
)
if (
param_spec
and callee.arg_kinds[i] in (ARG_STAR, ARG_STAR2)
and not incomplete_star_mapping
):
# If actual arguments are mapped to ParamSpec type, we can't infer individual
# constraints, instead store them and infer single constraint at the end.
# It is impossible to map actual kind to formal kind, so use some heuristic.
# This inference is used as a fallback, so relying on heuristic should be OK.
param_spec_arg_types.append(
mapper.expand_actual_type(
actual_arg_type, arg_kinds[actual], None, arg_kinds[actual]
)
)
actual_kind = arg_kinds[actual]
param_spec_arg_kinds.append(
ARG_POS if actual_kind not in (ARG_STAR, ARG_STAR2) else actual_kind
)
param_spec_arg_names.append(arg_names[actual] if arg_names else None)
else:
c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF)
constraints.extend(c)
if (
param_spec
and not any(c.type_var == param_spec.id for c in constraints)
and not incomplete_star_mapping
):
# Use ParamSpec constraint from arguments only if there are no other constraints,
# since as explained above it is quite ad-hoc.
constraints.append(
Constraint(
param_spec,
SUPERTYPE_OF,
Parameters(
arg_types=param_spec_arg_types,
arg_kinds=param_spec_arg_kinds,
arg_names=param_spec_arg_names,
imprecise_arg_kinds=True,
),
)
)
return constraints
def infer_constraints(
template: Type, actual: Type, direction: int, skip_neg_op: bool = False
) -> list[Constraint]:
"""Infer type constraints.
Match a template type, which may contain type variable references,
recursively against a type which does not contain (the same) type
variable references. The result is a list of type constrains of
form 'T is a supertype/subtype of x', where T is a type variable
present in the template and x is a type without reference to type
variables present in the template.
Assume T and S are type variables. Now the following results can be
calculated (read as '(template, actual) --> result'):
(T, X) --> T :> X
(X[T], X[Y]) --> T <: Y and T :> Y
((T, T), (X, Y)) --> T :> X and T :> Y
((T, S), (X, Y)) --> T :> X and S :> Y
(X[T], Any) --> T <: Any and T :> Any
The constraints are represented as Constraint objects. If skip_neg_op == True,
then skip adding reverse (polymorphic) constraints (since this is already a call
to infer such constraints).
"""
if any(
get_proper_type(template) == get_proper_type(t)
and get_proper_type(actual) == get_proper_type(a)
for (t, a) in reversed(type_state.inferring)
):
return []
if has_recursive_types(template) or isinstance(get_proper_type(template), Instance):
# This case requires special care because it may cause infinite recursion.
# Note that we include Instances because the may be recursive as str(Sequence[str]).
if not has_type_vars(template):
# Return early on an empty branch.
return []
type_state.inferring.append((template, actual))
res = _infer_constraints(template, actual, direction, skip_neg_op)
type_state.inferring.pop()
return res
return _infer_constraints(template, actual, direction, skip_neg_op)
def _infer_constraints(
template: Type, actual: Type, direction: int, skip_neg_op: bool
) -> list[Constraint]:
orig_template = template
template = get_proper_type(template)
actual = get_proper_type(actual)
# Type inference shouldn't be affected by whether union types have been simplified.
# We however keep any ErasedType items, so that the caller will see it when using
# checkexpr.has_erased_component().
if isinstance(template, UnionType):
template = mypy.typeops.make_simplified_union(template.items, keep_erased=True)
if isinstance(actual, UnionType):
actual = mypy.typeops.make_simplified_union(actual.items, keep_erased=True)
# Ignore Any types from the type suggestion engine to avoid them
# causing us to infer Any in situations where a better job could
# be done otherwise. (This can produce false positives but that
# doesn't really matter because it is all heuristic anyway.)
if isinstance(actual, AnyType) and actual.type_of_any == TypeOfAny.suggestion_engine:
return []
# If the template is simply a type variable, emit a Constraint directly.
# We need to handle this case before handling Unions for two reasons:
# 1. "T <: Union[U1, U2]" is not equivalent to "T <: U1 or T <: U2",
# because T can itself be a union (notably, Union[U1, U2] itself).
# 2. "T :> Union[U1, U2]" is logically equivalent to "T :> U1 and
# T :> U2", but they are not equivalent to the constraint solver,
# which never introduces new Union types (it uses join() instead).
if isinstance(template, TypeVarType):
return [Constraint(template, direction, actual)]
# Now handle the case of either template or actual being a Union.
# For a Union to be a subtype of another type, every item of the Union
# must be a subtype of that type, so concatenate the constraints.
if direction == SUBTYPE_OF and isinstance(template, UnionType):
res = []
for t_item in template.items:
res.extend(infer_constraints(t_item, actual, direction))
return res
if direction == SUPERTYPE_OF and isinstance(actual, UnionType):
res = []
for a_item in actual.items:
res.extend(infer_constraints(orig_template, a_item, direction))
return res
# Now the potential subtype is known not to be a Union or a type
# variable that we are solving for. In that case, for a Union to
# be a supertype of the potential subtype, some item of the Union
# must be a supertype of it.
if direction == SUBTYPE_OF and isinstance(actual, UnionType):
# If some of items is not a complete type, disregard that.
items = simplify_away_incomplete_types(actual.items)
# We infer constraints eagerly -- try to find constraints for a type
# variable if possible. This seems to help with some real-world
# use cases.
return any_constraints(
[infer_constraints_if_possible(template, a_item, direction) for a_item in items],
eager=True,
)
if direction == SUPERTYPE_OF and isinstance(template, UnionType):
# When the template is a union, we are okay with leaving some
# type variables indeterminate. This helps with some special
# cases, though this isn't very principled.
result = any_constraints(
[
infer_constraints_if_possible(t_item, actual, direction)
for t_item in template.items
],
eager=False,
)
if result:
return result
elif has_recursive_types(template) and not has_recursive_types(actual):
return handle_recursive_union(template, actual, direction)
return []
# Remaining cases are handled by ConstraintBuilderVisitor.
return template.accept(ConstraintBuilderVisitor(actual, direction, skip_neg_op))
def infer_constraints_if_possible(
template: Type, actual: Type, direction: int
) -> list[Constraint] | None:
"""Like infer_constraints, but return None if the input relation is
known to be unsatisfiable, for example if template=List[T] and actual=int.
(In this case infer_constraints would return [], just like it would for
an automatically satisfied relation like template=List[T] and actual=object.)
"""
if direction == SUBTYPE_OF and not mypy.subtypes.is_subtype(erase_typevars(template), actual):
return None
if direction == SUPERTYPE_OF and not mypy.subtypes.is_subtype(
actual, erase_typevars(template)
):
return None
if (
direction == SUPERTYPE_OF
and isinstance(template, TypeVarType)
and not mypy.subtypes.is_subtype(actual, erase_typevars(template.upper_bound))
):
# This is not caught by the above branch because of the erase_typevars() call,
# that would return 'Any' for a type variable.
return None
return infer_constraints(template, actual, direction)
def select_trivial(options: Sequence[list[Constraint] | None]) -> list[list[Constraint]]:
"""Select only those lists where each item is a constraint against Any."""
res = []
for option in options:
if option is None:
continue
if all(isinstance(get_proper_type(c.target), AnyType) for c in option):
res.append(option)
return res
def merge_with_any(constraint: Constraint) -> Constraint:
"""Transform a constraint target into a union with given Any type."""
target = constraint.target
if is_union_with_any(target):
# Do not produce redundant unions.
return constraint
# TODO: if we will support multiple sources Any, use this here instead.
any_type = AnyType(TypeOfAny.implementation_artifact)
return Constraint(
constraint.origin_type_var,
constraint.op,
UnionType.make_union([target, any_type], target.line, target.column),
)
def handle_recursive_union(template: UnionType, actual: Type, direction: int) -> list[Constraint]:
# This is a hack to special-case things like Union[T, Inst[T]] in recursive types. Although
# it is quite arbitrary, it is a relatively common pattern, so we should handle it well.
# This function may be called when inferring against such union resulted in different
# constraints for each item. Normally we give up in such case, but here we instead split
# the union in two parts, and try inferring sequentially.
non_type_var_items = [t for t in template.items if not isinstance(t, TypeVarType)]
type_var_items = [t for t in template.items if isinstance(t, TypeVarType)]
return infer_constraints(
UnionType.make_union(non_type_var_items), actual, direction
) or infer_constraints(UnionType.make_union(type_var_items), actual, direction)
def any_constraints(options: list[list[Constraint] | None], eager: bool) -> list[Constraint]:
"""Deduce what we can from a collection of constraint lists.
It's a given that at least one of the lists must be satisfied. A
None element in the list of options represents an unsatisfiable
constraint and is ignored. Ignore empty constraint lists if eager
is true -- they are always trivially satisfiable.
"""
if eager:
valid_options = [option for option in options if option]
else:
valid_options = [option for option in options if option is not None]
if not valid_options:
return []
if len(valid_options) == 1:
return valid_options[0]
if all(is_same_constraints(valid_options[0], c) for c in valid_options[1:]):
# Multiple sets of constraints that are all the same. Just pick any one of them.
return valid_options[0]
if all(is_similar_constraints(valid_options[0], c) for c in valid_options[1:]):
# All options have same structure. In this case we can merge-in trivial
# options (i.e. those that only have Any) and try again.
# TODO: More generally, if a given (variable, direction) pair appears in
# every option, combine the bounds with meet/join always, not just for Any.
trivial_options = select_trivial(valid_options)
if trivial_options and len(trivial_options) < len(valid_options):
merged_options = []
for option in valid_options:
if option in trivial_options:
continue
if option is not None:
merged_option: list[Constraint] | None = [merge_with_any(c) for c in option]
else:
merged_option = None
merged_options.append(merged_option)
return any_constraints(list(merged_options), eager)
# If normal logic didn't work, try excluding trivially unsatisfiable constraint (due to
# upper bounds) from each option, and comparing them again.
filtered_options = [filter_satisfiable(o) for o in options]
if filtered_options != options:
return any_constraints(filtered_options, eager=eager)
# Otherwise, there are either no valid options or multiple, inconsistent valid
# options. Give up and deduce nothing.
return []
def filter_satisfiable(option: list[Constraint] | None) -> list[Constraint] | None:
"""Keep only constraints that can possibly be satisfied.
Currently, we filter out constraints where target is not a subtype of the upper bound.
Since those can be never satisfied. We may add more cases in future if it improves type
inference.
"""
if not option:
return option
satisfiable = []
for c in option:
if isinstance(c.origin_type_var, TypeVarType) and c.origin_type_var.values:
if any(
mypy.subtypes.is_subtype(c.target, value) for value in c.origin_type_var.values
):
satisfiable.append(c)
elif mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound):
satisfiable.append(c)
if not satisfiable:
return None
return satisfiable
def is_same_constraints(x: list[Constraint], y: list[Constraint]) -> bool:
for c1 in x:
if not any(is_same_constraint(c1, c2) for c2 in y):
return False
for c1 in y:
if not any(is_same_constraint(c1, c2) for c2 in x):
return False
return True
def is_same_constraint(c1: Constraint, c2: Constraint) -> bool:
# Ignore direction when comparing constraints against Any.
skip_op_check = isinstance(get_proper_type(c1.target), AnyType) and isinstance(
get_proper_type(c2.target), AnyType
)
return (
c1.type_var == c2.type_var
and (c1.op == c2.op or skip_op_check)
and mypy.subtypes.is_same_type(c1.target, c2.target)
)
def is_similar_constraints(x: list[Constraint], y: list[Constraint]) -> bool:
"""Check that two lists of constraints have similar structure.
This means that each list has same type variable plus direction pairs (i.e we
ignore the target). Except for constraints where target is Any type, there
we ignore direction as well.
"""
return _is_similar_constraints(x, y) and _is_similar_constraints(y, x)
def _is_similar_constraints(x: list[Constraint], y: list[Constraint]) -> bool:
"""Check that every constraint in the first list has a similar one in the second.
See docstring above for definition of similarity.
"""
for c1 in x:
has_similar = False
for c2 in y:
# Ignore direction when either constraint is against Any.
skip_op_check = isinstance(get_proper_type(c1.target), AnyType) or isinstance(
get_proper_type(c2.target), AnyType
)
if c1.type_var == c2.type_var and (c1.op == c2.op or skip_op_check):
has_similar = True
break
if not has_similar:
return False
return True
def simplify_away_incomplete_types(types: Iterable[Type]) -> list[Type]:
complete = [typ for typ in types if is_complete_type(typ)]
if complete:
return complete
else:
return list(types)
def is_complete_type(typ: Type) -> bool:
"""Is a type complete?
A complete doesn't have uninhabited type components or (when not in strict
optional mode) None components.
"""
return typ.accept(CompleteTypeVisitor())
class CompleteTypeVisitor(TypeQuery[bool]):
def __init__(self) -> None:
super().__init__(all)
def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
return False
class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]):
"""Visitor class for inferring type constraints."""
# The type that is compared against a template
# TODO: The value may be None. Is that actually correct?
actual: ProperType
def __init__(self, actual: ProperType, direction: int, skip_neg_op: bool) -> None:
# Direction must be SUBTYPE_OF or SUPERTYPE_OF.
self.actual = actual
self.direction = direction
# Whether to skip polymorphic inference (involves inference in opposite direction)
# this is used to prevent infinite recursion when both template and actual are
# generic callables.
self.skip_neg_op = skip_neg_op
# Trivial leaf types
def visit_unbound_type(self, template: UnboundType) -> list[Constraint]:
return []
def visit_any(self, template: AnyType) -> list[Constraint]:
return []
def visit_none_type(self, template: NoneType) -> list[Constraint]:
return []
def visit_uninhabited_type(self, template: UninhabitedType) -> list[Constraint]:
return []
def visit_erased_type(self, template: ErasedType) -> list[Constraint]:
return []
def visit_deleted_type(self, template: DeletedType) -> list[Constraint]:
return []
def visit_literal_type(self, template: LiteralType) -> list[Constraint]:
return []
# Errors
def visit_partial_type(self, template: PartialType) -> list[Constraint]:
# We can't do anything useful with a partial type here.
assert False, "Internal error"
# Non-trivial leaf type
def visit_type_var(self, template: TypeVarType) -> list[Constraint]:
assert False, (
"Unexpected TypeVarType in ConstraintBuilderVisitor"
" (should have been handled in infer_constraints)"
)
def visit_param_spec(self, template: ParamSpecType) -> list[Constraint]:
# Can't infer ParamSpecs from component values (only via Callable[P, T]).
return []
def visit_type_var_tuple(self, template: TypeVarTupleType) -> list[Constraint]:
raise NotImplementedError
def visit_unpack_type(self, template: UnpackType) -> list[Constraint]:
raise RuntimeError("Mypy bug: unpack should be handled at a higher level.")
def visit_parameters(self, template: Parameters) -> list[Constraint]:
# Constraining Any against C[P] turns into infer_against_any([P], Any)
# ... which seems like the only case this can happen. Better to fail loudly otherwise.
if isinstance(self.actual, AnyType):
return self.infer_against_any(template.arg_types, self.actual)
if type_state.infer_polymorphic and isinstance(self.actual, Parameters):
# For polymorphic inference we need to be able to infer secondary constraints
# in situations like [x: T] <: P <: [x: int]. Note we invert direction, since
# this function expects direction between callables.
return infer_callable_arguments_constraints(
template, self.actual, neg_op(self.direction)
)
raise RuntimeError("Parameters cannot be constrained to")
# Non-leaf types
def visit_instance(self, template: Instance) -> list[Constraint]:
original_actual = actual = self.actual
res: list[Constraint] = []
if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol:
if "__call__" in template.type.protocol_members:
# Special case: a generic callback protocol
if not any(template == t for t in template.type.inferring):
template.type.inferring.append(template)
call = mypy.subtypes.find_member(
"__call__", template, actual, is_operator=True
)
assert call is not None
if mypy.subtypes.is_subtype(actual, erase_typevars(call)):
subres = infer_constraints(call, actual, self.direction)
res.extend(subres)
template.type.inferring.pop()
if isinstance(actual, CallableType) and actual.fallback is not None:
if actual.is_type_obj() and template.type.is_protocol:
ret_type = get_proper_type(actual.ret_type)
if isinstance(ret_type, TupleType):
ret_type = mypy.typeops.tuple_fallback(ret_type)
if isinstance(ret_type, Instance):
if self.direction == SUBTYPE_OF:
subtype = template
else:
subtype = ret_type
res.extend(
self.infer_constraints_from_protocol_members(
ret_type, template, subtype, template, class_obj=True
)
)
actual = actual.fallback
if isinstance(actual, TypeType) and template.type.is_protocol:
if isinstance(actual.item, Instance):
if self.direction == SUBTYPE_OF:
subtype = template
else:
subtype = actual.item
res.extend(
self.infer_constraints_from_protocol_members(
actual.item, template, subtype, template, class_obj=True
)
)
if self.direction == SUPERTYPE_OF:
# Infer constraints for Type[T] via metaclass of T when it makes sense.
a_item = actual.item
if isinstance(a_item, TypeVarType):
a_item = get_proper_type(a_item.upper_bound)
if isinstance(a_item, Instance) and a_item.type.metaclass_type:
res.extend(
self.infer_constraints_from_protocol_members(
a_item.type.metaclass_type, template, actual, template
)
)
if isinstance(actual, Overloaded) and actual.fallback is not None:
actual = actual.fallback
if isinstance(actual, TypedDictType):
actual = actual.as_anonymous().fallback
if isinstance(actual, LiteralType):
actual = actual.fallback
if isinstance(actual, Instance):
instance = actual
erased = erase_typevars(template)
assert isinstance(erased, Instance) # type: ignore[misc]
# We always try nominal inference if possible,
# it is much faster than the structural one.
if self.direction == SUBTYPE_OF and template.type.has_base(instance.type.fullname):
mapped = map_instance_to_supertype(template, instance.type)
tvars = mapped.type.defn.type_vars
if instance.type.has_type_var_tuple_type:
assert instance.type.type_var_tuple_prefix is not None
assert instance.type.type_var_tuple_suffix is not None
assert mapped.type.type_var_tuple_prefix is not None
assert mapped.type.type_var_tuple_suffix is not None
unpack_constraints, instance_args, mapped_args = build_constraints_for_unpack(
instance.args,
instance.type.type_var_tuple_prefix,
instance.type.type_var_tuple_suffix,
mapped.args,
mapped.type.type_var_tuple_prefix,
mapped.type.type_var_tuple_suffix,
self.direction,
)
res.extend(unpack_constraints)
tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix(
tuple(tvars),
instance.type.type_var_tuple_prefix,
instance.type.type_var_tuple_suffix,
)
tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix))
else:
mapped_args = mapped.args
instance_args = instance.args
# N.B: We use zip instead of indexing because the lengths might have
# mismatches during daemon reprocessing.
for tvar, mapped_arg, instance_arg in zip(tvars, mapped_args, instance_args):
if isinstance(tvar, TypeVarType):
# The constraints for generic type parameters depend on variance.
# Include constraints from both directions if invariant.
if tvar.variance != CONTRAVARIANT:
res.extend(infer_constraints(mapped_arg, instance_arg, self.direction))
if tvar.variance != COVARIANT:
res.extend(
infer_constraints(mapped_arg, instance_arg, neg_op(self.direction))
)
elif isinstance(tvar, ParamSpecType) and isinstance(mapped_arg, ParamSpecType):
prefix = mapped_arg.prefix
if isinstance(instance_arg, Parameters):
# No such thing as variance for ParamSpecs, consider them invariant
# TODO: constraints between prefixes using
# infer_callable_arguments_constraints()
suffix: Type = instance_arg.copy_modified(
instance_arg.arg_types[len(prefix.arg_types) :],
instance_arg.arg_kinds[len(prefix.arg_kinds) :],
instance_arg.arg_names[len(prefix.arg_names) :],
)
res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix))
res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
elif isinstance(instance_arg, ParamSpecType):
suffix = instance_arg.copy_modified(
prefix=Parameters(
instance_arg.prefix.arg_types[len(prefix.arg_types) :],
instance_arg.prefix.arg_kinds[len(prefix.arg_kinds) :],
instance_arg.prefix.arg_names[len(prefix.arg_names) :],
)
)
res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix))
res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix))
else:
# This case should have been handled above.
assert not isinstance(tvar, TypeVarTupleType)
return res
elif self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname):
mapped = map_instance_to_supertype(instance, template.type)
tvars = template.type.defn.type_vars
if template.type.has_type_var_tuple_type:
assert mapped.type.type_var_tuple_prefix is not None
assert mapped.type.type_var_tuple_suffix is not None
assert template.type.type_var_tuple_prefix is not None
assert template.type.type_var_tuple_suffix is not None
unpack_constraints, mapped_args, template_args = build_constraints_for_unpack(
mapped.args,
mapped.type.type_var_tuple_prefix,
mapped.type.type_var_tuple_suffix,
template.args,
template.type.type_var_tuple_prefix,
template.type.type_var_tuple_suffix,
self.direction,
)
res.extend(unpack_constraints)
tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix(
tuple(tvars),
template.type.type_var_tuple_prefix,
template.type.type_var_tuple_suffix,
)
tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix))
else:
mapped_args = mapped.args
template_args = template.args
# N.B: We use zip instead of indexing because the lengths might have
# mismatches during daemon reprocessing.
for tvar, mapped_arg, template_arg in zip(tvars, mapped_args, template_args):
assert not isinstance(tvar, TypeVarTupleType)
if isinstance(tvar, TypeVarType):
# The constraints for generic type parameters depend on variance.
# Include constraints from both directions if invariant.
if tvar.variance != CONTRAVARIANT:
res.extend(infer_constraints(template_arg, mapped_arg, self.direction))
if tvar.variance != COVARIANT:
res.extend(
infer_constraints(template_arg, mapped_arg, neg_op(self.direction))
)
elif isinstance(tvar, ParamSpecType) and isinstance(
template_arg, ParamSpecType
):
prefix = template_arg.prefix
if isinstance(mapped_arg, Parameters):
# No such thing as variance for ParamSpecs, consider them invariant
# TODO: constraints between prefixes using
# infer_callable_arguments_constraints()
suffix = mapped_arg.copy_modified(
mapped_arg.arg_types[len(prefix.arg_types) :],
mapped_arg.arg_kinds[len(prefix.arg_kinds) :],
mapped_arg.arg_names[len(prefix.arg_names) :],
)
res.append(Constraint(template_arg, SUBTYPE_OF, suffix))
res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
elif isinstance(mapped_arg, ParamSpecType):
suffix = mapped_arg.copy_modified(
prefix=Parameters(
mapped_arg.prefix.arg_types[len(prefix.arg_types) :],
mapped_arg.prefix.arg_kinds[len(prefix.arg_kinds) :],
mapped_arg.prefix.arg_names[len(prefix.arg_names) :],
)
)
res.append(Constraint(template_arg, SUBTYPE_OF, suffix))
res.append(Constraint(template_arg, SUPERTYPE_OF, suffix))
else:
# This case should have been handled above.
assert not isinstance(tvar, TypeVarTupleType)
return res
if (
template.type.is_protocol
and self.direction == SUPERTYPE_OF
and
# We avoid infinite recursion for structural subtypes by checking
# whether this type already appeared in the inference chain.
# This is a conservative way to break the inference cycles.
# It never produces any "false" constraints but gives up soon
# on purely structural inference cycles, see #3829.
# Note that we use is_protocol_implementation instead of is_subtype
# because some type may be considered a subtype of a protocol
# due to _promote, but still not implement the protocol.
not any(template == t for t in reversed(template.type.inferring))
and mypy.subtypes.is_protocol_implementation(instance, erased, skip=["__call__"])
):
template.type.inferring.append(template)
res.extend(
self.infer_constraints_from_protocol_members(
instance, template, original_actual, template
)
)
template.type.inferring.pop()
return res
elif (
instance.type.is_protocol
and self.direction == SUBTYPE_OF
and
# We avoid infinite recursion for structural subtypes also here.
not any(instance == i for i in reversed(instance.type.inferring))
and mypy.subtypes.is_protocol_implementation(erased, instance, skip=["__call__"])
):
instance.type.inferring.append(instance)
res.extend(
self.infer_constraints_from_protocol_members(
instance, template, template, instance
)
)
instance.type.inferring.pop()
return res
if res:
return res
if isinstance(actual, AnyType):
return self.infer_against_any(template.args, actual)
if (
isinstance(actual, TupleType)
and is_named_instance(template, TUPLE_LIKE_INSTANCE_NAMES)
and self.direction == SUPERTYPE_OF
):
for item in actual.items:
if isinstance(item, UnpackType):
unpacked = get_proper_type(item.type)
if isinstance(unpacked, TypeVarType):
# Cannot infer anything for T from [T, ...] <: *Ts
continue
assert (
isinstance(unpacked, Instance)
and unpacked.type.fullname == "builtins.tuple"
)
item = unpacked.args[0]
cb = infer_constraints(template.args[0], item, SUPERTYPE_OF)
res.extend(cb)
return res
elif isinstance(actual, TupleType) and self.direction == SUPERTYPE_OF:
return infer_constraints(template, mypy.typeops.tuple_fallback(actual), self.direction)
elif isinstance(actual, TypeVarType):
if not actual.values and not actual.id.is_meta_var():
return infer_constraints(template, actual.upper_bound, self.direction)
return []
elif isinstance(actual, ParamSpecType):
return infer_constraints(template, actual.upper_bound, self.direction)
elif isinstance(actual, TypeVarTupleType):
raise NotImplementedError
else:
return []
def infer_constraints_from_protocol_members(
self,
instance: Instance,
template: Instance,
subtype: Type,
protocol: Instance,
class_obj: bool = False,
) -> list[Constraint]:
"""Infer constraints for situations where either 'template' or 'instance' is a protocol.
The 'protocol' is the one of two that is an instance of protocol type, 'subtype'
is the type used to bind self during inference. Currently, we just infer constrains for
every protocol member type (both ways for settable members).
"""
res = []
for member in protocol.type.protocol_members:
inst = mypy.subtypes.find_member(member, instance, subtype, class_obj=class_obj)
temp = mypy.subtypes.find_member(member, template, subtype)
if inst is None or temp is None:
if member == "__call__":
continue
return [] # See #11020
# The above is safe since at this point we know that 'instance' is a subtype
# of (erased) 'template', therefore it defines all protocol members
res.extend(infer_constraints(temp, inst, self.direction))
if mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol):
# Settable members are invariant, add opposite constraints
res.extend(infer_constraints(temp, inst, neg_op(self.direction)))
return res
def visit_callable_type(self, template: CallableType) -> list[Constraint]:
# Normalize callables before matching against each other.
# Note that non-normalized callables can be created in annotations
# using e.g. callback protocols.
# TODO: check that callables match? Ideally we should not infer constraints
# callables that can never be subtypes of one another in given direction.
template = template.with_unpacked_kwargs()
extra_tvars = False
if isinstance(self.actual, CallableType):
res: list[Constraint] = []
cactual = self.actual.with_unpacked_kwargs()
param_spec = template.param_spec()
template_ret_type, cactual_ret_type = template.ret_type, cactual.ret_type
if template.type_guard is not None:
template_ret_type = template.type_guard
if cactual.type_guard is not None:
cactual_ret_type = cactual.type_guard
res.extend(infer_constraints(template_ret_type, cactual_ret_type, self.direction))
if param_spec is None:
# TODO: Erase template variables if it is generic?
if (
type_state.infer_polymorphic
and cactual.variables
and not self.skip_neg_op
# Technically, the correct inferred type for application of e.g.
# Callable[..., T] -> Callable[..., T] (with literal ellipsis), to a generic
# like U -> U, should be Callable[..., Any], but if U is a self-type, we can
# allow it to leak, to be later bound to self. A bunch of existing code
# depends on this old behaviour.
and not any(tv.id.raw_id == 0 for tv in cactual.variables)
):
# If the actual callable is generic, infer constraints in the opposite
# direction, and indicate to the solver there are extra type variables
# to solve for (see more details in mypy/solve.py).
res.extend(
infer_constraints(
cactual, template, neg_op(self.direction), skip_neg_op=True
)
)
extra_tvars = True
# We can't infer constraints from arguments if the template is Callable[..., T]
# (with literal '...').
if not template.is_ellipsis_args:
unpack_present = find_unpack_in_list(template.arg_types)
if unpack_present is not None:
# We need to re-normalize args to the form they appear in tuples,
# for callables we always pack the suffix inside another tuple.
unpack = template.arg_types[unpack_present]
assert isinstance(unpack, UnpackType)
tuple_type = get_tuple_fallback_from_unpack(unpack)
template_types = repack_callable_args(template, tuple_type)
actual_types = repack_callable_args(cactual, tuple_type)
# Now we can use the same general helper as for tuple types.
unpack_constraints = build_constraints_for_simple_unpack(
template_types, actual_types, neg_op(self.direction)
)
res.extend(unpack_constraints)
else:
# Negate direction due to function argument type contravariance.
res.extend(
infer_callable_arguments_constraints(template, cactual, self.direction)
)
else:
prefix = param_spec.prefix
prefix_len = len(prefix.arg_types)
cactual_ps = cactual.param_spec()
if type_state.infer_polymorphic and cactual.variables and not self.skip_neg_op:
# Similar logic to the branch above.
res.extend(
infer_constraints(
cactual, template, neg_op(self.direction), skip_neg_op=True
)
)
extra_tvars = True
# Compare prefixes as well
cactual_prefix = cactual.copy_modified(
arg_types=cactual.arg_types[:prefix_len],
arg_kinds=cactual.arg_kinds[:prefix_len],
arg_names=cactual.arg_names[:prefix_len],
)
res.extend(
infer_callable_arguments_constraints(prefix, cactual_prefix, self.direction)
)
param_spec_target: Type | None = None
skip_imprecise = (
any(c.type_var == param_spec.id for c in res) and cactual.imprecise_arg_kinds
)
if not cactual_ps:
max_prefix_len = len([k for k in cactual.arg_kinds if k in (ARG_POS, ARG_OPT)])
prefix_len = min(prefix_len, max_prefix_len)
# This logic matches top-level callable constraint exception, if we managed
# to get other constraints for ParamSpec, don't infer one with imprecise kinds
if not skip_imprecise:
param_spec_target = Parameters(
arg_types=cactual.arg_types[prefix_len:],
arg_kinds=cactual.arg_kinds[prefix_len:],
arg_names=cactual.arg_names[prefix_len:],
variables=cactual.variables
if not type_state.infer_polymorphic
else [],
imprecise_arg_kinds=cactual.imprecise_arg_kinds,
)
else:
if (
len(param_spec.prefix.arg_types) <= len(cactual_ps.prefix.arg_types)
and not skip_imprecise
):
param_spec_target = cactual_ps.copy_modified(
prefix=Parameters(
arg_types=cactual_ps.prefix.arg_types[prefix_len:],
arg_kinds=cactual_ps.prefix.arg_kinds[prefix_len:],
arg_names=cactual_ps.prefix.arg_names[prefix_len:],
imprecise_arg_kinds=cactual_ps.prefix.imprecise_arg_kinds,
)
)
if param_spec_target is not None:
res.append(Constraint(param_spec, neg_op(self.direction), param_spec_target))
if extra_tvars:
for c in res:
c.extra_tvars += cactual.variables
return res
elif isinstance(self.actual, AnyType):
param_spec = template.param_spec()
any_type = AnyType(TypeOfAny.from_another_any, source_any=self.actual)
if param_spec is None:
# FIX what if generic
res = self.infer_against_any(template.arg_types, self.actual)
else:
res = [
Constraint(
param_spec,
SUBTYPE_OF,
Parameters([any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None]),
)
]
res.extend(infer_constraints(template.ret_type, any_type, self.direction))
return res
elif isinstance(self.actual, Overloaded):
return self.infer_against_overloaded(self.actual, template)
elif isinstance(self.actual, TypeType):
return infer_constraints(template.ret_type, self.actual.item, self.direction)
elif isinstance(self.actual, Instance):
# Instances with __call__ method defined are considered structural
# subtypes of Callable with a compatible signature.
call = mypy.subtypes.find_member(
"__call__", self.actual, self.actual, is_operator=True
)
if call:
return infer_constraints(template, call, self.direction)
else:
return []
else:
return []
def infer_against_overloaded(
self, overloaded: Overloaded, template: CallableType
) -> list[Constraint]:
# Create constraints by matching an overloaded type against a template.
# This is tricky to do in general. We cheat by only matching against
# the first overload item that is callable compatible. This
# seems to work somewhat well, but we should really use a more
# reliable technique.
item = find_matching_overload_item(overloaded, template)
return infer_constraints(template, item, self.direction)
def visit_tuple_type(self, template: TupleType) -> list[Constraint]:
actual = self.actual
unpack_index = find_unpack_in_list(template.items)
is_varlength_tuple = (
isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple"
)
if isinstance(actual, TupleType) or is_varlength_tuple:
res: list[Constraint] = []
if unpack_index is not None:
if is_varlength_tuple:
unpack_type = template.items[unpack_index]
assert isinstance(unpack_type, UnpackType)
unpacked_type = unpack_type.type
assert isinstance(unpacked_type, TypeVarTupleType)
return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)]
else:
assert isinstance(actual, TupleType)
unpack_constraints = build_constraints_for_simple_unpack(
template.items, actual.items, self.direction
)
actual_items: tuple[Type, ...] = ()
template_items: tuple[Type, ...] = ()
res.extend(unpack_constraints)
elif isinstance(actual, TupleType):
actual_items = tuple(actual.items)
template_items = tuple(template.items)
else:
return res
# Cases above will return if actual wasn't a TupleType.
assert isinstance(actual, TupleType)
if len(actual_items) == len(template_items):
if (
actual.partial_fallback.type.is_named_tuple
and template.partial_fallback.type.is_named_tuple
):
# For named tuples using just the fallbacks usually gives better results.
return res + infer_constraints(
template.partial_fallback, actual.partial_fallback, self.direction
)
for i in range(len(template_items)):
res.extend(
infer_constraints(template_items[i], actual_items[i], self.direction)
)
return res
elif isinstance(actual, AnyType):
return self.infer_against_any(template.items, actual)
else:
return []
def visit_typeddict_type(self, template: TypedDictType) -> list[Constraint]:
actual = self.actual
if isinstance(actual, TypedDictType):
res: list[Constraint] = []
# NOTE: Non-matching keys are ignored. Compatibility is checked
# elsewhere so this shouldn't be unsafe.
for item_name, template_item_type, actual_item_type in template.zip(actual):
res.extend(infer_constraints(template_item_type, actual_item_type, self.direction))
return res
elif isinstance(actual, AnyType):
return self.infer_against_any(template.items.values(), actual)
else:
return []
def visit_union_type(self, template: UnionType) -> list[Constraint]:
assert False, (
"Unexpected UnionType in ConstraintBuilderVisitor"
" (should have been handled in infer_constraints)"
)
def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]:
assert False, f"This should be never called, got {template}"
def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]:
res: list[Constraint] = []
for t in types:
if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType):
res.append(Constraint(t.type, self.direction, any_type))
else:
# Note that we ignore variance and simply always use the
# original direction. This is because for Any targets direction is
# irrelevant in most cases, see e.g. is_same_constraint().
res.extend(infer_constraints(t, any_type, self.direction))
return res
def visit_overloaded(self, template: Overloaded) -> list[Constraint]:
if isinstance(self.actual, CallableType):
items = find_matching_overload_items(template, self.actual)
else:
items = template.items
res: list[Constraint] = []
for t in items:
res.extend(infer_constraints(t, self.actual, self.direction))
return res
def visit_type_type(self, template: TypeType) -> list[Constraint]:
if isinstance(self.actual, CallableType):
return infer_constraints(template.item, self.actual.ret_type, self.direction)
elif isinstance(self.actual, Overloaded):
return infer_constraints(template.item, self.actual.items[0].ret_type, self.direction)
elif isinstance(self.actual, TypeType):
return infer_constraints(template.item, self.actual.item, self.direction)
elif isinstance(self.actual, AnyType):
return infer_constraints(template.item, self.actual, self.direction)
else:
return []
def neg_op(op: int) -> int:
"""Map SubtypeOf to SupertypeOf and vice versa."""
if op == SUBTYPE_OF:
return SUPERTYPE_OF
elif op == SUPERTYPE_OF:
return SUBTYPE_OF
else:
raise ValueError(f"Invalid operator {op}")
def find_matching_overload_item(overloaded: Overloaded, template: CallableType) -> CallableType:
"""Disambiguate overload item against a template."""
items = overloaded.items
for item in items:
# Return type may be indeterminate in the template, so ignore it when performing a
# subtype check.
if mypy.subtypes.is_callable_compatible(
item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True
):
return item
# Fall back to the first item if we can't find a match. This is totally arbitrary --
# maybe we should just bail out at this point.
return items[0]
def find_matching_overload_items(
overloaded: Overloaded, template: CallableType
) -> list[CallableType]:
"""Like find_matching_overload_item, but return all matches, not just the first."""
items = overloaded.items
res = []
for item in items:
# Return type may be indeterminate in the template, so ignore it when performing a
# subtype check.
if mypy.subtypes.is_callable_compatible(
item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True
):
res.append(item)
if not res:
# Falling back to all items if we can't find a match is pretty arbitrary, but
# it maintains backward compatibility.
res = items.copy()
return res
def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None:
"""Get builtins.tuple type from available types to construct homogeneous tuples."""
tp = get_proper_type(unpack.type)
if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
return tp.type
if isinstance(tp, TypeVarTupleType):
return tp.tuple_fallback.type
if isinstance(tp, TupleType):
for base in tp.partial_fallback.type.mro:
if base.fullname == "builtins.tuple":
return base
return None
def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> list[Type]:
"""Present callable with star unpack in a normalized form.
Since positional arguments cannot follow star argument, they are packed in a suffix,
while prefix is represented as individual positional args. We want to put all in a single
list with unpack in the middle, and prefix/suffix on the sides (as they would appear
in e.g. a TupleType).
"""
if ARG_STAR not in callable.arg_kinds:
return callable.arg_types
star_index = callable.arg_kinds.index(ARG_STAR)
arg_types = callable.arg_types[:star_index]
star_type = callable.arg_types[star_index]
suffix_types = []
if not isinstance(star_type, UnpackType):
if tuple_type is not None:
# Re-normalize *args: X -> *args: *tuple[X, ...]
star_type = UnpackType(Instance(tuple_type, [star_type]))
else:
# This is unfortunate, something like tuple[Any, ...] would be better.
star_type = UnpackType(AnyType(TypeOfAny.from_error))
else:
tp = get_proper_type(star_type.type)
if isinstance(tp, TupleType):
assert isinstance(tp.items[0], UnpackType)
star_type = tp.items[0]
suffix_types = tp.items[1:]
return arg_types + [star_type] + suffix_types
def build_constraints_for_simple_unpack(
template_args: list[Type], actual_args: list[Type], direction: int
) -> list[Constraint]:
"""Infer constraints between two lists of types with variadic items.
This function is only supposed to be called when a variadic item is present in templates.
If there is no variadic item the actuals, we simply use split_with_prefix_and_suffix()
and infer prefix <: prefix, suffix <: suffix, variadic <: middle. If there is a variadic
item in the actuals we need to be more careful, only common prefix/suffix can generate
constraints, also we can only infer constraints for variadic template item, if template
prefix/suffix are shorter that actual ones, otherwise there may be partial overlap
between variadic items, for example if template prefix is longer:
templates: T1, T2, Ts, Ts, Ts, ...
actuals: A1, As, As, As, ...
Note: this function can only be called for builtin variadic constructors: Tuple and Callable,
for Instances variance depends on position, and a much more complex function
build_constraints_for_unpack() should be used.
"""
template_unpack = find_unpack_in_list(template_args)
assert template_unpack is not None
template_prefix = template_unpack
template_suffix = len(template_args) - template_prefix - 1
t_unpack = None
res = []
actual_unpack = find_unpack_in_list(actual_args)
if actual_unpack is None:
t_unpack = template_args[template_unpack]
if template_prefix + template_suffix > len(actual_args):
# These can't be subtypes of each-other, return fast.
assert isinstance(t_unpack, UnpackType)
if isinstance(t_unpack.type, TypeVarTupleType):
# Set TypeVarTuple to empty to improve error messages.
return [
Constraint(
t_unpack.type, direction, TupleType([], t_unpack.type.tuple_fallback)
)
]
else:
return []
common_prefix = template_prefix
common_suffix = template_suffix
else:
actual_prefix = actual_unpack
actual_suffix = len(actual_args) - actual_prefix - 1
common_prefix = min(template_prefix, actual_prefix)
common_suffix = min(template_suffix, actual_suffix)
if actual_prefix >= template_prefix and actual_suffix >= template_suffix:
# This is the only case where we can guarantee there will be no partial overlap.
t_unpack = template_args[template_unpack]
# Handle constraints from prefixes/suffixes first.
start, middle, end = split_with_prefix_and_suffix(
tuple(actual_args), common_prefix, common_suffix
)
for t, a in zip(template_args[:common_prefix], start):
res.extend(infer_constraints(t, a, direction))
if common_suffix:
for t, a in zip(template_args[-common_suffix:], end):
res.extend(infer_constraints(t, a, direction))
if t_unpack is not None:
# Add constraint(s) for variadic item when possible.
assert isinstance(t_unpack, UnpackType)
tp = get_proper_type(t_unpack.type)
if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple":
# Homogeneous case *tuple[T, ...] <: [X, Y, Z, ...].
for a in middle:
# TODO: should we use union instead of join here?
if not isinstance(a, UnpackType):
res.extend(infer_constraints(tp.args[0], a, direction))
else:
a_tp = get_proper_type(a.type)
# This is the case *tuple[T, ...] <: *tuple[A, ...].
if isinstance(a_tp, Instance) and a_tp.type.fullname == "builtins.tuple":
res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction))
elif isinstance(tp, TypeVarTupleType):
res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback)))
return res
def build_constraints_for_unpack(
# TODO: this naming is misleading, these should be "actual", not "mapped"
# both template and actual can be mapped before, depending on direction.
# Also the convention is to put template related args first.
mapped: tuple[Type, ...],
mapped_prefix_len: int | None,
mapped_suffix_len: int | None,
template: tuple[Type, ...],
template_prefix_len: int,
template_suffix_len: int,
direction: int,
) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]:
# TODO: this function looks broken:
# a) it should take into account variances, but it doesn't
# b) it looks like both call sites always pass identical values to args (2, 3) and (5, 6)
# because after map_instance_to_supertype() both template and actual have same TypeInfo.
if mapped_prefix_len is None:
mapped_prefix_len = template_prefix_len
if mapped_suffix_len is None:
mapped_suffix_len = template_suffix_len
split_result = split_with_mapped_and_template(
mapped,
mapped_prefix_len,
mapped_suffix_len,
template,
template_prefix_len,
template_suffix_len,
)
assert split_result is not None
(
mapped_prefix,
mapped_middle,
mapped_suffix,
template_prefix,
template_middle,
template_suffix,
) = split_result
template_unpack = extract_unpack(template_middle)
res = []
if template_unpack is not None:
if isinstance(template_unpack, TypeVarTupleType):
res.append(
Constraint(
template_unpack,
direction,
TupleType(list(mapped_middle), template_unpack.tuple_fallback),
)
)
elif (
isinstance(template_unpack, Instance)
and template_unpack.type.fullname == "builtins.tuple"
):
for item in mapped_middle:
res.extend(infer_constraints(template_unpack.args[0], item, direction))
elif isinstance(template_unpack, TupleType):
if len(template_unpack.items) == len(mapped_middle):
for template_arg, item in zip(template_unpack.items, mapped_middle):
res.extend(infer_constraints(template_arg, item, direction))
return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix
def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]:
"""Infer constraints between two arguments using direction between original callables."""
if isinstance(left, (ParamSpecType, UnpackType)) or isinstance(
right, (ParamSpecType, UnpackType)
):
# This avoids bogus constraints like T <: P.args
# TODO: can we infer something useful for *T vs P?
return []
if direction == SUBTYPE_OF:
# We invert direction to account for argument contravariance.
return infer_constraints(left, right, neg_op(direction))
else:
return infer_constraints(right, left, neg_op(direction))
def infer_callable_arguments_constraints(
template: CallableType | Parameters, actual: CallableType | Parameters, direction: int
) -> list[Constraint]:
"""Infer constraints between argument types of two callables.
This function essentially extracts four steps from are_parameters_compatible() in
subtypes.py that involve subtype checks between argument types. We keep the argument
matching logic, but ignore various strictness flags present there, and checks that
do not involve subtyping. Then in place of every subtype check we put an infer_constraints()
call for the same types.
"""
res = []
if direction == SUBTYPE_OF:
left, right = template, actual
else:
left, right = actual, template
left_star = left.var_arg()
left_star2 = left.kw_arg()
right_star = right.var_arg()
right_star2 = right.kw_arg()
# Numbering of steps below matches the one in are_parameters_compatible() for convenience.
# Phase 1a: compare star vs star arguments.
if left_star is not None and right_star is not None:
res.extend(infer_directed_arg_constraints(left_star.typ, right_star.typ, direction))
if left_star2 is not None and right_star2 is not None:
res.extend(infer_directed_arg_constraints(left_star2.typ, right_star2.typ, direction))
# Phase 1b: compare left args with corresponding non-star right arguments.
for right_arg in right.formal_arguments():
left_arg = mypy.typeops.callable_corresponding_argument(left, right_arg)
if left_arg is None:
continue
res.extend(infer_directed_arg_constraints(left_arg.typ, right_arg.typ, direction))
# Phase 1c: compare left args with right *args.
if right_star is not None:
right_by_position = right.try_synthesizing_arg_from_vararg(None)
assert right_by_position is not None
i = right_star.pos
assert i is not None
while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional():
left_by_position = left.argument_by_position(i)
assert left_by_position is not None
res.extend(
infer_directed_arg_constraints(
left_by_position.typ, right_by_position.typ, direction
)
)
i += 1
# Phase 1d: compare left args with right **kwargs.
if right_star2 is not None:
right_names = {name for name in right.arg_names if name is not None}
left_only_names = set()
for name, kind in zip(left.arg_names, left.arg_kinds):
if name is None or kind.is_star() or name in right_names:
continue
left_only_names.add(name)
right_by_name = right.try_synthesizing_arg_from_kwarg(None)
assert right_by_name is not None
for name in left_only_names:
left_by_name = left.argument_by_name(name)
assert left_by_name is not None
res.extend(
infer_directed_arg_constraints(left_by_name.typ, right_by_name.typ, direction)
)
return res