123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906 |
- # Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
- # Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
- # Copyright (c) 2014 Google, Inc.
- # Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
- # Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
- # Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
- # Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
- # Copyright (c) 2017-2018 Ashley Whetter <ashley@awhetter.co.uk>
- # Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
- # Copyright (c) 2017 rr- <rr-@sakuya.pl>
- # Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
- # Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
- # Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
- # Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
- # Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
- # Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
- # Copyright (c) 2020 Vilnis Termanis <vilnis.termanis@iotics.com>
- # Copyright (c) 2020 Ram Rachum <ram@rachum.com>
- # Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
- # Copyright (c) 2021 Tushar Sadhwani <86737547+tushar-deepsource@users.noreply.github.com>
- # Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
- # Copyright (c) 2021 David Liu <david@cs.toronto.edu>
- # Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
- # Copyright (c) 2021 doranid <ddandd@gmail.com>
- # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
- # For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
- """this module contains a set of functions to handle python protocols for nodes
- where it makes sense.
- """
- import collections
- import itertools
- import operator as operator_mod
- import sys
- from typing import Any, Generator, List, Optional, Union
- from astroid import arguments, bases, decorators, helpers, nodes, util
- from astroid.const import Context
- from astroid.context import InferenceContext, copy_context
- from astroid.exceptions import (
- AstroidIndexError,
- AstroidTypeError,
- AttributeInferenceError,
- InferenceError,
- NoDefault,
- )
- from astroid.nodes import node_classes
- if sys.version_info >= (3, 8):
- from typing import Literal
- else:
- from typing_extensions import Literal
- raw_building = util.lazy_import("raw_building")
- objects = util.lazy_import("objects")
- def _reflected_name(name):
- return "__r" + name[2:]
- def _augmented_name(name):
- return "__i" + name[2:]
- _CONTEXTLIB_MGR = "contextlib.contextmanager"
- BIN_OP_METHOD = {
- "+": "__add__",
- "-": "__sub__",
- "/": "__truediv__",
- "//": "__floordiv__",
- "*": "__mul__",
- "**": "__pow__",
- "%": "__mod__",
- "&": "__and__",
- "|": "__or__",
- "^": "__xor__",
- "<<": "__lshift__",
- ">>": "__rshift__",
- "@": "__matmul__",
- }
- REFLECTED_BIN_OP_METHOD = {
- key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items()
- }
- AUGMENTED_OP_METHOD = {
- key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items()
- }
- UNARY_OP_METHOD = {
- "+": "__pos__",
- "-": "__neg__",
- "~": "__invert__",
- "not": None, # XXX not '__nonzero__'
- }
- _UNARY_OPERATORS = {
- "+": operator_mod.pos,
- "-": operator_mod.neg,
- "~": operator_mod.invert,
- "not": operator_mod.not_,
- }
- def _infer_unary_op(obj, op):
- func = _UNARY_OPERATORS[op]
- value = func(obj)
- return nodes.const_factory(value)
- nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op)
- nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op)
- nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op)
- nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op)
- nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op)
- # Binary operations
- BIN_OP_IMPL = {
- "+": lambda a, b: a + b,
- "-": lambda a, b: a - b,
- "/": lambda a, b: a / b,
- "//": lambda a, b: a // b,
- "*": lambda a, b: a * b,
- "**": lambda a, b: a ** b,
- "%": lambda a, b: a % b,
- "&": lambda a, b: a & b,
- "|": lambda a, b: a | b,
- "^": lambda a, b: a ^ b,
- "<<": lambda a, b: a << b,
- ">>": lambda a, b: a >> b,
- "@": operator_mod.matmul,
- }
- for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
- BIN_OP_IMPL[_KEY + "="] = _IMPL
- @decorators.yes_if_nothing_inferred
- def const_infer_binary_op(self, opnode, operator, other, context, _):
- not_implemented = nodes.Const(NotImplemented)
- if isinstance(other, nodes.Const):
- try:
- impl = BIN_OP_IMPL[operator]
- try:
- yield nodes.const_factory(impl(self.value, other.value))
- except TypeError:
- # ArithmeticError is not enough: float >> float is a TypeError
- yield not_implemented
- except Exception: # pylint: disable=broad-except
- yield util.Uninferable
- except TypeError:
- yield not_implemented
- elif isinstance(self.value, str) and operator == "%":
- # TODO(cpopa): implement string interpolation later on.
- yield util.Uninferable
- else:
- yield not_implemented
- nodes.Const.infer_binary_op = const_infer_binary_op
- def _multiply_seq_by_int(self, opnode, other, context):
- node = self.__class__(parent=opnode)
- filtered_elts = (
- helpers.safe_infer(elt, context) or util.Uninferable
- for elt in self.elts
- if elt is not util.Uninferable
- )
- node.elts = list(filtered_elts) * other.value
- return node
- def _filter_uninferable_nodes(elts, context):
- for elt in elts:
- if elt is util.Uninferable:
- yield nodes.Unknown()
- else:
- for inferred in elt.infer(context):
- if inferred is not util.Uninferable:
- yield inferred
- else:
- yield nodes.Unknown()
- @decorators.yes_if_nothing_inferred
- def tl_infer_binary_op(self, opnode, operator, other, context, method):
- not_implemented = nodes.Const(NotImplemented)
- if isinstance(other, self.__class__) and operator == "+":
- node = self.__class__(parent=opnode)
- node.elts = list(
- itertools.chain(
- _filter_uninferable_nodes(self.elts, context),
- _filter_uninferable_nodes(other.elts, context),
- )
- )
- yield node
- elif isinstance(other, nodes.Const) and operator == "*":
- if not isinstance(other.value, int):
- yield not_implemented
- return
- yield _multiply_seq_by_int(self, opnode, other, context)
- elif isinstance(other, bases.Instance) and operator == "*":
- # Verify if the instance supports __index__.
- as_index = helpers.class_instance_as_index(other)
- if not as_index:
- yield util.Uninferable
- else:
- yield _multiply_seq_by_int(self, opnode, as_index, context)
- else:
- yield not_implemented
- nodes.Tuple.infer_binary_op = tl_infer_binary_op
- nodes.List.infer_binary_op = tl_infer_binary_op
- @decorators.yes_if_nothing_inferred
- def instance_class_infer_binary_op(self, opnode, operator, other, context, method):
- return method.infer_call_result(self, context)
- bases.Instance.infer_binary_op = instance_class_infer_binary_op
- nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op
- # assignment ##################################################################
- """the assigned_stmts method is responsible to return the assigned statement
- (e.g. not inferred) according to the assignment type.
- The `assign_path` argument is used to record the lhs path of the original node.
- For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
- will be [1, 1] once arrived to the Assign node.
- The `context` argument is the current inference context which should be given
- to any intermediary inference necessary.
- """
- def _resolve_looppart(parts, assign_path, context):
- """recursive function to resolve multiple assignments on loops"""
- assign_path = assign_path[:]
- index = assign_path.pop(0)
- for part in parts:
- if part is util.Uninferable:
- continue
- if not hasattr(part, "itered"):
- continue
- try:
- itered = part.itered()
- except TypeError:
- continue
- for stmt in itered:
- index_node = nodes.Const(index)
- try:
- assigned = stmt.getitem(index_node, context)
- except (AttributeError, AstroidTypeError, AstroidIndexError):
- continue
- if not assign_path:
- # we achieved to resolved the assignment path,
- # don't infer the last part
- yield assigned
- elif assigned is util.Uninferable:
- break
- else:
- # we are not yet on the last part of the path
- # search on each possibly inferred value
- try:
- yield from _resolve_looppart(
- assigned.infer(context), assign_path, context
- )
- except InferenceError:
- break
- @decorators.raise_if_nothing_inferred
- def for_assigned_stmts(
- self: Union[nodes.For, nodes.Comprehension],
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
- # Skip inferring of async code for now
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- if assign_path is None:
- for lst in self.iter.infer(context):
- if isinstance(lst, (nodes.Tuple, nodes.List)):
- yield from lst.elts
- else:
- yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- nodes.For.assigned_stmts = for_assigned_stmts
- nodes.Comprehension.assigned_stmts = for_assigned_stmts
- def sequence_assigned_stmts(
- self: Union[nodes.Tuple, nodes.List],
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- if assign_path is None:
- assign_path = []
- try:
- index = self.elts.index(node)
- except ValueError as exc:
- raise InferenceError(
- "Tried to retrieve a node {node!r} which does not exist",
- node=self,
- assign_path=assign_path,
- context=context,
- ) from exc
- assign_path.insert(0, index)
- return self.parent.assigned_stmts(
- node=self, context=context, assign_path=assign_path
- )
- nodes.Tuple.assigned_stmts = sequence_assigned_stmts
- nodes.List.assigned_stmts = sequence_assigned_stmts
- def assend_assigned_stmts(
- self: Union[nodes.AssignName, nodes.AssignAttr],
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- return self.parent.assigned_stmts(node=self, context=context)
- nodes.AssignName.assigned_stmts = assend_assigned_stmts
- nodes.AssignAttr.assigned_stmts = assend_assigned_stmts
- def _arguments_infer_argname(self, name, context):
- # arguments information may be missing, in which case we can't do anything
- # more
- if not (self.arguments or self.vararg or self.kwarg):
- yield util.Uninferable
- return
- functype = self.parent.type
- # first argument of instance/class method
- if (
- self.arguments
- and getattr(self.arguments[0], "name", None) == name
- and functype != "staticmethod"
- ):
- cls = self.parent.parent.scope()
- is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
- # If this is a metaclass, then the first argument will always
- # be the class, not an instance.
- if context.boundnode and isinstance(context.boundnode, bases.Instance):
- cls = context.boundnode._proxied
- if is_metaclass or functype == "classmethod":
- yield cls
- return
- if functype == "method":
- yield cls.instantiate_class()
- return
- if context and context.callcontext:
- callee = context.callcontext.callee
- while hasattr(callee, "_proxied"):
- callee = callee._proxied
- if getattr(callee, "name", None) == self.parent.name:
- call_site = arguments.CallSite(context.callcontext, context.extra_context)
- yield from call_site.infer_argument(self.parent, name, context)
- return
- if name == self.vararg:
- vararg = nodes.const_factory(())
- vararg.parent = self
- if not self.arguments and self.parent.name == "__init__":
- cls = self.parent.parent.scope()
- vararg.elts = [cls.instantiate_class()]
- yield vararg
- return
- if name == self.kwarg:
- kwarg = nodes.const_factory({})
- kwarg.parent = self
- yield kwarg
- return
- # if there is a default value, yield it. And then yield Uninferable to reflect
- # we can't guess given argument value
- try:
- context = copy_context(context)
- yield from self.default_value(name).infer(context)
- yield util.Uninferable
- except NoDefault:
- yield util.Uninferable
- def arguments_assigned_stmts(
- self: nodes.Arguments,
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- if context.callcontext:
- callee = context.callcontext.callee
- while hasattr(callee, "_proxied"):
- callee = callee._proxied
- else:
- callee = None
- if (
- context.callcontext
- and node
- and getattr(callee, "name", None) == node.frame(future=True).name
- ):
- # reset call context/name
- callcontext = context.callcontext
- context = copy_context(context)
- context.callcontext = None
- args = arguments.CallSite(callcontext, context=context)
- return args.infer_argument(self.parent, node.name, context)
- return _arguments_infer_argname(self, node.name, context)
- nodes.Arguments.assigned_stmts = arguments_assigned_stmts
- @decorators.raise_if_nothing_inferred
- def assign_assigned_stmts(
- self: Union[nodes.AugAssign, nodes.Assign, nodes.AnnAssign],
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- if not assign_path:
- yield self.value
- return None
- yield from _resolve_assignment_parts(
- self.value.infer(context), assign_path, context
- )
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- def assign_annassigned_stmts(
- self: nodes.AnnAssign,
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- for inferred in assign_assigned_stmts(self, node, context, assign_path):
- if inferred is None:
- yield util.Uninferable
- else:
- yield inferred
- nodes.Assign.assigned_stmts = assign_assigned_stmts
- nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts
- nodes.AugAssign.assigned_stmts = assign_assigned_stmts
- def _resolve_assignment_parts(parts, assign_path, context):
- """recursive function to resolve multiple assignments"""
- assign_path = assign_path[:]
- index = assign_path.pop(0)
- for part in parts:
- assigned = None
- if isinstance(part, nodes.Dict):
- # A dictionary in an iterating context
- try:
- assigned, _ = part.items[index]
- except IndexError:
- return
- elif hasattr(part, "getitem"):
- index_node = nodes.Const(index)
- try:
- assigned = part.getitem(index_node, context)
- except (AstroidTypeError, AstroidIndexError):
- return
- if not assigned:
- return
- if not assign_path:
- # we achieved to resolved the assignment path, don't infer the
- # last part
- yield assigned
- elif assigned is util.Uninferable:
- return
- else:
- # we are not yet on the last part of the path search on each
- # possibly inferred value
- try:
- yield from _resolve_assignment_parts(
- assigned.infer(context), assign_path, context
- )
- except InferenceError:
- return
- @decorators.raise_if_nothing_inferred
- def excepthandler_assigned_stmts(
- self: nodes.ExceptHandler,
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- for assigned in node_classes.unpack_infer(self.type):
- if isinstance(assigned, nodes.ClassDef):
- assigned = objects.ExceptionInstance(assigned)
- yield assigned
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts
- def _infer_context_manager(self, mgr, context):
- try:
- inferred = next(mgr.infer(context=context))
- except StopIteration as e:
- raise InferenceError(node=mgr) from e
- if isinstance(inferred, bases.Generator):
- # Check if it is decorated with contextlib.contextmanager.
- func = inferred.parent
- if not func.decorators:
- raise InferenceError(
- "No decorators found on inferred generator %s", node=func
- )
- for decorator_node in func.decorators.nodes:
- decorator = next(decorator_node.infer(context=context), None)
- if isinstance(decorator, nodes.FunctionDef):
- if decorator.qname() == _CONTEXTLIB_MGR:
- break
- else:
- # It doesn't interest us.
- raise InferenceError(node=func)
- try:
- yield next(inferred.infer_yield_types())
- except StopIteration as e:
- raise InferenceError(node=func) from e
- elif isinstance(inferred, bases.Instance):
- try:
- enter = next(inferred.igetattr("__enter__", context=context))
- except (InferenceError, AttributeInferenceError, StopIteration) as exc:
- raise InferenceError(node=inferred) from exc
- if not isinstance(enter, bases.BoundMethod):
- raise InferenceError(node=enter)
- yield from enter.infer_call_result(self, context)
- else:
- raise InferenceError(node=mgr)
- @decorators.raise_if_nothing_inferred
- def with_assigned_stmts(
- self: nodes.With,
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- """Infer names and other nodes from a *with* statement.
- This enables only inference for name binding in a *with* statement.
- For instance, in the following code, inferring `func` will return
- the `ContextManager` class, not whatever ``__enter__`` returns.
- We are doing this intentionally, because we consider that the context
- manager result is whatever __enter__ returns and what it is binded
- using the ``as`` keyword.
- class ContextManager(object):
- def __enter__(self):
- return 42
- with ContextManager() as f:
- pass
- # ContextManager().infer() will return ContextManager
- # f.infer() will return 42.
- Arguments:
- self: nodes.With
- node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
- context: Inference context used for caching already inferred objects
- assign_path:
- A list of indices, where each index specifies what item to fetch from
- the inference results.
- """
- try:
- mgr = next(mgr for (mgr, vars) in self.items if vars == node)
- except StopIteration:
- return None
- if assign_path is None:
- yield from _infer_context_manager(self, mgr, context)
- else:
- for result in _infer_context_manager(self, mgr, context):
- # Walk the assign_path and get the item at the final index.
- obj = result
- for index in assign_path:
- if not hasattr(obj, "elts"):
- raise InferenceError(
- "Wrong type ({targets!r}) for {node!r} assignment",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- )
- try:
- obj = obj.elts[index]
- except IndexError as exc:
- raise InferenceError(
- "Tried to infer a nonexistent target with index {index} "
- "in {node!r}.",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- ) from exc
- except TypeError as exc:
- raise InferenceError(
- "Tried to unpack a non-iterable value " "in {node!r}.",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- ) from exc
- yield obj
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- nodes.With.assigned_stmts = with_assigned_stmts
- @decorators.raise_if_nothing_inferred
- def named_expr_assigned_stmts(
- self: nodes.NamedExpr,
- node: node_classes.AssignedStmtsPossibleNode,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- """Infer names and other nodes from an assignment expression"""
- if self.target == node:
- yield from self.value.infer(context=context)
- else:
- raise InferenceError(
- "Cannot infer NamedExpr node {node!r}",
- node=self,
- assign_path=assign_path,
- context=context,
- )
- nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts
- @decorators.yes_if_nothing_inferred
- def starred_assigned_stmts(
- self: nodes.Starred,
- node: node_classes.AssignedStmtsPossibleNode = None,
- context: Optional[InferenceContext] = None,
- assign_path: Optional[List[int]] = None,
- ) -> Any:
- """
- Arguments:
- self: nodes.Starred
- node: a node related to the current underlying Node.
- context: Inference context used for caching already inferred objects
- assign_path:
- A list of indices, where each index specifies what item to fetch from
- the inference results.
- """
- # pylint: disable=too-many-locals,too-many-statements
- def _determine_starred_iteration_lookups(starred, target, lookups):
- # Determine the lookups for the rhs of the iteration
- itered = target.itered()
- for index, element in enumerate(itered):
- if (
- isinstance(element, nodes.Starred)
- and element.value.name == starred.value.name
- ):
- lookups.append((index, len(itered)))
- break
- if isinstance(element, nodes.Tuple):
- lookups.append((index, len(element.itered())))
- _determine_starred_iteration_lookups(starred, element, lookups)
- stmt = self.statement(future=True)
- if not isinstance(stmt, (nodes.Assign, nodes.For)):
- raise InferenceError(
- "Statement {stmt!r} enclosing {node!r} " "must be an Assign or For node.",
- node=self,
- stmt=stmt,
- unknown=node,
- context=context,
- )
- if context is None:
- context = InferenceContext()
- if isinstance(stmt, nodes.Assign):
- value = stmt.value
- lhs = stmt.targets[0]
- if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
- raise InferenceError(
- "Too many starred arguments in the " " assignment targets {lhs!r}.",
- node=self,
- targets=lhs,
- unknown=node,
- context=context,
- )
- try:
- rhs = next(value.infer(context))
- except (InferenceError, StopIteration):
- yield util.Uninferable
- return
- if rhs is util.Uninferable or not hasattr(rhs, "itered"):
- yield util.Uninferable
- return
- try:
- elts = collections.deque(rhs.itered())
- except TypeError:
- yield util.Uninferable
- return
- # Unpack iteratively the values from the rhs of the assignment,
- # until the find the starred node. What will remain will
- # be the list of values which the Starred node will represent
- # This is done in two steps, from left to right to remove
- # anything before the starred node and from right to left
- # to remove anything after the starred node.
- for index, left_node in enumerate(lhs.elts):
- if not isinstance(left_node, nodes.Starred):
- if not elts:
- break
- elts.popleft()
- continue
- lhs_elts = collections.deque(reversed(lhs.elts[index:]))
- for right_node in lhs_elts:
- if not isinstance(right_node, nodes.Starred):
- if not elts:
- break
- elts.pop()
- continue
- # We're done unpacking.
- packed = nodes.List(
- ctx=Context.Store,
- parent=self,
- lineno=lhs.lineno,
- col_offset=lhs.col_offset,
- )
- packed.postinit(elts=list(elts))
- yield packed
- break
- if isinstance(stmt, nodes.For):
- try:
- inferred_iterable = next(stmt.iter.infer(context=context))
- except (InferenceError, StopIteration):
- yield util.Uninferable
- return
- if inferred_iterable is util.Uninferable or not hasattr(
- inferred_iterable, "itered"
- ):
- yield util.Uninferable
- return
- try:
- itered = inferred_iterable.itered()
- except TypeError:
- yield util.Uninferable
- return
- target = stmt.target
- if not isinstance(target, nodes.Tuple):
- raise InferenceError(
- "Could not make sense of this, the target must be a tuple",
- context=context,
- )
- lookups = []
- _determine_starred_iteration_lookups(self, target, lookups)
- if not lookups:
- raise InferenceError(
- "Could not make sense of this, needs at least a lookup", context=context
- )
- # Make the last lookup a slice, since that what we want for a Starred node
- last_element_index, last_element_length = lookups[-1]
- is_starred_last = last_element_index == (last_element_length - 1)
- lookup_slice = slice(
- last_element_index,
- None if is_starred_last else (last_element_length - last_element_index),
- )
- lookups[-1] = lookup_slice
- for element in itered:
- # We probably want to infer the potential values *for each* element in an
- # iterable, but we can't infer a list of all values, when only a list of
- # step values are expected:
- #
- # for a, *b in [...]:
- # b
- #
- # *b* should now point to just the elements at that particular iteration step,
- # which astroid can't know about.
- found_element = None
- for lookup in lookups:
- if not hasattr(element, "itered"):
- break
- if not isinstance(lookup, slice):
- # Grab just the index, not the whole length
- lookup = lookup[0]
- try:
- itered_inner_element = element.itered()
- element = itered_inner_element[lookup]
- except IndexError:
- break
- except TypeError:
- # Most likely the itered() call failed, cannot make sense of this
- yield util.Uninferable
- return
- else:
- found_element = element
- unpacked = nodes.List(
- ctx=Context.Store,
- parent=self,
- lineno=self.lineno,
- col_offset=self.col_offset,
- )
- unpacked.postinit(elts=found_element or [])
- yield unpacked
- return
- yield util.Uninferable
- nodes.Starred.assigned_stmts = starred_assigned_stmts
- @decorators.yes_if_nothing_inferred
- def match_mapping_assigned_stmts(
- self: nodes.MatchMapping,
- node: nodes.AssignName,
- context: Optional[InferenceContext] = None,
- assign_path: Literal[None] = None,
- ) -> Generator[nodes.NodeNG, None, None]:
- """Return empty generator (return -> raises StopIteration) so inferred value
- is Uninferable.
- """
- return
- yield
- nodes.MatchMapping.assigned_stmts = match_mapping_assigned_stmts
- @decorators.yes_if_nothing_inferred
- def match_star_assigned_stmts(
- self: nodes.MatchStar,
- node: nodes.AssignName,
- context: Optional[InferenceContext] = None,
- assign_path: Literal[None] = None,
- ) -> Generator[nodes.NodeNG, None, None]:
- """Return empty generator (return -> raises StopIteration) so inferred value
- is Uninferable.
- """
- return
- yield
- nodes.MatchStar.assigned_stmts = match_star_assigned_stmts
- @decorators.yes_if_nothing_inferred
- def match_as_assigned_stmts(
- self: nodes.MatchAs,
- node: nodes.AssignName,
- context: Optional[InferenceContext] = None,
- assign_path: Literal[None] = None,
- ) -> Generator[nodes.NodeNG, None, None]:
- """Infer MatchAs as the Match subject if it's the only MatchCase pattern
- else raise StopIteration to yield Uninferable.
- """
- if (
- isinstance(self.parent, nodes.MatchCase)
- and isinstance(self.parent.parent, nodes.Match)
- and self.pattern is None
- ):
- yield self.parent.parent.subject
- nodes.MatchAs.assigned_stmts = match_as_assigned_stmts
|