diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..e84d4a0279 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +add5f7b8eba427de9d39caae864bbc6dc37ef980 diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 4c2639b503..9420b7594d 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -24,6 +24,6 @@ jobs: ) ) steps: - - uses: tibdex/backport@2e217641d82d02ba0603f46b1aeedefb258890ac # v2.0.3 + - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3026d491d4..16f2e0ef71 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -24,10 +24,10 @@ jobs: timeout-minutes: 20 steps: - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -39,7 +39,7 @@ jobs: 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: venv key: >- @@ -59,7 +59,7 @@ jobs: hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT - name: Restore pre-commit environment id: cache-precommit - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ env.PRE_COMMIT_CACHE }} key: >- @@ -81,15 +81,15 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.8, 3.9, "3.10", "3.11"] + python-version: [3.8, 3.9, "3.10", "3.11", "3.12-dev"] outputs: python-key: ${{ steps.generate-python-key.outputs.key }} steps: - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -106,7 +106,7 @@ jobs: 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: venv key: >- @@ -125,7 +125,7 @@ jobs: . venv/bin/activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v3.1.2 + uses: actions/upload-artifact@v3.1.3 with: name: coverage-linux-${{ matrix.python-version }} path: .coverage @@ -138,17 +138,17 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.8, 3.9, "3.10", "3.11"] + python-version: [3.8, 3.9, "3.10", "3.11", "3.12-dev"] steps: - name: Set temp directory run: echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV # Workaround to set correct temp directory on Windows # https://github.com/actions/virtual-environments/issues/712 - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -160,7 +160,7 @@ jobs: 'requirements_full.txt', 'requirements_minimal.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: venv key: >- @@ -179,7 +179,7 @@ jobs: . venv\\Scripts\\activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v3.1.2 + uses: actions/upload-artifact@v3.1.3 with: name: coverage-windows-${{ matrix.python-version }} path: .coverage @@ -195,10 +195,10 @@ jobs: python-version: ["pypy3.8", "pypy3.10"] steps: - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -210,7 +210,7 @@ jobs: }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: venv key: >- @@ -229,7 +229,7 @@ jobs: . venv/bin/activate pytest --cov - name: Upload coverage artifact - uses: actions/upload-artifact@v3.1.2 + uses: actions/upload-artifact@v3.1.3 with: name: coverage-pypy-${{ matrix.python-version }} path: .coverage @@ -241,10 +241,10 @@ jobs: needs: ["tests-linux", "tests-windows", "tests-pypy"] steps: - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python 3.11 id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: "3.11" check-latest: true diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 5dcae45a3d..8f5c7121c5 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -46,7 +46,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/release-tests.yml b/.github/workflows/release-tests.yml index eb1b1dd81f..d08f3c30d2 100644 --- a/.github/workflows/release-tests.yml +++ b/.github/workflows/release-tests.yml @@ -13,10 +13,10 @@ jobs: timeout-minutes: 5 steps: - name: Check out code from GitHub - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python 3.9 id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: # virtualenv 15.1.0 cannot be installed on Python 3.10+ python-version: 3.9 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6bbd405a89..03dd02b4f6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -20,10 +20,10 @@ jobs: url: https://pypi.org/project/astroid/ steps: - name: Check out code from Github - uses: actions/checkout@v3.5.3 + uses: actions/checkout@v4.0.0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v4.6.1 + uses: actions/setup-python@v4.7.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true diff --git a/.gitignore b/.gitignore index a5d0a6318d..b83e37f3c3 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,5 @@ astroid.egg-info/ .mypy_cache/ venv doc/_build/ +doc/api/base_nodes/ +doc/api/nodes/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 965b22fb80..432eeca87f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,8 +9,8 @@ repos: exclude: .github/|tests/testdata - id: end-of-file-fixer exclude: tests/testdata - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.272" + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.0.290" hooks: - id: ruff exclude: tests/testdata @@ -23,7 +23,7 @@ repos: exclude: tests/testdata|setup.py types: [python] - repo: https://github.com/asottile/pyupgrade - rev: v3.7.0 + rev: v3.11.0 hooks: - id: pyupgrade exclude: tests/testdata @@ -34,7 +34,7 @@ repos: - id: black-disable-checker exclude: tests/test_nodes_lineno.py - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.9.1 hooks: - id: black args: [--safe, --quiet] @@ -54,7 +54,7 @@ repos: ] exclude: tests/testdata|conf.py - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.5.1 hooks: - id: mypy name: mypy @@ -66,7 +66,7 @@ repos: additional_dependencies: ["types-typed-ast"] exclude: tests/testdata| # exclude everything, we're not ready - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.0-alpha.9-for-vscode + rev: v3.0.3 hooks: - id: prettier args: [--prose-wrap=always, --print-width=88] diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 0523baf3c2..30e108441b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -132,6 +132,7 @@ Contributors - Neil Girdhar - Michał Masłowski - Mateusz Bysiek +- Marcelo Trylesinski - Leandro T. C. Melo - Konrad Weihmann - Kian Meng, Ang diff --git a/ChangeLog b/ChangeLog index 3f7a0a1f12..b117deafc6 100644 --- a/ChangeLog +++ b/ChangeLog @@ -6,10 +6,35 @@ What's New in astroid 3.0.0? ============================= Release date: TBA +* Add support for Python 3.12, including PEP 695 type parameter syntax. + + Closes #2201 + * Remove support for Python 3.7. Refs #2137 +* Following a deprecation period starting in astroid 2.7.0, the ``astroid.node_classes`` + and ``astroid.scoped_nodes`` modules have been removed in favor of ``astroid.nodes.node_classes`` + and ``astroid.nodes.scoped_nodes``. + + Closes #1072 + +* Following a deprecation period starting in astroid 2.12.0, the ``astroid.mixins`` module + has been removed in favor of ``astroid.nodes._base_nodes`` (private). + + Refs #1633 + +* Return all existing arguments when calling ``Arguments.arguments()``. This also means ``find_argname`` will now + use the whole list of arguments for its search. + + Closes #2213 + +* Exclude class attributes from the ``__members__`` container of an ``Enum`` class when they are + ``nodes.AnnAssign`` nodes with no assigned value. + + Refs pylint-dev/pylint#7402 + * Remove ``@cached`` and ``@cachedproperty`` decorator (just use ``@cached_property`` from the stdlib). Closes #1780 @@ -20,6 +45,18 @@ Release date: TBA Closes pylint-dev/pylint#6535 +* Remove the ``inference`` module. Node inference methods are now in the module + defining the node, rather than being associated to the node afterward. + + Closes #679 + +* Move ``LookupMixIn`` to ``astroid.nodes._base_nodes`` and make it private. + +* Remove the shims for ``OperationError``, ``BinaryOperationError``, and ``UnaryOperationError`` + in ``exceptions``. They were moved to ``util`` in astroid 1.5.0. + +* Move ``safe_infer()`` from ``helpers`` to ``util``. This avoids some circular imports. + * Reduce file system access in ``ast_from_file()``. * Reduce time to ``import astroid`` by delaying ``astroid_bootstrapping()`` until @@ -40,6 +77,14 @@ Release date: TBA Closes pylint-dev/pylint#7464 Closes pylint-dev/pylint#8074 +* Use the global inference cache when inferring, even without an explicit + ``InferenceContext``. This is a significant performance improvement given how + often methods default to ``None`` for the context argument. (Linting ``astroid`` + itself now takes ~5% less time on Python 3.12; other projects requiring more + complex inference calculations will see greater speedups.) + + Refs #529 + * Fix interrupted ``InferenceContext`` call chains, thereby addressing performance problems when linting ``sqlalchemy``. @@ -66,7 +111,7 @@ Release date: TBA The breaking change allows us to better type and re-use the method within ``astroid``. * Improved signature of the ``__init__`` and ``__postinit__`` methods of most nodes. - This includes makes ``lineno``, ``col_offset``, ``end_lineno``, ``end_col_offset`` and ``parent`` + This includes making ``lineno``, ``col_offset``, ``end_lineno``, ``end_col_offset`` and ``parent`` required arguments for ``nodes.NodeNG`` and its subclasses. For most other nodes, arguments of their ``__postinit__`` methods have been made required to better represent how they would normally be constructed by the standard library ``ast`` module. @@ -110,7 +155,6 @@ Release date: TBA - ``nodes.Slice`` - ``nodes.Starred`` - ``objects.Super``, we also added the ``call`` parameter to its ``__init__`` method. - - ``nodes.TryExcept`` - ``nodes.Subscript`` - ``nodes.UnaryOp`` - ``nodes.While`` @@ -125,7 +169,7 @@ Release date: TBA * Remove dependency on ``wrapt``. * Remove dependency on ``lazy_object_proxy``. This includes the removal - of the assosicated ``lazy_import``, ``lazy_descriptor`` and ``proxy_alias`` utility functions. + of the associated ``lazy_import``, ``lazy_descriptor`` and ``proxy_alias`` utility functions. * ``CallSite._unpack_args`` and ``CallSite._unpack_keywords`` now use ``safe_infer()`` for better inference and fewer false positives. @@ -172,24 +216,62 @@ Release date: TBA Refs #2154 +* Add new ``nodes.Try`` to better match Python AST. Replaces the ``TryExcept`` + and ``TryFinally`` nodes which have been removed. + * Publicize ``NodeNG.repr_name()`` to facilitate finding a node's nice name. Refs pylint-dev/pylint#8598 -* Fix a regression in 2.12.0 where settings in AstroidManager would be ignored. - Most notably this addresses pylint-dev/pylint#7433. +* Fix false positives for ``no-member`` and ``invalid-name`` when using the ``_name_``, ``_value_`` and ``_ignore_`` sunders in Enums. + + Closes pylint-dev/pylint#9015 - Refs #2204 + +What's New in astroid 2.15.7? +============================= +Release date: 2023-09-23 + +* Fix a crash when inferring a ``typing.TypeVar`` call. + + Closes pylint-dev/pylint#8802 + +* Infer user-defined enum classes by checking if the class is a subtype of ``enum.Enum``. + + Closes pylint-dev/pylint#8897 + +* Fix inference of functions with ``@functools.lru_cache`` decorators without + parentheses. + + Closes pylint-dev/pylint#8868 + +* Make ``sys.argv`` uninferable because it never is. (It's impossible to infer + the value it will have outside of static analysis where it's our own value.) + + Refs pylint-dev/pylint#7710 What's New in astroid 2.15.6? ============================= -Release date: TBA +Release date: 2023-07-08 * Harden ``get_module_part()`` against ``"."``. Closes pylint-dev/pylint#8749 +* Allow ``AsStringVisitor`` to visit ``objects.PartialFunction``. + + Closes pylint-dev/pylint#8881 + +* Avoid expensive list/tuple multiplication operations that would result in ``MemoryError``. + + Closes pylint-dev/pylint#8748 + +* Fix a regression in 2.12.0 where settings in AstroidManager would be ignored. + Most notably this addresses pylint-dev/pylint#7433. + + Refs #2204 + What's New in astroid 2.15.5? ============================= @@ -273,6 +355,11 @@ Release date: 2023-03-06 Refs #1780 +* ``max_inferable_values`` can now be set on ``AstroidManager`` instances, e.g. ``astroid.MANAGER`` + besides just the ``AstroidManager`` class itself. + + Closes #2280 + * ``Astroid`` now retrieves the default values of keyword only arguments and sets them on ``Arguments.kw_defaults``. diff --git a/MANIFEST.in b/MANIFEST.in index 9561fb1061..5536e876f5 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,5 @@ include README.rst +include requirements*.txt +include tox.ini +recursive-include tests *.py +graft tests/testdata diff --git a/astroid/__init__.py b/astroid/__init__.py index f3c2c79018..4558d06b8d 100644 --- a/astroid/__init__.py +++ b/astroid/__init__.py @@ -16,7 +16,7 @@ Instance attributes are added by a builder object, which can either generate extended ast (let's call them astroid ;) by visiting an existent ast tree or by inspecting living -object. Methods are added by monkey patching ast classes. +object. Main modules are: @@ -32,21 +32,19 @@ import functools import tokenize -from importlib import import_module # isort: off -# We have an isort: off on '__version__' because of a circular import in nodes. +# We have an isort: off on 'astroid.nodes' because of a circular import. from astroid.nodes import node_classes, scoped_nodes # isort: on -from astroid import inference, raw_building +from astroid import raw_building from astroid.__pkginfo__ import __version__, version -from astroid.astroid_manager import MANAGER from astroid.bases import BaseInstance, BoundMethod, Instance, UnboundMethod from astroid.brain.helpers import register_module_extender from astroid.builder import extract_node, parse -from astroid.const import BRAIN_MODULES_DIRECTORY, PY310_PLUS, Context +from astroid.const import PY310_PLUS, Context from astroid.exceptions import ( AstroidBuildingError, AstroidBuildingException, @@ -57,7 +55,6 @@ AstroidTypeError, AstroidValueError, AttributeInferenceError, - BinaryOperationError, DuplicateBasesError, InconsistentMroError, InferenceError, @@ -66,14 +63,12 @@ NameInferenceError, NoDefault, NotFoundError, - OperationError, ParentMissingError, ResolveError, StatementMissing, SuperArgumentTypeError, SuperError, TooManyLevelsError, - UnaryOperationError, UnresolvableName, UseInferenceDefault, ) @@ -86,6 +81,7 @@ # and we need astroid/scoped_nodes and astroid/node_classes to work. So # importing with a wildcard would clash with astroid/nodes/scoped_nodes # and astroid/nodes/node_classes. +from astroid.astroid_manager import MANAGER from astroid.nodes import ( CONST_CLS, AnnAssign, @@ -150,6 +146,7 @@ NamedExpr, NodeNG, Nonlocal, + ParamSpec, Pass, Raise, Return, @@ -158,10 +155,12 @@ Slice, Starred, Subscript, - TryExcept, - TryFinally, + Try, TryStar, Tuple, + TypeAlias, + TypeVar, + TypeVarTuple, UnaryOp, Unknown, While, @@ -186,8 +185,3 @@ and getattr(tokenize._compile, "__wrapped__", None) is None # type: ignore[attr-defined] ): tokenize._compile = functools.lru_cache(tokenize._compile) # type: ignore[attr-defined] - -# load brain plugins -for module in BRAIN_MODULES_DIRECTORY.iterdir(): - if module.suffix == ".py": - import_module(f"astroid.brain.{module.stem}") diff --git a/astroid/__pkginfo__.py b/astroid/__pkginfo__.py index 477466f62a..163cfa17bd 100644 --- a/astroid/__pkginfo__.py +++ b/astroid/__pkginfo__.py @@ -2,5 +2,5 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt -__version__ = "3.0.0a6-dev0" +__version__ = "3.0.0a10-dev0" version = __version__ diff --git a/astroid/arguments.py b/astroid/arguments.py index f016823a98..d2dca776d5 100644 --- a/astroid/arguments.py +++ b/astroid/arguments.py @@ -8,9 +8,8 @@ from astroid.bases import Instance from astroid.context import CallContext, InferenceContext from astroid.exceptions import InferenceError, NoDefault -from astroid.helpers import safe_infer from astroid.typing import InferenceResult -from astroid.util import Uninferable, UninferableBase +from astroid.util import Uninferable, UninferableBase, safe_infer class CallSite: @@ -91,7 +90,7 @@ def _unpack_keywords( keywords: list[tuple[str | None, nodes.NodeNG]], context: InferenceContext | None = None, ): - values = {} + values: dict[str | None, InferenceResult] = {} context = context or InferenceContext() context.extra_context = self.argument_context_map for name, value in keywords: @@ -182,7 +181,13 @@ def infer_argument( positional = self.positional_arguments[: len(funcnode.args.args)] vararg = self.positional_arguments[len(funcnode.args.args) :] - argindex = funcnode.args.find_argname(name)[0] + + # preserving previous behavior, when vararg and kwarg were not included in find_argname results + if name in [funcnode.args.vararg, funcnode.args.kwarg]: + argindex = None + else: + argindex = funcnode.args.find_argname(name)[0] + kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs} kwargs = { key: value diff --git a/astroid/astroid_manager.py b/astroid/astroid_manager.py index a7019fcdd0..3031057e14 100644 --- a/astroid/astroid_manager.py +++ b/astroid/astroid_manager.py @@ -12,6 +12,9 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from astroid.brain.helpers import register_all_brains from astroid.manager import AstroidManager MANAGER = AstroidManager() +# Register all brains after instantiating the singleton Manager +register_all_brains(MANAGER) diff --git a/astroid/bases.py b/astroid/bases.py index 2f756a615e..d3b6548052 100644 --- a/astroid/bases.py +++ b/astroid/bases.py @@ -10,9 +10,9 @@ import collections import collections.abc from collections.abc import Iterable, Iterator -from typing import TYPE_CHECKING, Any, ClassVar, Literal +from typing import TYPE_CHECKING, Any, Literal -from astroid import nodes +from astroid import decorators, nodes from astroid.const import PY310_PLUS from astroid.context import ( CallContext, @@ -28,12 +28,11 @@ ) from astroid.interpreter import objectmodel from astroid.typing import ( - InferBinaryOp, InferenceErrorInfo, InferenceResult, SuccessfulInferenceResult, ) -from astroid.util import Uninferable, UninferableBase +from astroid.util import Uninferable, UninferableBase, safe_infer if TYPE_CHECKING: from astroid.constraint import Constraint @@ -69,8 +68,6 @@ def _is_property( meth: nodes.FunctionDef | UnboundMethod, context: InferenceContext | None = None ) -> bool: - from astroid import helpers # pylint: disable=import-outside-toplevel - decoratornames = meth.decoratornames(context=context) if PROPERTIES.intersection(decoratornames): return True @@ -86,7 +83,7 @@ def _is_property( if not meth.decorators: return False for decorator in meth.decorators.nodes or (): - inferred = helpers.safe_infer(decorator, context=context) + inferred = safe_infer(decorator, context=context) if inferred is None or isinstance(inferred, UninferableBase): continue if isinstance(inferred, nodes.ClassDef): @@ -149,7 +146,7 @@ def infer( # type: ignore[return] def _infer_stmts( - stmts: Iterator[InferenceResult], + stmts: Iterable[InferenceResult], context: InferenceContext | None, frame: nodes.NodeNG | BaseInstance | None = None, ) -> collections.abc.Generator[InferenceResult, None, None]: @@ -243,7 +240,7 @@ def getattr( name: str, context: InferenceContext | None = None, lookupclass: bool = True, - ) -> list[SuccessfulInferenceResult]: + ) -> list[InferenceResult]: try: values = self._proxied.instance_attr(name, context) except AttributeInferenceError as exc: @@ -346,7 +343,16 @@ class Instance(BaseInstance): def __init__(self, proxied: nodes.ClassDef | None) -> None: super().__init__(proxied) - infer_binary_op: ClassVar[InferBinaryOp[Instance]] + @decorators.yes_if_nothing_inferred + def infer_binary_op( + self, + opnode: nodes.AugAssign | nodes.BinOp, + operator: str, + other: InferenceResult, + context: InferenceContext, + method: SuccessfulInferenceResult, + ) -> Generator[InferenceResult, None, None]: + return method.infer_call_result(self, context) def __repr__(self) -> str: return "".format( diff --git a/astroid/brain/brain_argparse.py b/astroid/brain/brain_argparse.py index da6d5d202c..d0da4080a3 100644 --- a/astroid/brain/brain_argparse.py +++ b/astroid/brain/brain_argparse.py @@ -46,6 +46,7 @@ def _looks_like_namespace(node) -> bool: return False -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_namespace), _looks_like_namespace -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + nodes.Call, inference_tip(infer_namespace), _looks_like_namespace + ) diff --git a/astroid/brain/brain_attrs.py b/astroid/brain/brain_attrs.py index a2aae00b6a..b7a7eafe1b 100644 --- a/astroid/brain/brain_attrs.py +++ b/astroid/brain/brain_attrs.py @@ -8,10 +8,10 @@ Without this hook pylint reports unsupported-assignment-operation for attrs classes """ -from astroid.helpers import safe_infer from astroid.manager import AstroidManager from astroid.nodes.node_classes import AnnAssign, Assign, AssignName, Call, Unknown from astroid.nodes.scoped_nodes import ClassDef +from astroid.util import safe_infer ATTRIB_NAMES = frozenset( ( @@ -91,6 +91,7 @@ def attr_attributes_transform(node: ClassDef) -> None: node.instance_attrs[target.name] = [rhs_node] -AstroidManager().register_transform( - ClassDef, attr_attributes_transform, is_decorated_with_attrs -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + ClassDef, attr_attributes_transform, is_decorated_with_attrs + ) diff --git a/astroid/brain/brain_boto3.py b/astroid/brain/brain_boto3.py index f874c00734..55bca14fc8 100644 --- a/astroid/brain/brain_boto3.py +++ b/astroid/brain/brain_boto3.py @@ -26,6 +26,7 @@ def _looks_like_boto3_service_request(node) -> bool: return node.qname() == BOTO_SERVICE_FACTORY_QUALIFIED_NAME -AstroidManager().register_transform( - ClassDef, service_request_transform, _looks_like_boto3_service_request -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + ClassDef, service_request_transform, _looks_like_boto3_service_request + ) diff --git a/astroid/brain/brain_builtin_inference.py b/astroid/brain/brain_builtin_inference.py index 937a70f764..d53520dc46 100644 --- a/astroid/brain/brain_builtin_inference.py +++ b/astroid/brain/brain_builtin_inference.py @@ -7,9 +7,9 @@ from __future__ import annotations import itertools -from collections.abc import Callable, Iterator +from collections.abc import Callable, Iterable from functools import partial -from typing import Any, Type, Union, cast +from typing import TYPE_CHECKING, Any, Iterator, NoReturn, Type, Union, cast from astroid import arguments, helpers, inference_tip, nodes, objects, util from astroid.builder import AstroidBuilder @@ -29,6 +29,9 @@ SuccessfulInferenceResult, ) +if TYPE_CHECKING: + from astroid.bases import Instance + ContainerObjects = Union[ objects.FrozenSet, objects.DictItems, @@ -43,6 +46,13 @@ Type[frozenset], ] +CopyResult = Union[ + nodes.Dict, + nodes.List, + nodes.Set, + objects.FrozenSet, +] + OBJECT_DUNDER_NEW = "object.__new__" STR_CLASS = """ @@ -127,6 +137,10 @@ def ljust(self, width, fillchar=None): """ +def _use_default() -> NoReturn: # pragma: no cover + raise UseInferenceDefault() + + def _extend_string_class(class_node, code, rvalue): """Function to extend builtin str/unicode class.""" code = code.format(rvalue=rvalue) @@ -186,14 +200,18 @@ def _builtin_filter_predicate(node, builtin_name) -> bool: return False -def register_builtin_transform(transform, builtin_name) -> None: +def register_builtin_transform( + manager: AstroidManager, transform, builtin_name +) -> None: """Register a new transform function for the given *builtin_name*. The transform function must accept two parameters, a node and an optional context. """ - def _transform_wrapper(node, context: InferenceContext | None = None): + def _transform_wrapper( + node: nodes.Call, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator: result = transform(node, context=context) if result: if not result.parent: @@ -210,7 +228,7 @@ def _transform_wrapper(node, context: InferenceContext | None = None): result.col_offset = node.col_offset return iter([result]) - AstroidManager().register_transform( + manager.register_transform( nodes.Call, inference_tip(_transform_wrapper), partial(_builtin_filter_predicate, builtin_name=builtin_name), @@ -257,10 +275,12 @@ def _container_generic_transform( iterables: tuple[type[nodes.BaseContainer] | type[ContainerObjects], ...], build_elts: BuiltContainers, ) -> nodes.BaseContainer | None: + elts: Iterable | str | bytes + if isinstance(arg, klass): return arg if isinstance(arg, iterables): - arg = cast(ContainerObjects, arg) + arg = cast(Union[nodes.BaseContainer, ContainerObjects], arg) if all(isinstance(elt, nodes.Const) for elt in arg.elts): elts = [cast(nodes.Const, elt).value for elt in arg.elts] else: @@ -269,7 +289,7 @@ def _container_generic_transform( for element in arg.elts: if not element: continue - inferred = helpers.safe_infer(element, context=context) + inferred = util.safe_infer(element, context=context) if inferred: evaluated_object = nodes.EvaluatedObject( original=element, value=inferred @@ -277,9 +297,10 @@ def _container_generic_transform( elts.append(evaluated_object) elif isinstance(arg, nodes.Dict): # Dicts need to have consts as strings already. - if not all(isinstance(elt[0], nodes.Const) for elt in arg.items): - raise UseInferenceDefault() - elts = [item[0].value for item in arg.items] + elts = [ + item[0].value if isinstance(item[0], nodes.Const) else _use_default() + for item in arg.items + ] elif isinstance(arg, nodes.Const) and isinstance(arg.value, (str, bytes)): elts = arg.value else: @@ -399,6 +420,7 @@ def infer_dict(node: nodes.Call, context: InferenceContext | None = None) -> nod args = call.positional_arguments kwargs = list(call.keyword_arguments.items()) + items: list[tuple[InferenceResult, InferenceResult]] if not args and not kwargs: # dict() return nodes.Dict( @@ -670,7 +692,7 @@ def infer_slice(node, context: InferenceContext | None = None): if not 0 < len(args) <= 3: raise UseInferenceDefault - infer_func = partial(helpers.safe_infer, context=context) + infer_func = partial(util.safe_infer, context=context) args = [infer_func(arg) for arg in args] for arg in args: if not arg or isinstance(arg, util.UninferableBase): @@ -695,7 +717,9 @@ def infer_slice(node, context: InferenceContext | None = None): return slice_node -def _infer_object__new__decorator(node, context: InferenceContext | None = None): +def _infer_object__new__decorator( + node: nodes.ClassDef, context: InferenceContext | None = None, **kwargs: Any +) -> Iterator[Instance]: # Instantiate class immediately # since that's what @object.__new__ does return iter((node.instantiate_class(),)) @@ -944,10 +968,10 @@ def _build_dict_with_elements(elements): if isinstance(inferred_values, nodes.Const) and isinstance( inferred_values.value, (str, bytes) ): - elements = [ + elements_with_value = [ (nodes.Const(element), default) for element in inferred_values.value ] - return _build_dict_with_elements(elements) + return _build_dict_with_elements(elements_with_value) if isinstance(inferred_values, nodes.Dict): keys = inferred_values.itered() for key in keys: @@ -964,7 +988,7 @@ def _build_dict_with_elements(elements): def _infer_copy_method( node: nodes.Call, context: InferenceContext | None = None, **kwargs: Any -) -> Iterator[InferenceResult]: +) -> Iterator[CopyResult]: assert isinstance(node.func, nodes.Attribute) inferred_orig, inferred_copy = itertools.tee(node.func.expr.infer(context=context)) if all( @@ -973,9 +997,9 @@ def _infer_copy_method( ) for inferred_node in inferred_orig ): - return inferred_copy + return cast(Iterator[CopyResult], inferred_copy) - raise UseInferenceDefault() + raise UseInferenceDefault def _is_str_format_call(node: nodes.Call) -> bool: @@ -984,7 +1008,7 @@ def _is_str_format_call(node: nodes.Call) -> bool: return False if isinstance(node.func.expr, nodes.Name): - value = helpers.safe_infer(node.func.expr) + value = util.safe_infer(node.func.expr) else: value = node.func.expr @@ -1000,7 +1024,7 @@ def _infer_str_format_call( value: nodes.Const if isinstance(node.func.expr, nodes.Name): - if not (inferred := helpers.safe_infer(node.func.expr)) or not isinstance( + if not (inferred := util.safe_infer(node.func.expr)) or not isinstance( inferred, nodes.Const ): return iter([util.Uninferable]) @@ -1015,7 +1039,7 @@ def _infer_str_format_call( # Get the positional arguments passed inferred_positional: list[nodes.Const] = [] for i in call.positional_arguments: - one_inferred = helpers.safe_infer(i, context) + one_inferred = util.safe_infer(i, context) if not isinstance(one_inferred, nodes.Const): return iter([util.Uninferable]) inferred_positional.append(one_inferred) @@ -1025,7 +1049,7 @@ def _infer_str_format_call( # Get the keyword arguments passed inferred_keyword: dict[str, nodes.Const] = {} for k, v in call.keyword_arguments.items(): - one_inferred = helpers.safe_infer(v, context) + one_inferred = util.safe_infer(v, context) if not isinstance(one_inferred, nodes.Const): return iter([util.Uninferable]) inferred_keyword[k] = one_inferred @@ -1044,42 +1068,44 @@ def _infer_str_format_call( return iter([nodes.const_factory(formatted_string)]) -# Builtins inference -register_builtin_transform(infer_bool, "bool") -register_builtin_transform(infer_super, "super") -register_builtin_transform(infer_callable, "callable") -register_builtin_transform(infer_property, "property") -register_builtin_transform(infer_getattr, "getattr") -register_builtin_transform(infer_hasattr, "hasattr") -register_builtin_transform(infer_tuple, "tuple") -register_builtin_transform(infer_set, "set") -register_builtin_transform(infer_list, "list") -register_builtin_transform(infer_dict, "dict") -register_builtin_transform(infer_frozenset, "frozenset") -register_builtin_transform(infer_type, "type") -register_builtin_transform(infer_slice, "slice") -register_builtin_transform(infer_isinstance, "isinstance") -register_builtin_transform(infer_issubclass, "issubclass") -register_builtin_transform(infer_len, "len") -register_builtin_transform(infer_str, "str") -register_builtin_transform(infer_int, "int") -register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys") - - -# Infer object.__new__ calls -AstroidManager().register_transform( - nodes.ClassDef, - inference_tip(_infer_object__new__decorator), - _infer_object__new__decorator_check, -) +def register(manager: AstroidManager) -> None: + # Builtins inference + register_builtin_transform(manager, infer_bool, "bool") + register_builtin_transform(manager, infer_super, "super") + register_builtin_transform(manager, infer_callable, "callable") + register_builtin_transform(manager, infer_property, "property") + register_builtin_transform(manager, infer_getattr, "getattr") + register_builtin_transform(manager, infer_hasattr, "hasattr") + register_builtin_transform(manager, infer_tuple, "tuple") + register_builtin_transform(manager, infer_set, "set") + register_builtin_transform(manager, infer_list, "list") + register_builtin_transform(manager, infer_dict, "dict") + register_builtin_transform(manager, infer_frozenset, "frozenset") + register_builtin_transform(manager, infer_type, "type") + register_builtin_transform(manager, infer_slice, "slice") + register_builtin_transform(manager, infer_isinstance, "isinstance") + register_builtin_transform(manager, infer_issubclass, "issubclass") + register_builtin_transform(manager, infer_len, "len") + register_builtin_transform(manager, infer_str, "str") + register_builtin_transform(manager, infer_int, "int") + register_builtin_transform(manager, infer_dict_fromkeys, "dict.fromkeys") + + # Infer object.__new__ calls + manager.register_transform( + nodes.ClassDef, + inference_tip(_infer_object__new__decorator), + _infer_object__new__decorator_check, + ) -AstroidManager().register_transform( - nodes.Call, - inference_tip(_infer_copy_method), - lambda node: isinstance(node.func, nodes.Attribute) - and node.func.attrname == "copy", -) + manager.register_transform( + nodes.Call, + inference_tip(_infer_copy_method), + lambda node: isinstance(node.func, nodes.Attribute) + and node.func.attrname == "copy", + ) -AstroidManager().register_transform( - nodes.Call, inference_tip(_infer_str_format_call), _is_str_format_call -) + manager.register_transform( + nodes.Call, + inference_tip(_infer_str_format_call), + _is_str_format_call, + ) diff --git a/astroid/brain/brain_collections.py b/astroid/brain/brain_collections.py index 8de6d2414c..8f1fd6c306 100644 --- a/astroid/brain/brain_collections.py +++ b/astroid/brain/brain_collections.py @@ -81,9 +81,6 @@ def __class_getitem__(cls, item): return cls""" return base_ordered_dict_class -register_module_extender(AstroidManager(), "collections", _collections_transform) - - def _looks_like_subscriptable(node: ClassDef) -> bool: """ Returns True if the node corresponds to a ClassDef of the Collections.abc module @@ -116,11 +113,14 @@ def easy_class_getitem_inference(node, context: InferenceContext | None = None): node.locals["__class_getitem__"] = [func_to_add] -if PY39_PLUS: - # Starting with Python39 some objects of the collection module are subscriptable - # thanks to the __class_getitem__ method but the way it is implemented in - # _collection_abc makes it difficult to infer. (We would have to handle AssignName inference in the - # getitem method of the ClassDef class) Instead we put here a mock of the __class_getitem__ method - AstroidManager().register_transform( - ClassDef, easy_class_getitem_inference, _looks_like_subscriptable - ) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "collections", _collections_transform) + + if PY39_PLUS: + # Starting with Python39 some objects of the collection module are subscriptable + # thanks to the __class_getitem__ method but the way it is implemented in + # _collection_abc makes it difficult to infer. (We would have to handle AssignName inference in the + # getitem method of the ClassDef class) Instead we put here a mock of the __class_getitem__ method + manager.register_transform( + ClassDef, easy_class_getitem_inference, _looks_like_subscriptable + ) diff --git a/astroid/brain/brain_crypt.py b/astroid/brain/brain_crypt.py index 6b10b821e6..2a6abbd7ca 100644 --- a/astroid/brain/brain_crypt.py +++ b/astroid/brain/brain_crypt.py @@ -22,4 +22,5 @@ def _re_transform(): ) -register_module_extender(AstroidManager(), "crypt", _re_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "crypt", _re_transform) diff --git a/astroid/brain/brain_ctypes.py b/astroid/brain/brain_ctypes.py index f3d89c55db..863ea1874a 100644 --- a/astroid/brain/brain_ctypes.py +++ b/astroid/brain/brain_ctypes.py @@ -79,6 +79,7 @@ def __init__(self, value): return parse("\n".join(src)) -if not hasattr(sys, "pypy_version_info"): - # No need of this module in pypy where everything is written in python - register_module_extender(AstroidManager(), "ctypes", enrich_ctypes_redefined_types) +def register(manager: AstroidManager) -> None: + if not hasattr(sys, "pypy_version_info"): + # No need of this module in pypy where everything is written in python + register_module_extender(manager, "ctypes", enrich_ctypes_redefined_types) diff --git a/astroid/brain/brain_curses.py b/astroid/brain/brain_curses.py index b617f8627d..f06c52f979 100644 --- a/astroid/brain/brain_curses.py +++ b/astroid/brain/brain_curses.py @@ -180,4 +180,5 @@ def _curses_transform(): ) -register_module_extender(AstroidManager(), "curses", _curses_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "curses", _curses_transform) diff --git a/astroid/brain/brain_dataclasses.py b/astroid/brain/brain_dataclasses.py index f37c09a628..88a4385fda 100644 --- a/astroid/brain/brain_dataclasses.py +++ b/astroid/brain/brain_dataclasses.py @@ -17,14 +17,14 @@ from collections.abc import Iterator from typing import Literal, Tuple, Union -from astroid import bases, context, helpers, nodes +from astroid import bases, context, nodes from astroid.builder import parse from astroid.const import PY39_PLUS, PY310_PLUS from astroid.exceptions import AstroidSyntaxError, InferenceError, UseInferenceDefault from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager from astroid.typing import InferenceResult -from astroid.util import Uninferable, UninferableBase +from astroid.util import Uninferable, UninferableBase, safe_infer _FieldDefaultReturn = Union[ None, @@ -561,7 +561,7 @@ def _is_keyword_only_sentinel(node: nodes.NodeNG) -> bool: """Return True if node is the KW_ONLY sentinel.""" if not PY310_PLUS: return False - inferred = helpers.safe_infer(node) + inferred = safe_infer(node) return ( isinstance(inferred, bases.Instance) and inferred.qname() == "dataclasses._KW_ONLY_TYPE" @@ -617,18 +617,19 @@ def _infer_instance_from_annotation( yield klass.instantiate_class() -AstroidManager().register_transform( - nodes.ClassDef, dataclass_transform, is_decorated_with_dataclass -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + nodes.ClassDef, dataclass_transform, is_decorated_with_dataclass + ) -AstroidManager().register_transform( - nodes.Call, - inference_tip(infer_dataclass_field_call, raise_on_overwrite=True), - _looks_like_dataclass_field_call, -) + manager.register_transform( + nodes.Call, + inference_tip(infer_dataclass_field_call, raise_on_overwrite=True), + _looks_like_dataclass_field_call, + ) -AstroidManager().register_transform( - nodes.Unknown, - inference_tip(infer_dataclass_attribute, raise_on_overwrite=True), - _looks_like_dataclass_attribute, -) + manager.register_transform( + nodes.Unknown, + inference_tip(infer_dataclass_attribute, raise_on_overwrite=True), + _looks_like_dataclass_attribute, + ) diff --git a/astroid/brain/brain_datetime.py b/astroid/brain/brain_datetime.py new file mode 100644 index 0000000000..06b011ce49 --- /dev/null +++ b/astroid/brain/brain_datetime.py @@ -0,0 +1,19 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +from astroid.brain.helpers import register_module_extender +from astroid.builder import AstroidBuilder +from astroid.const import PY312_PLUS +from astroid.manager import AstroidManager + + +def datetime_transform(): + """The datetime module was C-accelerated in Python 3.12, so use the + Python source.""" + return AstroidBuilder(AstroidManager()).string_build("from _pydatetime import *") + + +def register(manager: AstroidManager) -> None: + if PY312_PLUS: + register_module_extender(manager, "datetime", datetime_transform) diff --git a/astroid/brain/brain_dateutil.py b/astroid/brain/brain_dateutil.py index a1db7fc95c..3630639b0a 100644 --- a/astroid/brain/brain_dateutil.py +++ b/astroid/brain/brain_dateutil.py @@ -23,4 +23,5 @@ def parse(timestr, parserinfo=None, **kwargs): ) -register_module_extender(AstroidManager(), "dateutil.parser", dateutil_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "dateutil.parser", dateutil_transform) diff --git a/astroid/brain/brain_fstrings.py b/astroid/brain/brain_fstrings.py index 935b31a71a..262a27d259 100644 --- a/astroid/brain/brain_fstrings.py +++ b/astroid/brain/brain_fstrings.py @@ -68,4 +68,5 @@ def _transform_formatted_value( # pylint: disable=inconsistent-return-statement # The problem is that FormattedValue.value, which is a Name node, # has wrong line numbers, usually 1. This creates problems for pylint, # which expects correct line numbers for things such as message control. -AstroidManager().register_transform(nodes.FormattedValue, _transform_formatted_value) +def register(manager: AstroidManager) -> None: + manager.register_transform(nodes.FormattedValue, _transform_formatted_value) diff --git a/astroid/brain/brain_functools.py b/astroid/brain/brain_functools.py index ded1da3a85..2adf2b604f 100644 --- a/astroid/brain/brain_functools.py +++ b/astroid/brain/brain_functools.py @@ -10,7 +10,7 @@ from functools import partial from itertools import chain -from astroid import BoundMethod, arguments, extract_node, helpers, nodes, objects +from astroid import BoundMethod, arguments, extract_node, nodes, objects from astroid.context import InferenceContext from astroid.exceptions import InferenceError, UseInferenceDefault from astroid.inference_tip import inference_tip @@ -19,7 +19,7 @@ from astroid.nodes.node_classes import AssignName, Attribute, Call, Name from astroid.nodes.scoped_nodes import FunctionDef from astroid.typing import InferenceResult, SuccessfulInferenceResult -from astroid.util import UninferableBase +from astroid.util import UninferableBase, safe_infer LRU_CACHE = "functools.lru_cache" @@ -50,7 +50,7 @@ def infer_call_result( caller: SuccessfulInferenceResult | None, context: InferenceContext | None = None, ) -> Iterator[InferenceResult]: - res = helpers.safe_infer(cache_info) + res = safe_infer(cache_info) assert res is not None yield res @@ -134,15 +134,17 @@ def _looks_like_lru_cache(node) -> bool: if not node.decorators: return False for decorator in node.decorators.nodes: - if not isinstance(decorator, Call): + if not isinstance(decorator, (Attribute, Call)): continue if _looks_like_functools_member(decorator, "lru_cache"): return True return False -def _looks_like_functools_member(node, member) -> bool: - """Check if the given Call node is a functools.partial call.""" +def _looks_like_functools_member(node: Attribute | Call, member: str) -> bool: + """Check if the given Call node is the wanted member of functools.""" + if isinstance(node, Attribute): + return node.attrname == member if isinstance(node.func, Name): return node.func.name == member if isinstance(node.func, Attribute): @@ -157,13 +159,11 @@ def _looks_like_functools_member(node, member) -> bool: _looks_like_partial = partial(_looks_like_functools_member, member="partial") -AstroidManager().register_transform( - FunctionDef, _transform_lru_cache, _looks_like_lru_cache -) +def register(manager: AstroidManager) -> None: + manager.register_transform(FunctionDef, _transform_lru_cache, _looks_like_lru_cache) - -AstroidManager().register_transform( - Call, - inference_tip(_functools_partial_inference), - _looks_like_partial, -) + manager.register_transform( + Call, + inference_tip(_functools_partial_inference), + _looks_like_partial, + ) diff --git a/astroid/brain/brain_gi.py b/astroid/brain/brain_gi.py index 66a034841d..4ebbdde2ab 100644 --- a/astroid/brain/brain_gi.py +++ b/astroid/brain/brain_gi.py @@ -243,7 +243,8 @@ def _register_require_version(node): return node -AstroidManager().register_failed_import_hook(_import_gi_module) -AstroidManager().register_transform( - nodes.Call, _register_require_version, _looks_like_require_version -) +def register(manager: AstroidManager) -> None: + manager.register_failed_import_hook(_import_gi_module) + manager.register_transform( + nodes.Call, _register_require_version, _looks_like_require_version + ) diff --git a/astroid/brain/brain_hashlib.py b/astroid/brain/brain_hashlib.py index 858251bef2..ae0632a901 100644 --- a/astroid/brain/brain_hashlib.py +++ b/astroid/brain/brain_hashlib.py @@ -93,4 +93,5 @@ def digest_size(self): return parse(classes) -register_module_extender(AstroidManager(), "hashlib", _hashlib_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "hashlib", _hashlib_transform) diff --git a/astroid/brain/brain_http.py b/astroid/brain/brain_http.py index 0052124e4c..f34f381df8 100644 --- a/astroid/brain/brain_http.py +++ b/astroid/brain/brain_http.py @@ -208,5 +208,6 @@ def _http_client_transform(): ) -register_module_extender(AstroidManager(), "http", _http_transform) -register_module_extender(AstroidManager(), "http.client", _http_client_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "http", _http_transform) + register_module_extender(manager, "http.client", _http_client_transform) diff --git a/astroid/brain/brain_hypothesis.py b/astroid/brain/brain_hypothesis.py index ce49d652f2..6180520f30 100644 --- a/astroid/brain/brain_hypothesis.py +++ b/astroid/brain/brain_hypothesis.py @@ -47,8 +47,9 @@ def remove_draw_parameter_from_composite_strategy(node): return node -AstroidManager().register_transform( - node_class=FunctionDef, - transform=remove_draw_parameter_from_composite_strategy, - predicate=is_decorated_with_st_composite, -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + node_class=FunctionDef, + transform=remove_draw_parameter_from_composite_strategy, + predicate=is_decorated_with_st_composite, + ) diff --git a/astroid/brain/brain_io.py b/astroid/brain/brain_io.py index 80fd18edf4..ab6e607377 100644 --- a/astroid/brain/brain_io.py +++ b/astroid/brain/brain_io.py @@ -35,9 +35,10 @@ def _transform_buffered(node): return _generic_io_transform(node, name="raw", cls=FileIO) -AstroidManager().register_transform( - ClassDef, _transform_buffered, lambda node: node.name in BUFFERED -) -AstroidManager().register_transform( - ClassDef, _transform_text_io_wrapper, lambda node: node.name == TextIOWrapper -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + ClassDef, _transform_buffered, lambda node: node.name in BUFFERED + ) + manager.register_transform( + ClassDef, _transform_text_io_wrapper, lambda node: node.name == TextIOWrapper + ) diff --git a/astroid/brain/brain_mechanize.py b/astroid/brain/brain_mechanize.py index 2ea223fb80..0f0d0193bd 100644 --- a/astroid/brain/brain_mechanize.py +++ b/astroid/brain/brain_mechanize.py @@ -120,4 +120,5 @@ def visit_response(self, response, request=None): ) -register_module_extender(AstroidManager(), "mechanize", mechanize_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "mechanize", mechanize_transform) diff --git a/astroid/brain/brain_multiprocessing.py b/astroid/brain/brain_multiprocessing.py index 292295f8bf..e6413b07c1 100644 --- a/astroid/brain/brain_multiprocessing.py +++ b/astroid/brain/brain_multiprocessing.py @@ -99,9 +99,8 @@ def shutdown(self): ) -register_module_extender( - AstroidManager(), "multiprocessing.managers", _multiprocessing_managers_transform -) -register_module_extender( - AstroidManager(), "multiprocessing", _multiprocessing_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "multiprocessing.managers", _multiprocessing_managers_transform + ) + register_module_extender(manager, "multiprocessing", _multiprocessing_transform) diff --git a/astroid/brain/brain_namedtuple_enum.py b/astroid/brain/brain_namedtuple_enum.py index 1fd64fe629..71091d8872 100644 --- a/astroid/brain/brain_namedtuple_enum.py +++ b/astroid/brain/brain_namedtuple_enum.py @@ -20,19 +20,10 @@ AstroidTypeError, AstroidValueError, InferenceError, - MroError, UseInferenceDefault, ) from astroid.manager import AstroidManager -ENUM_BASE_NAMES = { - "Enum", - "IntEnum", - "enum.Enum", - "enum.IntEnum", - "IntFlag", - "enum.IntFlag", -} ENUM_QNAME: Final[str] = "enum.Enum" TYPING_NAMEDTUPLE_QUALIFIED: Final = { "typing.NamedTuple", @@ -403,7 +394,10 @@ def infer_enum_class(node: nodes.ClassDef) -> nodes.ClassDef: dunder_members = {} target_names = set() for local, values in node.locals.items(): - if any(not isinstance(value, nodes.AssignName) for value in values): + if ( + any(not isinstance(value, nodes.AssignName) for value in values) + or local == "_ignore_" + ): continue stmt = values[0].statement() @@ -440,8 +434,14 @@ class {name}({types}): def value(self): return {return_value} @property + def _value_(self): + return {return_value} + @property def name(self): return "{name}" + @property + def _name_(self): + return "{name}" """.format( name=target.name, types=", ".join(node.basenames), @@ -462,6 +462,8 @@ def name(self): for method in node.mymethods(): fake.locals[method.name] = [method] new_targets.append(fake.instantiate_class()) + if stmt.value is None: + continue dunder_members[local] = fake node.locals[local] = new_targets @@ -642,34 +644,30 @@ def _get_namedtuple_fields(node: nodes.Call) -> str: def _is_enum_subclass(cls: astroid.ClassDef) -> bool: """Return whether cls is a subclass of an Enum.""" - try: - return any( - klass.name in ENUM_BASE_NAMES - and getattr(klass.root(), "name", None) == "enum" - for klass in cls.mro() - ) - except MroError: - return False + return cls.is_subtype_of("enum.Enum") -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple -) -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_enum), _looks_like_enum -) -AstroidManager().register_transform( - nodes.ClassDef, infer_enum_class, predicate=_is_enum_subclass -) -AstroidManager().register_transform( - nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base -) -AstroidManager().register_transform( - nodes.FunctionDef, - inference_tip(infer_typing_namedtuple_function), - lambda node: node.name == "NamedTuple" - and getattr(node.root(), "name", None) == "typing", -) -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple + ) + manager.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum) + manager.register_transform( + nodes.ClassDef, infer_enum_class, predicate=_is_enum_subclass + ) + manager.register_transform( + nodes.ClassDef, + inference_tip(infer_typing_namedtuple_class), + _has_namedtuple_base, + ) + manager.register_transform( + nodes.FunctionDef, + inference_tip(infer_typing_namedtuple_function), + lambda node: node.name == "NamedTuple" + and getattr(node.root(), "name", None) == "typing", + ) + manager.register_transform( + nodes.Call, + inference_tip(infer_typing_namedtuple), + _looks_like_typing_namedtuple, + ) diff --git a/astroid/brain/brain_nose.py b/astroid/brain/brain_nose.py index 83078fa817..742418f2d5 100644 --- a/astroid/brain/brain_nose.py +++ b/astroid/brain/brain_nose.py @@ -70,9 +70,10 @@ def _nose_tools_trivial_transform(): return stub -register_module_extender( - AstroidManager(), "nose.tools.trivial", _nose_tools_trivial_transform -) -AstroidManager().register_transform( - Module, _nose_tools_transform, lambda n: n.name == "nose.tools" -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "nose.tools.trivial", _nose_tools_trivial_transform + ) + manager.register_transform( + Module, _nose_tools_transform, lambda n: n.name == "nose.tools" + ) diff --git a/astroid/brain/brain_numpy_core_einsumfunc.py b/astroid/brain/brain_numpy_core_einsumfunc.py index d916947cba..b72369cb81 100644 --- a/astroid/brain/brain_numpy_core_einsumfunc.py +++ b/astroid/brain/brain_numpy_core_einsumfunc.py @@ -22,6 +22,7 @@ def einsum(*operands, out=None, optimize=False, **kwargs): ) -register_module_extender( - AstroidManager(), "numpy.core.einsumfunc", numpy_core_einsumfunc_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.core.einsumfunc", numpy_core_einsumfunc_transform + ) diff --git a/astroid/brain/brain_numpy_core_fromnumeric.py b/astroid/brain/brain_numpy_core_fromnumeric.py index 13a9e56a3f..c6be20b6ea 100644 --- a/astroid/brain/brain_numpy_core_fromnumeric.py +++ b/astroid/brain/brain_numpy_core_fromnumeric.py @@ -17,6 +17,7 @@ def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None): ) -register_module_extender( - AstroidManager(), "numpy.core.fromnumeric", numpy_core_fromnumeric_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.core.fromnumeric", numpy_core_fromnumeric_transform + ) diff --git a/astroid/brain/brain_numpy_core_function_base.py b/astroid/brain/brain_numpy_core_function_base.py index f69826d55e..17e1ad11d2 100644 --- a/astroid/brain/brain_numpy_core_function_base.py +++ b/astroid/brain/brain_numpy_core_function_base.py @@ -23,10 +23,12 @@ return numpy.ndarray([0, 0])""", } -for func_name, func_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, func_src) - AstroidManager().register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, func_name), - ) + +def register(manager: AstroidManager) -> None: + for func_name, func_src in METHODS_TO_BE_INFERRED.items(): + inference_function = functools.partial(infer_numpy_member, func_src) + manager.register_transform( + Attribute, + inference_tip(inference_function), + functools.partial(attribute_looks_like_numpy_member, func_name), + ) diff --git a/astroid/brain/brain_numpy_core_multiarray.py b/astroid/brain/brain_numpy_core_multiarray.py index e9c7bacfce..404e21cf1b 100644 --- a/astroid/brain/brain_numpy_core_multiarray.py +++ b/astroid/brain/brain_numpy_core_multiarray.py @@ -31,11 +31,6 @@ def vdot(a, b): ) -register_module_extender( - AstroidManager(), "numpy.core.multiarray", numpy_core_multiarray_transform -) - - METHODS_TO_BE_INFERRED = { "array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0): return numpy.ndarray([0, 0])""", @@ -90,15 +85,21 @@ def vdot(a, b): return numpy.ndarray([0, 0])""", } -for method_name, function_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, function_src) - AstroidManager().register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, method_name), - ) - AstroidManager().register_transform( - Name, - inference_tip(inference_function), - functools.partial(name_looks_like_numpy_member, method_name), + +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.core.multiarray", numpy_core_multiarray_transform ) + + for method_name, function_src in METHODS_TO_BE_INFERRED.items(): + inference_function = functools.partial(infer_numpy_member, function_src) + manager.register_transform( + Attribute, + inference_tip(inference_function), + functools.partial(attribute_looks_like_numpy_member, method_name), + ) + manager.register_transform( + Name, + inference_tip(inference_function), + functools.partial(name_looks_like_numpy_member, method_name), + ) diff --git a/astroid/brain/brain_numpy_core_numeric.py b/astroid/brain/brain_numpy_core_numeric.py index 6fd23a857f..7149c85daf 100644 --- a/astroid/brain/brain_numpy_core_numeric.py +++ b/astroid/brain/brain_numpy_core_numeric.py @@ -29,21 +29,21 @@ def full_like(a, fill_value, dtype=None, order='K', subok=True, shape=None): ret ) -register_module_extender( - AstroidManager(), "numpy.core.numeric", numpy_core_numeric_transform -) - - METHODS_TO_BE_INFERRED = { "ones": """def ones(shape, dtype=None, order='C'): return numpy.ndarray([0, 0])""" } -for method_name, function_src in METHODS_TO_BE_INFERRED.items(): - inference_function = functools.partial(infer_numpy_member, function_src) - AstroidManager().register_transform( - Attribute, - inference_tip(inference_function), - functools.partial(attribute_looks_like_numpy_member, method_name), +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.core.numeric", numpy_core_numeric_transform ) + + for method_name, function_src in METHODS_TO_BE_INFERRED.items(): + inference_function = functools.partial(infer_numpy_member, function_src) + manager.register_transform( + Attribute, + inference_tip(inference_function), + functools.partial(attribute_looks_like_numpy_member, method_name), + ) diff --git a/astroid/brain/brain_numpy_core_numerictypes.py b/astroid/brain/brain_numpy_core_numerictypes.py index 4f8f1c34da..6de299d72e 100644 --- a/astroid/brain/brain_numpy_core_numerictypes.py +++ b/astroid/brain/brain_numpy_core_numerictypes.py @@ -258,6 +258,7 @@ class int64(signedinteger): pass ) -register_module_extender( - AstroidManager(), "numpy.core.numerictypes", numpy_core_numerictypes_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.core.numerictypes", numpy_core_numerictypes_transform + ) diff --git a/astroid/brain/brain_numpy_core_umath.py b/astroid/brain/brain_numpy_core_umath.py index 948d17c966..61f3354408 100644 --- a/astroid/brain/brain_numpy_core_umath.py +++ b/astroid/brain/brain_numpy_core_umath.py @@ -149,6 +149,5 @@ def __call__(self, x1, x2, {opt_args:s}): ) -register_module_extender( - AstroidManager(), "numpy.core.umath", numpy_core_umath_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "numpy.core.umath", numpy_core_umath_transform) diff --git a/astroid/brain/brain_numpy_ma.py b/astroid/brain/brain_numpy_ma.py index f8ba2bea08..743e462d20 100644 --- a/astroid/brain/brain_numpy_ma.py +++ b/astroid/brain/brain_numpy_ma.py @@ -28,4 +28,5 @@ def masked_invalid(a, copy=True): ) -register_module_extender(AstroidManager(), "numpy.ma", numpy_ma_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "numpy.ma", numpy_ma_transform) diff --git a/astroid/brain/brain_numpy_ndarray.py b/astroid/brain/brain_numpy_ndarray.py index dd35606771..5748421fb9 100644 --- a/astroid/brain/brain_numpy_ndarray.py +++ b/astroid/brain/brain_numpy_ndarray.py @@ -155,8 +155,9 @@ def _looks_like_numpy_ndarray(node) -> bool: return isinstance(node, Attribute) and node.attrname == "ndarray" -AstroidManager().register_transform( - Attribute, - inference_tip(infer_numpy_ndarray), - _looks_like_numpy_ndarray, -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + Attribute, + inference_tip(infer_numpy_ndarray), + _looks_like_numpy_ndarray, + ) diff --git a/astroid/brain/brain_numpy_random_mtrand.py b/astroid/brain/brain_numpy_random_mtrand.py index 68af759763..83b1ab06ad 100644 --- a/astroid/brain/brain_numpy_random_mtrand.py +++ b/astroid/brain/brain_numpy_random_mtrand.py @@ -66,6 +66,7 @@ def zipf(a, size=None): return uninferable ) -register_module_extender( - AstroidManager(), "numpy.random.mtrand", numpy_random_mtrand_transform -) +def register(manager: AstroidManager) -> None: + register_module_extender( + manager, "numpy.random.mtrand", numpy_random_mtrand_transform + ) diff --git a/astroid/brain/brain_pathlib.py b/astroid/brain/brain_pathlib.py index f3847bb385..116cd2eef9 100644 --- a/astroid/brain/brain_pathlib.py +++ b/astroid/brain/brain_pathlib.py @@ -44,8 +44,9 @@ def infer_parents_subscript( raise UseInferenceDefault -AstroidManager().register_transform( - nodes.Subscript, - inference_tip(infer_parents_subscript), - _looks_like_parents_subscript, -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + nodes.Subscript, + inference_tip(infer_parents_subscript), + _looks_like_parents_subscript, + ) diff --git a/astroid/brain/brain_pkg_resources.py b/astroid/brain/brain_pkg_resources.py index 940783d2be..a844d15b31 100644 --- a/astroid/brain/brain_pkg_resources.py +++ b/astroid/brain/brain_pkg_resources.py @@ -67,4 +67,5 @@ def get_distribution(dist): ) -register_module_extender(AstroidManager(), "pkg_resources", pkg_resources_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "pkg_resources", pkg_resources_transform) diff --git a/astroid/brain/brain_pytest.py b/astroid/brain/brain_pytest.py index 7bbcafdfea..0e0db39041 100644 --- a/astroid/brain/brain_pytest.py +++ b/astroid/brain/brain_pytest.py @@ -79,5 +79,6 @@ def pytest_transform(): ) -register_module_extender(AstroidManager(), "pytest", pytest_transform) -register_module_extender(AstroidManager(), "py.test", pytest_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "pytest", pytest_transform) + register_module_extender(manager, "py.test", pytest_transform) diff --git a/astroid/brain/brain_qt.py b/astroid/brain/brain_qt.py index 2979de7fde..4badfce840 100644 --- a/astroid/brain/brain_qt.py +++ b/astroid/brain/brain_qt.py @@ -77,12 +77,13 @@ def emit(self, signal): pass ) -register_module_extender(AstroidManager(), "PyQt4.QtCore", pyqt4_qtcore_transform) -AstroidManager().register_transform( - nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal -) -AstroidManager().register_transform( - nodes.ClassDef, - transform_pyside_signal, - lambda node: node.qname() in {"PySide.QtCore.Signal", "PySide2.QtCore.Signal"}, -) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "PyQt4.QtCore", pyqt4_qtcore_transform) + manager.register_transform( + nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal + ) + manager.register_transform( + nodes.ClassDef, + transform_pyside_signal, + lambda node: node.qname() in {"PySide.QtCore.Signal", "PySide2.QtCore.Signal"}, + ) diff --git a/astroid/brain/brain_random.py b/astroid/brain/brain_random.py index d86b2acbfd..48cc121461 100644 --- a/astroid/brain/brain_random.py +++ b/astroid/brain/brain_random.py @@ -6,7 +6,6 @@ import random -from astroid import helpers from astroid.context import InferenceContext from astroid.exceptions import UseInferenceDefault from astroid.inference_tip import inference_tip @@ -21,6 +20,7 @@ Set, Tuple, ) +from astroid.util import safe_infer ACCEPTED_ITERABLES_FOR_SAMPLE = (List, Set, Tuple) @@ -51,13 +51,13 @@ def infer_random_sample(node, context: InferenceContext | None = None): if len(node.args) != 2: raise UseInferenceDefault - inferred_length = helpers.safe_infer(node.args[1], context=context) + inferred_length = safe_infer(node.args[1], context=context) if not isinstance(inferred_length, Const): raise UseInferenceDefault if not isinstance(inferred_length.value, int): raise UseInferenceDefault - inferred_sequence = helpers.safe_infer(node.args[0], context=context) + inferred_sequence = safe_infer(node.args[0], context=context) if not inferred_sequence: raise UseInferenceDefault @@ -97,6 +97,7 @@ def _looks_like_random_sample(node) -> bool: return False -AstroidManager().register_transform( - Call, inference_tip(infer_random_sample), _looks_like_random_sample -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + Call, inference_tip(infer_random_sample), _looks_like_random_sample + ) diff --git a/astroid/brain/brain_re.py b/astroid/brain/brain_re.py index 6214865d97..e675f66112 100644 --- a/astroid/brain/brain_re.py +++ b/astroid/brain/brain_re.py @@ -44,9 +44,6 @@ def _re_transform() -> nodes.Module: ) -register_module_extender(AstroidManager(), "re", _re_transform) - - CLASS_GETITEM_TEMPLATE = """ @classmethod def __class_getitem__(cls, item): @@ -93,6 +90,8 @@ def infer_pattern_match(node: nodes.Call, ctx: context.InferenceContext | None = return iter([class_def]) -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_pattern_match), _looks_like_pattern_or_match -) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "re", _re_transform) + manager.register_transform( + nodes.Call, inference_tip(infer_pattern_match), _looks_like_pattern_or_match + ) diff --git a/astroid/brain/brain_regex.py b/astroid/brain/brain_regex.py index a3cca65ba4..aff0610cb4 100644 --- a/astroid/brain/brain_regex.py +++ b/astroid/brain/brain_regex.py @@ -43,9 +43,6 @@ def _regex_transform() -> nodes.Module: ) -register_module_extender(AstroidManager(), "regex", _regex_transform) - - CLASS_GETITEM_TEMPLATE = """ @classmethod def __class_getitem__(cls, item): @@ -92,6 +89,8 @@ def infer_pattern_match(node: nodes.Call, ctx: context.InferenceContext | None = return iter([class_def]) -AstroidManager().register_transform( - nodes.Call, inference_tip(infer_pattern_match), _looks_like_pattern_or_match -) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "regex", _regex_transform) + manager.register_transform( + nodes.Call, inference_tip(infer_pattern_match), _looks_like_pattern_or_match + ) diff --git a/astroid/brain/brain_responses.py b/astroid/brain/brain_responses.py index 067d569f20..0a0de8b558 100644 --- a/astroid/brain/brain_responses.py +++ b/astroid/brain/brain_responses.py @@ -75,4 +75,5 @@ def stop(allow_assert=True): ) -register_module_extender(AstroidManager(), "responses", responses_funcs) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "responses", responses_funcs) diff --git a/astroid/brain/brain_scipy_signal.py b/astroid/brain/brain_scipy_signal.py index 91762b1db8..7d17a1e953 100755 --- a/astroid/brain/brain_scipy_signal.py +++ b/astroid/brain/brain_scipy_signal.py @@ -85,4 +85,5 @@ def tukey(M, alpha=0.5, sym=True): ) -register_module_extender(AstroidManager(), "scipy.signal", scipy_signal) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "scipy.signal", scipy_signal) diff --git a/astroid/brain/brain_signal.py b/astroid/brain/brain_signal.py index c2b831df59..649e9749a9 100644 --- a/astroid/brain/brain_signal.py +++ b/astroid/brain/brain_signal.py @@ -116,4 +116,5 @@ class Sigmasks(enum.IntEnum): return "" -register_module_extender(AstroidManager(), "signal", _signals_enums_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "signal", _signals_enums_transform) diff --git a/astroid/brain/brain_six.py b/astroid/brain/brain_six.py index 93a16a9384..c222a42206 100644 --- a/astroid/brain/brain_six.py +++ b/astroid/brain/brain_six.py @@ -222,18 +222,19 @@ def transform_six_with_metaclass(node): return node -register_module_extender(AstroidManager(), "six", six_moves_transform) -register_module_extender( - AstroidManager(), "requests.packages.urllib3.packages.six", six_moves_transform -) -AstroidManager().register_failed_import_hook(_six_fail_hook) -AstroidManager().register_transform( - nodes.ClassDef, - transform_six_add_metaclass, - _looks_like_decorated_with_six_add_metaclass, -) -AstroidManager().register_transform( - nodes.ClassDef, - transform_six_with_metaclass, - _looks_like_nested_from_six_with_metaclass, -) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "six", six_moves_transform) + register_module_extender( + manager, "requests.packages.urllib3.packages.six", six_moves_transform + ) + manager.register_failed_import_hook(_six_fail_hook) + manager.register_transform( + nodes.ClassDef, + transform_six_add_metaclass, + _looks_like_decorated_with_six_add_metaclass, + ) + manager.register_transform( + nodes.ClassDef, + transform_six_with_metaclass, + _looks_like_nested_from_six_with_metaclass, + ) diff --git a/astroid/brain/brain_sqlalchemy.py b/astroid/brain/brain_sqlalchemy.py index 92722fc200..d37b505bf2 100644 --- a/astroid/brain/brain_sqlalchemy.py +++ b/astroid/brain/brain_sqlalchemy.py @@ -36,4 +36,5 @@ def configure(self, **new_kw): ) -register_module_extender(AstroidManager(), "sqlalchemy.orm.session", _session_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "sqlalchemy.orm.session", _session_transform) diff --git a/astroid/brain/brain_ssl.py b/astroid/brain/brain_ssl.py index a4d89b7481..42018b5bfa 100644 --- a/astroid/brain/brain_ssl.py +++ b/astroid/brain/brain_ssl.py @@ -155,4 +155,5 @@ class VerifyMode(_IntEnum): ) -register_module_extender(AstroidManager(), "ssl", ssl_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "ssl", ssl_transform) diff --git a/astroid/brain/brain_subprocess.py b/astroid/brain/brain_subprocess.py index 553ade59dd..e7e1034bb8 100644 --- a/astroid/brain/brain_subprocess.py +++ b/astroid/brain/brain_subprocess.py @@ -102,4 +102,5 @@ def __class_getitem__(cls, item): return parse(code) -register_module_extender(AstroidManager(), "subprocess", _subprocess_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "subprocess", _subprocess_transform) diff --git a/astroid/brain/brain_threading.py b/astroid/brain/brain_threading.py index 6b17b126e4..6c6f29bf06 100644 --- a/astroid/brain/brain_threading.py +++ b/astroid/brain/brain_threading.py @@ -28,4 +28,5 @@ def Lock(*args, **kwargs): ) -register_module_extender(AstroidManager(), "threading", _thread_transform) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "threading", _thread_transform) diff --git a/astroid/brain/brain_type.py b/astroid/brain/brain_type.py index dc01693e55..02322ef026 100644 --- a/astroid/brain/brain_type.py +++ b/astroid/brain/brain_type.py @@ -63,7 +63,8 @@ def __class_getitem__(cls, key): return node.infer(context=context) -if PY39_PLUS: - AstroidManager().register_transform( - nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript - ) +def register(manager: AstroidManager) -> None: + if PY39_PLUS: + manager.register_transform( + nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript + ) diff --git a/astroid/brain/brain_typing.py b/astroid/brain/brain_typing.py index 924f0ac0f9..cb3d1ce969 100644 --- a/astroid/brain/brain_typing.py +++ b/astroid/brain/brain_typing.py @@ -6,14 +6,16 @@ from __future__ import annotations +import textwrap import typing from collections.abc import Iterator from functools import partial from typing import Final from astroid import context, extract_node, inference_tip -from astroid.builder import _extract_single_node -from astroid.const import PY39_PLUS +from astroid.brain.helpers import register_module_extender +from astroid.builder import AstroidBuilder, _extract_single_node +from astroid.const import PY39_PLUS, PY312_PLUS from astroid.exceptions import ( AttributeInferenceError, InferenceError, @@ -116,7 +118,9 @@ def looks_like_typing_typevar_or_newtype(node) -> bool: return False -def infer_typing_typevar_or_newtype(node, context_itton=None): +def infer_typing_typevar_or_newtype( + node: Call, context_itton: context.InferenceContext | None = None +) -> Iterator[ClassDef]: """Infer a typing.TypeVar(...) or typing.NewType(...) call.""" try: func = next(node.func.infer(context=context_itton)) @@ -132,7 +136,14 @@ def infer_typing_typevar_or_newtype(node, context_itton=None): raise UseInferenceDefault typename = node.args[0].as_string().strip("'") - node = extract_node(TYPING_TYPE_TEMPLATE.format(typename)) + node = ClassDef( + name=typename, + lineno=node.lineno, + col_offset=node.col_offset, + parent=node.parent, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + ) return node.infer(context=context_itton) @@ -231,7 +242,8 @@ def _looks_like_typing_alias(node: Call) -> bool: """ return ( isinstance(node.func, Name) - and node.func.name == "_alias" + # TODO: remove _DeprecatedGenericAlias when Py3.14 min + and node.func.name in {"_alias", "_DeprecatedGenericAlias"} and ( # _alias function works also for builtins object such as list and dict isinstance(node.args[0], (Attribute, Name)) @@ -273,6 +285,8 @@ def infer_typing_alias( :param node: call node :param context: inference context + + # TODO: evaluate if still necessary when Py3.12 is minimum """ if ( not isinstance(node.parent, Assign) @@ -415,30 +429,57 @@ def infer_typing_cast( return node.args[1].infer(context=ctx) -AstroidManager().register_transform( - Call, - inference_tip(infer_typing_typevar_or_newtype), - looks_like_typing_typevar_or_newtype, -) -AstroidManager().register_transform( - Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript -) -AstroidManager().register_transform( - Call, inference_tip(infer_typing_cast), _looks_like_typing_cast -) +def _typing_transform(): + return AstroidBuilder(AstroidManager()).string_build( + textwrap.dedent( + """ + class Generic: + @classmethod + def __class_getitem__(cls, item): return cls + class ParamSpec: ... + class ParamSpecArgs: ... + class ParamSpecKwargs: ... + class TypeAlias: ... + class Type: + @classmethod + def __class_getitem__(cls, item): return cls + class TypeVar: + @classmethod + def __class_getitem__(cls, item): return cls + class TypeVarTuple: ... + """ + ) + ) -if PY39_PLUS: - AstroidManager().register_transform( - FunctionDef, inference_tip(infer_typedDict), _looks_like_typedDict + +def register(manager: AstroidManager) -> None: + manager.register_transform( + Call, + inference_tip(infer_typing_typevar_or_newtype), + looks_like_typing_typevar_or_newtype, + ) + manager.register_transform( + Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript ) -else: - AstroidManager().register_transform( - ClassDef, inference_tip(infer_old_typedDict), _looks_like_typedDict + manager.register_transform( + Call, inference_tip(infer_typing_cast), _looks_like_typing_cast ) -AstroidManager().register_transform( - Call, inference_tip(infer_typing_alias), _looks_like_typing_alias -) -AstroidManager().register_transform( - Call, inference_tip(infer_special_alias), _looks_like_special_alias -) + if PY39_PLUS: + manager.register_transform( + FunctionDef, inference_tip(infer_typedDict), _looks_like_typedDict + ) + else: + manager.register_transform( + ClassDef, inference_tip(infer_old_typedDict), _looks_like_typedDict + ) + + manager.register_transform( + Call, inference_tip(infer_typing_alias), _looks_like_typing_alias + ) + manager.register_transform( + Call, inference_tip(infer_special_alias), _looks_like_special_alias + ) + + if PY312_PLUS: + register_module_extender(manager, "typing", _typing_transform) diff --git a/astroid/brain/brain_unittest.py b/astroid/brain/brain_unittest.py index db5ea8c985..a94df0a68e 100644 --- a/astroid/brain/brain_unittest.py +++ b/astroid/brain/brain_unittest.py @@ -26,4 +26,5 @@ def IsolatedAsyncioTestCaseImport(): ) -register_module_extender(AstroidManager(), "unittest", IsolatedAsyncioTestCaseImport) +def register(manager: AstroidManager) -> None: + register_module_extender(manager, "unittest", IsolatedAsyncioTestCaseImport) diff --git a/astroid/brain/brain_uuid.py b/astroid/brain/brain_uuid.py index 7d4c85b74b..37800b8e03 100644 --- a/astroid/brain/brain_uuid.py +++ b/astroid/brain/brain_uuid.py @@ -13,6 +13,7 @@ def _patch_uuid_class(node: ClassDef) -> None: node.locals["int"] = [Const(0, parent=node)] -AstroidManager().register_transform( - ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID" -) +def register(manager: AstroidManager) -> None: + manager.register_transform( + ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID" + ) diff --git a/astroid/brain/helpers.py b/astroid/brain/helpers.py index 22e3ec74c2..baf6c5c854 100644 --- a/astroid/brain/helpers.py +++ b/astroid/brain/helpers.py @@ -22,3 +22,112 @@ def transform(node: Module) -> None: obj.parent = node manager.register_transform(Module, transform, lambda n: n.name == module_name) + + +# pylint: disable-next=too-many-locals +def register_all_brains(manager: AstroidManager) -> None: + from astroid.brain import ( # pylint: disable=import-outside-toplevel + brain_argparse, + brain_attrs, + brain_boto3, + brain_builtin_inference, + brain_collections, + brain_crypt, + brain_ctypes, + brain_curses, + brain_dataclasses, + brain_datetime, + brain_dateutil, + brain_fstrings, + brain_functools, + brain_gi, + brain_hashlib, + brain_http, + brain_hypothesis, + brain_io, + brain_mechanize, + brain_multiprocessing, + brain_namedtuple_enum, + brain_nose, + brain_numpy_core_einsumfunc, + brain_numpy_core_fromnumeric, + brain_numpy_core_function_base, + brain_numpy_core_multiarray, + brain_numpy_core_numeric, + brain_numpy_core_numerictypes, + brain_numpy_core_umath, + brain_numpy_ma, + brain_numpy_ndarray, + brain_numpy_random_mtrand, + brain_pathlib, + brain_pkg_resources, + brain_pytest, + brain_qt, + brain_random, + brain_re, + brain_regex, + brain_responses, + brain_scipy_signal, + brain_signal, + brain_six, + brain_sqlalchemy, + brain_ssl, + brain_subprocess, + brain_threading, + brain_type, + brain_typing, + brain_unittest, + brain_uuid, + ) + + brain_argparse.register(manager) + brain_attrs.register(manager) + brain_boto3.register(manager) + brain_builtin_inference.register(manager) + brain_collections.register(manager) + brain_crypt.register(manager) + brain_ctypes.register(manager) + brain_curses.register(manager) + brain_dataclasses.register(manager) + brain_datetime.register(manager) + brain_dateutil.register(manager) + brain_fstrings.register(manager) + brain_functools.register(manager) + brain_gi.register(manager) + brain_hashlib.register(manager) + brain_http.register(manager) + brain_hypothesis.register(manager) + brain_io.register(manager) + brain_mechanize.register(manager) + brain_multiprocessing.register(manager) + brain_namedtuple_enum.register(manager) + brain_nose.register(manager) + brain_numpy_core_einsumfunc.register(manager) + brain_numpy_core_fromnumeric.register(manager) + brain_numpy_core_function_base.register(manager) + brain_numpy_core_multiarray.register(manager) + brain_numpy_core_numerictypes.register(manager) + brain_numpy_core_umath.register(manager) + brain_numpy_random_mtrand.register(manager) + brain_numpy_ma.register(manager) + brain_numpy_ndarray.register(manager) + brain_numpy_core_numeric.register(manager) + brain_pathlib.register(manager) + brain_pkg_resources.register(manager) + brain_pytest.register(manager) + brain_qt.register(manager) + brain_random.register(manager) + brain_re.register(manager) + brain_regex.register(manager) + brain_responses.register(manager) + brain_scipy_signal.register(manager) + brain_signal.register(manager) + brain_six.register(manager) + brain_sqlalchemy.register(manager) + brain_ssl.register(manager) + brain_subprocess.register(manager) + brain_threading.register(manager) + brain_type.register(manager) + brain_typing.register(manager) + brain_unittest.register(manager) + brain_uuid.register(manager) diff --git a/astroid/builder.py b/astroid/builder.py index cd2fb1f82e..09a787aad4 100644 --- a/astroid/builder.py +++ b/astroid/builder.py @@ -233,7 +233,6 @@ def delayed_assattr(self, node: nodes.AssignAttr) -> None: from astroid import objects # pylint: disable=import-outside-toplevel try: - frame = node.frame() for inferred in node.expr.infer(): if isinstance(inferred, util.UninferableBase): continue @@ -264,15 +263,7 @@ def delayed_assattr(self, node: nodes.AssignAttr) -> None: values = iattrs.setdefault(node.attrname, []) if node in values: continue - # get assign in __init__ first XXX useful ? - if ( - frame.name == "__init__" - and values - and values[0].frame().name != "__init__" - ): - values.insert(0, node) - else: - values.append(node) + values.append(node) except InferenceError: pass diff --git a/astroid/const.py b/astroid/const.py index 95672ae57d..91c2d32f06 100644 --- a/astroid/const.py +++ b/astroid/const.py @@ -4,12 +4,12 @@ import enum import sys -from pathlib import Path PY38 = sys.version_info[:2] == (3, 8) PY39_PLUS = sys.version_info >= (3, 9) PY310_PLUS = sys.version_info >= (3, 10) PY311_PLUS = sys.version_info >= (3, 11) +PY312_PLUS = sys.version_info >= (3, 12) WIN32 = sys.platform == "win32" @@ -26,8 +26,4 @@ class Context(enum.Enum): Del = 3 -ASTROID_INSTALL_DIRECTORY = Path(__file__).parent -BRAIN_MODULES_DIRECTORY = ASTROID_INSTALL_DIRECTORY / "brain" - - _EMPTY_OBJECT_MARKER = object() diff --git a/astroid/constraint.py b/astroid/constraint.py index 6e23b592f1..08bb80e3c9 100644 --- a/astroid/constraint.py +++ b/astroid/constraint.py @@ -8,9 +8,9 @@ import sys from abc import ABC, abstractmethod from collections.abc import Iterator -from typing import Union +from typing import TYPE_CHECKING, Union -from astroid import bases, nodes, util +from astroid import nodes, util from astroid.typing import InferenceResult if sys.version_info >= (3, 11): @@ -18,6 +18,9 @@ else: from typing_extensions import Self +if TYPE_CHECKING: + from astroid import bases + _NameNodes = Union[nodes.AssignAttr, nodes.Attribute, nodes.AssignName, nodes.Name] diff --git a/astroid/exceptions.py b/astroid/exceptions.py index 0bb89872c7..a9806e5eb7 100644 --- a/astroid/exceptions.py +++ b/astroid/exceptions.py @@ -6,10 +6,9 @@ from __future__ import annotations -from collections.abc import Iterator +from collections.abc import Iterable, Iterator from typing import TYPE_CHECKING, Any -from astroid import util from astroid.typing import InferenceResult, SuccessfulInferenceResult if TYPE_CHECKING: @@ -26,7 +25,6 @@ "AstroidTypeError", "AstroidValueError", "AttributeInferenceError", - "BinaryOperationError", "DuplicateBasesError", "InconsistentMroError", "InferenceError", @@ -35,14 +33,12 @@ "NameInferenceError", "NoDefault", "NotFoundError", - "OperationError", "ParentMissingError", "ResolveError", "StatementMissing", "SuperArgumentTypeError", "SuperError", "TooManyLevelsError", - "UnaryOperationError", "UnresolvableName", "UseInferenceDefault", ) @@ -188,7 +184,7 @@ class MroError(ResolveError): def __init__( self, message: str, - mros: list[nodes.ClassDef], + mros: Iterable[Iterable[nodes.ClassDef]], cls: nodes.ClassDef, context: InferenceContext | None = None, **kws: Any, @@ -416,11 +412,6 @@ def __init__(self, target: nodes.NodeNG) -> None: ) -# Backwards-compatibility aliases -OperationError = util.BadOperationMessage -UnaryOperationError = util.BadUnaryOperationMessage -BinaryOperationError = util.BadBinaryOperationMessage - SuperArgumentTypeError = SuperError UnresolvableName = NameInferenceError NotFoundError = AttributeInferenceError diff --git a/astroid/filter_statements.py b/astroid/filter_statements.py index 7f040dd4ed..acca676170 100644 --- a/astroid/filter_statements.py +++ b/astroid/filter_statements.py @@ -10,10 +10,14 @@ from __future__ import annotations +from typing import TYPE_CHECKING + from astroid import nodes -from astroid.nodes import node_classes from astroid.typing import SuccessfulInferenceResult +if TYPE_CHECKING: + from astroid.nodes import _base_nodes + def _get_filtered_node_statements( base_node: nodes.NodeNG, stmt_nodes: list[nodes.NodeNG] @@ -44,7 +48,7 @@ def _get_if_statement_ancestor(node: nodes.NodeNG) -> nodes.If | None: def _filter_stmts( - base_node: node_classes.LookupMixIn, + base_node: _base_nodes.LookupMixIn, stmts: list[SuccessfulInferenceResult], frame: nodes.LocalsDictNodeNG, offset: int, diff --git a/astroid/helpers.py b/astroid/helpers.py index ab5ada3715..244612146f 100644 --- a/astroid/helpers.py +++ b/astroid/helpers.py @@ -6,6 +6,7 @@ from __future__ import annotations +import warnings from collections.abc import Generator from astroid import bases, manager, nodes, objects, raw_building, util @@ -19,6 +20,20 @@ ) from astroid.nodes import scoped_nodes from astroid.typing import InferenceResult +from astroid.util import safe_infer as real_safe_infer + + +def safe_infer( + node: nodes.NodeNG | bases.Proxy | util.UninferableBase, + context: InferenceContext | None = None, +) -> InferenceResult | None: + # When removing, also remove the real_safe_infer alias + warnings.warn( + "Import safe_infer from astroid.util; this shim in astroid.helpers will be removed.", + DeprecationWarning, + stacklevel=2, + ) + return real_safe_infer(node, context=context) def _build_proxy_class(cls_name: str, builtins: nodes.Module) -> nodes.ClassDef: @@ -93,7 +108,7 @@ def object_type( return util.Uninferable if len(types) > 1 or not types: return util.Uninferable - return list(types)[0] + return next(iter(types)) def _object_type_is_subclass( @@ -155,31 +170,6 @@ def object_issubclass( return _object_type_is_subclass(node, class_or_seq, context=context) -def safe_infer( - node: nodes.NodeNG | bases.Proxy | util.UninferableBase, - context: InferenceContext | None = None, -) -> InferenceResult | None: - """Return the inferred value for the given node. - - Return None if inference failed or if there is some ambiguity (more than - one node has been inferred). - """ - if isinstance(node, util.UninferableBase): - return node - try: - inferit = node.infer(context=context) - value = next(inferit) - except (InferenceError, StopIteration): - return None - try: - next(inferit) - return None # None if there is ambiguity on the inferred node - except InferenceError: - return None # there is some kind of ambiguity - except StopIteration: - return value - - def has_known_bases(klass, context: InferenceContext | None = None) -> bool: """Return whether all base classes of a class could be inferred.""" try: @@ -187,7 +177,7 @@ def has_known_bases(klass, context: InferenceContext | None = None) -> bool: except AttributeError: pass for base in klass.bases: - result = safe_infer(base, context=context) + result = real_safe_infer(base, context=context) # TODO: check for A->B->A->B pattern in class structure too? if ( not isinstance(result, scoped_nodes.ClassDef) @@ -262,7 +252,7 @@ def object_len(node, context: InferenceContext | None = None): # pylint: disable=import-outside-toplevel; circular import from astroid.objects import FrozenSet - inferred_node = safe_infer(node, context=context) + inferred_node = real_safe_infer(node, context=context) # prevent self referential length calls from causing a recursion error # see https://github.com/pylint-dev/astroid/issues/777 @@ -324,3 +314,25 @@ def object_len(node, context: InferenceContext | None = None): raise AstroidTypeError( f"'{result_of_len}' object cannot be interpreted as an integer" ) + + +def _higher_function_scope(node: nodes.NodeNG) -> nodes.FunctionDef | None: + """Search for the first function which encloses the given + scope. + + This can be used for looking up in that function's + scope, in case looking up in a lower scope for a particular + name fails. + + :param node: A scope node. + :returns: + ``None``, if no parent function scope was found, + otherwise an instance of :class:`astroid.nodes.scoped_nodes.Function`, + which encloses the given node. + """ + current = node + while current.parent and not isinstance(current.parent, nodes.FunctionDef): + current = current.parent + if current and current.parent: + return current.parent + return None diff --git a/astroid/inference.py b/astroid/inference.py deleted file mode 100644 index 0b84d64bdd..0000000000 --- a/astroid/inference.py +++ /dev/null @@ -1,1289 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -"""This module contains a set of functions to handle inference on astroid trees.""" - -from __future__ import annotations - -import ast -import functools -import itertools -import operator -import typing -from collections.abc import Callable, Generator, Iterable, Iterator -from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union - -from astroid import ( - bases, - constraint, - decorators, - helpers, - nodes, - objects, - protocols, - util, -) -from astroid.const import PY310_PLUS -from astroid.context import ( - CallContext, - InferenceContext, - bind_context_to_node, - copy_context, -) -from astroid.exceptions import ( - AstroidBuildingError, - AstroidError, - AstroidIndexError, - AstroidTypeError, - AstroidValueError, - AttributeInferenceError, - InferenceError, - NameInferenceError, - _NonDeducibleTypeHierarchy, -) -from astroid.interpreter import dunder_lookup -from astroid.manager import AstroidManager -from astroid.typing import ( - InferenceErrorInfo, - InferenceResult, - SuccessfulInferenceResult, -) - -if TYPE_CHECKING: - from astroid.objects import Property - - -_T = TypeVar("_T") -_BaseContainerT = TypeVar("_BaseContainerT", bound=nodes.BaseContainer) -_FunctionDefT = TypeVar("_FunctionDefT", bound=nodes.FunctionDef) - -GetFlowFactory = typing.Callable[ - [ - InferenceResult, - Optional[InferenceResult], - Union[nodes.AugAssign, nodes.BinOp], - InferenceResult, - Optional[InferenceResult], - InferenceContext, - InferenceContext, - ], - "list[functools.partial[Generator[InferenceResult, None, None]]]", -] - -# .infer method ############################################################### - - -def infer_end( - self: _T, context: InferenceContext | None = None, **kwargs: Any -) -> Iterator[_T]: - """Inference's end for nodes that yield themselves on inference. - - These are objects for which inference does not have any semantic, - such as Module or Consts. - """ - yield self - - -# We add ignores to all assignments to methods -# See https://github.com/python/mypy/issues/2427 -nodes.Module._infer = infer_end -nodes.ClassDef._infer = infer_end -nodes.Lambda._infer = infer_end # type: ignore[assignment] -nodes.Const._infer = infer_end # type: ignore[assignment] -nodes.Slice._infer = infer_end # type: ignore[assignment] - - -def _infer_sequence_helper( - node: _BaseContainerT, context: InferenceContext | None = None -) -> list[SuccessfulInferenceResult]: - """Infer all values based on _BaseContainer.elts.""" - values = [] - - for elt in node.elts: - if isinstance(elt, nodes.Starred): - starred = helpers.safe_infer(elt.value, context) - if not starred: - raise InferenceError(node=node, context=context) - if not hasattr(starred, "elts"): - raise InferenceError(node=node, context=context) - values.extend(_infer_sequence_helper(starred)) - elif isinstance(elt, nodes.NamedExpr): - value = helpers.safe_infer(elt.value, context) - if not value: - raise InferenceError(node=node, context=context) - values.append(value) - else: - values.append(elt) - return values - - -@decorators.raise_if_nothing_inferred -def infer_sequence( - self: _BaseContainerT, - context: InferenceContext | None = None, - **kwargs: Any, -) -> Iterator[_BaseContainerT]: - has_starred_named_expr = any( - isinstance(e, (nodes.Starred, nodes.NamedExpr)) for e in self.elts - ) - if has_starred_named_expr: - values = _infer_sequence_helper(self, context) - new_seq = type(self)( - lineno=self.lineno, - col_offset=self.col_offset, - parent=self.parent, - end_lineno=self.end_lineno, - end_col_offset=self.end_col_offset, - ) - new_seq.postinit(values) - - yield new_seq - else: - yield self - - -nodes.List._infer = infer_sequence # type: ignore[assignment] -nodes.Tuple._infer = infer_sequence # type: ignore[assignment] -nodes.Set._infer = infer_sequence # type: ignore[assignment] - - -def infer_map( - self: nodes.Dict, context: InferenceContext | None = None -) -> Iterator[nodes.Dict]: - if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items): - yield self - else: - items = _infer_map(self, context) - new_seq = type(self)( - self.lineno, - self.col_offset, - self.parent, - end_lineno=self.end_lineno, - end_col_offset=self.end_col_offset, - ) - new_seq.postinit(list(items.items())) - yield new_seq - - -def _update_with_replacement( - lhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], - rhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], -) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: - """Delete nodes that equate to duplicate keys. - - Since an astroid node doesn't 'equal' another node with the same value, - this function uses the as_string method to make sure duplicate keys - don't get through - - Note that both the key and the value are astroid nodes - - Fixes issue with DictUnpack causing duplicate keys - in inferred Dict items - - :param lhs_dict: Dictionary to 'merge' nodes into - :param rhs_dict: Dictionary with nodes to pull from - :return : merged dictionary of nodes - """ - combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items()) - # Overwrite keys which have the same string values - string_map = {key.as_string(): (key, value) for key, value in combined_dict} - # Return to dictionary - return dict(string_map.values()) - - -def _infer_map( - node: nodes.Dict, context: InferenceContext | None -) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: - """Infer all values based on Dict.items.""" - values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {} - for name, value in node.items: - if isinstance(name, nodes.DictUnpack): - double_starred = helpers.safe_infer(value, context) - if not double_starred: - raise InferenceError - if not isinstance(double_starred, nodes.Dict): - raise InferenceError(node=node, context=context) - unpack_items = _infer_map(double_starred, context) - values = _update_with_replacement(values, unpack_items) - else: - key = helpers.safe_infer(name, context=context) - safe_value = helpers.safe_infer(value, context=context) - if any(not elem for elem in (key, safe_value)): - raise InferenceError(node=node, context=context) - # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False - values = _update_with_replacement(values, {key: safe_value}) - return values - - -nodes.Dict._infer = infer_map # type: ignore[assignment] - - -def _higher_function_scope(node: nodes.NodeNG) -> nodes.FunctionDef | None: - """Search for the first function which encloses the given - scope. This can be used for looking up in that function's - scope, in case looking up in a lower scope for a particular - name fails. - - :param node: A scope node. - :returns: - ``None``, if no parent function scope was found, - otherwise an instance of :class:`astroid.nodes.scoped_nodes.Function`, - which encloses the given node. - """ - current = node - while current.parent and not isinstance(current.parent, nodes.FunctionDef): - current = current.parent - if current and current.parent: - return current.parent # type: ignore[no-any-return] - return None - - -def infer_name( - self: nodes.Name | nodes.AssignName, - context: InferenceContext | None = None, - **kwargs: Any, -) -> Generator[InferenceResult, None, None]: - """Infer a Name: use name lookup rules.""" - frame, stmts = self.lookup(self.name) - if not stmts: - # Try to see if the name is enclosed in a nested function - # and use the higher (first function) scope for searching. - parent_function = _higher_function_scope(self.scope()) - if parent_function: - _, stmts = parent_function.lookup(self.name) - - if not stmts: - raise NameInferenceError( - name=self.name, scope=self.scope(), context=context - ) - context = copy_context(context) - context.lookupname = self.name - context.constraints[self.name] = constraint.get_constraints(self, frame) - - return bases._infer_stmts(stmts, context, frame) - - -# The order of the decorators here is important -# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 -nodes.Name._infer = decorators.raise_if_nothing_inferred( - decorators.path_wrapper(infer_name) -) -nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_call( - self: nodes.Call, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, InferenceErrorInfo]: - """Infer a Call node by trying to guess what the function returns.""" - callcontext = copy_context(context) - callcontext.boundnode = None - if context is not None: - callcontext.extra_context = _populate_context_lookup(self, context.clone()) - - for callee in self.func.infer(context): - if isinstance(callee, util.UninferableBase): - yield callee - continue - try: - if hasattr(callee, "infer_call_result"): - callcontext.callcontext = CallContext( - args=self.args, keywords=self.keywords, callee=callee - ) - yield from callee.infer_call_result(caller=self, context=callcontext) - except InferenceError: - continue - return InferenceErrorInfo(node=self, context=context) - - -nodes.Call._infer = infer_call # type: ignore[assignment] - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_import( - self: nodes.Import, - context: InferenceContext | None = None, - asname: bool = True, - **kwargs: Any, -) -> Generator[nodes.Module, None, None]: - """Infer an Import node: return the imported module/object.""" - context = context or InferenceContext() - name = context.lookupname - if name is None: - raise InferenceError(node=self, context=context) - - try: - if asname: - yield self.do_import_module(self.real_name(name)) - else: - yield self.do_import_module(name) - except AstroidBuildingError as exc: - raise InferenceError(node=self, context=context) from exc - - -nodes.Import._infer = infer_import - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_import_from( - self: nodes.ImportFrom, - context: InferenceContext | None = None, - asname: bool = True, - **kwargs: Any, -) -> Generator[InferenceResult, None, None]: - """Infer a ImportFrom node: return the imported module/object.""" - context = context or InferenceContext() - name = context.lookupname - if name is None: - raise InferenceError(node=self, context=context) - if asname: - try: - name = self.real_name(name) - except AttributeInferenceError as exc: - # See https://github.com/pylint-dev/pylint/issues/4692 - raise InferenceError(node=self, context=context) from exc - try: - module = self.do_import_module() - except AstroidBuildingError as exc: - raise InferenceError(node=self, context=context) from exc - - try: - context = copy_context(context) - context.lookupname = name - stmts = module.getattr(name, ignore_locals=module is self.root()) - return bases._infer_stmts(stmts, context) - except AttributeInferenceError as error: - raise InferenceError( - str(error), target=self, attribute=name, context=context - ) from error - - -nodes.ImportFrom._infer = infer_import_from # type: ignore[assignment] - - -def infer_attribute( - self: nodes.Attribute | nodes.AssignAttr, - context: InferenceContext | None = None, - **kwargs: Any, -) -> Generator[InferenceResult, None, InferenceErrorInfo]: - """Infer an Attribute node by using getattr on the associated object.""" - for owner in self.expr.infer(context): - if isinstance(owner, util.UninferableBase): - yield owner - continue - - context = copy_context(context) - old_boundnode = context.boundnode - try: - context.boundnode = owner - if isinstance(owner, (nodes.ClassDef, bases.Instance)): - frame = owner if isinstance(owner, nodes.ClassDef) else owner._proxied - context.constraints[self.attrname] = constraint.get_constraints( - self, frame=frame - ) - yield from owner.igetattr(self.attrname, context) - except ( - AttributeInferenceError, - InferenceError, - AttributeError, - ): - pass - finally: - context.boundnode = old_boundnode - return InferenceErrorInfo(node=self, context=context) - - -# The order of the decorators here is important -# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 -nodes.Attribute._infer = decorators.raise_if_nothing_inferred( - decorators.path_wrapper(infer_attribute) -) -# won't work with a path wrapper -nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute) - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_global( - self: nodes.Global, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - if context is None or context.lookupname is None: - raise InferenceError(node=self, context=context) - try: - return bases._infer_stmts(self.root().getattr(context.lookupname), context) - except AttributeInferenceError as error: - raise InferenceError( - str(error), target=self, attribute=context.lookupname, context=context - ) from error - - -nodes.Global._infer = infer_global # type: ignore[assignment] - - -_SUBSCRIPT_SENTINEL = object() - - -def infer_subscript( - self: nodes.Subscript, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: - """Inference for subscripts. - - We're understanding if the index is a Const - or a slice, passing the result of inference - to the value's `getitem` method, which should - handle each supported index type accordingly. - """ - - found_one = False - for value in self.value.infer(context): - if isinstance(value, util.UninferableBase): - yield util.Uninferable - return None - for index in self.slice.infer(context): - if isinstance(index, util.UninferableBase): - yield util.Uninferable - return None - - # Try to deduce the index value. - index_value = _SUBSCRIPT_SENTINEL - if value.__class__ == bases.Instance: - index_value = index - elif index.__class__ == bases.Instance: - instance_as_index = helpers.class_instance_as_index(index) - if instance_as_index: - index_value = instance_as_index - else: - index_value = index - - if index_value is _SUBSCRIPT_SENTINEL: - raise InferenceError(node=self, context=context) - - try: - assigned = value.getitem(index_value, context) - except ( - AstroidTypeError, - AstroidIndexError, - AstroidValueError, - AttributeInferenceError, - AttributeError, - ) as exc: - raise InferenceError(node=self, context=context) from exc - - # Prevent inferring if the inferred subscript - # is the same as the original subscripted object. - if self is assigned or isinstance(assigned, util.UninferableBase): - yield util.Uninferable - return None - yield from assigned.infer(context) - found_one = True - - if found_one: - return InferenceErrorInfo(node=self, context=context) - return None - - -# The order of the decorators here is important -# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 -nodes.Subscript._infer = decorators.raise_if_nothing_inferred( # type: ignore[assignment] - decorators.path_wrapper(infer_subscript) -) -nodes.Subscript.infer_lhs = decorators.raise_if_nothing_inferred(infer_subscript) - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def _infer_boolop( - self: nodes.BoolOp, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: - """Infer a boolean operation (and / or / not). - - The function will calculate the boolean operation - for all pairs generated through inference for each component - node. - """ - values = self.values - if self.op == "or": - predicate = operator.truth - else: - predicate = operator.not_ - - try: - inferred_values = [value.infer(context=context) for value in values] - except InferenceError: - yield util.Uninferable - return None - - for pair in itertools.product(*inferred_values): - if any(isinstance(item, util.UninferableBase) for item in pair): - # Can't infer the final result, just yield Uninferable. - yield util.Uninferable - continue - - bool_values = [item.bool_value() for item in pair] - if any(isinstance(item, util.UninferableBase) for item in bool_values): - # Can't infer the final result, just yield Uninferable. - yield util.Uninferable - continue - - # Since the boolean operations are short circuited operations, - # this code yields the first value for which the predicate is True - # and if no value respected the predicate, then the last value will - # be returned (or Uninferable if there was no last value). - # This is conforming to the semantics of `and` and `or`: - # 1 and 0 -> 1 - # 0 and 1 -> 0 - # 1 or 0 -> 1 - # 0 or 1 -> 1 - value = util.Uninferable - for value, bool_value in zip(pair, bool_values): - if predicate(bool_value): - yield value - break - else: - yield value - - return InferenceErrorInfo(node=self, context=context) - - -nodes.BoolOp._infer = _infer_boolop - - -# UnaryOp, BinOp and AugAssign inferences - - -def _filter_operation_errors( - self: _T, - infer_callable: Callable[ - [_T, InferenceContext | None], - Generator[InferenceResult | util.BadOperationMessage, None, None], - ], - context: InferenceContext | None, - error: type[util.BadOperationMessage], -) -> Generator[InferenceResult, None, None]: - for result in infer_callable(self, context): - if isinstance(result, error): - # For the sake of .infer(), we don't care about operation - # errors, which is the job of pylint. So return something - # which shows that we can't infer the result. - yield util.Uninferable - else: - yield result - - -def _infer_unaryop( - self: nodes.UnaryOp, context: InferenceContext | None = None -) -> Generator[InferenceResult | util.BadUnaryOperationMessage, None, None]: - """Infer what an UnaryOp should return when evaluated.""" - for operand in self.operand.infer(context): - try: - yield operand.infer_unary_op(self.op) - except TypeError as exc: - # The operand doesn't support this operation. - yield util.BadUnaryOperationMessage(operand, self.op, exc) - except AttributeError as exc: - meth = protocols.UNARY_OP_METHOD[self.op] - if meth is None: - # `not node`. Determine node's boolean - # value and negate its result, unless it is - # Uninferable, which will be returned as is. - bool_value = operand.bool_value() - if not isinstance(bool_value, util.UninferableBase): - yield nodes.const_factory(not bool_value) - else: - yield util.Uninferable - else: - if not isinstance(operand, (bases.Instance, nodes.ClassDef)): - # The operation was used on something which - # doesn't support it. - yield util.BadUnaryOperationMessage(operand, self.op, exc) - continue - - try: - try: - methods = dunder_lookup.lookup(operand, meth) - except AttributeInferenceError: - yield util.BadUnaryOperationMessage(operand, self.op, exc) - continue - - meth = methods[0] - inferred = next(meth.infer(context=context), None) - if ( - isinstance(inferred, util.UninferableBase) - or not inferred.callable() - ): - continue - - context = copy_context(context) - context.boundnode = operand - context.callcontext = CallContext(args=[], callee=inferred) - - call_results = inferred.infer_call_result(self, context=context) - result = next(call_results, None) - if result is None: - # Failed to infer, return the same type. - yield operand - else: - yield result - except AttributeInferenceError as inner_exc: - # The unary operation special method was not found. - yield util.BadUnaryOperationMessage(operand, self.op, inner_exc) - except InferenceError: - yield util.Uninferable - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_unaryop( - self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, InferenceErrorInfo]: - """Infer what an UnaryOp should return when evaluated.""" - yield from _filter_operation_errors( - self, _infer_unaryop, context, util.BadUnaryOperationMessage - ) - return InferenceErrorInfo(node=self, context=context) - - -nodes.UnaryOp._infer_unaryop = _infer_unaryop -nodes.UnaryOp._infer = infer_unaryop - - -def _is_not_implemented(const) -> bool: - """Check if the given const node is NotImplemented.""" - return isinstance(const, nodes.Const) and const.value is NotImplemented - - -def _infer_old_style_string_formatting( - instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext -) -> tuple[util.UninferableBase | nodes.Const]: - """Infer the result of '"string" % ...'. - - TODO: Instead of returning Uninferable we should rely - on the call to '%' to see if the result is actually uninferable. - """ - if isinstance(other, nodes.Tuple): - if util.Uninferable in other.elts: - return (util.Uninferable,) - inferred_positional = [helpers.safe_infer(i, context) for i in other.elts] - if all(isinstance(i, nodes.Const) for i in inferred_positional): - values = tuple(i.value for i in inferred_positional) - else: - values = None - elif isinstance(other, nodes.Dict): - values: dict[Any, Any] = {} - for pair in other.items: - key = helpers.safe_infer(pair[0], context) - if not isinstance(key, nodes.Const): - return (util.Uninferable,) - value = helpers.safe_infer(pair[1], context) - if not isinstance(value, nodes.Const): - return (util.Uninferable,) - values[key.value] = value.value - elif isinstance(other, nodes.Const): - values = other.value - else: - return (util.Uninferable,) - - try: - return (nodes.const_factory(instance.value % values),) - except (TypeError, KeyError, ValueError): - return (util.Uninferable,) - - -def _invoke_binop_inference( - instance: InferenceResult, - opnode: nodes.AugAssign | nodes.BinOp, - op: str, - other: InferenceResult, - context: InferenceContext, - method_name: str, -) -> Generator[InferenceResult, None, None]: - """Invoke binary operation inference on the given instance.""" - methods = dunder_lookup.lookup(instance, method_name) - context = bind_context_to_node(context, instance) - method = methods[0] - context.callcontext.callee = method - - if ( - isinstance(instance, nodes.Const) - and isinstance(instance.value, str) - and op == "%" - ): - return iter(_infer_old_style_string_formatting(instance, other, context)) - - try: - inferred = next(method.infer(context=context)) - except StopIteration as e: - raise InferenceError(node=method, context=context) from e - if isinstance(inferred, util.UninferableBase): - raise InferenceError - if not isinstance( - instance, (nodes.Const, nodes.Tuple, nodes.List, nodes.ClassDef, bases.Instance) - ): - raise InferenceError # pragma: no cover # Used as a failsafe - return instance.infer_binary_op(opnode, op, other, context, inferred) - - -def _aug_op( - instance: InferenceResult, - opnode: nodes.AugAssign, - op: str, - other: InferenceResult, - context: InferenceContext, - reverse: bool = False, -) -> functools.partial[Generator[InferenceResult, None, None]]: - """Get an inference callable for an augmented binary operation.""" - method_name = protocols.AUGMENTED_OP_METHOD[op] - return functools.partial( - _invoke_binop_inference, - instance=instance, - op=op, - opnode=opnode, - other=other, - context=context, - method_name=method_name, - ) - - -def _bin_op( - instance: InferenceResult, - opnode: nodes.AugAssign | nodes.BinOp, - op: str, - other: InferenceResult, - context: InferenceContext, - reverse: bool = False, -) -> functools.partial[Generator[InferenceResult, None, None]]: - """Get an inference callable for a normal binary operation. - - If *reverse* is True, then the reflected method will be used instead. - """ - if reverse: - method_name = protocols.REFLECTED_BIN_OP_METHOD[op] - else: - method_name = protocols.BIN_OP_METHOD[op] - return functools.partial( - _invoke_binop_inference, - instance=instance, - op=op, - opnode=opnode, - other=other, - context=context, - method_name=method_name, - ) - - -def _bin_op_or_union_type( - left: bases.UnionType | nodes.ClassDef | nodes.Const, - right: bases.UnionType | nodes.ClassDef | nodes.Const, -) -> Generator[InferenceResult, None, None]: - """Create a new UnionType instance for binary or, e.g. int | str.""" - yield bases.UnionType(left, right) - - -def _get_binop_contexts(context, left, right): - """Get contexts for binary operations. - - This will return two inference contexts, the first one - for x.__op__(y), the other one for y.__rop__(x), where - only the arguments are inversed. - """ - # The order is important, since the first one should be - # left.__op__(right). - for arg in (right, left): - new_context = context.clone() - new_context.callcontext = CallContext(args=[arg]) - new_context.boundnode = None - yield new_context - - -def _same_type(type1, type2) -> bool: - """Check if type1 is the same as type2.""" - return type1.qname() == type2.qname() - - -def _get_binop_flow( - left: InferenceResult, - left_type: InferenceResult | None, - binary_opnode: nodes.AugAssign | nodes.BinOp, - right: InferenceResult, - right_type: InferenceResult | None, - context: InferenceContext, - reverse_context: InferenceContext, -) -> list[functools.partial[Generator[InferenceResult, None, None]]]: - """Get the flow for binary operations. - - The rules are a bit messy: - - * if left and right have the same type, then only one - method will be called, left.__op__(right) - * if left and right are unrelated typewise, then first - left.__op__(right) is tried and if this does not exist - or returns NotImplemented, then right.__rop__(left) is tried. - * if left is a subtype of right, then only left.__op__(right) - is tried. - * if left is a supertype of right, then right.__rop__(left) - is first tried and then left.__op__(right) - """ - op = binary_opnode.op - if _same_type(left_type, right_type): - methods = [_bin_op(left, binary_opnode, op, right, context)] - elif helpers.is_subtype(left_type, right_type): - methods = [_bin_op(left, binary_opnode, op, right, context)] - elif helpers.is_supertype(left_type, right_type): - methods = [ - _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), - _bin_op(left, binary_opnode, op, right, context), - ] - else: - methods = [ - _bin_op(left, binary_opnode, op, right, context), - _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), - ] - - if ( - PY310_PLUS - and op == "|" - and ( - isinstance(left, (bases.UnionType, nodes.ClassDef)) - or isinstance(left, nodes.Const) - and left.value is None - ) - and ( - isinstance(right, (bases.UnionType, nodes.ClassDef)) - or isinstance(right, nodes.Const) - and right.value is None - ) - ): - methods.extend([functools.partial(_bin_op_or_union_type, left, right)]) - return methods - - -def _get_aug_flow( - left: InferenceResult, - left_type: InferenceResult | None, - aug_opnode: nodes.AugAssign, - right: InferenceResult, - right_type: InferenceResult | None, - context: InferenceContext, - reverse_context: InferenceContext, -) -> list[functools.partial[Generator[InferenceResult, None, None]]]: - """Get the flow for augmented binary operations. - - The rules are a bit messy: - - * if left and right have the same type, then left.__augop__(right) - is first tried and then left.__op__(right). - * if left and right are unrelated typewise, then - left.__augop__(right) is tried, then left.__op__(right) - is tried and then right.__rop__(left) is tried. - * if left is a subtype of right, then left.__augop__(right) - is tried and then left.__op__(right). - * if left is a supertype of right, then left.__augop__(right) - is tried, then right.__rop__(left) and then - left.__op__(right) - """ - bin_op = aug_opnode.op.strip("=") - aug_op = aug_opnode.op - if _same_type(left_type, right_type): - methods = [ - _aug_op(left, aug_opnode, aug_op, right, context), - _bin_op(left, aug_opnode, bin_op, right, context), - ] - elif helpers.is_subtype(left_type, right_type): - methods = [ - _aug_op(left, aug_opnode, aug_op, right, context), - _bin_op(left, aug_opnode, bin_op, right, context), - ] - elif helpers.is_supertype(left_type, right_type): - methods = [ - _aug_op(left, aug_opnode, aug_op, right, context), - _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), - _bin_op(left, aug_opnode, bin_op, right, context), - ] - else: - methods = [ - _aug_op(left, aug_opnode, aug_op, right, context), - _bin_op(left, aug_opnode, bin_op, right, context), - _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), - ] - return methods - - -def _infer_binary_operation( - left: InferenceResult, - right: InferenceResult, - binary_opnode: nodes.AugAssign | nodes.BinOp, - context: InferenceContext, - flow_factory: GetFlowFactory, -) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: - """Infer a binary operation between a left operand and a right operand. - - This is used by both normal binary operations and augmented binary - operations, the only difference is the flow factory used. - """ - - context, reverse_context = _get_binop_contexts(context, left, right) - left_type = helpers.object_type(left) - right_type = helpers.object_type(right) - methods = flow_factory( - left, left_type, binary_opnode, right, right_type, context, reverse_context - ) - for method in methods: - try: - results = list(method()) - except AttributeError: - continue - except AttributeInferenceError: - continue - except InferenceError: - yield util.Uninferable - return - else: - if any(isinstance(result, util.UninferableBase) for result in results): - yield util.Uninferable - return - - if all(map(_is_not_implemented, results)): - continue - not_implemented = sum( - 1 for result in results if _is_not_implemented(result) - ) - if not_implemented and not_implemented != len(results): - # Can't infer yet what this is. - yield util.Uninferable - return - - yield from results - return - # The operation doesn't seem to be supported so let the caller know about it - yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) - - -def _infer_binop( - self: nodes.BinOp, context: InferenceContext | None = None -) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: - """Binary operation inference logic.""" - left = self.left - right = self.right - - # we use two separate contexts for evaluating lhs and rhs because - # 1. evaluating lhs may leave some undesired entries in context.path - # which may not let us infer right value of rhs - context = context or InferenceContext() - lhs_context = copy_context(context) - rhs_context = copy_context(context) - lhs_iter = left.infer(context=lhs_context) - rhs_iter = right.infer(context=rhs_context) - for lhs, rhs in itertools.product(lhs_iter, rhs_iter): - if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): - # Don't know how to process this. - yield util.Uninferable - return - - try: - yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow) - except _NonDeducibleTypeHierarchy: - yield util.Uninferable - - -@decorators.yes_if_nothing_inferred -@decorators.path_wrapper -def infer_binop( - self: nodes.BinOp, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - return _filter_operation_errors( - self, _infer_binop, context, util.BadBinaryOperationMessage - ) - - -nodes.BinOp._infer_binop = _infer_binop -nodes.BinOp._infer = infer_binop - -COMPARE_OPS: dict[str, Callable[[Any, Any], bool]] = { - "==": operator.eq, - "!=": operator.ne, - "<": operator.lt, - "<=": operator.le, - ">": operator.gt, - ">=": operator.ge, - "in": lambda a, b: a in b, - "not in": lambda a, b: a not in b, -} -UNINFERABLE_OPS = { - "is", - "is not", -} - - -def _to_literal(node: SuccessfulInferenceResult) -> Any: - # Can raise SyntaxError or ValueError from ast.literal_eval - # Can raise AttributeError from node.as_string() as not all nodes have a visitor - # Is this the stupidest idea or the simplest idea? - return ast.literal_eval(node.as_string()) - - -def _do_compare( - left_iter: Iterable[InferenceResult], op: str, right_iter: Iterable[InferenceResult] -) -> bool | util.UninferableBase: - """ - If all possible combinations are either True or False, return that: - >>> _do_compare([1, 2], '<=', [3, 4]) - True - >>> _do_compare([1, 2], '==', [3, 4]) - False - - If any item is uninferable, or if some combinations are True and some - are False, return Uninferable: - >>> _do_compare([1, 3], '<=', [2, 4]) - util.Uninferable - """ - retval: bool | None = None - if op in UNINFERABLE_OPS: - return util.Uninferable - op_func = COMPARE_OPS[op] - - for left, right in itertools.product(left_iter, right_iter): - if isinstance(left, util.UninferableBase) or isinstance( - right, util.UninferableBase - ): - return util.Uninferable - - try: - left, right = _to_literal(left), _to_literal(right) - except (SyntaxError, ValueError, AttributeError): - return util.Uninferable - - try: - expr = op_func(left, right) - except TypeError as exc: - raise AstroidTypeError from exc - - if retval is None: - retval = expr - elif retval != expr: - return util.Uninferable - # (or both, but "True | False" is basically the same) - - assert retval is not None - return retval # it was all the same value - - -def _infer_compare( - self: nodes.Compare, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[nodes.Const | util.UninferableBase, None, None]: - """Chained comparison inference logic.""" - retval: bool | util.UninferableBase = True - - ops = self.ops - left_node = self.left - lhs = list(left_node.infer(context=context)) - # should we break early if first element is uninferable? - for op, right_node in ops: - # eagerly evaluate rhs so that values can be re-used as lhs - rhs = list(right_node.infer(context=context)) - try: - retval = _do_compare(lhs, op, rhs) - except AstroidTypeError: - retval = util.Uninferable - break - if retval is not True: - break # short-circuit - lhs = rhs # continue - if retval is util.Uninferable: - yield retval # type: ignore[misc] - else: - yield nodes.Const(retval) - - -nodes.Compare._infer = _infer_compare # type: ignore[assignment] - - -def _infer_augassign( - self: nodes.AugAssign, context: InferenceContext | None = None -) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: - """Inference logic for augmented binary operations.""" - context = context or InferenceContext() - - rhs_context = context.clone() - - lhs_iter = self.target.infer_lhs(context=context) - rhs_iter = self.value.infer(context=rhs_context) - for lhs, rhs in itertools.product(lhs_iter, rhs_iter): - if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): - # Don't know how to process this. - yield util.Uninferable - return - - try: - yield from _infer_binary_operation( - left=lhs, - right=rhs, - binary_opnode=self, - context=context, - flow_factory=_get_aug_flow, - ) - except _NonDeducibleTypeHierarchy: - yield util.Uninferable - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_augassign( - self: nodes.AugAssign, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - return _filter_operation_errors( - self, _infer_augassign, context, util.BadBinaryOperationMessage - ) - - -nodes.AugAssign._infer_augassign = _infer_augassign -nodes.AugAssign._infer = infer_augassign - -# End of binary operation inference. - - -@decorators.raise_if_nothing_inferred -def infer_arguments( - self: nodes.Arguments, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - if context is None or context.lookupname is None: - raise InferenceError(node=self, context=context) - return protocols._arguments_infer_argname(self, context.lookupname, context) - - -nodes.Arguments._infer = infer_arguments # type: ignore[assignment] - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_assign( - self: nodes.AssignName | nodes.AssignAttr, - context: InferenceContext | None = None, - **kwargs: Any, -) -> Generator[InferenceResult, None, None]: - """Infer a AssignName/AssignAttr: need to inspect the RHS part of the - assign node. - """ - if isinstance(self.parent, nodes.AugAssign): - return self.parent.infer(context) - - stmts = list(self.assigned_stmts(context=context)) - return bases._infer_stmts(stmts, context) - - -nodes.AssignName._infer = infer_assign -nodes.AssignAttr._infer = infer_assign - - -@decorators.raise_if_nothing_inferred -@decorators.path_wrapper -def infer_empty_node( - self: nodes.EmptyNode, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - if not self.has_underlying_object(): - yield util.Uninferable - else: - try: - yield from AstroidManager().infer_ast_from_something( - self.object, context=context - ) - except AstroidError: - yield util.Uninferable - - -nodes.EmptyNode._infer = infer_empty_node # type: ignore[assignment] - - -def _populate_context_lookup(call: nodes.Call, context: InferenceContext | None): - # Allows context to be saved for later - # for inference inside a function - context_lookup: dict[InferenceResult, InferenceContext] = {} - if context is None: - return context_lookup - for arg in call.args: - if isinstance(arg, nodes.Starred): - context_lookup[arg.value] = context - else: - context_lookup[arg] = context - keywords = call.keywords if call.keywords is not None else [] - for keyword in keywords: - context_lookup[keyword.value] = context - return context_lookup - - -@decorators.raise_if_nothing_inferred -def infer_ifexp( - self: nodes.IfExp, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[InferenceResult, None, None]: - """Support IfExp inference. - - If we can't infer the truthiness of the condition, we default - to inferring both branches. Otherwise, we infer either branch - depending on the condition. - """ - both_branches = False - # We use two separate contexts for evaluating lhs and rhs because - # evaluating lhs may leave some undesired entries in context.path - # which may not let us infer right value of rhs. - - context = context or InferenceContext() - lhs_context = copy_context(context) - rhs_context = copy_context(context) - try: - test = next(self.test.infer(context=context.clone())) - except (InferenceError, StopIteration): - both_branches = True - else: - if not isinstance(test, util.UninferableBase): - if test.bool_value(): - yield from self.body.infer(context=lhs_context) - else: - yield from self.orelse.infer(context=rhs_context) - else: - both_branches = True - if both_branches: - yield from self.body.infer(context=lhs_context) - yield from self.orelse.infer(context=rhs_context) - - -nodes.IfExp._infer = infer_ifexp # type: ignore[assignment] - - -def infer_functiondef( - self: _FunctionDefT, context: InferenceContext | None = None, **kwargs: Any -) -> Generator[Property | _FunctionDefT, None, InferenceErrorInfo]: - if not self.decorators or not bases._is_property(self): - yield self - return InferenceErrorInfo(node=self, context=context) - - # When inferring a property, we instantiate a new `objects.Property` object, - # which in turn, because it inherits from `FunctionDef`, sets itself in the locals - # of the wrapping frame. This means that every time we infer a property, the locals - # are mutated with a new instance of the property. To avoid this, we detect this - # scenario and avoid passing the `parent` argument to the constructor. - parent_frame = self.parent.frame() - property_already_in_parent_locals = self.name in parent_frame.locals and any( - isinstance(val, objects.Property) for val in parent_frame.locals[self.name] - ) - # We also don't want to pass parent if the definition is within a Try node - if isinstance(self.parent, (nodes.TryExcept, nodes.TryFinally, nodes.If)): - property_already_in_parent_locals = True - - prop_func = objects.Property( - function=self, - name=self.name, - lineno=self.lineno, - parent=self.parent if not property_already_in_parent_locals else None, - col_offset=self.col_offset, - ) - if property_already_in_parent_locals: - prop_func.parent = self.parent - prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node) - yield prop_func - return InferenceErrorInfo(node=self, context=context) - - -nodes.FunctionDef._infer = infer_functiondef diff --git a/astroid/interpreter/_import/util.py b/astroid/interpreter/_import/util.py index 06afd19267..a8af9ec6ae 100644 --- a/astroid/interpreter/_import/util.py +++ b/astroid/interpreter/_import/util.py @@ -78,12 +78,8 @@ def is_namespace(modname: str) -> bool: # Repair last_submodule_search_locations if last_submodule_search_locations: - # TODO: py38: remove except - try: - # pylint: disable=unsubscriptable-object - last_item = last_submodule_search_locations[-1] - except TypeError: - last_item = last_submodule_search_locations._recalculate()[-1] + # pylint: disable=unsubscriptable-object + last_item = last_submodule_search_locations[-1] # e.g. for failure example above, add 'a/b' and keep going # so that find_spec('a.b.c', path=['a', 'a/b']) succeeds assumed_location = pathlib.Path(last_item) / component diff --git a/astroid/manager.py b/astroid/manager.py index 2df270f1ac..c499fe5598 100644 --- a/astroid/manager.py +++ b/astroid/manager.py @@ -14,11 +14,9 @@ import types import zipimport from collections.abc import Callable, Iterator, Sequence -from importlib.util import find_spec, module_from_spec from typing import Any, ClassVar from astroid import nodes -from astroid.const import BRAIN_MODULES_DIRECTORY from astroid.context import InferenceContext, _invalidate_cache from astroid.exceptions import AstroidBuildingError, AstroidImportError from astroid.interpreter._import import spec, util @@ -53,16 +51,16 @@ class AstroidManager: """ name = "astroid loader" - brain: AstroidManagerBrain = { + brain: ClassVar[AstroidManagerBrain] = { "astroid_cache": {}, "_mod_file_cache": {}, "_failed_import_hooks": [], "always_load_extensions": False, "optimize_ast": False, + "max_inferable_values": 100, "extension_package_whitelist": set(), "_transform": TransformVisitor(), } - max_inferable_values: ClassVar[int] = 100 def __init__(self) -> None: # NOTE: cache entries are added by the [re]builder @@ -90,6 +88,14 @@ def optimize_ast(self) -> bool: def optimize_ast(self, value: bool) -> None: AstroidManager.brain["optimize_ast"] = value + @property + def max_inferable_values(self) -> int: + return AstroidManager.brain["max_inferable_values"] + + @max_inferable_values.setter + def max_inferable_values(self, value: int) -> None: + AstroidManager.brain["max_inferable_values"] = value + @property def register_transform(self): # This and unregister_transform below are exported for convenience @@ -432,9 +438,10 @@ def clear_cache(self) -> None: """ # import here because of cyclic imports # pylint: disable=import-outside-toplevel + from astroid.brain.helpers import register_all_brains from astroid.inference_tip import clear_inference_tip_cache from astroid.interpreter.objectmodel import ObjectModel - from astroid.nodes.node_classes import LookupMixIn + from astroid.nodes._base_nodes import LookupMixIn from astroid.nodes.scoped_nodes import ClassDef clear_inference_tip_cache() @@ -455,11 +462,5 @@ def clear_cache(self) -> None: self.bootstrap() - # Reload brain plugins. During initialisation this is done in astroid.__init__.py - for module in BRAIN_MODULES_DIRECTORY.iterdir(): - if module.suffix == ".py": - module_spec = find_spec(f"astroid.brain.{module.stem}") - assert module_spec - module_object = module_from_spec(module_spec) - assert module_spec.loader - module_spec.loader.exec_module(module_object) + # Reload brain plugins. During initialisation this is done in astroid.manager.py + register_all_brains(self) diff --git a/astroid/mixins.py b/astroid/mixins.py deleted file mode 100644 index 09ae075f86..0000000000 --- a/astroid/mixins.py +++ /dev/null @@ -1,31 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -"""This module contains some mixins for the different nodes.""" - -import warnings - -from astroid.nodes._base_nodes import AssignTypeNode as AssignTypeMixin -from astroid.nodes._base_nodes import FilterStmtsBaseNode as FilterStmtsMixin -from astroid.nodes._base_nodes import ImportNode as ImportFromMixin -from astroid.nodes._base_nodes import MultiLineBlockNode as MultiLineBlockMixin -from astroid.nodes._base_nodes import MultiLineWithElseBlockNode as BlockRangeMixIn -from astroid.nodes._base_nodes import NoChildrenNode as NoChildrenMixin -from astroid.nodes._base_nodes import ParentAssignNode as ParentAssignTypeMixin - -__all__ = ( - "AssignTypeMixin", - "BlockRangeMixIn", - "FilterStmtsMixin", - "ImportFromMixin", - "MultiLineBlockMixin", - "NoChildrenMixin", - "ParentAssignTypeMixin", -) - -warnings.warn( - "The 'astroid.mixins' module is deprecated and will become private in astroid 3.0.0", - DeprecationWarning, - stacklevel=2, -) diff --git a/astroid/node_classes.py b/astroid/node_classes.py deleted file mode 100644 index 980fa0a90b..0000000000 --- a/astroid/node_classes.py +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -# pylint: disable=unused-import - -import warnings - -from astroid.nodes.node_classes import ( - CONST_CLS, - AnnAssign, - Arguments, - Assert, - Assign, - AssignAttr, - AssignName, - AsyncFor, - AsyncWith, - Attribute, - AugAssign, - Await, - BaseContainer, - BinOp, - BoolOp, - Break, - Call, - Compare, - Comprehension, - Const, - Continue, - Decorators, - DelAttr, - Delete, - DelName, - Dict, - DictUnpack, - EmptyNode, - EvaluatedObject, - ExceptHandler, - Expr, - For, - FormattedValue, - Global, - If, - IfExp, - Import, - ImportFrom, - JoinedStr, - Keyword, - List, - LookupMixIn, - Match, - MatchAs, - MatchCase, - MatchClass, - MatchMapping, - MatchOr, - MatchSequence, - MatchSingleton, - MatchStar, - MatchValue, - Name, - NamedExpr, - NodeNG, - Nonlocal, - Pass, - Pattern, - Raise, - Return, - Set, - Slice, - Starred, - Subscript, - TryExcept, - TryFinally, - TryStar, - Tuple, - UnaryOp, - Unknown, - While, - With, - Yield, - YieldFrom, - are_exclusive, - const_factory, - unpack_infer, -) - -# We cannot create a __all__ here because it would create a circular import -# Please remove astroid/scoped_nodes.py|astroid/node_classes.py in autoflake -# exclude when removing this file. -warnings.warn( - "The 'astroid.node_classes' module is deprecated and will be replaced by " - "'astroid.nodes' in astroid 3.0.0", - DeprecationWarning, - stacklevel=2, -) diff --git a/astroid/nodes/__init__.py b/astroid/nodes/__init__.py index f677ff509b..769cf278e4 100644 --- a/astroid/nodes/__init__.py +++ b/astroid/nodes/__init__.py @@ -11,10 +11,6 @@ """ # Nodes not present in the builtin ast module: DictUnpack, Unknown, and EvaluatedObject. - -# This is the only node we re-export from the private _base_nodes module. This -# is because it was originally part of the public API and hasn't been deprecated. -from astroid.nodes._base_nodes import Statement from astroid.nodes.node_classes import ( CONST_CLS, AnnAssign, @@ -71,6 +67,7 @@ NamedExpr, NodeNG, Nonlocal, + ParamSpec, Pass, Pattern, Raise, @@ -79,10 +76,12 @@ Slice, Starred, Subscript, - TryExcept, - TryFinally, + Try, TryStar, Tuple, + TypeAlias, + TypeVar, + TypeVarTuple, UnaryOp, Unknown, While, @@ -111,10 +110,7 @@ ) from astroid.nodes.utils import Position -_BaseContainer = BaseContainer # TODO Remove for astroid 3.0 - ALL_NODE_CLASSES = ( - _BaseContainer, BaseContainer, AnnAssign, Arguments, @@ -180,6 +176,7 @@ NamedExpr, NodeNG, Nonlocal, + ParamSpec, Pass, Pattern, Raise, @@ -189,10 +186,12 @@ Slice, Starred, Subscript, - TryExcept, - TryFinally, + Try, TryStar, Tuple, + TypeAlias, + TypeVar, + TypeVarTuple, UnaryOp, Unknown, While, @@ -215,6 +214,7 @@ "Attribute", "AugAssign", "Await", + "BaseContainer", "BinOp", "BoolOp", "Break", @@ -271,6 +271,7 @@ "NamedExpr", "NodeNG", "Nonlocal", + "ParamSpec", "Pass", "Position", "Raise", @@ -279,12 +280,13 @@ "SetComp", "Slice", "Starred", - "Statement", "Subscript", - "TryExcept", - "TryFinally", + "Try", "TryStar", "Tuple", + "TypeAlias", + "TypeVar", + "TypeVarTuple", "UnaryOp", "Unknown", "unpack_infer", diff --git a/astroid/nodes/_base_nodes.py b/astroid/nodes/_base_nodes.py index 15cc6a9ad1..ddcac994c6 100644 --- a/astroid/nodes/_base_nodes.py +++ b/astroid/nodes/_base_nodes.py @@ -10,15 +10,40 @@ from __future__ import annotations import itertools -from collections.abc import Iterator -from functools import cached_property -from typing import TYPE_CHECKING, ClassVar - -from astroid.exceptions import AttributeInferenceError +from collections.abc import Generator, Iterator +from functools import cached_property, lru_cache, partial +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union + +from astroid import bases, nodes, util +from astroid.const import PY310_PLUS +from astroid.context import ( + CallContext, + InferenceContext, + bind_context_to_node, +) +from astroid.exceptions import ( + AttributeInferenceError, + InferenceError, +) +from astroid.interpreter import dunder_lookup from astroid.nodes.node_ng import NodeNG +from astroid.typing import InferenceResult if TYPE_CHECKING: - from astroid import nodes + from astroid.nodes.node_classes import LocalsDictNodeNG + + GetFlowFactory = Callable[ + [ + InferenceResult, + Optional[InferenceResult], + Union[nodes.AugAssign, nodes.BinOp], + InferenceResult, + Optional[InferenceResult], + InferenceContext, + InferenceContext, + ], + list[partial[Generator[InferenceResult, None, None]]], + ] class Statement(NodeNG): @@ -186,6 +211,13 @@ def _get_return_nodes_skip_functions(self): continue yield from child_node._get_return_nodes_skip_functions() + def _get_yield_nodes_skip_functions(self): + for block in self._multi_line_blocks: + for child_node in block: + if child_node.is_function: + continue + yield from child_node._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): for block in self._multi_line_blocks: for child_node in block: @@ -223,3 +255,421 @@ def _elsed_block_range( return lineno, orelse[-1].tolineno return lineno, orelse[0].fromlineno - 1 return lineno, last or self.tolineno + + +class LookupMixIn(NodeNG): + """Mixin to look up a name in the right scope.""" + + @lru_cache # noqa + def lookup(self, name: str) -> tuple[LocalsDictNodeNG, list[NodeNG]]: + """Lookup where the given variable is assigned. + + The lookup starts from self's scope. If self is not a frame itself + and the name is found in the inner frame locals, statements will be + filtered to remove ignorable statements according to self's location. + + :param name: The name of the variable to find assignments for. + + :returns: The scope node and the list of assignments associated to the + given name according to the scope where it has been found (locals, + globals or builtin). + """ + return self.scope().scope_lookup(self, name) + + def ilookup(self, name): + """Lookup the inferred values of the given variable. + + :param name: The variable name to find values for. + :type name: str + + :returns: The inferred values of the statements returned from + :meth:`lookup`. + :rtype: iterable + """ + frame, stmts = self.lookup(name) + context = InferenceContext() + return bases._infer_stmts(stmts, context, frame) + + +def _reflected_name(name) -> str: + return "__r" + name[2:] + + +def _augmented_name(name) -> str: + return "__i" + name[2:] + + +BIN_OP_METHOD = { + "+": "__add__", + "-": "__sub__", + "/": "__truediv__", + "//": "__floordiv__", + "*": "__mul__", + "**": "__pow__", + "%": "__mod__", + "&": "__and__", + "|": "__or__", + "^": "__xor__", + "<<": "__lshift__", + ">>": "__rshift__", + "@": "__matmul__", +} + +REFLECTED_BIN_OP_METHOD = { + key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items() +} +AUGMENTED_OP_METHOD = { + key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items() +} + + +class OperatorNode(NodeNG): + @staticmethod + def _filter_operation_errors( + infer_callable: Callable[ + [InferenceContext | None], + Generator[InferenceResult | util.BadOperationMessage, None, None], + ], + context: InferenceContext | None, + error: type[util.BadOperationMessage], + ) -> Generator[InferenceResult, None, None]: + for result in infer_callable(context): + if isinstance(result, error): + # For the sake of .infer(), we don't care about operation + # errors, which is the job of a linter. So return something + # which shows that we can't infer the result. + yield util.Uninferable + else: + yield result + + @staticmethod + def _is_not_implemented(const) -> bool: + """Check if the given const node is NotImplemented.""" + return isinstance(const, nodes.Const) and const.value is NotImplemented + + @staticmethod + def _infer_old_style_string_formatting( + instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext + ) -> tuple[util.UninferableBase | nodes.Const]: + """Infer the result of '"string" % ...'. + + TODO: Instead of returning Uninferable we should rely + on the call to '%' to see if the result is actually uninferable. + """ + if isinstance(other, nodes.Tuple): + if util.Uninferable in other.elts: + return (util.Uninferable,) + inferred_positional = [util.safe_infer(i, context) for i in other.elts] + if all(isinstance(i, nodes.Const) for i in inferred_positional): + values = tuple(i.value for i in inferred_positional) + else: + values = None + elif isinstance(other, nodes.Dict): + values: dict[Any, Any] = {} + for pair in other.items: + key = util.safe_infer(pair[0], context) + if not isinstance(key, nodes.Const): + return (util.Uninferable,) + value = util.safe_infer(pair[1], context) + if not isinstance(value, nodes.Const): + return (util.Uninferable,) + values[key.value] = value.value + elif isinstance(other, nodes.Const): + values = other.value + else: + return (util.Uninferable,) + + try: + return (nodes.const_factory(instance.value % values),) + except (TypeError, KeyError, ValueError): + return (util.Uninferable,) + + @staticmethod + def _invoke_binop_inference( + instance: InferenceResult, + opnode: nodes.AugAssign | nodes.BinOp, + op: str, + other: InferenceResult, + context: InferenceContext, + method_name: str, + ) -> Generator[InferenceResult, None, None]: + """Invoke binary operation inference on the given instance.""" + methods = dunder_lookup.lookup(instance, method_name) + context = bind_context_to_node(context, instance) + method = methods[0] + context.callcontext.callee = method + + if ( + isinstance(instance, nodes.Const) + and isinstance(instance.value, str) + and op == "%" + ): + return iter( + OperatorNode._infer_old_style_string_formatting( + instance, other, context + ) + ) + + try: + inferred = next(method.infer(context=context)) + except StopIteration as e: + raise InferenceError(node=method, context=context) from e + if isinstance(inferred, util.UninferableBase): + raise InferenceError + if not isinstance( + instance, + (nodes.Const, nodes.Tuple, nodes.List, nodes.ClassDef, bases.Instance), + ): + raise InferenceError # pragma: no cover # Used as a failsafe + return instance.infer_binary_op(opnode, op, other, context, inferred) + + @staticmethod + def _aug_op( + instance: InferenceResult, + opnode: nodes.AugAssign, + op: str, + other: InferenceResult, + context: InferenceContext, + reverse: bool = False, + ) -> partial[Generator[InferenceResult, None, None]]: + """Get an inference callable for an augmented binary operation.""" + method_name = AUGMENTED_OP_METHOD[op] + return partial( + OperatorNode._invoke_binop_inference, + instance=instance, + op=op, + opnode=opnode, + other=other, + context=context, + method_name=method_name, + ) + + @staticmethod + def _bin_op( + instance: InferenceResult, + opnode: nodes.AugAssign | nodes.BinOp, + op: str, + other: InferenceResult, + context: InferenceContext, + reverse: bool = False, + ) -> partial[Generator[InferenceResult, None, None]]: + """Get an inference callable for a normal binary operation. + + If *reverse* is True, then the reflected method will be used instead. + """ + if reverse: + method_name = REFLECTED_BIN_OP_METHOD[op] + else: + method_name = BIN_OP_METHOD[op] + return partial( + OperatorNode._invoke_binop_inference, + instance=instance, + op=op, + opnode=opnode, + other=other, + context=context, + method_name=method_name, + ) + + @staticmethod + def _bin_op_or_union_type( + left: bases.UnionType | nodes.ClassDef | nodes.Const, + right: bases.UnionType | nodes.ClassDef | nodes.Const, + ) -> Generator[InferenceResult, None, None]: + """Create a new UnionType instance for binary or, e.g. int | str.""" + yield bases.UnionType(left, right) + + @staticmethod + def _get_binop_contexts(context, left, right): + """Get contexts for binary operations. + + This will return two inference contexts, the first one + for x.__op__(y), the other one for y.__rop__(x), where + only the arguments are inversed. + """ + # The order is important, since the first one should be + # left.__op__(right). + for arg in (right, left): + new_context = context.clone() + new_context.callcontext = CallContext(args=[arg]) + new_context.boundnode = None + yield new_context + + @staticmethod + def _same_type(type1, type2) -> bool: + """Check if type1 is the same as type2.""" + return type1.qname() == type2.qname() + + @staticmethod + def _get_aug_flow( + left: InferenceResult, + left_type: InferenceResult | None, + aug_opnode: nodes.AugAssign, + right: InferenceResult, + right_type: InferenceResult | None, + context: InferenceContext, + reverse_context: InferenceContext, + ) -> list[partial[Generator[InferenceResult, None, None]]]: + """Get the flow for augmented binary operations. + + The rules are a bit messy: + + * if left and right have the same type, then left.__augop__(right) + is first tried and then left.__op__(right). + * if left and right are unrelated typewise, then + left.__augop__(right) is tried, then left.__op__(right) + is tried and then right.__rop__(left) is tried. + * if left is a subtype of right, then left.__augop__(right) + is tried and then left.__op__(right). + * if left is a supertype of right, then left.__augop__(right) + is tried, then right.__rop__(left) and then + left.__op__(right) + """ + from astroid import helpers # pylint: disable=import-outside-toplevel + + bin_op = aug_opnode.op.strip("=") + aug_op = aug_opnode.op + if OperatorNode._same_type(left_type, right_type): + methods = [ + OperatorNode._aug_op(left, aug_opnode, aug_op, right, context), + OperatorNode._bin_op(left, aug_opnode, bin_op, right, context), + ] + elif helpers.is_subtype(left_type, right_type): + methods = [ + OperatorNode._aug_op(left, aug_opnode, aug_op, right, context), + OperatorNode._bin_op(left, aug_opnode, bin_op, right, context), + ] + elif helpers.is_supertype(left_type, right_type): + methods = [ + OperatorNode._aug_op(left, aug_opnode, aug_op, right, context), + OperatorNode._bin_op( + right, aug_opnode, bin_op, left, reverse_context, reverse=True + ), + OperatorNode._bin_op(left, aug_opnode, bin_op, right, context), + ] + else: + methods = [ + OperatorNode._aug_op(left, aug_opnode, aug_op, right, context), + OperatorNode._bin_op(left, aug_opnode, bin_op, right, context), + OperatorNode._bin_op( + right, aug_opnode, bin_op, left, reverse_context, reverse=True + ), + ] + return methods + + @staticmethod + def _get_binop_flow( + left: InferenceResult, + left_type: InferenceResult | None, + binary_opnode: nodes.AugAssign | nodes.BinOp, + right: InferenceResult, + right_type: InferenceResult | None, + context: InferenceContext, + reverse_context: InferenceContext, + ) -> list[partial[Generator[InferenceResult, None, None]]]: + """Get the flow for binary operations. + + The rules are a bit messy: + + * if left and right have the same type, then only one + method will be called, left.__op__(right) + * if left and right are unrelated typewise, then first + left.__op__(right) is tried and if this does not exist + or returns NotImplemented, then right.__rop__(left) is tried. + * if left is a subtype of right, then only left.__op__(right) + is tried. + * if left is a supertype of right, then right.__rop__(left) + is first tried and then left.__op__(right) + """ + from astroid import helpers # pylint: disable=import-outside-toplevel + + op = binary_opnode.op + if OperatorNode._same_type(left_type, right_type): + methods = [OperatorNode._bin_op(left, binary_opnode, op, right, context)] + elif helpers.is_subtype(left_type, right_type): + methods = [OperatorNode._bin_op(left, binary_opnode, op, right, context)] + elif helpers.is_supertype(left_type, right_type): + methods = [ + OperatorNode._bin_op( + right, binary_opnode, op, left, reverse_context, reverse=True + ), + OperatorNode._bin_op(left, binary_opnode, op, right, context), + ] + else: + methods = [ + OperatorNode._bin_op(left, binary_opnode, op, right, context), + OperatorNode._bin_op( + right, binary_opnode, op, left, reverse_context, reverse=True + ), + ] + + if ( + PY310_PLUS + and op == "|" + and ( + isinstance(left, (bases.UnionType, nodes.ClassDef)) + or isinstance(left, nodes.Const) + and left.value is None + ) + and ( + isinstance(right, (bases.UnionType, nodes.ClassDef)) + or isinstance(right, nodes.Const) + and right.value is None + ) + ): + methods.extend([partial(OperatorNode._bin_op_or_union_type, left, right)]) + return methods + + @staticmethod + def _infer_binary_operation( + left: InferenceResult, + right: InferenceResult, + binary_opnode: nodes.AugAssign | nodes.BinOp, + context: InferenceContext, + flow_factory: GetFlowFactory, + ) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: + """Infer a binary operation between a left operand and a right operand. + + This is used by both normal binary operations and augmented binary + operations, the only difference is the flow factory used. + """ + from astroid import helpers # pylint: disable=import-outside-toplevel + + context, reverse_context = OperatorNode._get_binop_contexts( + context, left, right + ) + left_type = helpers.object_type(left) + right_type = helpers.object_type(right) + methods = flow_factory( + left, left_type, binary_opnode, right, right_type, context, reverse_context + ) + for method in methods: + try: + results = list(method()) + except AttributeError: + continue + except AttributeInferenceError: + continue + except InferenceError: + yield util.Uninferable + return + else: + if any(isinstance(result, util.UninferableBase) for result in results): + yield util.Uninferable + return + + if all(map(OperatorNode._is_not_implemented, results)): + continue + not_implemented = sum( + 1 for result in results if OperatorNode._is_not_implemented(result) + ) + if not_implemented and not_implemented != len(results): + # Can't infer yet what this is. + yield util.Uninferable + return + + yield from results + return + + # The operation doesn't seem to be supported so let the caller know about it + yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) diff --git a/astroid/nodes/as_string.py b/astroid/nodes/as_string.py index 49ef1b77e3..4ef10b49e6 100644 --- a/astroid/nodes/as_string.py +++ b/astroid/nodes/as_string.py @@ -12,6 +12,7 @@ from astroid import nodes if TYPE_CHECKING: + from astroid import objects from astroid.nodes import Const from astroid.nodes.node_classes import ( Match, @@ -178,6 +179,7 @@ def visit_classdef(self, node) -> str: args += [n.accept(self) for n in node.keywords] args_str = f"({', '.join(args)})" if args else "" docs = self._docs_dedent(node.doc_node) + # TODO: handle type_params return "\n\n{}class {}{}:{}\n{}\n".format( decorate, node.name, args_str, docs, self._stmt_list(node.body) ) @@ -322,7 +324,7 @@ def visit_formattedvalue(self, node) -> str: result += ":" + node.format_spec.accept(self)[2:-1] return "{%s}" % result - def handle_functiondef(self, node, keyword) -> str: + def handle_functiondef(self, node: nodes.FunctionDef, keyword: str) -> str: """return a (possibly async) function definition node as string""" decorate = node.decorators.accept(self) if node.decorators else "" docs = self._docs_dedent(node.doc_node) @@ -330,6 +332,7 @@ def handle_functiondef(self, node, keyword) -> str: if node.returns: return_annotation = " -> " + node.returns.as_string() trailer = return_annotation + ":" + # TODO: handle type_params def_format = "\n%s%s %s(%s)%s%s\n%s" return def_format % ( decorate, @@ -341,11 +344,11 @@ def handle_functiondef(self, node, keyword) -> str: self._stmt_list(node.body), ) - def visit_functiondef(self, node) -> str: + def visit_functiondef(self, node: nodes.FunctionDef) -> str: """return an astroid.FunctionDef node as string""" return self.handle_functiondef(node, "def") - def visit_asyncfunctiondef(self, node) -> str: + def visit_asyncfunctiondef(self, node: nodes.AsyncFunctionDef) -> str: """return an astroid.AsyncFunction node as string""" return self.handle_functiondef(node, "async def") @@ -431,10 +434,18 @@ def visit_nonlocal(self, node) -> str: """return an astroid.Nonlocal node as string""" return f"nonlocal {', '.join(node.names)}" + def visit_paramspec(self, node: nodes.ParamSpec) -> str: + """return an astroid.ParamSpec node as string""" + return node.name.accept(self) + def visit_pass(self, node) -> str: """return an astroid.Pass node as string""" return "pass" + def visit_partialfunction(self, node: objects.PartialFunction) -> str: + """Return an objects.PartialFunction as string.""" + return self.visit_functiondef(node) + def visit_raise(self, node) -> str: """return an astroid.Raise node as string""" if node.exc: @@ -485,21 +496,17 @@ def visit_subscript(self, node) -> str: idxstr = idxstr[1:-1] return f"{self._precedence_parens(node, node.value)}[{idxstr}]" - def visit_tryexcept(self, node) -> str: - """return an astroid.TryExcept node as string""" + def visit_try(self, node) -> str: + """return an astroid.Try node as string""" trys = [f"try:\n{self._stmt_list(node.body)}"] for handler in node.handlers: trys.append(handler.accept(self)) if node.orelse: trys.append(f"else:\n{self._stmt_list(node.orelse)}") + if node.finalbody: + trys.append(f"finally:\n{self._stmt_list(node.finalbody)}") return "\n".join(trys) - def visit_tryfinally(self, node) -> str: - """return an astroid.TryFinally node as string""" - return "try:\n{}\nfinally:\n{}".format( - self._stmt_list(node.body), self._stmt_list(node.finalbody) - ) - def visit_trystar(self, node) -> str: """return an astroid.TryStar node as string""" trys = [f"try:\n{self._stmt_list(node.body)}"] @@ -517,6 +524,18 @@ def visit_tuple(self, node) -> str: return f"({node.elts[0].accept(self)}, )" return f"({', '.join(child.accept(self) for child in node.elts)})" + def visit_typealias(self, node: nodes.TypeAlias) -> str: + """return an astroid.TypeAlias node as string""" + return node.name.accept(self) if node.name else "_" + + def visit_typevar(self, node: nodes.TypeVar) -> str: + """return an astroid.TypeVar node as string""" + return node.name.accept(self) if node.name else "_" + + def visit_typevartuple(self, node: nodes.TypeVarTuple) -> str: + """return an astroid.TypeVarTuple node as string""" + return "*" + node.name.accept(self) if node.name else "" + def visit_unaryop(self, node) -> str: """return an astroid.UnaryOp node as string""" if node.op == "not": diff --git a/astroid/nodes/node_classes.py b/astroid/nodes/node_classes.py index 84d8cbe15d..59e4fc8a20 100644 --- a/astroid/nodes/node_classes.py +++ b/astroid/nodes/node_classes.py @@ -7,12 +7,14 @@ from __future__ import annotations import abc +import ast import itertools +import operator import sys import typing import warnings from collections.abc import Generator, Iterable, Iterator, Mapping -from functools import cached_property, lru_cache +from functools import cached_property from typing import ( TYPE_CHECKING, Any, @@ -20,29 +22,33 @@ ClassVar, Literal, Optional, - TypeVar, Union, ) -from astroid import decorators, util +from astroid import decorators, protocols, util from astroid.bases import Instance, _infer_stmts from astroid.const import _EMPTY_OBJECT_MARKER, Context -from astroid.context import InferenceContext +from astroid.context import CallContext, InferenceContext, copy_context from astroid.exceptions import ( + AstroidBuildingError, + AstroidError, AstroidIndexError, AstroidTypeError, AstroidValueError, + AttributeInferenceError, InferenceError, + NameInferenceError, NoDefault, ParentMissingError, + _NonDeducibleTypeHierarchy, ) +from astroid.interpreter import dunder_lookup from astroid.manager import AstroidManager from astroid.nodes import _base_nodes from astroid.nodes.const import OP_PRECEDENCE from astroid.nodes.node_ng import NodeNG from astroid.typing import ( ConstFactoryResult, - InferBinaryOp, InferenceErrorInfo, InferenceResult, SuccessfulInferenceResult, @@ -53,7 +59,6 @@ else: from typing_extensions import Self - if TYPE_CHECKING: from astroid import nodes from astroid.nodes import LocalsDictNodeNG @@ -63,8 +68,8 @@ def _is_const(value) -> bool: return isinstance(value, tuple(CONST_CLS)) -_NodesT = TypeVar("_NodesT", bound=NodeNG) -_BadOpMessageT = TypeVar("_BadOpMessageT", bound=util.BadOperationMessage) +_NodesT = typing.TypeVar("_NodesT", bound=NodeNG) +_BadOpMessageT = typing.TypeVar("_BadOpMessageT", bound=util.BadOperationMessage) AssignedStmtsPossibleNode = Union["List", "Tuple", "AssignName", "AssignAttr", None] AssignedStmtsCall = Callable[ @@ -124,7 +129,7 @@ def are_exclusive(stmt1, stmt2, exceptions: list[str] | None = None) -> bool: algorithm : 1) index stmt1's parents 2) climb among stmt2's parents until we find a common parent - 3) if the common parent is a If or TryExcept statement, look if nodes are + 3) if the common parent is a If or Try statement, look if nodes are in exclusive branches """ # index stmt1's parents @@ -139,7 +144,7 @@ def are_exclusive(stmt1, stmt2, exceptions: list[str] | None = None) -> bool: previous = stmt2 for node in stmt2.node_ancestors(): if node in stmt1_parents: - # if the common parent is a If or TryExcept statement, look if + # if the common parent is a If or Try statement, look if # nodes are in exclusive branches if isinstance(node, If) and exceptions is None: c2attr, c2node = node.locate_child(previous) @@ -151,7 +156,7 @@ def are_exclusive(stmt1, stmt2, exceptions: list[str] | None = None) -> bool: if c1attr != c2attr: # different `If` branches (`If.body` and `If.orelse`) return True - elif isinstance(node, TryExcept): + elif isinstance(node, Try): c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: @@ -338,46 +343,63 @@ def pytype(self) -> str: def get_children(self): yield from self.elts + @decorators.raise_if_nothing_inferred + def _infer( + self, + context: InferenceContext | None = None, + **kwargs: Any, + ) -> Iterator[Self]: + has_starred_named_expr = any( + isinstance(e, (Starred, NamedExpr)) for e in self.elts + ) + if has_starred_named_expr: + values = self._infer_sequence_helper(context) + new_seq = type(self)( + lineno=self.lineno, + col_offset=self.col_offset, + parent=self.parent, + end_lineno=self.end_lineno, + end_col_offset=self.end_col_offset, + ) + new_seq.postinit(values) -# TODO: Move into _base_nodes. Blocked by import of _infer_stmts from bases. -class LookupMixIn(NodeNG): - """Mixin to look up a name in the right scope.""" - - @lru_cache # noqa - def lookup(self, name: str) -> tuple[LocalsDictNodeNG, list[NodeNG]]: - """Lookup where the given variable is assigned. - - The lookup starts from self's scope. If self is not a frame itself - and the name is found in the inner frame locals, statements will be - filtered to remove ignorable statements according to self's location. - - :param name: The name of the variable to find assignments for. - - :returns: The scope node and the list of assignments associated to the - given name according to the scope where it has been found (locals, - globals or builtin). - """ - return self.scope().scope_lookup(self, name) - - def ilookup(self, name): - """Lookup the inferred values of the given variable. - - :param name: The variable name to find values for. - :type name: str - - :returns: The inferred values of the statements returned from - :meth:`lookup`. - :rtype: iterable - """ - frame, stmts = self.lookup(name) - context = InferenceContext() - return _infer_stmts(stmts, context, frame) + yield new_seq + else: + yield self + + def _infer_sequence_helper( + self, context: InferenceContext | None = None + ) -> list[SuccessfulInferenceResult]: + """Infer all values based on BaseContainer.elts.""" + values = [] + + for elt in self.elts: + if isinstance(elt, Starred): + starred = util.safe_infer(elt.value, context) + if not starred: + raise InferenceError(node=self, context=context) + if not hasattr(starred, "elts"): + raise InferenceError(node=self, context=context) + # TODO: fresh context? + values.extend(starred._infer_sequence_helper(context)) + elif isinstance(elt, NamedExpr): + value = util.safe_infer(elt.value, context) + if not value: + raise InferenceError(node=self, context=context) + values.append(value) + else: + values.append(elt) + return values # Name classes -class AssignName(_base_nodes.NoChildrenNode, LookupMixIn, _base_nodes.ParentAssignNode): +class AssignName( + _base_nodes.NoChildrenNode, + _base_nodes.LookupMixIn, + _base_nodes.ParentAssignNode, +): """Variation of :class:`ast.Assign` representing assignment to a name. An :class:`AssignName` is the name of something that is assigned to. @@ -395,8 +417,6 @@ class AssignName(_base_nodes.NoChildrenNode, LookupMixIn, _base_nodes.ParentAssi _other_fields = ("name",) - infer_lhs: ClassVar[InferLHS[AssignName]] - def __init__( self, name: str, @@ -418,13 +438,58 @@ def __init__( parent=parent, ) - assigned_stmts: ClassVar[AssignedStmtsCall[AssignName]] + assigned_stmts = protocols.assend_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Infer an AssignName: need to inspect the RHS part of the + assign node. + """ + if isinstance(self.parent, AugAssign): + return self.parent.infer(context) + + stmts = list(self.assigned_stmts(context=context)) + return _infer_stmts(stmts, context) -class DelName(_base_nodes.NoChildrenNode, LookupMixIn, _base_nodes.ParentAssignNode): + @decorators.raise_if_nothing_inferred + def infer_lhs( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Infer a Name: use name lookup rules. + + Same implementation as Name._infer.""" + # pylint: disable=import-outside-toplevel + from astroid.constraint import get_constraints + from astroid.helpers import _higher_function_scope + + frame, stmts = self.lookup(self.name) + if not stmts: + # Try to see if the name is enclosed in a nested function + # and use the higher (first function) scope for searching. + parent_function = _higher_function_scope(self.scope()) + if parent_function: + _, stmts = parent_function.lookup(self.name) + + if not stmts: + raise NameInferenceError( + name=self.name, scope=self.scope(), context=context + ) + context = copy_context(context) + context.lookupname = self.name + context.constraints[self.name] = get_constraints(self, frame) + + return _infer_stmts(stmts, context, frame) + + +class DelName( + _base_nodes.NoChildrenNode, _base_nodes.LookupMixIn, _base_nodes.ParentAssignNode +): """Variation of :class:`ast.Delete` representing deletion of a name. A :class:`DelName` is the name of something that is deleted. @@ -461,7 +526,7 @@ def __init__( ) -class Name(_base_nodes.NoChildrenNode, LookupMixIn): +class Name(_base_nodes.LookupMixIn, _base_nodes.NoChildrenNode): """Class representing an :class:`ast.Name` node. A :class:`Name` node is something that is named, but not covered by @@ -506,11 +571,43 @@ def _get_name_nodes(self): for child_node in self.get_children(): yield from child_node._get_name_nodes() + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Infer a Name: use name lookup rules + + Same implementation as AssignName._infer_lhs.""" + # pylint: disable=import-outside-toplevel + from astroid.constraint import get_constraints + from astroid.helpers import _higher_function_scope + + frame, stmts = self.lookup(self.name) + if not stmts: + # Try to see if the name is enclosed in a nested function + # and use the higher (first function) scope for searching. + parent_function = _higher_function_scope(self.scope()) + if parent_function: + _, stmts = parent_function.lookup(self.name) + + if not stmts: + raise NameInferenceError( + name=self.name, scope=self.scope(), context=context + ) + context = copy_context(context) + context.lookupname = self.name + context.constraints[self.name] = get_constraints(self, frame) + + return _infer_stmts(stmts, context, frame) -DEPRECATED_ARGUMENT_DEFAULT = object() +DEPRECATED_ARGUMENT_DEFAULT = "DEPRECATED_ARGUMENT_DEFAULT" -class Arguments(_base_nodes.AssignTypeNode): + +class Arguments( + _base_nodes.AssignTypeNode +): # pylint: disable=too-many-instance-attributes """Class representing an :class:`ast.arguments` node. An :class:`Arguments` node represents that arguments in a @@ -609,7 +706,20 @@ class Arguments(_base_nodes.AssignTypeNode): kwargannotation: NodeNG | None """The type annotation for the variable length keyword arguments.""" - def __init__(self, vararg: str | None, kwarg: str | None, parent: NodeNG) -> None: + vararg_node: AssignName | None + """The node for variable length arguments""" + + kwarg_node: AssignName | None + """The node for variable keyword arguments""" + + def __init__( + self, + vararg: str | None, + kwarg: str | None, + parent: NodeNG, + vararg_node: AssignName | None = None, + kwarg_node: AssignName | None = None, + ) -> None: """Almost all attributes can be None for living objects where introspection failed.""" super().__init__( parent=parent, @@ -625,6 +735,9 @@ def __init__(self, vararg: str | None, kwarg: str | None, parent: NodeNG) -> Non self.kwarg = kwarg """The name of the variable length keyword arguments.""" + self.vararg_node = vararg_node + self.kwarg_node = kwarg_node + # pylint: disable=too-many-arguments def postinit( self, @@ -664,7 +777,7 @@ def postinit( type_comment_posonlyargs = [] self.type_comment_posonlyargs = type_comment_posonlyargs - assigned_stmts: ClassVar[AssignedStmtsCall[Arguments]] + assigned_stmts = protocols.arguments_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -685,8 +798,21 @@ def fromlineno(self) -> int: @cached_property def arguments(self): - """Get all the arguments for this node, including positional only and positional and keyword""" - return list(itertools.chain((self.posonlyargs or ()), self.args or ())) + """Get all the arguments for this node. This includes: + * Positional only arguments + * Positional arguments + * Keyword arguments + * Variable arguments (.e.g *args) + * Variable keyword arguments (e.g **kwargs) + """ + retval = list(itertools.chain((self.posonlyargs or ()), (self.args or ()))) + if self.vararg_node: + retval.append(self.vararg_node) + retval += self.kwonlyargs or () + if self.kwarg_node: + retval.append(self.kwarg_node) + + return retval def format_args(self, *, skippable_names: set[str] | None = None) -> str: """Get the arguments formatted as string. @@ -816,15 +942,22 @@ def default_value(self, argname): :raises NoDefault: If there is no default value defined for the given argument. """ - args = self.arguments + args = [ + arg for arg in self.arguments if arg.name not in [self.vararg, self.kwarg] + ] + + index = _find_arg(argname, self.kwonlyargs)[0] + if (index is not None) and (len(self.kw_defaults) > index): + if self.kw_defaults[index] is not None: + return self.kw_defaults[index] + raise NoDefault(func=self.parent, name=argname) + index = _find_arg(argname, args)[0] if index is not None: - idx = index - (len(args) - len(self.defaults)) + idx = index - (len(args) - len(self.defaults) - len(self.kw_defaults)) if idx >= 0: return self.defaults[idx] - index = _find_arg(argname, self.kwonlyargs)[0] - if index is not None and self.kw_defaults[index] is not None: - return self.kw_defaults[index] + raise NoDefault(func=self.parent, name=argname) def is_argument(self, name) -> bool: @@ -839,11 +972,7 @@ def is_argument(self, name) -> bool: return True if name == self.kwarg: return True - return ( - self.find_argname(name)[1] is not None - or self.kwonlyargs - and _find_arg(name, self.kwonlyargs)[1] is not None - ) + return self.find_argname(name)[1] is not None def find_argname(self, argname, rec=DEPRECATED_ARGUMENT_DEFAULT): """Get the index and :class:`AssignName` node for given name. @@ -854,14 +983,16 @@ def find_argname(self, argname, rec=DEPRECATED_ARGUMENT_DEFAULT): :returns: The index and node for the argument. :rtype: tuple(str or None, AssignName or None) """ - if rec is not DEPRECATED_ARGUMENT_DEFAULT: # pragma: no cover + if rec != DEPRECATED_ARGUMENT_DEFAULT: # pragma: no cover warnings.warn( "The rec argument will be removed in astroid 3.1.", DeprecationWarning, stacklevel=2, ) if self.arguments: - return _find_arg(argname, self.arguments) + index, argument = _find_arg(argname, self.arguments) + if argument: + return index, argument return None, None def get_children(self): @@ -895,6 +1026,17 @@ def get_children(self): if elt is not None: yield elt + @decorators.raise_if_nothing_inferred + def _infer( + self: nodes.Arguments, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + # pylint: disable-next=import-outside-toplevel + from astroid.protocols import _arguments_infer_argname + + if context is None or context.lookupname is None: + raise InferenceError(node=self, context=context) + return _arguments_infer_argname(self, context.lookupname, context) + def _find_arg(argname, args): for i, arg in enumerate(args): @@ -935,7 +1077,46 @@ def _format_args( return ", ".join(values) -class AssignAttr(_base_nodes.ParentAssignNode): +def _infer_attribute( + node: nodes.AssignAttr | nodes.Attribute, + context: InferenceContext | None = None, + **kwargs: Any, +) -> Generator[InferenceResult, None, InferenceErrorInfo]: + """Infer an AssignAttr/Attribute node by using getattr on the associated object.""" + # pylint: disable=import-outside-toplevel + from astroid.constraint import get_constraints + from astroid.nodes import ClassDef + + for owner in node.expr.infer(context): + if isinstance(owner, util.UninferableBase): + yield owner + continue + + context = copy_context(context) + old_boundnode = context.boundnode + try: + context.boundnode = owner + if isinstance(owner, (ClassDef, Instance)): + frame = owner if isinstance(owner, ClassDef) else owner._proxied + context.constraints[node.attrname] = get_constraints(node, frame=frame) + if node.attrname == "argv" and owner.name == "sys": + # sys.argv will never be inferable during static analysis + # It's value would be the args passed to the linter itself + yield util.Uninferable + else: + yield from owner.igetattr(node.attrname, context) + except ( + AttributeInferenceError, + InferenceError, + AttributeError, + ): + pass + finally: + context.boundnode = old_boundnode + return InferenceErrorInfo(node=node, context=context) + + +class AssignAttr(_base_nodes.LookupMixIn, _base_nodes.ParentAssignNode): """Variation of :class:`ast.Assign` representing assignment to an attribute. >>> import astroid @@ -948,14 +1129,11 @@ class AssignAttr(_base_nodes.ParentAssignNode): 'self.attribute' """ + expr: NodeNG + _astroid_fields = ("expr",) _other_fields = ("attrname",) - infer_lhs: ClassVar[InferLHS[AssignAttr]] - - expr: NodeNG - """What has the attribute that is being assigned to.""" - def __init__( self, attrname: str, @@ -980,7 +1158,7 @@ def __init__( def postinit(self, expr: NodeNG) -> None: self.expr = expr - assigned_stmts: ClassVar[AssignedStmtsCall[AssignAttr]] + assigned_stmts = protocols.assend_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -988,6 +1166,27 @@ def postinit(self, expr: NodeNG) -> None: def get_children(self): yield self.expr + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Infer an AssignAttr: need to inspect the RHS part of the + assign node. + """ + if isinstance(self.parent, AugAssign): + return self.parent.infer(context) + + stmts = list(self.assigned_stmts(context=context)) + return _infer_stmts(stmts, context) + + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def infer_lhs( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + return _infer_attribute(self, context, **kwargs) + class Assert(_base_nodes.Statement): """Class representing an :class:`ast.Assert` node. @@ -1053,7 +1252,7 @@ def postinit( self.value = value self.type_annotation = type_annotation - assigned_stmts: ClassVar[AssignedStmtsCall[Assign]] + assigned_stmts = protocols.assign_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -1067,6 +1266,9 @@ def get_children(self): def _assign_nodes_in_scope(self) -> list[nodes.Assign]: return [self, *self.value._assign_nodes_in_scope] + def _get_yield_nodes_skip_functions(self): + yield from self.value._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): yield from self.value._get_yield_nodes_skip_lambdas() @@ -1109,7 +1311,7 @@ def postinit( self.value = value self.simple = simple - assigned_stmts: ClassVar[AssignedStmtsCall[AnnAssign]] + assigned_stmts = protocols.assign_annassigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -1122,7 +1324,9 @@ def get_children(self): yield self.value -class AugAssign(_base_nodes.AssignTypeNode, _base_nodes.Statement): +class AugAssign( + _base_nodes.AssignTypeNode, _base_nodes.OperatorNode, _base_nodes.Statement +): """Class representing an :class:`ast.AugAssign` node. An :class:`AugAssign` is an assignment paired with an operator. @@ -1170,16 +1374,11 @@ def postinit(self, target: Name | Attribute | Subscript, value: NodeNG) -> None: self.target = target self.value = value - assigned_stmts: ClassVar[AssignedStmtsCall[AugAssign]] + assigned_stmts = protocols.assign_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ - # This is set by inference.py - _infer_augassign: ClassVar[ - InferBinaryOperation[AugAssign, util.BadBinaryOperationMessage] - ] - def type_errors(self, context: InferenceContext | None = None): """Get a list of type errors which can occur during inference. @@ -1203,13 +1402,55 @@ def get_children(self): yield self.target yield self.value + def _get_yield_nodes_skip_functions(self): + """An AugAssign node can contain a Yield node in the value""" + yield from self.value._get_yield_nodes_skip_functions() + yield from super()._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): """An AugAssign node can contain a Yield node in the value""" yield from self.value._get_yield_nodes_skip_lambdas() yield from super()._get_yield_nodes_skip_lambdas() + def _infer_augassign( + self, context: InferenceContext | None = None + ) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: + """Inference logic for augmented binary operations.""" + context = context or InferenceContext() + + rhs_context = context.clone() + + lhs_iter = self.target.infer_lhs(context=context) + rhs_iter = self.value.infer(context=rhs_context) + + for lhs, rhs in itertools.product(lhs_iter, rhs_iter): + if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): + # Don't know how to process this. + yield util.Uninferable + return + + try: + yield from self._infer_binary_operation( + left=lhs, + right=rhs, + binary_opnode=self, + context=context, + flow_factory=self._get_aug_flow, + ) + except _NonDeducibleTypeHierarchy: + yield util.Uninferable + + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self: nodes.AugAssign, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + return self._filter_operation_errors( + self._infer_augassign, context, util.BadBinaryOperationMessage + ) + -class BinOp(NodeNG): +class BinOp(_base_nodes.OperatorNode): """Class representing an :class:`ast.BinOp` node. A :class:`BinOp` node is an application of a binary operator. @@ -1254,9 +1495,6 @@ def postinit(self, left: NodeNG, right: NodeNG) -> None: self.left = left self.right = right - # This is set by inference.py - _infer_binop: ClassVar[InferBinaryOperation[BinOp, util.BadBinaryOperationMessage]] - def type_errors(self, context: InferenceContext | None = None): """Get a list of type errors which can occur during inference. @@ -1287,6 +1525,43 @@ def op_left_associative(self) -> bool: # 2**3**4 == 2**(3**4) return self.op != "**" + def _infer_binop( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + """Binary operation inference logic.""" + left = self.left + right = self.right + + # we use two separate contexts for evaluating lhs and rhs because + # 1. evaluating lhs may leave some undesired entries in context.path + # which may not let us infer right value of rhs + context = context or InferenceContext() + lhs_context = copy_context(context) + rhs_context = copy_context(context) + lhs_iter = left.infer(context=lhs_context) + rhs_iter = right.infer(context=rhs_context) + for lhs, rhs in itertools.product(lhs_iter, rhs_iter): + if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): + # Don't know how to process this. + yield util.Uninferable + return + + try: + yield from self._infer_binary_operation( + lhs, rhs, self, context, self._get_binop_flow + ) + except _NonDeducibleTypeHierarchy: + yield util.Uninferable + + @decorators.yes_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self: nodes.BinOp, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + return self._filter_operation_errors( + self._infer_binop, context, util.BadBinaryOperationMessage + ) + class BoolOp(NodeNG): """Class representing an :class:`ast.BoolOp` node. @@ -1356,6 +1631,60 @@ def get_children(self): def op_precedence(self): return OP_PRECEDENCE[self.op] + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self: nodes.BoolOp, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Infer a boolean operation (and / or / not). + + The function will calculate the boolean operation + for all pairs generated through inference for each component + node. + """ + values = self.values + if self.op == "or": + predicate = operator.truth + else: + predicate = operator.not_ + + try: + inferred_values = [value.infer(context=context) for value in values] + except InferenceError: + yield util.Uninferable + return None + + for pair in itertools.product(*inferred_values): + if any(isinstance(item, util.UninferableBase) for item in pair): + # Can't infer the final result, just yield Uninferable. + yield util.Uninferable + continue + + bool_values = [item.bool_value() for item in pair] + if any(isinstance(item, util.UninferableBase) for item in bool_values): + # Can't infer the final result, just yield Uninferable. + yield util.Uninferable + continue + + # Since the boolean operations are short circuited operations, + # this code yields the first value for which the predicate is True + # and if no value respected the predicate, then the last value will + # be returned (or Uninferable if there was no last value). + # This is conforming to the semantics of `and` and `or`: + # 1 and 0 -> 1 + # 0 and 1 -> 0 + # 1 or 0 -> 1 + # 0 or 1 -> 1 + value = util.Uninferable + for value, bool_value in zip(pair, bool_values): + if predicate(bool_value): + yield value + break + else: + yield value + + return InferenceErrorInfo(node=self, context=context) + class Break(_base_nodes.NoChildrenNode, _base_nodes.Statement): """Class representing an :class:`ast.Break` node. @@ -1413,6 +1742,64 @@ def get_children(self): yield from self.keywords + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo]: + """Infer a Call node by trying to guess what the function returns.""" + callcontext = copy_context(context) + callcontext.boundnode = None + if context is not None: + callcontext.extra_context = self._populate_context_lookup(context.clone()) + + for callee in self.func.infer(context): + if isinstance(callee, util.UninferableBase): + yield callee + continue + try: + if hasattr(callee, "infer_call_result"): + callcontext.callcontext = CallContext( + args=self.args, keywords=self.keywords, callee=callee + ) + yield from callee.infer_call_result( + caller=self, context=callcontext + ) + except InferenceError: + continue + return InferenceErrorInfo(node=self, context=context) + + def _populate_context_lookup(self, context: InferenceContext | None): + """Allows context to be saved for later for inference inside a function.""" + context_lookup: dict[InferenceResult, InferenceContext] = {} + if context is None: + return context_lookup + for arg in self.args: + if isinstance(arg, Starred): + context_lookup[arg.value] = context + else: + context_lookup[arg] = context + keywords = self.keywords if self.keywords is not None else [] + for keyword in keywords: + context_lookup[keyword.value] = context + return context_lookup + + +COMPARE_OPS: dict[str, Callable[[Any, Any], bool]] = { + "==": operator.eq, + "!=": operator.ne, + "<": operator.lt, + "<=": operator.le, + ">": operator.gt, + ">=": operator.ge, + "in": lambda a, b: a in b, + "not in": lambda a, b: a not in b, +} +UNINFERABLE_OPS = { + "is", + "is not", +} + class Compare(NodeNG): """Class representing an :class:`ast.Compare` node. @@ -1462,6 +1849,88 @@ def last_child(self): return self.ops[-1][1] # return self.left + # TODO: move to util? + @staticmethod + def _to_literal(node: SuccessfulInferenceResult) -> Any: + # Can raise SyntaxError or ValueError from ast.literal_eval + # Can raise AttributeError from node.as_string() as not all nodes have a visitor + # Is this the stupidest idea or the simplest idea? + return ast.literal_eval(node.as_string()) + + def _do_compare( + self, + left_iter: Iterable[InferenceResult], + op: str, + right_iter: Iterable[InferenceResult], + ) -> bool | util.UninferableBase: + """ + If all possible combinations are either True or False, return that: + >>> _do_compare([1, 2], '<=', [3, 4]) + True + >>> _do_compare([1, 2], '==', [3, 4]) + False + + If any item is uninferable, or if some combinations are True and some + are False, return Uninferable: + >>> _do_compare([1, 3], '<=', [2, 4]) + util.Uninferable + """ + retval: bool | None = None + if op in UNINFERABLE_OPS: + return util.Uninferable + op_func = COMPARE_OPS[op] + + for left, right in itertools.product(left_iter, right_iter): + if isinstance(left, util.UninferableBase) or isinstance( + right, util.UninferableBase + ): + return util.Uninferable + + try: + left, right = self._to_literal(left), self._to_literal(right) + except (SyntaxError, ValueError, AttributeError): + return util.Uninferable + + try: + expr = op_func(left, right) + except TypeError as exc: + raise AstroidTypeError from exc + + if retval is None: + retval = expr + elif retval != expr: + return util.Uninferable + # (or both, but "True | False" is basically the same) + + assert retval is not None + return retval # it was all the same value + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[nodes.Const | util.UninferableBase, None, None]: + """Chained comparison inference logic.""" + retval: bool | util.UninferableBase = True + + ops = self.ops + left_node = self.left + lhs = list(left_node.infer(context=context)) + # should we break early if first element is uninferable? + for op, right_node in ops: + # eagerly evaluate rhs so that values can be re-used as lhs + rhs = list(right_node.infer(context=context)) + try: + retval = self._do_compare(lhs, op, rhs) + except AstroidTypeError: + retval = util.Uninferable + break + if retval is not True: + break # short-circuit + lhs = rhs # continue + if retval is util.Uninferable: + yield retval # type: ignore[misc] + else: + yield Const(retval) + class Comprehension(NodeNG): """Class representing an :class:`ast.comprehension` node. @@ -1507,7 +1976,7 @@ def postinit( self.ifs = ifs self.is_async = is_async - assigned_stmts: ClassVar[AssignedStmtsCall[Comprehension]] + assigned_stmts = protocols.for_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -1604,8 +2073,8 @@ def __init__( Instance.__init__(self, None) - infer_unary_op: ClassVar[InferUnaryOp[Const]] - infer_binary_op: ClassVar[InferBinaryOp[Const]] + infer_unary_op = protocols.const_infer_unary_op + infer_binary_op = protocols.const_infer_binary_op def __getattr__(self, name): # This is needed because of Proxy's __getattr__ method. @@ -1693,6 +2162,11 @@ def bool_value(self, context: InferenceContext | None = None): """ return bool(self.value) + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[Const]: + yield self + class Continue(_base_nodes.NoChildrenNode, _base_nodes.Statement): """Class representing an :class:`ast.Continue` node. @@ -1865,16 +2339,14 @@ def __init__( parent=parent, ) - def postinit( - self, items: list[tuple[SuccessfulInferenceResult, SuccessfulInferenceResult]] - ) -> None: + def postinit(self, items: list[tuple[InferenceResult, InferenceResult]]) -> None: """Do some setup after initialisation. :param items: The key-value pairs contained in the dictionary. """ self.items = items - infer_unary_op: ClassVar[InferUnaryOp[Dict]] + infer_unary_op = protocols.dict_infer_unary_op def pytype(self) -> Literal["builtins.dict"]: """Get the name of the type that this node represents. @@ -1926,13 +2398,10 @@ def getitem( :raises AstroidIndexError: If the given index does not exist in the dictionary. """ - # pylint: disable-next=import-outside-toplevel; circular import - from astroid.helpers import safe_infer - for key, value in self.items: # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}. if isinstance(key, DictUnpack): - inferred_value = safe_infer(value, context) + inferred_value = util.safe_infer(value, context) if not isinstance(inferred_value, Dict): continue @@ -1958,6 +2427,72 @@ def bool_value(self, context: InferenceContext | None = None): """ return bool(self.items) + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[nodes.Dict]: + if not any(isinstance(k, DictUnpack) for k, _ in self.items): + yield self + else: + items = self._infer_map(context) + new_seq = type(self)( + lineno=self.lineno, + col_offset=self.col_offset, + parent=self.parent, + end_lineno=self.end_lineno, + end_col_offset=self.end_col_offset, + ) + new_seq.postinit(list(items.items())) + yield new_seq + + @staticmethod + def _update_with_replacement( + lhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], + rhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], + ) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: + """Delete nodes that equate to duplicate keys. + + Since an astroid node doesn't 'equal' another node with the same value, + this function uses the as_string method to make sure duplicate keys + don't get through + + Note that both the key and the value are astroid nodes + + Fixes issue with DictUnpack causing duplicate keys + in inferred Dict items + + :param lhs_dict: Dictionary to 'merge' nodes into + :param rhs_dict: Dictionary with nodes to pull from + :return : merged dictionary of nodes + """ + combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items()) + # Overwrite keys which have the same string values + string_map = {key.as_string(): (key, value) for key, value in combined_dict} + # Return to dictionary + return dict(string_map.values()) + + def _infer_map( + self, context: InferenceContext | None + ) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: + """Infer all values based on Dict.items.""" + values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {} + for name, value in self.items: + if isinstance(name, DictUnpack): + double_starred = util.safe_infer(value, context) + if not double_starred: + raise InferenceError + if not isinstance(double_starred, Dict): + raise InferenceError(node=self, context=context) + unpack_items = double_starred._infer_map(context) + values = self._update_with_replacement(values, unpack_items) + else: + key = util.safe_infer(name, context=context) + safe_value = util.safe_infer(value, context=context) + if any(not elem for elem in (key, safe_value)): + raise InferenceError(node=self, context=context) + # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False + values = self._update_with_replacement(values, {key: safe_value}) + return values + class Expr(_base_nodes.Statement): """Class representing an :class:`ast.Expr` node. @@ -1984,6 +2519,10 @@ def postinit(self, value: NodeNG) -> None: def get_children(self): yield self.value + def _get_yield_nodes_skip_functions(self): + if not self.value.is_function: + yield from self.value._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): if not self.value.is_lambda: yield from self.value._get_yield_nodes_skip_lambdas() @@ -2014,6 +2553,21 @@ def __init__( def has_underlying_object(self) -> bool: return self.object is not None and self.object is not _EMPTY_OBJECT_MARKER + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + if not self.has_underlying_object(): + yield util.Uninferable + else: + try: + yield from AstroidManager().infer_ast_from_something( + self.object, context=context + ) + except AstroidError: + yield util.Uninferable + class ExceptHandler( _base_nodes.MultiLineBlockNode, _base_nodes.AssignTypeNode, _base_nodes.Statement @@ -2030,7 +2584,7 @@ class ExceptHandler( print("Error!") ''') >>> node - + >>> node.handlers [] """ @@ -2047,7 +2601,7 @@ class ExceptHandler( body: list[NodeNG] """The contents of the block.""" - assigned_stmts: ClassVar[AssignedStmtsCall[ExceptHandler]] + assigned_stmts = protocols.excepthandler_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -2145,7 +2699,7 @@ def postinit( self.orelse = orelse self.type_annotation = type_annotation - assigned_stmts: ClassVar[AssignedStmtsCall[For]] + assigned_stmts = protocols.for_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -2286,16 +2840,49 @@ def __init__( parent=parent, ) + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, + context: InferenceContext | None = None, + asname: bool = True, + **kwargs: Any, + ) -> Generator[InferenceResult, None, None]: + """Infer a ImportFrom node: return the imported module/object.""" + context = context or InferenceContext() + name = context.lookupname + if name is None: + raise InferenceError(node=self, context=context) + if asname: + try: + name = self.real_name(name) + except AttributeInferenceError as exc: + # See https://github.com/pylint-dev/pylint/issues/4692 + raise InferenceError(node=self, context=context) from exc + try: + module = self.do_import_module() + except AstroidBuildingError as exc: + raise InferenceError(node=self, context=context) from exc + + try: + context = copy_context(context) + context.lookupname = name + stmts = module.getattr(name, ignore_locals=module is self.root()) + return _infer_stmts(stmts, context) + except AttributeInferenceError as error: + raise InferenceError( + str(error), target=self, attribute=name, context=context + ) from error + class Attribute(NodeNG): """Class representing an :class:`ast.Attribute` node.""" + expr: NodeNG + _astroid_fields = ("expr",) _other_fields = ("attrname",) - expr: NodeNG - """The name that this node represents.""" - def __init__( self, attrname: str, @@ -2323,6 +2910,13 @@ def postinit(self, expr: NodeNG) -> None: def get_children(self): yield self.expr + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo]: + return _infer_attribute(self, context, **kwargs) + class Global(_base_nodes.NoChildrenNode, _base_nodes.Statement): """Class representing an :class:`ast.Global` node. @@ -2374,6 +2968,21 @@ def __init__( def _infer_name(self, frame, name): return name + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + if context is None or context.lookupname is None: + raise InferenceError(node=self, context=context) + try: + # pylint: disable-next=no-member + return _infer_stmts(self.root().getattr(context.lookupname), context) + except AttributeInferenceError as error: + raise InferenceError( + str(error), target=self, attribute=context.lookupname, context=context + ) from error + class If(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.Statement): """Class representing an :class:`ast.If` node. @@ -2432,6 +3041,11 @@ def get_children(self): def has_elif_block(self): return len(self.orelse) == 1 and isinstance(self.orelse[0], If) + def _get_yield_nodes_skip_functions(self): + """An If node can contain a Yield node in the test""" + yield from self.test._get_yield_nodes_skip_functions() + yield from super()._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): """An If node can contain a Yield node in the test""" yield from self.test._get_yield_nodes_skip_lambdas() @@ -2472,6 +3086,40 @@ def op_left_associative(self) -> Literal[False]: # `1 if True else (2 if False else 3)` return False + @decorators.raise_if_nothing_inferred + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, None]: + """Support IfExp inference. + + If we can't infer the truthiness of the condition, we default + to inferring both branches. Otherwise, we infer either branch + depending on the condition. + """ + both_branches = False + # We use two separate contexts for evaluating lhs and rhs because + # evaluating lhs may leave some undesired entries in context.path + # which may not let us infer right value of rhs. + + context = context or InferenceContext() + lhs_context = copy_context(context) + rhs_context = copy_context(context) + try: + test = next(self.test.infer(context=context.clone())) + except (InferenceError, StopIteration): + both_branches = True + else: + if not isinstance(test, util.UninferableBase): + if test.bool_value(): + yield from self.body.infer(context=lhs_context) + else: + yield from self.orelse.infer(context=rhs_context) + else: + both_branches = True + if both_branches: + yield from self.body.infer(context=lhs_context) + yield from self.orelse.infer(context=rhs_context) + class Import(_base_nodes.ImportNode): """Class representing an :class:`ast.Import` node. @@ -2524,6 +3172,28 @@ def __init__( parent=parent, ) + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self, + context: InferenceContext | None = None, + asname: bool = True, + **kwargs: Any, + ) -> Generator[nodes.Module, None, None]: + """Infer an Import node: return the imported module/object.""" + context = context or InferenceContext() + name = context.lookupname + if name is None: + raise InferenceError(node=self, context=context) + + try: + if asname: + yield self.do_import_module(self.real_name(name)) + else: + yield self.do_import_module(name) + except AstroidBuildingError as exc: + raise InferenceError(node=self, context=context) from exc + class Keyword(NodeNG): """Class representing an :class:`ast.keyword` node. @@ -2617,13 +3287,13 @@ def __init__( parent=parent, ) - assigned_stmts: ClassVar[AssignedStmtsCall[List]] + assigned_stmts = protocols.sequence_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ - infer_unary_op: ClassVar[InferUnaryOp[List]] - infer_binary_op: ClassVar[InferBinaryOp[List]] + infer_unary_op = protocols.list_infer_unary_op + infer_binary_op = protocols.tl_infer_binary_op def pytype(self) -> Literal["builtins.list"]: """Get the name of the type that this node represents. @@ -2697,6 +3367,45 @@ def _infer_name(self, frame, name): return name +class ParamSpec(_base_nodes.AssignTypeNode): + """Class representing a :class:`ast.ParamSpec` node. + + >>> import astroid + >>> node = astroid.extract_node('type Alias[**P] = Callable[P, int]') + >>> node.type_params[0] + + """ + + _astroid_fields = ("name",) + + name: AssignName + + def __init__( + self, + lineno: int, + col_offset: int, + parent: NodeNG, + *, + end_lineno: int, + end_col_offset: int, + ) -> None: + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit(self, *, name: AssignName) -> None: + self.name = name + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[ParamSpec]: + yield self + + class Pass(_base_nodes.NoChildrenNode, _base_nodes.Statement): """Class representing an :class:`ast.Pass` node. @@ -2788,7 +3497,7 @@ class Set(BaseContainer): """ - infer_unary_op: ClassVar[InferUnaryOp[Set]] + infer_unary_op = protocols.set_infer_unary_op def pytype(self) -> Literal["builtins.set"]: """Get the name of the type that this node represents. @@ -2881,6 +3590,11 @@ def get_children(self): if self.step is not None: yield self.step + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[Slice]: + yield self + class Starred(_base_nodes.ParentAssignNode): """Class representing an :class:`ast.Starred` node. @@ -2921,7 +3635,7 @@ def __init__( def postinit(self, value: NodeNG) -> None: self.value = value - assigned_stmts: ClassVar[AssignedStmtsCall[Starred]] + assigned_stmts = protocols.starred_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -2939,11 +3653,10 @@ class Subscript(NodeNG): """ + _SUBSCRIPT_SENTINEL = object() _astroid_fields = ("value", "slice") _other_fields = ("ctx",) - infer_lhs: ClassVar[InferLHS[Subscript]] - value: NodeNG """What is being indexed.""" @@ -2980,98 +3693,102 @@ def get_children(self): yield self.value yield self.slice + def _infer_subscript( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: + """Inference for subscripts. -class TryExcept(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.Statement): - """Class representing an :class:`ast.TryExcept` node. - - >>> import astroid - >>> node = astroid.extract_node(''' - try: - do_something() - except Exception as error: - print("Error!") - ''') - >>> node - - """ - - _astroid_fields = ("body", "handlers", "orelse") - _multi_line_block_fields = ("body", "handlers", "orelse") - - body: list[NodeNG] - """The contents of the block to catch exceptions from.""" - - handlers: list[ExceptHandler] - """The exception handlers.""" - - orelse: list[NodeNG] - """The contents of the ``else`` block.""" - - def postinit( - self, - body: list[NodeNG], - handlers: list[ExceptHandler], - orelse: list[NodeNG], - ) -> None: - self.body = body - self.handlers = handlers - self.orelse = orelse - - def _infer_name(self, frame, name): - return name - - def block_range(self, lineno: int) -> tuple[int, int]: - """Get a range from the given line number to where this node ends. - - :param lineno: The line number to start the range at. - - :returns: The range of line numbers that this node belongs to, - starting at the given line number. + We're understanding if the index is a Const + or a slice, passing the result of inference + to the value's `getitem` method, which should + handle each supported index type accordingly. """ - last = None - for exhandler in self.handlers: - if exhandler.type and lineno == exhandler.type.fromlineno: - return lineno, lineno - if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno: - return lineno, exhandler.body[-1].tolineno - if last is None: - last = exhandler.body[0].fromlineno - 1 - return self._elsed_block_range(lineno, self.orelse, last) + from astroid import helpers # pylint: disable=import-outside-toplevel + + found_one = False + for value in self.value.infer(context): + if isinstance(value, util.UninferableBase): + yield util.Uninferable + return None + for index in self.slice.infer(context): + if isinstance(index, util.UninferableBase): + yield util.Uninferable + return None + + # Try to deduce the index value. + index_value = self._SUBSCRIPT_SENTINEL + if value.__class__ == Instance: + index_value = index + elif index.__class__ == Instance: + instance_as_index = helpers.class_instance_as_index(index) + if instance_as_index: + index_value = instance_as_index + else: + index_value = index + + if index_value is self._SUBSCRIPT_SENTINEL: + raise InferenceError(node=self, context=context) - def get_children(self): - yield from self.body + try: + assigned = value.getitem(index_value, context) + except ( + AstroidTypeError, + AstroidIndexError, + AstroidValueError, + AttributeInferenceError, + AttributeError, + ) as exc: + raise InferenceError(node=self, context=context) from exc + + # Prevent inferring if the inferred subscript + # is the same as the original subscripted object. + if self is assigned or isinstance(assigned, util.UninferableBase): + yield util.Uninferable + return None + yield from assigned.infer(context) + found_one = True + + if found_one: + return InferenceErrorInfo(node=self, context=context) + return None - yield from self.handlers or () - yield from self.orelse or () + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer(self, context: InferenceContext | None = None, **kwargs: Any): + return self._infer_subscript(context, **kwargs) + @decorators.raise_if_nothing_inferred + def infer_lhs(self, context: InferenceContext | None = None, **kwargs: Any): + return self._infer_subscript(context, **kwargs) -class TryFinally(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.Statement): - """Class representing an :class:`ast.TryFinally` node. + +class Try(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.Statement): + """Class representing a :class:`ast.Try` node. >>> import astroid >>> node = astroid.extract_node(''' - try: - do_something() - except Exception as error: - print("Error!") - finally: - print("Cleanup!") - ''') + try: + do_something() + except Exception as error: + print("Error!") + finally: + print("Cleanup!") + ''') >>> node - + """ - _astroid_fields = ("body", "finalbody") - _multi_line_block_fields = ("body", "finalbody") + _astroid_fields = ("body", "handlers", "orelse", "finalbody") + _multi_line_block_fields = ("body", "handlers", "orelse", "finalbody") def __init__( self, - lineno: int | None = None, - col_offset: int | None = None, - parent: NodeNG | None = None, *, - end_lineno: int | None = None, - end_col_offset: int | None = None, + lineno: int, + col_offset: int, + end_lineno: int, + end_col_offset: int, + parent: NodeNG, ) -> None: """ :param lineno: The line that this node appears on in the source code. @@ -3086,8 +3803,14 @@ def __init__( :param end_col_offset: The end column this node appears on in the source code. Note: This is after the last symbol. """ - self.body: list[NodeNG | TryExcept] = [] - """The try-except that the finally is attached to.""" + self.body: list[NodeNG] = [] + """The contents of the block to catch exceptions from.""" + + self.handlers: list[ExceptHandler] = [] + """The exception handlers.""" + + self.orelse: list[NodeNG] = [] + """The contents of the ``else`` block.""" self.finalbody: list[NodeNG] = [] """The contents of the ``finally`` block.""" @@ -3102,40 +3825,58 @@ def __init__( def postinit( self, - body: list[NodeNG | TryExcept] | None = None, - finalbody: list[NodeNG] | None = None, + *, + body: list[NodeNG], + handlers: list[ExceptHandler], + orelse: list[NodeNG], + finalbody: list[NodeNG], ) -> None: """Do some setup after initialisation. - :param body: The try-except that the finally is attached to. + :param body: The contents of the block to catch exceptions from. + + :param handlers: The exception handlers. + + :param orelse: The contents of the ``else`` block. :param finalbody: The contents of the ``finally`` block. """ - if body is not None: - self.body = body - if finalbody is not None: - self.finalbody = finalbody - - def block_range(self, lineno: int) -> tuple[int, int]: - """Get a range from the given line number to where this node ends. + self.body = body + self.handlers = handlers + self.orelse = orelse + self.finalbody = finalbody - :param lineno: The line number to start the range at. + def _infer_name(self, frame, name): + return name - :returns: The range of line numbers that this node belongs to, - starting at the given line number. - """ - child = self.body[0] - # py2.5 try: except: finally: - if ( - isinstance(child, TryExcept) - and child.fromlineno == self.fromlineno - and child.tolineno >= lineno > self.fromlineno - ): - return child.block_range(lineno) - return self._elsed_block_range(lineno, self.finalbody) + def block_range(self, lineno: int) -> tuple[int, int]: + """Get a range from a given line number to where this node ends.""" + if lineno == self.fromlineno: + return lineno, lineno + if self.body and self.body[0].fromlineno <= lineno <= self.body[-1].tolineno: + # Inside try body - return from lineno till end of try body + return lineno, self.body[-1].tolineno + for exhandler in self.handlers: + if exhandler.type and lineno == exhandler.type.fromlineno: + return lineno, lineno + if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno: + return lineno, exhandler.body[-1].tolineno + if self.orelse: + if self.orelse[0].fromlineno - 1 == lineno: + return lineno, lineno + if self.orelse[0].fromlineno <= lineno <= self.orelse[-1].tolineno: + return lineno, self.orelse[-1].tolineno + if self.finalbody: + if self.finalbody[0].fromlineno - 1 == lineno: + return lineno, lineno + if self.finalbody[0].fromlineno <= lineno <= self.finalbody[-1].tolineno: + return lineno, self.finalbody[-1].tolineno + return lineno, self.tolineno def get_children(self): yield from self.body + yield from self.handlers + yield from self.orelse yield from self.finalbody @@ -3287,13 +4028,13 @@ def __init__( parent=parent, ) - assigned_stmts: ClassVar[AssignedStmtsCall[Tuple]] + assigned_stmts = protocols.sequence_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ - infer_unary_op: ClassVar[InferUnaryOp[Tuple]] - infer_binary_op: ClassVar[InferBinaryOp[Tuple]] + infer_unary_op = protocols.tuple_infer_unary_op + infer_binary_op = protocols.tl_infer_binary_op def pytype(self) -> Literal["builtins.tuple"]: """Get the name of the type that this node represents. @@ -3311,7 +4052,156 @@ def getitem(self, index, context: InferenceContext | None = None): return _container_getitem(self, self.elts, index, context=context) -class UnaryOp(NodeNG): +class TypeAlias(_base_nodes.AssignTypeNode, _base_nodes.Statement): + """Class representing a :class:`ast.TypeAlias` node. + + >>> import astroid + >>> node = astroid.extract_node('type Point = tuple[float, float]') + >>> node + + """ + + _astroid_fields = ("name", "type_params", "value") + + name: AssignName + type_params: list[TypeVar | ParamSpec | TypeVarTuple] + value: NodeNG + + def __init__( + self, + lineno: int, + col_offset: int, + parent: NodeNG, + *, + end_lineno: int, + end_col_offset: int, + ) -> None: + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit( + self, + *, + name: AssignName, + type_params: list[TypeVar | ParamSpec | TypeVarTuple], + value: NodeNG, + ) -> None: + self.name = name + self.type_params = type_params + self.value = value + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[TypeAlias]: + yield self + + assigned_stmts: ClassVar[ + Callable[ + [ + TypeAlias, + AssignName, + InferenceContext | None, + None, + ], + Generator[NodeNG, None, None], + ] + ] = protocols.assign_assigned_stmts + + +class TypeVar(_base_nodes.AssignTypeNode): + """Class representing a :class:`ast.TypeVar` node. + + >>> import astroid + >>> node = astroid.extract_node('type Point[T] = tuple[float, float]') + >>> node.type_params[0] + + """ + + _astroid_fields = ("name", "bound") + + name: AssignName + bound: NodeNG | None + + def __init__( + self, + lineno: int, + col_offset: int, + parent: NodeNG, + *, + end_lineno: int, + end_col_offset: int, + ) -> None: + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit(self, *, name: AssignName, bound: NodeNG | None) -> None: + self.name = name + self.bound = bound + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[TypeVar]: + yield self + + +class TypeVarTuple(_base_nodes.AssignTypeNode): + """Class representing a :class:`ast.TypeVarTuple` node. + + >>> import astroid + >>> node = astroid.extract_node('type Alias[*Ts] = tuple[*Ts]') + >>> node.type_params[0] + + """ + + _astroid_fields = ("name",) + + name: AssignName + + def __init__( + self, + lineno: int, + col_offset: int, + parent: NodeNG, + *, + end_lineno: int, + end_col_offset: int, + ) -> None: + super().__init__( + lineno=lineno, + col_offset=col_offset, + end_lineno=end_lineno, + end_col_offset=end_col_offset, + parent=parent, + ) + + def postinit(self, *, name: AssignName) -> None: + self.name = name + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Iterator[TypeVarTuple]: + yield self + + +UNARY_OP_METHOD = { + "+": "__pos__", + "-": "__neg__", + "~": "__invert__", + "not": None, # XXX not '__nonzero__' +} + + +class UnaryOp(_base_nodes.OperatorNode): """Class representing an :class:`ast.UnaryOp` node. >>> import astroid @@ -3350,19 +4240,14 @@ def __init__( def postinit(self, operand: NodeNG) -> None: self.operand = operand - # This is set by inference.py - _infer_unaryop: ClassVar[ - InferBinaryOperation[UnaryOp, util.BadUnaryOperationMessage] - ] - def type_errors(self, context: InferenceContext | None = None): """Get a list of type errors which can occur during inference. - Each TypeError is represented by a :class:`BadBinaryOperationMessage`, + Each TypeError is represented by a :class:`BadUnaryOperationMessage`, which holds the original exception. :returns: The list of possible type errors. - :rtype: list(BadBinaryOperationMessage) + :rtype: list(BadUnaryOperationMessage) """ try: results = self._infer_unaryop(context=context) @@ -3383,6 +4268,81 @@ def op_precedence(self): return super().op_precedence() + def _infer_unaryop( + self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[ + InferenceResult | util.BadUnaryOperationMessage, None, InferenceErrorInfo + ]: + """Infer what an UnaryOp should return when evaluated.""" + from astroid.nodes import ClassDef # pylint: disable=import-outside-toplevel + + for operand in self.operand.infer(context): + try: + yield operand.infer_unary_op(self.op) + except TypeError as exc: + # The operand doesn't support this operation. + yield util.BadUnaryOperationMessage(operand, self.op, exc) + except AttributeError as exc: + meth = UNARY_OP_METHOD[self.op] + if meth is None: + # `not node`. Determine node's boolean + # value and negate its result, unless it is + # Uninferable, which will be returned as is. + bool_value = operand.bool_value() + if not isinstance(bool_value, util.UninferableBase): + yield const_factory(not bool_value) + else: + yield util.Uninferable + else: + if not isinstance(operand, (Instance, ClassDef)): + # The operation was used on something which + # doesn't support it. + yield util.BadUnaryOperationMessage(operand, self.op, exc) + continue + + try: + try: + methods = dunder_lookup.lookup(operand, meth) + except AttributeInferenceError: + yield util.BadUnaryOperationMessage(operand, self.op, exc) + continue + + meth = methods[0] + inferred = next(meth.infer(context=context), None) + if ( + isinstance(inferred, util.UninferableBase) + or not inferred.callable() + ): + continue + + context = copy_context(context) + context.boundnode = operand + context.callcontext = CallContext(args=[], callee=inferred) + + call_results = inferred.infer_call_result(self, context=context) + result = next(call_results, None) + if result is None: + # Failed to infer, return the same type. + yield operand + else: + yield result + except AttributeInferenceError as inner_exc: + # The unary operation special method was not found. + yield util.BadUnaryOperationMessage(operand, self.op, inner_exc) + except InferenceError: + yield util.Uninferable + + @decorators.raise_if_nothing_inferred + @decorators.path_wrapper + def _infer( + self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[InferenceResult, None, InferenceErrorInfo]: + """Infer what an UnaryOp should return when evaluated.""" + yield from self._filter_operation_errors( + self._infer_unaryop, context, util.BadUnaryOperationMessage + ) + return InferenceErrorInfo(node=self, context=context) + class While(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.Statement): """Class representing an :class:`ast.While` node. @@ -3442,6 +4402,11 @@ def get_children(self): yield from self.body yield from self.orelse + def _get_yield_nodes_skip_functions(self): + """A While node can contain a Yield node in the test""" + yield from self.test._get_yield_nodes_skip_functions() + yield from super()._get_yield_nodes_skip_functions() + def _get_yield_nodes_skip_lambdas(self): """A While node can contain a Yield node in the test""" yield from self.test._get_yield_nodes_skip_lambdas() @@ -3526,7 +4491,7 @@ def postinit( self.body = body self.type_annotation = type_annotation - assigned_stmts: ClassVar[AssignedStmtsCall[With]] + assigned_stmts = protocols.with_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -3577,6 +4542,9 @@ def get_children(self): if self.value is not None: yield self.value + def _get_yield_nodes_skip_functions(self): + yield self + def _get_yield_nodes_skip_lambdas(self): yield self @@ -3799,7 +4767,7 @@ def postinit(self, target: NodeNG, value: NodeNG) -> None: self.target = target self.value = value - assigned_stmts: ClassVar[AssignedStmtsCall[NamedExpr]] + assigned_stmts = protocols.named_expr_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -3911,11 +4879,13 @@ class EvaluatedObject(NodeNG): _astroid_fields = ("original",) _other_fields = ("value",) - def __init__(self, original: NodeNG, value: NodeNG | util.UninferableBase) -> None: - self.original: NodeNG = original + def __init__( + self, original: SuccessfulInferenceResult, value: InferenceResult + ) -> None: + self.original: SuccessfulInferenceResult = original """The original node that has already been evaluated""" - self.value: NodeNG | util.UninferableBase = value + self.value: InferenceResult = value """The inferred value""" super().__init__( @@ -4198,17 +5168,7 @@ def postinit( self.patterns = patterns self.rest = rest - assigned_stmts: ClassVar[ - Callable[ - [ - MatchMapping, - AssignName, - InferenceContext | None, - None, - ], - Generator[NodeNG, None, None], - ] - ] + assigned_stmts = protocols.match_mapping_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -4305,17 +5265,7 @@ def __init__( def postinit(self, *, name: AssignName | None) -> None: self.name = name - assigned_stmts: ClassVar[ - Callable[ - [ - MatchStar, - AssignName, - InferenceContext | None, - None, - ], - Generator[NodeNG, None, None], - ] - ] + assigned_stmts = protocols.match_star_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ @@ -4376,17 +5326,7 @@ def postinit( self.pattern = pattern self.name = name - assigned_stmts: ClassVar[ - Callable[ - [ - MatchAs, - AssignName, - InferenceContext | None, - None, - ], - Generator[NodeNG, None, None], - ] - ] + assigned_stmts = protocols.match_as_assigned_stmts """Returns the assigned statement (non inferred) according to the assignment type. See astroid/protocols.py for actual implementation. """ diff --git a/astroid/nodes/node_ng.py b/astroid/nodes/node_ng.py index 08860e3621..a86cbb1044 100644 --- a/astroid/nodes/node_ng.py +++ b/astroid/nodes/node_ng.py @@ -138,18 +138,13 @@ def infer( :returns: The inferred values. :rtype: iterable """ - if context is not None: + if context is None: + context = InferenceContext() + else: context = context.extra_context.get(self, context) if self._explicit_inference is not None: # explicit_inference is not bound, give it self explicitly try: - if context is None: - yield from self._explicit_inference( - self, # type: ignore[arg-type] - context, - **kwargs, - ) - return for result in self._explicit_inference( self, # type: ignore[arg-type] context, @@ -161,11 +156,6 @@ def infer( except UseInferenceDefault: pass - if not context: - # nodes_inferred? - yield from self._infer(context=context, **kwargs) - return - key = (self, context.lookupname, context.callcontext, context.boundnode) if key in context.inferred: yield from context.inferred[key] @@ -175,7 +165,7 @@ def infer( # Limit inference amount to help with performance issues with # exponentially exploding possible results. - limit = AstroidManager.max_inferable_values + limit = AstroidManager().max_inferable_values for i, result in enumerate(self._infer(context=context, **kwargs)): if i >= limit or (context.nodes_inferred > context.max_inferred): results.append(util.Uninferable) @@ -243,7 +233,7 @@ def __repr__(self) -> str: "id": id(self), } - def accept(self, visitor): + def accept(self, visitor: AsStringVisitor) -> str: """Visit this node using the given visitor.""" func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) return func(self) @@ -342,9 +332,12 @@ def root(self) -> nodes.Module: :returns: The root node. """ - if self.parent: - return self.parent.root() - return self # type: ignore[return-value] # Only 'Module' does not have a parent node. + if not (parent := self.parent): + return self # type: ignore[return-value] # Only 'Module' does not have a parent node. + + while parent.parent: + parent = parent.parent + return parent # type: ignore[return-value] # Only 'Module' does not have a parent node. def child_sequence(self, child): """Search for the sequence that contains this child. @@ -566,11 +559,14 @@ def _get_name_nodes(self): def _get_return_nodes_skip_functions(self): yield from () + def _get_yield_nodes_skip_functions(self): + yield from () + def _get_yield_nodes_skip_lambdas(self): yield from () def _infer_name(self, frame, name): - # overridden for ImportFrom, Import, Global, TryExcept, TryStar and Arguments + # overridden for ImportFrom, Import, Global, Try, TryStar and Arguments pass def _infer( diff --git a/astroid/nodes/scoped_nodes/mixin.py b/astroid/nodes/scoped_nodes/mixin.py index fa6aad412e..78608fe189 100644 --- a/astroid/nodes/scoped_nodes/mixin.py +++ b/astroid/nodes/scoped_nodes/mixin.py @@ -8,8 +8,9 @@ from typing import TYPE_CHECKING, TypeVar, overload +from astroid.exceptions import ParentMissingError from astroid.filter_statements import _filter_stmts -from astroid.nodes import node_classes, scoped_nodes +from astroid.nodes import _base_nodes, scoped_nodes from astroid.nodes.scoped_nodes.utils import builtin_lookup from astroid.typing import InferenceResult, SuccessfulInferenceResult @@ -19,15 +20,14 @@ _T = TypeVar("_T") -class LocalsDictNodeNG(node_classes.LookupMixIn): +class LocalsDictNodeNG(_base_nodes.LookupMixIn): """this class provides locals handling common to Module, FunctionDef and ClassDef nodes, including a dict like interface for direct access to locals information """ # attributes below are set by the builder module or by raw factories - - locals: dict[str, list[InferenceResult]] = {} + locals: dict[str, list[InferenceResult]] """A map of the name of a local variable to the node defining the local.""" def qname(self) -> str: @@ -39,9 +39,12 @@ def qname(self) -> str: :rtype: str """ # pylint: disable=no-member; github.com/pylint-dev/astroid/issues/278 - if self.parent is None or isinstance(self.parent, node_classes.Unknown): + if self.parent is None: + return self.name + try: + return f"{self.parent.frame().qname()}.{self.name}" + except ParentMissingError: return self.name - return f"{self.parent.frame().qname()}.{self.name}" def scope(self: _T) -> _T: """The first parent node defining a new scope. @@ -52,7 +55,7 @@ def scope(self: _T) -> _T: return self def scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: _base_nodes.LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[nodes.NodeNG]]: """Lookup where the given variable is assigned. @@ -70,7 +73,7 @@ def scope_lookup( raise NotImplementedError def _scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: _base_nodes.LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[nodes.NodeNG]]: """XXX method for interfacing the scope lookup""" try: diff --git a/astroid/nodes/scoped_nodes/scoped_nodes.py b/astroid/nodes/scoped_nodes/scoped_nodes.py index 0ae2141741..21bad2fecc 100644 --- a/astroid/nodes/scoped_nodes/scoped_nodes.py +++ b/astroid/nodes/scoped_nodes/scoped_nodes.py @@ -16,9 +16,9 @@ import warnings from collections.abc import Generator, Iterable, Iterator, Sequence from functools import cached_property, lru_cache -from typing import TYPE_CHECKING, ClassVar, Literal, NoReturn, TypeVar +from typing import TYPE_CHECKING, Any, ClassVar, Literal, NoReturn, TypeVar -from astroid import bases, util +from astroid import bases, protocols, util from astroid.const import IS_PYPY, PY38, PY39_PLUS, PYPY_7_3_11_PLUS from astroid.context import ( CallContext, @@ -34,6 +34,7 @@ InconsistentMroError, InferenceError, MroError, + ParentMissingError, StatementMissing, TooManyLevelsError, ) @@ -44,10 +45,16 @@ from astroid.nodes.scoped_nodes.mixin import ComprehensionScope, LocalsDictNodeNG from astroid.nodes.scoped_nodes.utils import builtin_lookup from astroid.nodes.utils import Position -from astroid.typing import InferBinaryOp, InferenceResult, SuccessfulInferenceResult +from astroid.typing import ( + InferBinaryOp, + InferenceErrorInfo, + InferenceResult, + SuccessfulInferenceResult, +) if TYPE_CHECKING: - from astroid import nodes + from astroid import nodes, objects + from astroid.nodes._base_nodes import LookupMixIn ITER_METHODS = ("__iter__", "__getitem__") @@ -134,10 +141,10 @@ def clean_typing_generic_mro(sequences: list[list[ClassDef]]) -> None: def clean_duplicates_mro( - sequences: Iterable[Iterable[ClassDef]], + sequences: list[list[ClassDef]], cls: ClassDef, context: InferenceContext | None, -) -> Iterable[Iterable[ClassDef]]: +) -> list[list[ClassDef]]: for sequence in sequences: seen = set() for node in sequence: @@ -196,7 +203,13 @@ class Module(LocalsDictNodeNG): """The names of special attributes that this module has.""" # names of module attributes available through the global scope - scope_attrs = {"__name__", "__doc__", "__file__", "__path__", "__package__"} + scope_attrs: ClassVar[set[str]] = { + "__name__", + "__doc__", + "__file__", + "__path__", + "__package__", + } """The names of module attributes available through the global scope.""" _other_fields = ( @@ -235,7 +248,7 @@ def __init__( self.pure_python = pure_python """Whether the ast was built from source.""" - self.globals: dict[str, list[SuccessfulInferenceResult]] + self.globals: dict[str, list[InferenceResult]] """A map of the name of a global variable to the node defining the global.""" self.locals = self.globals = {} @@ -285,7 +298,7 @@ def block_range(self, lineno: int) -> tuple[int, int]: return self.fromlineno, self.tolineno def scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[node_classes.NodeNG]]: """Lookup where the given variable is assigned. @@ -445,7 +458,11 @@ def import_module( # skip here if relative_only: raise - return AstroidManager().ast_from_module_name(modname) + # Don't repeat the same operation, e.g. for missing modules + # like "_winapi" or "nt" on POSIX systems. + if modname == absmodname: + raise + return AstroidManager().ast_from_module_name(modname, use_cache=use_cache) def relative_to_absolute_name(self, modname: str, level: int | None) -> str: """Get the absolute module name for a relative import. @@ -574,6 +591,11 @@ def frame(self: _T, *, future: Literal[None, True] = None) -> _T: """ return self + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[Module, None, None]: + yield self + class GeneratorExp(ComprehensionScope): """Class representing an :class:`ast.GeneratorExp` node. @@ -872,7 +894,7 @@ def type(self) -> Literal["method", "function"]: :returns: 'method' if this is a method, 'function' otherwise. """ if self.args.arguments and self.args.arguments[0].name == "self": - if isinstance(self.parent.scope(), ClassDef): + if self.parent and isinstance(self.parent.scope(), ClassDef): return "method" return "function" @@ -941,11 +963,7 @@ def argnames(self) -> list[str]: names = [elt.name for elt in self.args.arguments] else: names = [] - if self.args.vararg: - names.append(self.args.vararg) - names += [elt.name for elt in self.args.kwonlyargs] - if self.args.kwarg: - names.append(self.args.kwarg) + return names def infer_call_result( @@ -957,7 +975,7 @@ def infer_call_result( return self.body.infer(context) def scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[NodeNG]]: """Lookup where the given names is assigned. @@ -975,6 +993,8 @@ def scope_lookup( if (self.args.defaults and node in self.args.defaults) or ( self.args.kw_defaults and node in self.args.kw_defaults ): + if not self.parent: + raise ParentMissingError(target=self) frame = self.parent.frame() # line offset to avoid that def func(f=func) resolve the default # value to the defined function @@ -1021,6 +1041,15 @@ def getattr( return found_attrs raise AttributeInferenceError(target=self, attribute=name) + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[Lambda, None, None]: + yield self + + def _get_yield_nodes_skip_functions(self): + """A Lambda node can contain a Yield node in the body.""" + yield from self.body._get_yield_nodes_skip_functions() + class FunctionDef( _base_nodes.MultiLineBlockNode, @@ -1039,7 +1068,14 @@ class FunctionDef( """ - _astroid_fields = ("decorators", "args", "returns", "doc_node", "body") + _astroid_fields = ( + "decorators", + "args", + "returns", + "type_params", + "doc_node", + "body", + ) _multi_line_block_fields = ("body",) returns = None @@ -1083,8 +1119,6 @@ class FunctionDef( name = "" - is_lambda = True - special_attributes = FunctionModel() """The names of special attributes that this function has.""" @@ -1107,6 +1141,11 @@ def __init__( self.body: list[NodeNG] = [] """The contents of the function body.""" + self.type_params: list[ + nodes.TypeVar | nodes.ParamSpec | nodes.TypeVarTuple + ] = [] + """PEP 695 (Python 3.12+) type params, e.g. first 'T' in def func[T]() -> T: ...""" + self.instance_attrs: dict[str, list[NodeNG]] = {} super().__init__( @@ -1131,6 +1170,8 @@ def postinit( *, position: Position | None = None, doc_node: Const | None = None, + type_params: list[nodes.TypeVar | nodes.ParamSpec | nodes.TypeVarTuple] + | None = None, ): """Do some setup after initialisation. @@ -1148,6 +1189,8 @@ def postinit( Position of function keyword(s) and name. :param doc_node: The doc node associated with this node. + :param type_params: + The type_params associated with this node. """ self.args = args self.body = body @@ -1157,6 +1200,7 @@ def postinit( self.type_comment_args = type_comment_args self.position = position self.doc_node = doc_node + self.type_params = type_params or [] @cached_property def extra_decorators(self) -> list[node_classes.Call]: @@ -1167,8 +1211,7 @@ def extra_decorators(self) -> list[node_classes.Call]: The property will return all the callables that are used for decoration. """ - frame = self.parent.frame() - if not isinstance(frame, ClassDef): + if not self.parent or not isinstance(frame := self.parent.frame(), ClassDef): return [] decorators: list[node_classes.Call] = [] @@ -1233,11 +1276,7 @@ def argnames(self) -> list[str]: names = [elt.name for elt in self.args.arguments] else: names = [] - if self.args.vararg: - names.append(self.args.vararg) - names += [elt.name for elt in self.args.kwonlyargs] - if self.args.kwarg: - names.append(self.args.kwarg) + return names def getattr( @@ -1265,6 +1304,9 @@ def type(self) -> str: # pylint: disable=too-many-return-statements # noqa: C90 if decorator.func.name in BUILTIN_DESCRIPTORS: return decorator.func.name + if not self.parent: + raise ParentMissingError(target=self) + frame = self.parent.frame() type_name = "function" if isinstance(frame, ClassDef): @@ -1379,7 +1421,11 @@ def is_method(self) -> bool: """ # check we are defined in a ClassDef, because this is usually expected # (e.g. pylint...) when is_method() return True - return self.type != "function" and isinstance(self.parent.frame(), ClassDef) + return ( + self.type != "function" + and self.parent is not None + and isinstance(self.parent.frame(), ClassDef) + ) def decoratornames(self, context: InferenceContext | None = None) -> set[str]: """Get the qualified names of each of the decorators on this function. @@ -1449,7 +1495,50 @@ def is_generator(self) -> bool: :returns: Whether this is a generator function. """ - return bool(next(self._get_yield_nodes_skip_lambdas(), False)) + yields_without_lambdas = set(self._get_yield_nodes_skip_lambdas()) + yields_without_functions = set(self._get_yield_nodes_skip_functions()) + # Want an intersecting member that is neither in a lambda nor a function + return bool(yields_without_lambdas & yields_without_functions) + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[objects.Property | FunctionDef, None, InferenceErrorInfo]: + from astroid import objects # pylint: disable=import-outside-toplevel + + if not self.decorators or not bases._is_property(self): + yield self + return InferenceErrorInfo(node=self, context=context) + + # When inferring a property, we instantiate a new `objects.Property` object, + # which in turn, because it inherits from `FunctionDef`, sets itself in the locals + # of the wrapping frame. This means that every time we infer a property, the locals + # are mutated with a new instance of the property. To avoid this, we detect this + # scenario and avoid passing the `parent` argument to the constructor. + if not self.parent: + raise ParentMissingError(target=self) + parent_frame = self.parent.frame() + property_already_in_parent_locals = self.name in parent_frame.locals and any( + isinstance(val, objects.Property) for val in parent_frame.locals[self.name] + ) + # We also don't want to pass parent if the definition is within a Try node + if isinstance( + self.parent, + (node_classes.Try, node_classes.If), + ): + property_already_in_parent_locals = True + + prop_func = objects.Property( + function=self, + name=self.name, + lineno=self.lineno, + parent=self.parent if not property_already_in_parent_locals else None, + col_offset=self.col_offset, + ) + if property_already_in_parent_locals: + prop_func.parent = self.parent + prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node) + yield prop_func + return InferenceErrorInfo(node=self, context=context) def infer_yield_result(self, context: InferenceContext | None = None): """Infer what the function yields when called @@ -1582,7 +1671,7 @@ def get_children(self): yield from self.body def scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[nodes.NodeNG]]: """Lookup where the given name is assigned.""" if name == "__class__": @@ -1590,13 +1679,14 @@ def scope_lookup( # if any methods in a class body refer to either __class__ or super. # In our case, we want to be able to look it up in the current scope # when `__class__` is being used. - frame = self.parent.frame() - if isinstance(frame, ClassDef): + if self.parent and isinstance(frame := self.parent.frame(), ClassDef): return self, [frame] if (self.args.defaults and node in self.args.defaults) or ( self.args.kw_defaults and node in self.args.kw_defaults ): + if not self.parent: + raise ParentMissingError(target=self) frame = self.parent.frame() # line offset to avoid that def func(f=func) resolve the default # value to the defined function @@ -1721,7 +1811,7 @@ def get_wrapping_class(node): return klass -class ClassDef( +class ClassDef( # pylint: disable=too-many-instance-attributes _base_nodes.FilterStmtsBaseNode, LocalsDictNodeNG, _base_nodes.Statement ): """Class representing an :class:`ast.ClassDef` node. @@ -1740,7 +1830,14 @@ def my_meth(self, arg): # by a raw factories # a dictionary of class instances attributes - _astroid_fields = ("decorators", "bases", "keywords", "doc_node", "body") # name + _astroid_fields = ( + "decorators", + "bases", + "keywords", + "doc_node", + "body", + "type_params", + ) # name decorators = None """The decorators that are applied to this class. @@ -1807,6 +1904,11 @@ def __init__( self.is_dataclass: bool = False """Whether this class is a dataclass.""" + self.type_params: list[ + nodes.TypeVar | nodes.ParamSpec | nodes.TypeVarTuple + ] = [] + """PEP 695 (Python 3.12+) type params, e.g. class MyClass[T]: ...""" + super().__init__( lineno=lineno, col_offset=col_offset, @@ -1820,7 +1922,9 @@ def __init__( for local_name, node in self.implicit_locals(): self.add_local_node(node, local_name) - infer_binary_op: ClassVar[InferBinaryOp[ClassDef]] + infer_binary_op: ClassVar[ + InferBinaryOp[ClassDef] + ] = protocols.instance_class_infer_binary_op def implicit_parameters(self) -> Literal[1]: return 1 @@ -1848,6 +1952,8 @@ def postinit( *, position: Position | None = None, doc_node: Const | None = None, + type_params: list[nodes.TypeVar | nodes.ParamSpec | nodes.TypeVarTuple] + | None = None, ) -> None: if keywords is not None: self.keywords = keywords @@ -1858,6 +1964,7 @@ def postinit( self._metaclass = metaclass self.position = position self.doc_node = doc_node + self.type_params = type_params or [] def _newstyle_impl(self, context: InferenceContext | None = None): if context is None: @@ -2050,7 +2157,7 @@ def infer_call_result( yield self.instantiate_class() def scope_lookup( - self, node: node_classes.LookupMixIn, name: str, offset: int = 0 + self, node: LookupMixIn, name: str, offset: int = 0 ) -> tuple[LocalsDictNodeNG, list[nodes.NodeNG]]: """Lookup where the given name is assigned. @@ -2089,7 +2196,8 @@ def scope_lookup( # import name # class A(name.Name): # def name(self): ... - + if not self.parent: + raise ParentMissingError(target=self) frame = self.parent.frame() # line offset to avoid that class A(A) resolve the ancestor to # the defined class @@ -2123,7 +2231,8 @@ def ancestors( if context is None: context = InferenceContext() if not self.bases and self.qname() != "builtins.object": - yield builtin_lookup("object")[1][0] + # This should always be a ClassDef (which we don't assert for) + yield builtin_lookup("object")[1][0] # type: ignore[misc] return for stmt in self.bases: @@ -2263,7 +2372,7 @@ def getattr( name: str, context: InferenceContext | None = None, class_context: bool = True, - ) -> list[SuccessfulInferenceResult]: + ) -> list[InferenceResult]: """Get an attribute from this class, using Python's attribute semantic. This method doesn't look in the :attr:`instance_attrs` dictionary @@ -2290,7 +2399,7 @@ def getattr( raise AttributeInferenceError(target=self, attribute=name, context=context) # don't modify the list in self.locals! - values: list[SuccessfulInferenceResult] = list(self.locals.get(name, [])) + values: list[InferenceResult] = list(self.locals.get(name, [])) for classnode in self.ancestors(recurs=True, context=context): values += classnode.locals.get(name, []) @@ -2797,7 +2906,7 @@ def _compute_mro(self, context: InferenceContext | None = None): ancestors = list(base.ancestors(context=context)) bases_mro.append(ancestors) - unmerged_mro = [[self], *bases_mro, inferred_bases] + unmerged_mro: list[list[ClassDef]] = [[self], *bases_mro, inferred_bases] unmerged_mro = clean_duplicates_mro(unmerged_mro, self, context) clean_typing_generic_mro(unmerged_mro) return _c3_merge(unmerged_mro, self, context) @@ -2845,3 +2954,8 @@ def frame(self: _T, *, future: Literal[None, True] = None) -> _T: :returns: The node itself. """ return self + + def _infer( + self, context: InferenceContext | None = None, **kwargs: Any + ) -> Generator[ClassDef, None, None]: + yield self diff --git a/astroid/protocols.py b/astroid/protocols.py index e3b89b7ef7..e69ab5d6da 100644 --- a/astroid/protocols.py +++ b/astroid/protocols.py @@ -12,9 +12,9 @@ import itertools import operator as operator_mod from collections.abc import Callable, Generator, Iterator, Sequence -from typing import Any, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar -from astroid import arguments, bases, decorators, helpers, nodes, objects, util +from astroid import bases, decorators, nodes, util from astroid.const import Context from astroid.context import InferenceContext, copy_context from astroid.exceptions import ( @@ -31,47 +31,11 @@ SuccessfulInferenceResult, ) -_TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List) - - -def _reflected_name(name) -> str: - return "__r" + name[2:] - - -def _augmented_name(name) -> str: - return "__i" + name[2:] - +if TYPE_CHECKING: + _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List) _CONTEXTLIB_MGR = "contextlib.contextmanager" -BIN_OP_METHOD = { - "+": "__add__", - "-": "__sub__", - "/": "__truediv__", - "//": "__floordiv__", - "*": "__mul__", - "**": "__pow__", - "%": "__mod__", - "&": "__and__", - "|": "__or__", - "^": "__xor__", - "<<": "__lshift__", - ">>": "__rshift__", - "@": "__matmul__", -} - -REFLECTED_BIN_OP_METHOD = { - key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items() -} -AUGMENTED_OP_METHOD = { - key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items() -} -UNARY_OP_METHOD = { - "+": "__pos__", - "-": "__neg__", - "~": "__invert__", - "not": None, # XXX not '__nonzero__' -} _UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = { "+": operator_mod.pos, "-": operator_mod.neg, @@ -93,11 +57,25 @@ def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult: return nodes.const_factory(value) -nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op) -nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op) -nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op) -nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op) -nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op) +def tuple_infer_unary_op(self, op): + return _infer_unary_op(tuple(self.elts), op) + + +def list_infer_unary_op(self, op): + return _infer_unary_op(self.elts, op) + + +def set_infer_unary_op(self, op): + return _infer_unary_op(set(self.elts), op) + + +def const_infer_unary_op(self, op): + return _infer_unary_op(self.value, op) + + +def dict_infer_unary_op(self, op): + return _infer_unary_op(dict(self.items), op) + # Binary operations @@ -157,22 +135,22 @@ def const_infer_binary_op( yield not_implemented -nodes.Const.infer_binary_op = const_infer_binary_op - - def _multiply_seq_by_int( self: _TupleListNodeT, opnode: nodes.AugAssign | nodes.BinOp, - other: nodes.Const, + value: int, context: InferenceContext, ) -> _TupleListNodeT: node = self.__class__(parent=opnode) + if value > 1e8: + node.elts = [util.Uninferable] + return node filtered_elts = ( - helpers.safe_infer(elt, context) or util.Uninferable + util.safe_infer(elt, context) or util.Uninferable for elt in self.elts if not isinstance(elt, util.UninferableBase) ) - node.elts = list(filtered_elts) * other.value + node.elts = list(filtered_elts) * value return node @@ -205,6 +183,8 @@ def tl_infer_binary_op( or list. This refers to the left-hand side of the operation, so: 'tuple() + 1' or '[] + A()' """ + from astroid import helpers # pylint: disable=import-outside-toplevel + # For tuples and list the boundnode is no longer the tuple or list instance context.boundnode = None not_implemented = nodes.Const(NotImplemented) @@ -221,25 +201,24 @@ def tl_infer_binary_op( if not isinstance(other.value, int): yield not_implemented return - yield _multiply_seq_by_int(self, opnode, other, context) + yield _multiply_seq_by_int(self, opnode, other.value, context) elif isinstance(other, bases.Instance) and operator == "*": # Verify if the instance supports __index__. as_index = helpers.class_instance_as_index(other) if not as_index: yield util.Uninferable + elif not isinstance(as_index.value, int): # pragma: no cover + # already checked by class_instance_as_index() but faster than casting + raise AssertionError("Please open a bug report.") else: - yield _multiply_seq_by_int(self, opnode, as_index, context) + yield _multiply_seq_by_int(self, opnode, as_index.value, context) else: yield not_implemented -nodes.Tuple.infer_binary_op = tl_infer_binary_op -nodes.List.infer_binary_op = tl_infer_binary_op - - @decorators.yes_if_nothing_inferred def instance_class_infer_binary_op( - self: bases.Instance | nodes.ClassDef, + self: nodes.ClassDef, opnode: nodes.AugAssign | nodes.BinOp, operator: str, other: InferenceResult, @@ -249,12 +228,8 @@ def instance_class_infer_binary_op( return method.infer_call_result(self, context) -bases.Instance.infer_binary_op = instance_class_infer_binary_op -nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op - - # assignment ################################################################## - +# pylint: disable-next=pointless-string-statement """The assigned_stmts method is responsible to return the assigned statement (e.g. not inferred) according to the assignment type. @@ -337,10 +312,6 @@ def for_assigned_stmts( } -nodes.For.assigned_stmts = for_assigned_stmts -nodes.Comprehension.assigned_stmts = for_assigned_stmts - - def sequence_assigned_stmts( self: nodes.Tuple | nodes.List, node: node_classes.AssignedStmtsPossibleNode = None, @@ -365,10 +336,6 @@ def sequence_assigned_stmts( ) -nodes.Tuple.assigned_stmts = sequence_assigned_stmts -nodes.List.assigned_stmts = sequence_assigned_stmts - - def assend_assigned_stmts( self: nodes.AssignName | nodes.AssignAttr, node: node_classes.AssignedStmtsPossibleNode = None, @@ -378,23 +345,22 @@ def assend_assigned_stmts( return self.parent.assigned_stmts(node=self, context=context) -nodes.AssignName.assigned_stmts = assend_assigned_stmts -nodes.AssignAttr.assigned_stmts = assend_assigned_stmts - - def _arguments_infer_argname( self, name: str | None, context: InferenceContext ) -> Generator[InferenceResult, None, None]: # arguments information may be missing, in which case we can't do anything # more - if not (self.arguments or self.vararg or self.kwarg): + from astroid import arguments # pylint: disable=import-outside-toplevel + + if not self.arguments: yield util.Uninferable return + args = [arg for arg in self.arguments if arg.name not in [self.vararg, self.kwarg]] functype = self.parent.type # first argument of instance/class method if ( - self.arguments + args and getattr(self.arguments[0], "name", None) == name and functype != "staticmethod" ): @@ -423,7 +389,7 @@ def _arguments_infer_argname( if name == self.vararg: vararg = nodes.const_factory(()) vararg.parent = self - if not self.arguments and self.parent.name == "__init__": + if not args and self.parent.name == "__init__": cls = self.parent.parent.scope() vararg.elts = [cls.instantiate_class()] yield vararg @@ -449,6 +415,8 @@ def arguments_assigned_stmts( context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: + from astroid import arguments # pylint: disable=import-outside-toplevel + try: node_name = node.name # type: ignore[union-attr] except AttributeError: @@ -472,12 +440,9 @@ def arguments_assigned_stmts( return _arguments_infer_argname(self, node_name, context) -nodes.Arguments.assigned_stmts = arguments_assigned_stmts - - @decorators.raise_if_nothing_inferred def assign_assigned_stmts( - self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign, + self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign | nodes.TypeAlias, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, @@ -510,11 +475,6 @@ def assign_annassigned_stmts( yield inferred -nodes.Assign.assigned_stmts = assign_assigned_stmts -nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts -nodes.AugAssign.assigned_stmts = assign_assigned_stmts - - def _resolve_assignment_parts(parts, assign_path, context): """Recursive function to resolve multiple assignments.""" assign_path = assign_path[:] @@ -562,6 +522,8 @@ def excepthandler_assigned_stmts( context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: + from astroid import objects # pylint: disable=import-outside-toplevel + for assigned in node_classes.unpack_infer(self.type): if isinstance(assigned, nodes.ClassDef): assigned = objects.ExceptionInstance(assigned) @@ -575,9 +537,6 @@ def excepthandler_assigned_stmts( } -nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts - - def _infer_context_manager(self, mgr, context): try: inferred = next(mgr.infer(context=context)) @@ -696,9 +655,6 @@ def __enter__(self): } -nodes.With.assigned_stmts = with_assigned_stmts - - @decorators.raise_if_nothing_inferred def named_expr_assigned_stmts( self: nodes.NamedExpr, @@ -718,9 +674,6 @@ def named_expr_assigned_stmts( ) -nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts - - @decorators.yes_if_nothing_inferred def starred_assigned_stmts( # noqa: C901 self: nodes.Starred, @@ -918,9 +871,6 @@ def _determine_starred_iteration_lookups( yield util.Uninferable -nodes.Starred.assigned_stmts = starred_assigned_stmts - - @decorators.yes_if_nothing_inferred def match_mapping_assigned_stmts( self: nodes.MatchMapping, @@ -935,9 +885,6 @@ def match_mapping_assigned_stmts( yield -nodes.MatchMapping.assigned_stmts = match_mapping_assigned_stmts - - @decorators.yes_if_nothing_inferred def match_star_assigned_stmts( self: nodes.MatchStar, @@ -952,9 +899,6 @@ def match_star_assigned_stmts( yield -nodes.MatchStar.assigned_stmts = match_star_assigned_stmts - - @decorators.yes_if_nothing_inferred def match_as_assigned_stmts( self: nodes.MatchAs, @@ -971,6 +915,3 @@ def match_as_assigned_stmts( and self.pattern is None ): yield self.parent.parent.subject - - -nodes.MatchAs.assigned_stmts = match_as_assigned_stmts diff --git a/astroid/raw_building.py b/astroid/raw_building.py index bf07028e2b..ba7a60712a 100644 --- a/astroid/raw_building.py +++ b/astroid/raw_building.py @@ -39,7 +39,6 @@ # the keys of CONST_CLS eg python builtin types _CONSTANTS = tuple(node_classes.CONST_CLS) -_BUILTINS = vars(builtins) TYPE_NONE = type(None) TYPE_NOTIMPLEMENTED = type(NotImplemented) TYPE_ELLIPSIS = type(...) diff --git a/astroid/rebuilder.py b/astroid/rebuilder.py index 64c1c12362..17a6ffe57f 100644 --- a/astroid/rebuilder.py +++ b/astroid/rebuilder.py @@ -18,11 +18,12 @@ from astroid import nodes from astroid._ast import ParserModule, get_parser_module, parse_function_type_comment -from astroid.const import IS_PYPY, PY38, PY39_PLUS, Context +from astroid.const import IS_PYPY, PY38, PY39_PLUS, PY312_PLUS, Context from astroid.manager import AstroidManager from astroid.nodes import NodeNG +from astroid.nodes.node_classes import AssignName from astroid.nodes.utils import Position -from astroid.typing import SuccessfulInferenceResult +from astroid.typing import InferenceResult REDIRECT: Final[dict[str, str]] = { "arguments": "Arguments", @@ -167,9 +168,8 @@ def _reset_end_lineno(self, newnode: nodes.NodeNG) -> None: - ClassDef - For - FunctionDef - While - Call - If - - Decorators - TryExcept - - With - TryFinally - - Assign + - Decorators - Try + - With - Assign """ newnode.end_lineno = None newnode.end_col_offset = None @@ -384,6 +384,12 @@ def visit(self, node: ast.Nonlocal, parent: NodeNG) -> nodes.Nonlocal: def visit(self, node: ast.Constant, parent: NodeNG) -> nodes.Const: ... + if sys.version_info >= (3, 12): + + @overload + def visit(self, node: ast.ParamSpec, parent: NodeNG) -> nodes.ParamSpec: + ... + @overload def visit(self, node: ast.Pass, parent: NodeNG) -> nodes.Pass: ... @@ -417,9 +423,7 @@ def visit(self, node: ast.Starred, parent: NodeNG) -> nodes.Starred: ... @overload - def visit( - self, node: ast.Try, parent: NodeNG - ) -> nodes.TryExcept | nodes.TryFinally: + def visit(self, node: ast.Try, parent: NodeNG) -> nodes.Try: ... if sys.version_info >= (3, 11): @@ -432,6 +436,22 @@ def visit(self, node: ast.TryStar, parent: NodeNG) -> nodes.TryStar: def visit(self, node: ast.Tuple, parent: NodeNG) -> nodes.Tuple: ... + if sys.version_info >= (3, 12): + + @overload + def visit(self, node: ast.TypeAlias, parent: NodeNG) -> nodes.TypeAlias: + ... + + @overload + def visit(self, node: ast.TypeVar, parent: NodeNG) -> nodes.TypeVar: + ... + + @overload + def visit( + self, node: ast.TypeVarTuple, parent: NodeNG + ) -> nodes.TypeVarTuple: + ... + @overload def visit(self, node: ast.UnaryOp, parent: NodeNG) -> nodes.UnaryOp: ... @@ -542,10 +562,33 @@ def visit_arguments(self, node: ast.arguments, parent: NodeNG) -> nodes.Argument """Visit an Arguments node by returning a fresh instance of it.""" vararg: str | None = None kwarg: str | None = None + vararg_node = node.vararg + kwarg_node = node.kwarg + newnode = nodes.Arguments( node.vararg.arg if node.vararg else None, node.kwarg.arg if node.kwarg else None, parent, + AssignName( + vararg_node.arg, + vararg_node.lineno, + vararg_node.col_offset, + parent, + end_lineno=vararg_node.end_lineno, + end_col_offset=vararg_node.end_col_offset, + ) + if vararg_node + else None, + AssignName( + kwarg_node.arg, + kwarg_node.lineno, + kwarg_node.col_offset, + parent, + end_lineno=kwarg_node.end_lineno, + end_col_offset=kwarg_node.end_col_offset, + ) + if kwarg_node + else None, ) args = [self.visit(child, newnode) for child in node.args] defaults = [self.visit(child, newnode) for child in node.defaults] @@ -870,6 +913,9 @@ def visit_classdef( ], position=self._get_position_info(node, newnode), doc_node=self.visit(doc_ast_node, newnode), + type_params=[self.visit(param, newnode) for param in node.type_params] + if PY312_PLUS + else [], ) return newnode @@ -994,7 +1040,7 @@ def visit_dict(self, node: ast.Dict, parent: NodeNG) -> nodes.Dict: end_col_offset=node.end_col_offset, parent=parent, ) - items: list[tuple[SuccessfulInferenceResult, SuccessfulInferenceResult]] = list( + items: list[tuple[InferenceResult, InferenceResult]] = list( self._visit_dict_items(node, parent, newnode) ) newnode.postinit(items) @@ -1170,6 +1216,9 @@ def _visit_functiondef( type_comment_args=type_comment_args, position=self._get_position_info(node, newnode), doc_node=self.visit(doc_ast_node, newnode), + type_params=[self.visit(param, newnode) for param in node.type_params] + if PY312_PLUS + else [], ) self._global_names.pop() return newnode @@ -1477,6 +1526,20 @@ def visit_constant(self, node: ast.Constant, parent: NodeNG) -> nodes.Const: parent=parent, ) + def visit_paramspec(self, node: ast.ParamSpec, parent: NodeNG) -> nodes.ParamSpec: + """Visit a ParamSpec node by returning a fresh instance of it.""" + newnode = nodes.ParamSpec( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) + return newnode + def visit_pass(self, node: ast.Pass, parent: NodeNG) -> nodes.Pass: """Visit a Pass node by returning a fresh instance of it.""" return nodes.Pass( @@ -1589,56 +1652,23 @@ def visit_starred(self, node: ast.Starred, parent: NodeNG) -> nodes.Starred: newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_tryexcept(self, node: ast.Try, parent: NodeNG) -> nodes.TryExcept: - """Visit a TryExcept node by returning a fresh instance of it.""" - # TryExcept excludes the 'finally' but that will be included in the - # end_lineno from 'node'. Therefore, we check all non 'finally' - # children to find the correct end_lineno and column. - end_lineno = node.end_lineno - end_col_offset = node.end_col_offset - all_children: list[ast.AST] = [*node.body, *node.handlers, *node.orelse] - for child in reversed(all_children): - end_lineno = child.end_lineno - end_col_offset = child.end_col_offset - break - newnode = nodes.TryExcept( + def visit_try(self, node: ast.Try, parent: NodeNG) -> nodes.Try: + """Visit a Try node by returning a fresh instance of it""" + newnode = nodes.Try( lineno=node.lineno, col_offset=node.col_offset, - end_lineno=end_lineno, - end_col_offset=end_col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, parent=parent, ) newnode.postinit( - [self.visit(child, newnode) for child in node.body], - [self.visit(child, newnode) for child in node.handlers], - [self.visit(child, newnode) for child in node.orelse], + body=[self.visit(child, newnode) for child in node.body], + handlers=[self.visit(child, newnode) for child in node.handlers], + orelse=[self.visit(child, newnode) for child in node.orelse], + finalbody=[self.visit(child, newnode) for child in node.finalbody], ) return newnode - def visit_try( - self, node: ast.Try, parent: NodeNG - ) -> nodes.TryExcept | nodes.TryFinally | None: - # python 3.3 introduce a new Try node replacing - # TryFinally/TryExcept nodes - if node.finalbody: - newnode = nodes.TryFinally( - lineno=node.lineno, - col_offset=node.col_offset, - end_lineno=node.end_lineno, - end_col_offset=node.end_col_offset, - parent=parent, - ) - body: list[NodeNG | nodes.TryExcept] - if node.handlers: - body = [self.visit_tryexcept(node, newnode)] - else: - body = [self.visit(child, newnode) for child in node.body] - newnode.postinit(body, [self.visit(n, newnode) for n in node.finalbody]) - return newnode - if node.handlers: - return self.visit_tryexcept(node, parent) - return None - def visit_trystar(self, node: ast.TryStar, parent: NodeNG) -> nodes.TryStar: newnode = nodes.TryStar( lineno=node.lineno, @@ -1669,6 +1699,55 @@ def visit_tuple(self, node: ast.Tuple, parent: NodeNG) -> nodes.Tuple: newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode + def visit_typealias(self, node: ast.TypeAlias, parent: NodeNG) -> nodes.TypeAlias: + """Visit a TypeAlias node by returning a fresh instance of it.""" + newnode = nodes.TypeAlias( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + newnode.postinit( + name=self.visit(node.name, newnode), + type_params=[self.visit(p, newnode) for p in node.type_params], + value=self.visit(node.value, newnode), + ) + return newnode + + def visit_typevar(self, node: ast.TypeVar, parent: NodeNG) -> nodes.TypeVar: + """Visit a TypeVar node by returning a fresh instance of it.""" + newnode = nodes.TypeVar( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit( + name=self.visit_assignname(node, newnode, node.name), + bound=self.visit(node.bound, newnode), + ) + return newnode + + def visit_typevartuple( + self, node: ast.TypeVarTuple, parent: NodeNG + ) -> nodes.TypeVarTuple: + """Visit a TypeVarTuple node by returning a fresh instance of it.""" + newnode = nodes.TypeVarTuple( + lineno=node.lineno, + col_offset=node.col_offset, + end_lineno=node.end_lineno, + end_col_offset=node.end_col_offset, + parent=parent, + ) + # Add AssignName node for 'node.name' + # https://bugs.python.org/issue43994 + newnode.postinit(name=self.visit_assignname(node, newnode, node.name)) + return newnode + def visit_unaryop(self, node: ast.UnaryOp, parent: NodeNG) -> nodes.UnaryOp: """Visit a UnaryOp node by returning a fresh instance of it.""" newnode = nodes.UnaryOp( diff --git a/astroid/scoped_nodes.py b/astroid/scoped_nodes.py deleted file mode 100644 index da780f6f7b..0000000000 --- a/astroid/scoped_nodes.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html -# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE -# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt - -# pylint: disable=unused-import - -import warnings - -from astroid.nodes.scoped_nodes import ( - AsyncFunctionDef, - ClassDef, - ComprehensionScope, - DictComp, - FunctionDef, - GeneratorExp, - Lambda, - ListComp, - LocalsDictNodeNG, - Module, - SetComp, - _is_metaclass, - builtin_lookup, - function_to_method, - get_wrapping_class, -) - -# We cannot create a __all__ here because it would create a circular import -# Please remove astroid/scoped_nodes.py|astroid/node_classes.py in autoflake -# exclude when removing this file. -warnings.warn( - "The 'astroid.scoped_nodes' module is deprecated and will be replaced by " - "'astroid.nodes' in astroid 3.0.0", - DeprecationWarning, - stacklevel=2, -) diff --git a/astroid/typing.py b/astroid/typing.py index 0ae30fcc28..acb5418fd5 100644 --- a/astroid/typing.py +++ b/astroid/typing.py @@ -17,6 +17,8 @@ ) if TYPE_CHECKING: + from collections.abc import Iterator + from astroid import bases, exceptions, nodes, transforms, util from astroid.context import InferenceContext from astroid.interpreter._import import spec @@ -41,6 +43,7 @@ class AstroidManagerBrain(TypedDict): _failed_import_hooks: list[Callable[[str], nodes.Module]] always_load_extensions: bool optimize_ast: bool + max_inferable_values: int extension_package_whitelist: set[str] _transform: transforms.TransformVisitor @@ -84,7 +87,7 @@ def __call__( node: _SuccessfulInferenceResultT_contra, context: InferenceContext | None = None, **kwargs: Any, - ) -> Generator[InferenceResult, None, None]: + ) -> Iterator[InferenceResult]: ... # pragma: no cover diff --git a/astroid/util.py b/astroid/util.py index 50ca336a86..510b81cc13 100644 --- a/astroid/util.py +++ b/astroid/util.py @@ -6,7 +6,14 @@ from __future__ import annotations import warnings -from typing import Any, Final, Literal +from typing import TYPE_CHECKING, Any, Final, Literal + +from astroid.exceptions import InferenceError + +if TYPE_CHECKING: + from astroid import bases, nodes + from astroid.context import InferenceContext + from astroid.typing import InferenceResult class UninferableBase: @@ -125,3 +132,28 @@ def check_warnings_filter() -> bool: and filter[3] != "__main__" for filter in warnings.filters ) + + +def safe_infer( + node: nodes.NodeNG | bases.Proxy | UninferableBase, + context: InferenceContext | None = None, +) -> InferenceResult | None: + """Return the inferred value for the given node. + + Return None if inference failed or if there is some ambiguity (more than + one node has been inferred). + """ + if isinstance(node, UninferableBase): + return node + try: + inferit = node.infer(context=context) + value = next(inferit) + except (InferenceError, StopIteration): + return None + try: + next(inferit) + return None # None if there is ambiguity on the inferred node + except InferenceError: + return None # there is some kind of ambiguity + except StopIteration: + return value diff --git a/doc/Makefile b/doc/Makefile index f90e635fc3..62d227732d 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,5 +1,4 @@ # Makefile for Sphinx documentation -# # You can set these variables from the command line. SPHINXOPTS = @@ -8,30 +7,14 @@ PAPER = BUILDDIR = build # Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) -n . -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest +.PHONY: help clean html linkcheck help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* @@ -41,90 +24,8 @@ html: @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astroid.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astroid.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Astroid" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astroid" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/doc/_templates/autosummary_class.rst b/doc/_templates/autosummary_class.rst new file mode 100644 index 0000000000..2cfc6ca803 --- /dev/null +++ b/doc/_templates/autosummary_class.rst @@ -0,0 +1,5 @@ +{{ name | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} diff --git a/doc/api/astroid.exceptions.rst b/doc/api/astroid.exceptions.rst index 65abeaf817..995a3c2354 100644 --- a/doc/api/astroid.exceptions.rst +++ b/doc/api/astroid.exceptions.rst @@ -3,14 +3,6 @@ Exceptions .. automodule:: astroid.exceptions - .. rubric:: Classes - - .. autosummary:: - - BinaryOperationError - OperationError - UnaryOperationError - .. rubric:: Exceptions .. autosummary:: diff --git a/doc/api/astroid.nodes.rst b/doc/api/astroid.nodes.rst index 7783b45d3d..3fff0c307b 100644 --- a/doc/api/astroid.nodes.rst +++ b/doc/api/astroid.nodes.rst @@ -1,13 +1,13 @@ Nodes ===== -For a list of available nodes see :ref:`nodes`. - .. _nodes: Nodes ----- .. autosummary:: + :toctree: nodes + :template: autosummary_class.rst astroid.nodes.AnnAssign astroid.nodes.Arguments @@ -67,6 +67,7 @@ Nodes astroid.nodes.Module astroid.nodes.Name astroid.nodes.Nonlocal + astroid.nodes.ParamSpec astroid.nodes.Pass astroid.nodes.Raise astroid.nodes.Return @@ -75,165 +76,15 @@ Nodes astroid.nodes.Slice astroid.nodes.Starred astroid.nodes.Subscript - astroid.nodes.TryExcept - astroid.nodes.TryFinally + astroid.nodes.Try astroid.nodes.TryStar astroid.nodes.Tuple + astroid.nodes.TypeAlias + astroid.nodes.TypeVar + astroid.nodes.TypeVarTuple astroid.nodes.UnaryOp astroid.nodes.Unknown astroid.nodes.While astroid.nodes.With astroid.nodes.Yield astroid.nodes.YieldFrom - -.. autoclass:: astroid.nodes.AnnAssign - -.. autoclass:: astroid.nodes.Arguments - -.. autoclass:: astroid.nodes.Assert - -.. autoclass:: astroid.nodes.Assign - -.. autoclass:: astroid.nodes.AssignAttr - -.. autoclass:: astroid.nodes.AssignName - -.. autoclass:: astroid.nodes.AsyncFor - -.. autoclass:: astroid.nodes.AsyncFunctionDef - -.. autoclass:: astroid.nodes.AsyncWith - -.. autoclass:: astroid.nodes.Attribute - -.. autoclass:: astroid.nodes.AugAssign - -.. autoclass:: astroid.nodes.Await - -.. autoclass:: astroid.nodes.BinOp - -.. autoclass:: astroid.nodes.BoolOp - -.. autoclass:: astroid.nodes.Break - -.. autoclass:: astroid.nodes.Call - -.. autoclass:: astroid.nodes.ClassDef - -.. autoclass:: astroid.nodes.Compare - -.. autoclass:: astroid.nodes.Comprehension - -.. autoclass:: astroid.nodes.Const - -.. autoclass:: astroid.nodes.Continue - -.. autoclass:: astroid.nodes.Decorators - -.. autoclass:: astroid.nodes.DelAttr - -.. autoclass:: astroid.nodes.DelName - -.. autoclass:: astroid.nodes.Delete - -.. autoclass:: astroid.nodes.Dict - -.. autoclass:: astroid.nodes.DictComp - -.. autoclass:: astroid.nodes.DictUnpack - -.. autoclass:: astroid.nodes.EmptyNode - -.. autoclass:: astroid.nodes.ExceptHandler - -.. autoclass:: astroid.nodes.Expr - -.. autoclass:: astroid.nodes.For - -.. autoclass:: astroid.nodes.FormattedValue - -.. autoclass:: astroid.nodes.FunctionDef - -.. autoclass:: astroid.nodes.GeneratorExp - -.. autoclass:: astroid.nodes.Global - -.. autoclass:: astroid.nodes.If - -.. autoclass:: astroid.nodes.IfExp - -.. autoclass:: astroid.nodes.Import - -.. autoclass:: astroid.nodes.ImportFrom - -.. autoclass:: astroid.nodes.JoinedStr - -.. autoclass:: astroid.nodes.Keyword - -.. autoclass:: astroid.nodes.Lambda - -.. autoclass:: astroid.nodes.List - -.. autoclass:: astroid.nodes.ListComp - -.. autoclass:: astroid.nodes.Match - -.. autoclass:: astroid.nodes.MatchAs - -.. autoclass:: astroid.nodes.MatchCase - -.. autoclass:: astroid.nodes.MatchClass - -.. autoclass:: astroid.nodes.MatchMapping - -.. autoclass:: astroid.nodes.MatchOr - -.. autoclass:: astroid.nodes.MatchSequence - -.. autoclass:: astroid.nodes.MatchSingleton - -.. autoclass:: astroid.nodes.MatchStar - -.. autoclass:: astroid.nodes.MatchValue - -.. autoclass:: astroid.nodes.Module - -.. autoclass:: astroid.nodes.Name - -.. autoclass:: astroid.nodes.Nonlocal - -.. autoclass:: astroid.nodes.Pass - -.. autoclass:: astroid.nodes.Raise - -.. autoclass:: astroid.nodes.Return - -.. autoclass:: astroid.nodes.Set - -.. autoclass:: astroid.nodes.SetComp - -.. autoclass:: astroid.nodes.Slice - -.. autoclass:: astroid.nodes.Starred - -.. autoclass:: astroid.nodes.Subscript - -.. autoclass:: astroid.nodes.TryExcept - -.. autoclass:: astroid.nodes.TryFinally - -.. autoclass:: astroid.nodes.TryStar - -.. autoclass:: astroid.nodes.Tuple - -.. autoclass:: astroid.nodes.UnaryOp - -.. autoclass:: astroid.nodes.Unknown - -.. autoclass:: astroid.nodes.While - -.. autoclass:: astroid.nodes.With - -.. autoclass:: astroid.nodes.Yield - -.. autoclass:: astroid.nodes.YieldFrom diff --git a/doc/api/base_nodes.rst b/doc/api/base_nodes.rst index 6253ce5ce5..d8c60acf20 100644 --- a/doc/api/base_nodes.rst +++ b/doc/api/base_nodes.rst @@ -4,41 +4,11 @@ Base Nodes These are abstract node classes that :ref:`other nodes ` inherit from. .. autosummary:: + :toctree: base_nodes + :template: autosummary_class.rst - astroid.nodes._base_nodes.AssignTypeNode astroid.nodes.BaseContainer - astroid.nodes._base_nodes.MultiLineWithElseBlockNode astroid.nodes.ComprehensionScope - astroid.nodes._base_nodes.FilterStmtsBaseNode - astroid.nodes._base_nodes.ImportNode - astroid.nodes.LocalsDictNodeNG - astroid.nodes.node_classes.LookupMixIn + astroid.nodes.LocalsDictNodeNG astroid.nodes.NodeNG - astroid.nodes._base_nodes.ParentAssignNode - astroid.nodes.Statement astroid.nodes.Pattern - - -.. autoclass:: astroid.nodes._base_nodes.AssignTypeNode - -.. autoclass:: astroid.nodes.BaseContainer - -.. autoclass:: astroid.nodes._base_nodes.MultiLineWithElseBlockNode - -.. autoclass:: astroid.nodes.ComprehensionScope - -.. autoclass:: astroid.nodes._base_nodes.FilterStmtsBaseNode - -.. autoclass:: astroid.nodes._base_nodes.ImportNode - -.. autoclass:: astroid.nodes.LocalsDictNodeNG - -.. autoclass:: astroid.nodes.node_classes.LookupMixIn - -.. autoclass:: astroid.nodes.NodeNG - -.. autoclass:: astroid.nodes._base_nodes.ParentAssignNode - -.. autoclass:: astroid.nodes.Statement - -.. autoclass:: astroid.nodes.Pattern diff --git a/doc/ast_objects.inv b/doc/ast_objects.inv deleted file mode 100644 index 15683ac47e..0000000000 --- a/doc/ast_objects.inv +++ /dev/null @@ -1,11 +0,0 @@ -# Sphinx inventory version 2 -# Project: Green Tree Snakes -# Version: 1.0 -# The remainder of this file is compressed using zlib. -xn8Fz - t]f)4tIKM"UJR"1 s/_q?^-/ŝc6pOS"aҥG )9a=,ܕԿfk,'xiqX+ESV/Pq% No\,Kfbd`@CXaGkpg$ -ԑy!sd+9K7]-ζRXpH -K)9Are&׈J/~Bkd$2B97_n@q̑=KJ6„UX\ETvǽ\D$o{cC5@B8eM f/k(]u^P8U# <&tt(P :nu f$[W8Y6E[{4g#'y@'H.-GnJFTz+;a G'ȃmÙw -˔l Y+ lC{ˈJ/Ws.3Zg7eV -.r$[Tܓ"|y ie%,yb|\[i˕ȥCp٭j>ZYr/skZζf zYka'{n$.=؝x:܅Wgb^%dݕ߬;\]Y?(2I~&F&\| BJla\f#mi NhsMXpVvJN|7cQe2[(přwaF9|5 |b٨l ͼH}d1\UyxMY߇ϨǛC_l+1ܪ: -- "*?M7rc)Z ':\lO,v;e?Ѝ@yz36Bivdb=aK`zec;>>*'mNY1&\?7,|QMVwQ \ No newline at end of file diff --git a/doc/conf.py b/doc/conf.py index 9cdd52dd14..43c2c0f98b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -2,18 +2,6 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt -# -# Astroid documentation build configuration file, created by -# sphinx-quickstart on Wed Jun 26 15:00:40 2013. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - import os import sys from datetime import datetime @@ -25,19 +13,14 @@ # -- General configuration ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "sphinx.ext.doctest", "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.viewcode", "sphinx.ext.napoleon", + "sphinx.ext.viewcode", ] # Add any paths that contain templates here, relative to this directory. @@ -46,11 +29,8 @@ # The suffix of source filenames. source_suffix = ".rst" -# The encoding of source files. -# source_encoding = 'utf-8-sig' - # The master toctree document. -master_doc = "index" +root_doc = "index" # General information about the project. project = "Astroid" @@ -58,205 +38,40 @@ contributors = "Logilab, and astroid contributors" copyright = f"2003-{current_year}, {contributors}" -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. from astroid.__pkginfo__ import __version__ # noqa -# The full version, including alpha/beta/rc tags. release = __version__ -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - - -# -- Customization -- - -primary_domain = "py" -todo_include_todos = True - # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "nature" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +html_theme = "furo" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["media"] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - # Output file base name for HTML help builder. htmlhelp_basename = "Pylintdoc" - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ( - "index", - "Astroid.tex", - "Astroid Documentation", - contributors, - "manual", - ), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - "index", - "astroid", - "Astroid Documentation", - [contributors], - 1, - ) -] +# -- Options for Autodoc ------------------------------------------------------- autodoc_default_options = { "members": True, - "undoc-members": True, "show-inheritance": True, + "undoc-members": True, } -autoclass_content = "both" -autodoc_member_order = "groupwise" -autodoc_typehints = "description" intersphinx_mapping = { - "green_tree_snakes": ( - "http://greentreesnakes.readthedocs.io/en/latest/", - "ast_objects.inv", - ), + # Use dev so that the documentation builds when we are adding support for + # upcoming Python versions. + "python": ("https://docs.python.org/dev", None), } diff --git a/doc/index.rst b/doc/index.rst index bf01378d6e..c923b27830 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -59,11 +59,10 @@ tools. .. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-astroid?utm_source=pypi-astroid&utm_medium=referral&utm_campaign=readme -More information ----------------- .. toctree:: :maxdepth: 2 + :hidden: inference @@ -73,10 +72,10 @@ More information whatsnew +.. toctree:: + :hidden: + :caption: Indices -Indices and tables -================== + genindex -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` + modindex diff --git a/doc/inference.rst b/doc/inference.rst index d66ea5ea19..ef8340c950 100644 --- a/doc/inference.rst +++ b/doc/inference.rst @@ -34,7 +34,7 @@ inferred to be an instance of some known class. Crash course into astroid's inference -------------------------------------- -Let's see some examples on how the inference might work in in ``astroid``. +Let's see some examples on how the inference might work in ``astroid``. First we'll need to do a detour through some of the ``astroid``'s APIs. diff --git a/doc/requirements.txt b/doc/requirements.txt index 1f40d93473..7b7a9fe8cc 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,2 +1,3 @@ -e . -sphinx~=6.2 +sphinx~=7.2 +furo==2023.9.10 diff --git a/pyproject.toml b/pyproject.toml index 9b3d42723c..5b12875359 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", @@ -45,14 +46,6 @@ license-files = ["LICENSE", "CONTRIBUTORS.txt"] # Keep in sync with setup.cfg [tool.setuptools.packages.find] include = ["astroid*"] -[tool.setuptools.package-data] -"*" = [ - "../requirements*.txt", - "../tox.ini", - "../tests/__init__.py", - "../tests/resources.py", -] - [tool.setuptools.dynamic] version = {attr = "astroid.__pkginfo__.__version__"} @@ -115,5 +108,9 @@ fixable = [ "I", # isort "RUF", # ruff ] -unfixable = [] -target-version = "py37" +unfixable = ["RUF001"] +target-version = "py38" + +[tool.ruff.per-file-ignores] +# Ruff is autofixing a tests with a voluntarily sneaky unicode +"tests/test_regrtest.py" = ["RUF001"] diff --git a/requirements_minimal.txt b/requirements_minimal.txt index 8b0d4d556d..e49b791661 100644 --- a/requirements_minimal.txt +++ b/requirements_minimal.txt @@ -1,8 +1,8 @@ # Tools used when releasing contributors-txt>=0.7.4 -tbump~=6.10 +tbump~=6.11 # Tools used to run tests -coverage~=7.2 +coverage~=7.3 pytest pytest-cov~=4.1 diff --git a/script/.contributors_aliases.json b/script/.contributors_aliases.json index fedcc682a5..73e9e0db14 100644 --- a/script/.contributors_aliases.json +++ b/script/.contributors_aliases.json @@ -30,6 +30,10 @@ "mails": ["androwiiid@gmail.com"], "name": "Paligot Gérard" }, + "antonio@zoftko.com": { + "mails": ["antonio@zoftko.com", "antonioglez-23@hotmail.com"], + "name": "Antonio" + }, "areveny@protonmail.com": { "mails": ["areveny@protonmail.com", "self@areveny.com"], "name": "Areveny", diff --git a/tbump.toml b/tbump.toml index b5bc137a90..73d2164a98 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/pylint-dev/astroid" [version] -current = "3.0.0a6-dev0" +current = "3.0.0a10-dev0" regex = ''' ^(?P0|[1-9]\d*) \. diff --git a/tests/brain/numpy/test_core_numerictypes.py b/tests/brain/numpy/test_core_numerictypes.py index 17dd83f3c9..819cc7d34f 100644 --- a/tests/brain/numpy/test_core_numerictypes.py +++ b/tests/brain/numpy/test_core_numerictypes.py @@ -2,7 +2,10 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from __future__ import annotations + import unittest +from typing import ClassVar try: import numpy # pylint: disable=unused-import @@ -23,7 +26,7 @@ class NumpyBrainCoreNumericTypesTest(unittest.TestCase): """Test of all the missing types defined in numerictypes module.""" - all_types = [ + all_types: ClassVar[list[str]] = [ "uint16", "uint32", "uint64", diff --git a/tests/brain/numpy/test_random_mtrand.py b/tests/brain/numpy/test_random_mtrand.py index d2f3a2e89d..7a3c3240fd 100644 --- a/tests/brain/numpy/test_random_mtrand.py +++ b/tests/brain/numpy/test_random_mtrand.py @@ -2,7 +2,10 @@ # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt +from __future__ import annotations + import unittest +from typing import ClassVar try: import numpy # pylint: disable=unused-import @@ -19,7 +22,7 @@ class NumpyBrainRandomMtrandTest(unittest.TestCase): """Test of all the functions of numpy.random.mtrand module.""" # Map between functions names and arguments names and default values - all_mtrand = { + all_mtrand: ClassVar[dict[str, tuple]] = { "beta": (["a", "b", "size"], [None]), "binomial": (["n", "p", "size"], [None]), "bytes": (["length"], []), diff --git a/tests/brain/test_brain.py b/tests/brain/test_brain.py index 632a93284e..eecd7716d3 100644 --- a/tests/brain/test_brain.py +++ b/tests/brain/test_brain.py @@ -15,6 +15,7 @@ from astroid import MANAGER, builder, nodes, objects, test_utils, util from astroid.bases import Instance from astroid.brain.brain_namedtuple_enum import _get_namedtuple_fields +from astroid.const import PY312_PLUS from astroid.exceptions import ( AttributeInferenceError, InferenceError, @@ -186,9 +187,16 @@ def test_builtin_subscriptable(self): def check_metaclass_is_abc(node: nodes.ClassDef): - meta = node.metaclass() - assert isinstance(meta, nodes.ClassDef) - assert meta.name == "ABCMeta" + if PY312_PLUS and node.name == "ByteString": + # .metaclass() finds the first metaclass in the mro(), + # which, from 3.12, is _DeprecateByteStringMeta (unhelpful) + # until ByteString is removed in 3.14. + # Jump over the first two ByteString classes in the mro(). + check_metaclass_is_abc(node.mro()[2]) + else: + meta = node.metaclass() + assert isinstance(meta, nodes.ClassDef) + assert meta.name == "ABCMeta" class CollectionsBrain(unittest.TestCase): @@ -323,7 +331,7 @@ def test_collections_object_not_yet_subscriptable_2(self): @test_utils.require_version(minver="3.9") def test_collections_object_subscriptable_3(self): - """With Python 3.9 the ByteString class of the collections module is subscritable + """With Python 3.9 the ByteString class of the collections module is subscriptable (but not the same class from typing module)""" right_node = builder.extract_node( """ @@ -1779,6 +1787,7 @@ def test(a, b): assert len(inferred) == 1 partial = inferred[0] assert isinstance(partial, objects.PartialFunction) + assert isinstance(partial.as_string(), str) assert isinstance(partial.doc_node, nodes.Const) assert partial.doc_node.value == "Docstring" assert partial.lineno == 3 diff --git a/tests/brain/test_builtin.py b/tests/brain/test_builtin.py index c2a9de9001..cf413f16cd 100644 --- a/tests/brain/test_builtin.py +++ b/tests/brain/test_builtin.py @@ -22,7 +22,7 @@ def getter(): asd = property(getter) #@ """ ) - inferred_property = list(class_with_property.value.infer())[0] + inferred_property = next(iter(class_with_property.value.infer())) self.assertTrue(isinstance(inferred_property, objects.Property)) class_parent = inferred_property.parent.parent.parent self.assertIsInstance(class_parent, nodes.ClassDef) diff --git a/tests/brain/test_dateutil.py b/tests/brain/test_dateutil.py index a31128f34c..68cf640f8a 100644 --- a/tests/brain/test_dateutil.py +++ b/tests/brain/test_dateutil.py @@ -26,4 +26,4 @@ def test_parser(self): """ ) d_type = next(module["d"].infer()) - self.assertEqual(d_type.qname(), "datetime.datetime") + self.assertIn(d_type.qname(), {"_pydatetime.datetime", "datetime.datetime"}) diff --git a/tests/brain/test_enum.py b/tests/brain/test_enum.py index 3ca09f2ebf..910c81f680 100644 --- a/tests/brain/test_enum.py +++ b/tests/brain/test_enum.py @@ -493,3 +493,109 @@ def pear(self): for node in (attribute_nodes[1], name_nodes[1]): with pytest.raises(InferenceError): node.inferred() + + def test_enum_members_uppercase_only(self) -> None: + """Originally reported in https://github.com/pylint-dev/pylint/issues/7402. + ``nodes.AnnAssign`` nodes with no assigned values do not appear inside ``__members__``. + + Test that only enum members `MARS` and `radius` appear in the `__members__` container while + the attribute `mass` does not. + """ + enum_class = astroid.extract_node( + """ + from enum import Enum + class Planet(Enum): #@ + MARS = (1, 2) + radius: int = 1 + mass: int + + def __init__(self, mass, radius): + self.mass = mass + self.radius = radius + + Planet.MARS.value + """ + ) + enum_members = next(enum_class.igetattr("__members__")) + assert len(enum_members.items) == 2 + mars, radius = enum_members.items + assert mars[1].name == "MARS" + assert radius[1].name == "radius" + + def test_local_enum_child_class_inference(self) -> None: + """Originally reported in https://github.com/pylint-dev/pylint/issues/8897 + + Test that a user-defined enum class is inferred when it subclasses + another user-defined enum class. + """ + enum_class_node, enum_member_value_node = astroid.extract_node( + """ + import sys + + from enum import Enum + + if sys.version_info >= (3, 11): + from enum import StrEnum + else: + class StrEnum(str, Enum): + pass + + + class Color(StrEnum): #@ + RED = "red" + + + Color.RED.value #@ + """ + ) + assert "RED" in enum_class_node.locals + + enum_members = enum_class_node.locals["__members__"][0].items + assert len(enum_members) == 1 + _, name = enum_members[0] + assert name.name == "RED" + + inferred_enum_member_value_node = next(enum_member_value_node.infer()) + assert inferred_enum_member_value_node.value == "red" + + def test_enum_with_ignore(self) -> None: + """Exclude ``_ignore_`` from the ``__members__`` container + Originally reported in https://github.com/pylint-dev/pylint/issues/9015 + """ + + ast_node: nodes.Attribute = builder.extract_node( + """ + import enum + + + class MyEnum(enum.Enum): + FOO = enum.auto() + BAR = enum.auto() + _ignore_ = ["BAZ"] + BAZ = 42 + MyEnum.__members__ + """ + ) + inferred = next(ast_node.infer()) + members_names = [const_node.value for const_node, name_obj in inferred.items] + assert members_names == ["FOO", "BAR", "BAZ"] + + def test_enum_sunder_names(self) -> None: + """Test that both `_name_` and `_value_` sunder names exist""" + + sunder_name, sunder_value = builder.extract_node( + """ + import enum + + + class MyEnum(enum.Enum): + APPLE = 42 + MyEnum.APPLE._name_ #@ + MyEnum.APPLE._value_ #@ + """ + ) + inferred_name = next(sunder_name.infer()) + assert inferred_name.value == "APPLE" + + inferred_value = next(sunder_value.infer()) + assert inferred_value.value == 42 diff --git a/tests/brain/test_qt.py b/tests/brain/test_qt.py index 9f778355fb..6e66c630f1 100644 --- a/tests/brain/test_qt.py +++ b/tests/brain/test_qt.py @@ -8,6 +8,7 @@ from astroid import Uninferable, extract_node from astroid.bases import UnboundMethod +from astroid.const import PY312_PLUS from astroid.manager import AstroidManager from astroid.nodes import FunctionDef @@ -15,8 +16,10 @@ @pytest.mark.skipif(HAS_PYQT6 is None, reason="These tests require the PyQt6 library.") +# TODO: enable for Python 3.12 as soon as PyQt6 release is compatible +@pytest.mark.skipif(PY312_PLUS, reason="This test was segfaulting with Python 3.12.") class TestBrainQt: - AstroidManager.brain["extension_package_whitelist"] = {"PyQt6"} + AstroidManager.brain["extension_package_whitelist"] = {"PyQt6"} # noqa: RUF012 @staticmethod def test_value_of_lambda_instance_attrs_is_list(): diff --git a/tests/brain/test_typing.py b/tests/brain/test_typing.py new file mode 100644 index 0000000000..8d75708d6d --- /dev/null +++ b/tests/brain/test_typing.py @@ -0,0 +1,24 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +from astroid import builder, nodes + + +def test_infer_typevar() -> None: + """ + Regression test for: https://github.com/pylint-dev/pylint/issues/8802 + + Test that an inferred `typing.TypeVar()` call produces a `nodes.ClassDef` + node. + """ + assign_node = builder.extract_node( + """ + from typing import TypeVar + MyType = TypeVar('My.Type') + """ + ) + call = assign_node.value + inferred = next(call.infer()) + assert isinstance(inferred, nodes.ClassDef) + assert inferred.name == "My.Type" diff --git a/tests/test_group_exceptions.py b/tests/test_group_exceptions.py index ce1f142a53..2ee4143fc7 100644 --- a/tests/test_group_exceptions.py +++ b/tests/test_group_exceptions.py @@ -10,7 +10,7 @@ ExceptHandler, For, Name, - TryExcept, + Try, Uninferable, bases, extract_node, @@ -35,10 +35,9 @@ def test_group_exceptions() -> None: print("Handling TypeError")""" ) ) - assert isinstance(node, TryExcept) + assert isinstance(node, Try) handler = node.handlers[0] - exception_group_block_range = (1, 4) - assert node.block_range(lineno=1) == exception_group_block_range + assert node.block_range(lineno=1) == (1, 9) assert node.block_range(lineno=2) == (2, 2) assert node.block_range(lineno=5) == (5, 9) assert isinstance(handler, ExceptHandler) @@ -47,7 +46,7 @@ def test_group_exceptions() -> None: assert len(children) == 3 exception_group, short_name, for_loop = children assert isinstance(exception_group, Name) - assert exception_group.block_range(1) == exception_group_block_range + assert exception_group.block_range(1) == (1, 4) assert isinstance(short_name, AssignName) assert isinstance(for_loop, For) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index aaf45c7413..1e57ac0777 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -263,4 +263,14 @@ class A(type): #@ def test_uninferable_for_safe_infer() -> None: uninfer = util.Uninferable - assert helpers.safe_infer(util.Uninferable) == uninfer + assert util.safe_infer(util.Uninferable) == uninfer + + +def test_safe_infer_shim() -> None: + with pytest.warns(DeprecationWarning) as records: + helpers.safe_infer(nodes.Unknown()) + + assert ( + "Import safe_infer from astroid.util; this shim in astroid.helpers will be removed." + in records[0].message.args[0] + ) diff --git a/tests/test_inference.py b/tests/test_inference.py index 22b75e9924..ffd78fe035 100644 --- a/tests/test_inference.py +++ b/tests/test_inference.py @@ -6,6 +6,7 @@ from __future__ import annotations +import sys import textwrap import unittest from abc import ABCMeta @@ -21,7 +22,6 @@ Slice, Uninferable, arguments, - helpers, manager, nodes, objects, @@ -32,15 +32,15 @@ from astroid.arguments import CallSite from astroid.bases import BoundMethod, Instance, UnboundMethod, UnionType from astroid.builder import AstroidBuilder, _extract_single_node, extract_node, parse -from astroid.const import IS_PYPY, PY39_PLUS, PY310_PLUS +from astroid.const import IS_PYPY, PY39_PLUS, PY310_PLUS, PY312_PLUS from astroid.context import CallContext, InferenceContext from astroid.exceptions import ( AstroidTypeError, AttributeInferenceError, InferenceError, + NoDefault, NotFoundError, ) -from astroid.inference import infer_end as inference_infer_end from astroid.objects import ExceptionInstance from . import resources @@ -59,8 +59,6 @@ def get_node_of_class(start_from: nodes.FunctionDef, klass: type) -> nodes.Attri builder = AstroidBuilder() -EXC_MODULE = "builtins" -BOOL_SPECIAL_METHOD = "__bool__" DATA_DIR = Path(__file__).parent / "testdata" / "python3" / "data" @@ -70,7 +68,7 @@ def infer_default(self: Any, *args: InferenceContext) -> None: raise InferenceError infer_default = decoratorsmod.path_wrapper(infer_default) - infer_end = decoratorsmod.path_wrapper(inference_infer_end) + infer_end = decoratorsmod.path_wrapper(Slice._infer) with self.assertRaises(InferenceError): next(infer_default(1)) self.assertEqual(next(infer_end(1)), 1) @@ -149,6 +147,21 @@ def meth3(self, d=attr): ast = parse(CODE, __name__) + def test_arg_keyword_no_default_value(self): + node = extract_node( + """ + class Sensor: + def __init__(self, *, description): #@ + self._id = description.key + """ + ) + with self.assertRaises(NoDefault): + node.args.default_value("description") + + node = extract_node("def apple(color, *args, name: str, **kwargs): ...") + with self.assertRaises(NoDefault): + node.args.default_value("name") + def test_infer_abstract_property_return_values(self) -> None: module = parse( """ @@ -200,7 +213,7 @@ def test_tupleassign_name_inference(self) -> None: exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, "Exception") - self.assertEqual(exc.root().name, EXC_MODULE) + self.assertEqual(exc.root().name, "builtins") self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast["b"].infer() const = next(inferred) @@ -218,7 +231,7 @@ def test_listassign_name_inference(self) -> None: exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, "Exception") - self.assertEqual(exc.root().name, EXC_MODULE) + self.assertEqual(exc.root().name, "builtins") self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast["e"].infer() const = next(inferred) @@ -269,7 +282,7 @@ def test_swap_assign_inference(self) -> None: exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, "Exception") - self.assertEqual(exc.root().name, EXC_MODULE) + self.assertEqual(exc.root().name, "builtins") self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference1(self) -> None: @@ -277,7 +290,7 @@ def test_getattr_inference1(self) -> None: exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, "Exception") - self.assertEqual(exc.root().name, EXC_MODULE) + self.assertEqual(exc.root().name, "builtins") self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference2(self) -> None: @@ -537,13 +550,13 @@ class Warning(Warning): ancestors = w.ancestors() ancestor = next(ancestors) self.assertEqual(ancestor.name, "Warning") - self.assertEqual(ancestor.root().name, EXC_MODULE) + self.assertEqual(ancestor.root().name, "builtins") ancestor = next(ancestors) self.assertEqual(ancestor.name, "Exception") - self.assertEqual(ancestor.root().name, EXC_MODULE) + self.assertEqual(ancestor.root().name, "builtins") ancestor = next(ancestors) self.assertEqual(ancestor.name, "BaseException") - self.assertEqual(ancestor.root().name, EXC_MODULE) + self.assertEqual(ancestor.root().name, "builtins") ancestor = next(ancestors) self.assertEqual(ancestor.name, "object") self.assertEqual(ancestor.root().name, "builtins") @@ -988,7 +1001,12 @@ def test_import_as(self) -> None: self.assertIsInstance(inferred[0], nodes.Module) self.assertEqual(inferred[0].name, "os.path") inferred = list(ast.igetattr("e")) - self.assertEqual(len(inferred), 1) + if PY312_PLUS and sys.platform.startswith("win"): + # There are two os.path.exists exported, likely due to + # https://github.com/python/cpython/pull/101324 + self.assertEqual(len(inferred), 2) + else: + self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.FunctionDef) self.assertEqual(inferred[0].name, "exists") @@ -1466,6 +1484,13 @@ def get_context_data(self, **kwargs): assert len(results) == 2 assert all(isinstance(result, nodes.Dict) for result in results) + def test_name_repeat_inference(self) -> None: + node = extract_node("print") + context = InferenceContext() + _ = next(node.infer(context=context)) + with pytest.raises(InferenceError): + next(node.infer(context=context)) + def test_python25_no_relative_import(self) -> None: ast = resources.build_file("data/package/absimport.py") self.assertTrue(ast.absolute_import_activated(), True) @@ -2878,12 +2903,12 @@ def true_value(): def test_bool_value_instances(self) -> None: instances = extract_node( - f""" + """ class FalseBoolInstance(object): - def {BOOL_SPECIAL_METHOD}(self): + def __bool__(self): return False class TrueBoolInstance(object): - def {BOOL_SPECIAL_METHOD}(self): + def __bool__(self): return True class FalseLenInstance(object): def __len__(self): @@ -2916,11 +2941,11 @@ class NonMethods(object): def test_bool_value_variable(self) -> None: instance = extract_node( - f""" + """ class VariableBoolInstance(object): def __init__(self, value): self.value = value - def {BOOL_SPECIAL_METHOD}(self): + def __bool__(self): return self.value not VariableBoolInstance(True) @@ -4024,7 +4049,7 @@ def __getitem__(self, name): flow['app']['config']['doffing'] = AttributeDict() #@ """ ) - self.assertIsInstance(helpers.safe_infer(ast_node.targets[0]), Instance) + self.assertIsInstance(util.safe_infer(ast_node.targets[0]), Instance) def test_classmethod_inferred_by_context(self) -> None: ast_node = extract_node( @@ -4845,20 +4870,20 @@ def test_bool(self) -> None: def test_bool_bool_special_method(self) -> None: ast_nodes = extract_node( - f""" + """ class FalseClass: - def {BOOL_SPECIAL_METHOD}(self): + def __bool__(self): return False class TrueClass: - def {BOOL_SPECIAL_METHOD}(self): + def __bool__(self): return True class C(object): def __call__(self): return False class B(object): - {BOOL_SPECIAL_METHOD} = C() + __bool__ = C() class LambdaBoolFalse(object): - {BOOL_SPECIAL_METHOD} = lambda self: self.foo + __bool__ = lambda self: self.foo @property def foo(self): return 0 class FalseBoolLen(object): @@ -4881,9 +4906,9 @@ def foo(self): return 0 def test_bool_instance_not_callable(self) -> None: ast_nodes = extract_node( - f""" + """ class BoolInvalid(object): - {BOOL_SPECIAL_METHOD} = 42 + __bool__ = 42 class LenInvalid(object): __len__ = "a" bool(BoolInvalid()) #@ @@ -4919,9 +4944,12 @@ def __class_getitem__(self, value): """ klass = extract_node(code) context = InferenceContext() + # For this test, we want a fresh inference, rather than a cache hit on + # the inference done at brain time in _is_enum_subclass() + context.lookupname = "Fresh lookup!" _ = klass.getitem(0, context=context) - assert list(context.path)[0][0].name == "Parent" + assert next(iter(context.path))[0].name == "Parent" class TestType(unittest.TestCase): @@ -6016,8 +6044,7 @@ def test_exception_lookup_last_except_handler_wins() -> None: assert isinstance(inferred_exc, Instance) assert inferred_exc.name == "OSError" - # Check that two except handlers on the same TryExcept works the same as separate - # TryExcepts + # Two except handlers on the same Try work the same as separate node = extract_node( """ try: @@ -6149,6 +6176,24 @@ class InferMeTwice: assert util.Uninferable not in instance.igetattr("item", context_to_be_used_twice) +@patch("astroid.nodes.Call._infer") +def test_cache_usage_without_explicit_context(mock) -> None: + code = """ + class InferMeTwice: + item = 10 + + InferMeTwice() + """ + call = extract_node(code) + mock.return_value = [Uninferable] + + # no explicit InferenceContext + call.inferred() + call.inferred() + + mock.assert_called_once() + + def test_infer_context_manager_with_unknown_args() -> None: code = """ class client_log(object): @@ -6315,6 +6360,20 @@ def __init__(self, index): assert isinstance(index[0], nodes.AssignAttr) +def test_infer_assign_attr() -> None: + code = """ + class Counter: + def __init__(self): + self.count = 0 + + def increment(self): + self.count += 1 #@ + """ + node = extract_node(code) + inferred = next(node.infer()) + assert inferred.value == 1 + + @pytest.mark.parametrize( "code,instance_name", [ @@ -6493,7 +6552,7 @@ class Foo: inst.a = b """ ) - helpers.safe_infer(node.targets[0]) + util.safe_infer(node.targets[0]) def test_inferaugassign_picking_parent_instead_of_stmt() -> None: @@ -6645,7 +6704,7 @@ class ProxyConfig: """ node = extract_node(code) # Reproduces only with safe_infer() - assert helpers.safe_infer(node) is None + assert util.safe_infer(node) is None @pytest.mark.skipif( @@ -6668,7 +6727,7 @@ class ProxyConfig: replace(a, **test_dict['proxy']) # This fails """ node = extract_node(code) - infer_val = helpers.safe_infer(node) + infer_val = util.safe_infer(node) assert isinstance(infer_val, Instance) assert infer_val.pytype() == ".ProxyConfig" @@ -7182,3 +7241,18 @@ def test_old_style_string_formatting_with_specs(self) -> None: inferred = next(node.infer()) assert isinstance(inferred, nodes.Const) assert inferred.value == "My name is Daniel, I'm 12.00" + + +def test_sys_argv_uninferable() -> None: + """Regression test for https://github.com/pylint-dev/pylint/issues/7710.""" + a: nodes.List = extract_node( + textwrap.dedent( + """ + import sys + + sys.argv""" + ) + ) + sys_argv_value = list(a._infer()) + assert len(sys_argv_value) == 1 + assert sys_argv_value[0] is Uninferable diff --git a/tests/test_lookup.py b/tests/test_lookup.py index 475516b60f..a19f287637 100644 --- a/tests/test_lookup.py +++ b/tests/test_lookup.py @@ -370,9 +370,9 @@ def initialize(linter): ''', "data.__init__", ) - path = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "__path__"][ - 0 - ] + path = next( + n for n in astroid.nodes_of_class(nodes.Name) if n.name == "__path__" + ) self.assertEqual(len(path.lookup("__path__")[1]), 1) def test_builtin_lookup(self) -> None: @@ -477,7 +477,7 @@ def test_consecutive_assign(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 3) @@ -489,7 +489,7 @@ def test_assign_after_use(self) -> None: x = 10 """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 0) @@ -501,7 +501,7 @@ def test_del_removes_prior(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 0) @@ -514,7 +514,7 @@ def test_del_no_effect_after(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 4) @@ -531,7 +531,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 2) self.assertCountEqual([stmt.lineno for stmt in stmts], [3, 5]) @@ -549,7 +549,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 2) self.assertCountEqual([stmt.lineno for stmt in stmts], [3, 6]) @@ -571,7 +571,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 4) self.assertCountEqual([stmt.lineno for stmt in stmts], [3, 6, 8, 10]) @@ -594,7 +594,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 3) @@ -618,7 +618,7 @@ def f(b): x = 5 """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 10) @@ -640,7 +640,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 3) self.assertCountEqual([stmt.lineno for stmt in stmts], [3, 5, 7]) @@ -710,7 +710,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 9) @@ -730,7 +730,7 @@ def f(b): print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 3) @@ -829,14 +829,14 @@ def f(*args, **kwargs): print(args, kwargs) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "args"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "args") _, stmts1 = x_name.lookup("args") self.assertEqual(len(stmts1), 1) self.assertEqual(stmts1[0].lineno, 3) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "kwargs"][ - 0 - ] + x_name = next( + n for n in astroid.nodes_of_class(nodes.Name) if n.name == "kwargs" + ) _, stmts2 = x_name.lookup("kwargs") self.assertEqual(len(stmts2), 1) self.assertEqual(stmts2[0].lineno, 4) @@ -852,7 +852,7 @@ def test_except_var_in_block(self) -> None: print(e) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e") _, stmts = x_name.lookup("e") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 4) @@ -870,7 +870,7 @@ def test_except_var_in_block_overwrites(self) -> None: print(e) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e") _, stmts = x_name.lookup("e") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 5) @@ -912,7 +912,7 @@ def test_except_var_after_block_single(self) -> None: print(e) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e") _, stmts = x_name.lookup("e") self.assertEqual(len(stmts), 0) @@ -930,7 +930,7 @@ def test_except_var_after_block_multiple(self) -> None: print(e) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "e") _, stmts = x_name.lookup("e") self.assertEqual(len(stmts), 0) @@ -946,7 +946,7 @@ def test_except_assign_in_block(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 5) @@ -965,7 +965,7 @@ def test_except_assign_in_block_multiple(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 7) @@ -984,7 +984,7 @@ def test_except_assign_after_block(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 2) self.assertCountEqual([stmt.lineno for stmt in stmts], [5, 7]) @@ -1004,7 +1004,7 @@ def test_except_assign_after_block_overwritten(self) -> None: print(x) """ astroid = builder.parse(code) - x_name = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x"][0] + x_name = next(n for n in astroid.nodes_of_class(nodes.Name) if n.name == "x") _, stmts = x_name.lookup("x") self.assertEqual(len(stmts), 1) self.assertEqual(stmts[0].lineno, 8) diff --git a/tests/test_manager.py b/tests/test_manager.py index 56b09945ba..a55fae1932 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -137,7 +137,7 @@ def test_module_is_not_namespace(self) -> None: self.assertFalse(util.is_namespace("tests.testdata.python3.data.all")) self.assertFalse(util.is_namespace("__main__")) self.assertFalse( - util.is_namespace(list(EXT_LIB_DIRS)[0].rsplit("/", maxsplit=1)[-1]), + util.is_namespace(next(iter(EXT_LIB_DIRS)).rsplit("/", maxsplit=1)[-1]), ) self.assertFalse(util.is_namespace("importlib._bootstrap")) @@ -405,11 +405,24 @@ def test_borg(self) -> None: second_built = second_manager.ast_from_module_name("builtins") self.assertIs(built, second_built) + def test_max_inferable_values(self) -> None: + mgr = manager.AstroidManager() + original_limit = mgr.max_inferable_values + + def reset_limit(): + nonlocal original_limit + manager.AstroidManager().max_inferable_values = original_limit + + self.addCleanup(reset_limit) + + mgr.max_inferable_values = 4 + self.assertEqual(manager.AstroidManager.brain["max_inferable_values"], 4) + class ClearCacheTest(unittest.TestCase): def test_clear_cache_clears_other_lru_caches(self) -> None: lrus = ( - astroid.nodes.node_classes.LookupMixIn.lookup, + astroid.nodes._base_nodes.LookupMixIn.lookup, astroid.modutils._cache_normalize_path_, util.is_namespace, astroid.interpreter.objectmodel.ObjectModel.attributes, diff --git a/tests/test_nodes.py b/tests/test_nodes.py index 2a34d50455..6ea25fd846 100644 --- a/tests/test_nodes.py +++ b/tests/test_nodes.py @@ -22,13 +22,14 @@ Uninferable, bases, builder, + extract_node, nodes, parse, test_utils, transforms, util, ) -from astroid.const import PY310_PLUS, Context +from astroid.const import PY310_PLUS, PY312_PLUS, Context from astroid.context import InferenceContext from astroid.exceptions import ( AstroidBuildingError, @@ -279,6 +280,33 @@ def test_as_string_unknown() -> None: assert nodes.Unknown(lineno=1, col_offset=0).as_string() == "Unknown.Unknown()" +@pytest.mark.skipif(not PY312_PLUS, reason="Uses 3.12 type param nodes") +class AsStringTypeParamNodes(unittest.TestCase): + @staticmethod + def test_as_string_type_alias() -> None: + ast = abuilder.string_build("type Point = tuple[float, float]") + type_alias = ast.body[0] + assert type_alias.as_string().strip() == "Point" + + @staticmethod + def test_as_string_type_var() -> None: + ast = abuilder.string_build("type Point[T] = tuple[float, float]") + type_var = ast.body[0].type_params[0] + assert type_var.as_string().strip() == "T" + + @staticmethod + def test_as_string_type_var_tuple() -> None: + ast = abuilder.string_build("type Alias[*Ts] = tuple[*Ts]") + type_var_tuple = ast.body[0].type_params[0] + assert type_var_tuple.as_string().strip() == "*Ts" + + @staticmethod + def test_as_string_param_spec() -> None: + ast = abuilder.string_build("type Alias[**P] = Callable[P, int]") + param_spec = ast.body[0].type_params[0] + assert param_spec.as_string().strip() == "P" + + class _NodeTest(unittest.TestCase): """Test transformation of If Node.""" @@ -341,6 +369,35 @@ def test_block_range(self) -> None: self.assertEqual(self.astroid.body[1].orelse[0].block_range(8), (8, 8)) +class TryNodeTest(_NodeTest): + CODE = """ + try: # L2 + print("Hello") + except IOError: + pass + except UnicodeError: + pass + else: + print() + finally: + print() + """ + + def test_block_range(self) -> None: + try_node = self.astroid.body[0] + assert try_node.block_range(1) == (1, 11) + assert try_node.block_range(2) == (2, 2) + assert try_node.block_range(3) == (3, 3) + assert try_node.block_range(4) == (4, 4) + assert try_node.block_range(5) == (5, 5) + assert try_node.block_range(6) == (6, 6) + assert try_node.block_range(7) == (7, 7) + assert try_node.block_range(8) == (8, 8) + assert try_node.block_range(9) == (9, 9) + assert try_node.block_range(10) == (10, 10) + assert try_node.block_range(11) == (11, 11) + + class TryExceptNodeTest(_NodeTest): CODE = """ try: @@ -355,14 +412,15 @@ class TryExceptNodeTest(_NodeTest): def test_block_range(self) -> None: # XXX ensure expected values - self.assertEqual(self.astroid.body[0].block_range(1), (1, 8)) + self.assertEqual(self.astroid.body[0].block_range(1), (1, 9)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) - self.assertEqual(self.astroid.body[0].block_range(3), (3, 8)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 3)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) self.assertEqual(self.astroid.body[0].block_range(7), (7, 7)) self.assertEqual(self.astroid.body[0].block_range(8), (8, 8)) + self.assertEqual(self.astroid.body[0].block_range(9), (9, 9)) class TryFinallyNodeTest(_NodeTest): @@ -375,10 +433,11 @@ class TryFinallyNodeTest(_NodeTest): def test_block_range(self) -> None: # XXX ensure expected values - self.assertEqual(self.astroid.body[0].block_range(1), (1, 4)) + self.assertEqual(self.astroid.body[0].block_range(1), (1, 5)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) - self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 3)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) class TryExceptFinallyNodeTest(_NodeTest): @@ -393,12 +452,13 @@ class TryExceptFinallyNodeTest(_NodeTest): def test_block_range(self) -> None: # XXX ensure expected values - self.assertEqual(self.astroid.body[0].block_range(1), (1, 6)) + self.assertEqual(self.astroid.body[0].block_range(1), (1, 7)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) - self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 3)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) + self.assertEqual(self.astroid.body[0].block_range(7), (7, 7)) class ImportNodeTest(resources.SysPathSetup, unittest.TestCase): @@ -1889,8 +1949,7 @@ def return_from_match(x): [ node for node in astroid.nodes.ALL_NODE_CLASSES - if node.__name__ - not in ["_BaseContainer", "BaseContainer", "NodeNG", "const_factory"] + if node.__name__ not in ["BaseContainer", "NodeNG", "const_factory"] ], ) @pytest.mark.filterwarnings("error") @@ -1904,7 +1963,10 @@ def test_str_repr_no_warnings(node): if "int" in param_type.annotation: args[name] = random.randint(0, 50) - elif "NodeNG" in param_type.annotation: + elif ( + "NodeNG" in param_type.annotation + or "SuccessfulInferenceResult" in param_type.annotation + ): args[name] = nodes.Unknown() elif "str" in param_type.annotation: args[name] = "" @@ -1914,3 +1976,38 @@ def test_str_repr_no_warnings(node): test_node = node(**args) str(test_node) repr(test_node) + + +def test_arguments_contains_all(): + """Ensure Arguments.arguments actually returns all available arguments""" + + def manually_get_args(arg_node) -> set: + names = set() + if arg_node.args.vararg: + names.add(arg_node.args.vararg) + if arg_node.args.kwarg: + names.add(arg_node.args.kwarg) + + names.update([x.name for x in arg_node.args.args]) + names.update([x.name for x in arg_node.args.kwonlyargs]) + + return names + + node = extract_node("""def a(fruit: str, *args, b=None, c=None, **kwargs): ...""") + assert manually_get_args(node) == {x.name for x in node.args.arguments} + + node = extract_node("""def a(mango: int, b="banana", c=None, **kwargs): ...""") + assert manually_get_args(node) == {x.name for x in node.args.arguments} + + node = extract_node("""def a(self, num = 10, *args): ...""") + assert manually_get_args(node) == {x.name for x in node.args.arguments} + + +def test_arguments_default_value(): + node = extract_node( + "def fruit(eat='please', *, peel='no', trim='yes', **kwargs): ..." + ) + assert node.args.default_value("eat").value == "please" + + node = extract_node("def fruit(seeds, flavor='good', *, peel='maybe'): ...") + assert node.args.default_value("flavor").value == "good" diff --git a/tests/test_nodes_lineno.py b/tests/test_nodes_lineno.py index 126655df52..b0cdb9850b 100644 --- a/tests/test_nodes_lineno.py +++ b/tests/test_nodes_lineno.py @@ -8,7 +8,7 @@ import astroid from astroid import builder, nodes -from astroid.const import IS_PYPY, PY38, PY39_PLUS, PY310_PLUS +from astroid.const import IS_PYPY, PY38, PY39_PLUS, PY310_PLUS, PY312_PLUS @pytest.mark.skipif( @@ -737,7 +737,7 @@ def test_end_lineno_dict() -> None: @staticmethod def test_end_lineno_try() -> None: - """TryExcept, TryFinally, ExceptHandler.""" + """Try, ExceptHandler.""" code = textwrap.dedent( """ try: #@ @@ -763,7 +763,7 @@ def test_end_lineno_try() -> None: assert isinstance(ast_nodes, list) and len(ast_nodes) == 2 t1 = ast_nodes[0] - assert isinstance(t1, nodes.TryExcept) + assert isinstance(t1, nodes.Try) assert isinstance(t1.body[0], nodes.Pass) assert isinstance(t1.orelse[0], nodes.Pass) assert (t1.lineno, t1.col_offset) == (1, 0) @@ -789,13 +789,12 @@ def test_end_lineno_try() -> None: assert (t2.body[0].end_lineno, t2.body[0].end_col_offset) == (4, 8) t3 = ast_nodes[1] - assert isinstance(t3, nodes.TryFinally) - assert isinstance(t3.body[0], nodes.TryExcept) + assert isinstance(t3, nodes.Try) assert isinstance(t3.finalbody[0], nodes.Pass) assert (t3.lineno, t3.col_offset) == (10, 0) assert (t3.end_lineno, t3.end_col_offset) == (17, 8) - assert (t3.body[0].lineno, t3.body[0].col_offset) == (10, 0) - assert (t3.body[0].end_lineno, t3.body[0].end_col_offset) == (15, 8) + assert (t3.body[0].lineno, t3.body[0].col_offset) == (11, 4) + assert (t3.body[0].end_lineno, t3.body[0].end_col_offset) == (11, 8) assert (t3.finalbody[0].lineno, t3.finalbody[0].col_offset) == (17, 4) assert (t3.finalbody[0].end_lineno, t3.finalbody[0].end_col_offset) == (17, 8) @@ -977,13 +976,24 @@ def test_end_lineno_string() -> None: assert isinstance(s1.values[0], nodes.Const) assert (s1.lineno, s1.col_offset) == (1, 0) assert (s1.end_lineno, s1.end_col_offset) == (1, 29) - assert (s1.values[0].lineno, s1.values[0].col_offset) == (1, 0) - assert (s1.values[0].end_lineno, s1.values[0].end_col_offset) == (1, 29) + if PY312_PLUS: + assert (s1.values[0].lineno, s1.values[0].col_offset) == (1, 2) + assert (s1.values[0].end_lineno, s1.values[0].end_col_offset) == (1, 15) + else: + # Bug in Python 3.11 + # https://github.com/python/cpython/issues/81639 + assert (s1.values[0].lineno, s1.values[0].col_offset) == (1, 0) + assert (s1.values[0].end_lineno, s1.values[0].end_col_offset) == (1, 29) s2 = s1.values[1] assert isinstance(s2, nodes.FormattedValue) - assert (s2.lineno, s2.col_offset) == (1, 0) - assert (s2.end_lineno, s2.end_col_offset) == (1, 29) + if PY312_PLUS: + assert (s2.lineno, s2.col_offset) == (1, 15) + assert (s2.end_lineno, s2.end_col_offset) == (1, 28) + else: + assert (s2.lineno, s2.col_offset) == (1, 0) + assert (s2.end_lineno, s2.end_col_offset) == (1, 29) + assert isinstance(s2.value, nodes.Const) # 42.1234 if PY39_PLUS: assert (s2.value.lineno, s2.value.col_offset) == (1, 16) @@ -993,22 +1003,35 @@ def test_end_lineno_string() -> None: # https://bugs.python.org/issue44885 assert (s2.value.lineno, s2.value.col_offset) == (1, 1) assert (s2.value.end_lineno, s2.value.end_col_offset) == (1, 8) - assert isinstance(s2.format_spec, nodes.JoinedStr) # '02d' - assert (s2.format_spec.lineno, s2.format_spec.col_offset) == (1, 0) - assert (s2.format_spec.end_lineno, s2.format_spec.end_col_offset) == (1, 29) + assert isinstance(s2.format_spec, nodes.JoinedStr) # ':02d' + if PY312_PLUS: + assert (s2.format_spec.lineno, s2.format_spec.col_offset) == (1, 23) + assert (s2.format_spec.end_lineno, s2.format_spec.end_col_offset) == (1, 27) + else: + assert (s2.format_spec.lineno, s2.format_spec.col_offset) == (1, 0) + assert (s2.format_spec.end_lineno, s2.format_spec.end_col_offset) == (1, 29) s3 = ast_nodes[1] assert isinstance(s3, nodes.JoinedStr) assert isinstance(s3.values[0], nodes.Const) assert (s3.lineno, s3.col_offset) == (2, 0) assert (s3.end_lineno, s3.end_col_offset) == (2, 17) - assert (s3.values[0].lineno, s3.values[0].col_offset) == (2, 0) - assert (s3.values[0].end_lineno, s3.values[0].end_col_offset) == (2, 17) + if PY312_PLUS: + assert (s3.values[0].lineno, s3.values[0].col_offset) == (2, 2) + assert (s3.values[0].end_lineno, s3.values[0].end_col_offset) == (2, 15) + else: + assert (s3.values[0].lineno, s3.values[0].col_offset) == (2, 0) + assert (s3.values[0].end_lineno, s3.values[0].end_col_offset) == (2, 17) s4 = s3.values[1] assert isinstance(s4, nodes.FormattedValue) - assert (s4.lineno, s4.col_offset) == (2, 0) - assert (s4.end_lineno, s4.end_col_offset) == (2, 17) + if PY312_PLUS: + assert (s4.lineno, s4.col_offset) == (2, 9) + assert (s4.end_lineno, s4.end_col_offset) == (2, 16) + else: + assert (s4.lineno, s4.col_offset) == (2, 0) + assert (s4.end_lineno, s4.end_col_offset) == (2, 17) + assert isinstance(s4.value, nodes.Name) # 'name' if PY39_PLUS: assert (s4.value.lineno, s4.value.col_offset) == (2, 10) diff --git a/tests/test_object_model.py b/tests/test_object_model.py index 3acb17af74..530b9c351a 100644 --- a/tests/test_object_model.py +++ b/tests/test_object_model.py @@ -589,6 +589,10 @@ def test(a: 1, b: 2, /, c: 3): pass self.assertEqual(annotations.getitem(astroid.Const("b")).value, 2) self.assertEqual(annotations.getitem(astroid.Const("c")).value, 3) + def test_is_not_lambda(self): + ast_node = builder.extract_node("def func(): pass") + self.assertIs(ast_node.is_lambda, False) + class TestContextManagerModel: def test_model(self) -> None: @@ -829,13 +833,13 @@ def test_str_argument_not_required(self) -> None: assert not args.elts -class LruCacheModelTest(unittest.TestCase): - def test_lru_cache(self) -> None: - ast_nodes = builder.extract_node( - """ +@pytest.mark.parametrize("parentheses", (True, False)) +def test_lru_cache(parentheses) -> None: + ast_nodes = builder.extract_node( + f""" import functools class Foo(object): - @functools.lru_cache() + @functools.lru_cache{"()" if parentheses else ""} def foo(): pass f = Foo() @@ -843,12 +847,12 @@ def foo(): f.foo.__wrapped__ #@ f.foo.cache_info() #@ """ - ) - assert isinstance(ast_nodes, list) - cache_clear = next(ast_nodes[0].infer()) - self.assertIsInstance(cache_clear, astroid.BoundMethod) - wrapped = next(ast_nodes[1].infer()) - self.assertIsInstance(wrapped, astroid.FunctionDef) - self.assertEqual(wrapped.name, "foo") - cache_info = next(ast_nodes[2].infer()) - self.assertIsInstance(cache_info, astroid.Instance) + ) + assert isinstance(ast_nodes, list) + cache_clear = next(ast_nodes[0].infer()) + assert isinstance(cache_clear, astroid.BoundMethod) + wrapped = next(ast_nodes[1].infer()) + assert isinstance(wrapped, astroid.FunctionDef) + assert wrapped.name == "foo" + cache_info = next(ast_nodes[2].infer()) + assert isinstance(cache_info, astroid.Instance) diff --git a/tests/test_protocols.py b/tests/test_protocols.py index d24659ba4f..8c318252b3 100644 --- a/tests/test_protocols.py +++ b/tests/test_protocols.py @@ -279,6 +279,13 @@ def test_uninferable_exponents() -> None: parsed = extract_node("None ** 2") assert parsed.inferred() == [Uninferable] + @staticmethod + def test_uninferable_list_multiplication() -> None: + """Attempting to calculate the result is prohibitively expensive.""" + parsed = extract_node("[0] * 123456789") + element = parsed.inferred()[0].elts[0] + assert element.value is Uninferable + def test_named_expr_inference() -> None: code = """ diff --git a/tests/test_raw_building.py b/tests/test_raw_building.py index 093e003cc0..d206022b8f 100644 --- a/tests/test_raw_building.py +++ b/tests/test_raw_building.py @@ -24,7 +24,7 @@ import tests.testdata.python3.data.fake_module_with_broken_getattr as fm_getattr import tests.testdata.python3.data.fake_module_with_warnings as fm from astroid.builder import AstroidBuilder -from astroid.const import IS_PYPY +from astroid.const import IS_PYPY, PY312_PLUS from astroid.raw_building import ( attach_dummy_node, build_class, @@ -86,7 +86,7 @@ def test_build_from_import(self) -> None: @unittest.skipIf(IS_PYPY, "Only affects CPython") def test_io_is__io(self): - # _io module calls itself io. This leads + # _io module calls itself io before Python 3.12. This leads # to cyclic dependencies when astroid tries to resolve # what io.BufferedReader is. The code that handles this # is in astroid.raw_building.imported_member, which verifies @@ -94,7 +94,8 @@ def test_io_is__io(self): builder = AstroidBuilder() module = builder.inspect_build(_io) buffered_reader = module.getattr("BufferedReader")[0] - self.assertEqual(buffered_reader.root().name, "io") + expected = "_io" if PY312_PLUS else "io" + self.assertEqual(buffered_reader.root().name, expected) def test_build_function_deepinspect_deprecation(self) -> None: # Tests https://github.com/pylint-dev/astroid/issues/1717 diff --git a/tests/test_regrtest.py b/tests/test_regrtest.py index f525451a2e..67ccca630f 100644 --- a/tests/test_regrtest.py +++ b/tests/test_regrtest.py @@ -143,7 +143,7 @@ def run(): classes = astroid.nodes_of_class(nodes.ClassDef) for klass in classes: # triggers the _is_metaclass call - klass.type # pylint: disable=pointless-statement # noqa[B018] + klass.type # pylint: disable=pointless-statement # noqa: B018 def test_decorator_callchain_issue42(self) -> None: builder = AstroidBuilder() @@ -283,7 +283,7 @@ def method(self): "With unicode : {'’'} " instance = MyClass() - """ # noqa[RUF001] + """ ) next(node.value.infer()).as_string() diff --git a/tests/test_scoped_nodes.py b/tests/test_scoped_nodes.py index 462494e853..1bc5af78b6 100644 --- a/tests/test_scoped_nodes.py +++ b/tests/test_scoped_nodes.py @@ -8,13 +8,14 @@ from __future__ import annotations -import datetime +import difflib import os import sys import textwrap import unittest from functools import partial from typing import Any +from unittest.mock import patch import pytest @@ -29,8 +30,9 @@ util, ) from astroid.bases import BoundMethod, Generator, Instance, UnboundMethod -from astroid.const import IS_PYPY, PY38 +from astroid.const import IS_PYPY, PY38, WIN32 from astroid.exceptions import ( + AstroidBuildingError, AttributeInferenceError, DuplicateBasesError, InconsistentMroError, @@ -244,6 +246,19 @@ def test_import_2(self) -> None: finally: del sys.path[0] + @patch( + "astroid.nodes.scoped_nodes.scoped_nodes.AstroidManager.ast_from_module_name" + ) + def test_import_unavailable_module(self, mock) -> None: + unavailable_modname = "posixpath" if WIN32 else "ntpath" + module = builder.parse(f"import {unavailable_modname}") + mock.side_effect = AstroidBuildingError + + with pytest.raises(AstroidBuildingError): + module.import_module(unavailable_modname) + + mock.assert_called_once() + def test_file_stream_in_memory(self) -> None: data = """irrelevant_variable is irrelevant""" astroid = builder.parse(data, "in_memory") @@ -262,7 +277,7 @@ def test_file_stream_api(self) -> None: file_build = builder.AstroidBuilder().file_build(path, "all") with self.assertRaises(AttributeError): # pylint: disable=pointless-statement, no-member - file_build.file_stream # noqa[B018] + file_build.file_stream # noqa: B018 def test_stream_api(self) -> None: path = resources.find("data/all.py") @@ -479,7 +494,7 @@ def f(): g = lambda: None """ astroid = builder.parse(data) - g = list(astroid["f"].ilookup("g"))[0] + g = next(iter(astroid["f"].ilookup("g"))) self.assertEqual(g.pytype(), "builtins.function") def test_lambda_qname(self) -> None: @@ -2141,8 +2156,8 @@ class ParentGetattr(Getattr): # Test that objects analyzed through the live introspection # aren't considered to have dynamic getattr implemented. astroid_builder = builder.AstroidBuilder() - module = astroid_builder.module_build(datetime) - self.assertFalse(module["timedelta"].has_dynamic_getattr()) + module = astroid_builder.module_build(difflib) + self.assertFalse(module["SequenceMatcher"].has_dynamic_getattr()) def test_duplicate_bases_namedtuple(self) -> None: module = builder.parse( diff --git a/tests/test_type_params.py b/tests/test_type_params.py new file mode 100644 index 0000000000..afc38b14bc --- /dev/null +++ b/tests/test_type_params.py @@ -0,0 +1,73 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt + +import pytest + +from astroid import extract_node +from astroid.const import PY312_PLUS +from astroid.nodes import ( + AssignName, + ParamSpec, + Subscript, + TypeAlias, + TypeVar, + TypeVarTuple, +) + +if not PY312_PLUS: + pytest.skip("Requires Python 3.12 or higher", allow_module_level=True) + + +def test_type_alias() -> None: + node = extract_node("type Point[T] = list[float, float]") + assert isinstance(node, TypeAlias) + assert isinstance(node.type_params[0], TypeVar) + assert isinstance(node.type_params[0].name, AssignName) + assert node.type_params[0].name.name == "T" + assert node.type_params[0].bound is None + + assert isinstance(node.value, Subscript) + assert node.value.value.name == "list" + assert node.value.slice.name == "tuple" + assert all(elt.name == "float" for elt in node.value.slice.elts) + + assert node.inferred()[0] is node + assert node.type_params[0].inferred()[0] is node.type_params[0] + + assert node.statement() is node + + assigned = next(node.assigned_stmts()) + assert assigned is node.value + + +def test_type_param_spec() -> None: + node = extract_node("type Alias[**P] = Callable[P, int]") + params = node.type_params[0] + assert isinstance(params, ParamSpec) + assert isinstance(params.name, AssignName) + assert params.name.name == "P" + + assert node.inferred()[0] is node + + +def test_type_var_tuple() -> None: + node = extract_node("type Alias[*Ts] = tuple[*Ts]") + params = node.type_params[0] + assert isinstance(params, TypeVarTuple) + assert isinstance(params.name, AssignName) + assert params.name.name == "Ts" + + assert node.inferred()[0] is node + + +def test_type_param() -> None: + func_node = extract_node("def func[T]() -> T: ...") + assert isinstance(func_node.type_params[0], TypeVar) + assert func_node.type_params[0].name.name == "T" + assert func_node.type_params[0].bound is None + + class_node = extract_node("class MyClass[T]: ...") + assert isinstance(class_node.type_params[0], TypeVar) + assert class_node.type_params[0].name.name == "T" + assert class_node.type_params[0].bound is None