1 Commits

Author SHA1 Message Date
Joffrey Bluthé
e6913a4c6b Merge 0315e6ee8f into ce109a8cdf 2024-12-28 15:43:21 +00:00
40 changed files with 210 additions and 1024 deletions

View File

@@ -6,9 +6,9 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-24.04, windows-2022]
python-version: ["3.13", "3.12", "3.11", "3.10", "3.9", "3.8"]
environment: ['3.8', '3.13', '3.12', '3.11', '3.10', '3.9', 'interpreter']
os: [ubuntu-20.04, windows-2019]
python-version: ["3.13", "3.12", "3.11", "3.10", "3.9", "3.8", "3.7", "3.6"]
environment: ['3.8', '3.13', '3.12', '3.11', '3.10', '3.9', '3.7', '3.6', 'interpreter']
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -35,7 +35,7 @@ jobs:
JEDI_TEST_ENVIRONMENT: ${{ matrix.environment }}
code-quality:
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -51,7 +51,7 @@ jobs:
python -m mypy jedi sith.py setup.py
coverage:
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
steps:
- name: Checkout code

View File

@@ -10,9 +10,6 @@ python:
submodules:
include: all
sphinx:
configuration: docs/conf.py
build:
os: ubuntu-22.04
tools:

View File

@@ -2,9 +2,6 @@
Jedi - an awesome autocompletion, static analysis and refactoring library for Python
####################################################################################
**I released the successor to Jedi: A
Mypy-Compatible Python Language Server Built in Rust** - `Zuban <https://github.com/zubanls/zuban>`_
.. image:: http://isitmaintained.com/badge/open/davidhalter/jedi.svg
:target: https://github.com/davidhalter/jedi/issues
:alt: The percentage of open issues and pull requests
@@ -13,7 +10,7 @@ Mypy-Compatible Python Language Server Built in Rust** - `Zuban <https://github.
:target: https://github.com/davidhalter/jedi/issues
:alt: The resolution time is the median time an issue or pull request stays open.
.. image:: https://github.com/davidhalter/jedi/actions/workflows/ci.yml/badge.svg?branch=master
.. image:: https://github.com/davidhalter/jedi/workflows/ci/badge.svg?branch=master
:target: https://github.com/davidhalter/jedi/actions
:alt: Tests
@@ -102,7 +99,7 @@ Features and Limitations
Jedi's features are listed here:
`Features <https://jedi.readthedocs.org/en/latest/docs/features.html>`_.
You can run Jedi on Python 3.8+ but it should also
You can run Jedi on Python 3.6+ but it should also
understand code that is older than those versions. Additionally you should be
able to use `Virtualenvs <https://jedi.readthedocs.org/en/latest/docs/api.html#environments>`_
very well.

View File

@@ -156,14 +156,6 @@ def jedi_path():
return os.path.dirname(__file__)
@pytest.fixture()
def skip_pre_python311(environment):
if environment.version_info < (3, 11):
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
pytest.skip()
@pytest.fixture()
def skip_pre_python38(environment):
if environment.version_info < (3, 8):

View File

@@ -16,7 +16,7 @@ Jedi's main API calls and features are:
Basic Features
--------------
- Python 3.8+ support
- Python 3.6+ support
- Ignores syntax errors and wrong indentation
- Can deal with complex module / function / class structures
- Great ``virtualenv``/``venv`` support

View File

@@ -38,7 +38,7 @@ using pip::
If you want to install the current development version (master branch)::
sudo pip install -e git+https://github.com/davidhalter/jedi.git#egg=jedi
sudo pip install -e git://github.com/davidhalter/jedi.git#egg=jedi
System-wide installation via a package manager

View File

@@ -483,7 +483,7 @@ class Script:
module_context = self._get_module_context()
n = leaf.search_ancestor('funcdef', 'classdef')
n = tree.search_ancestor(leaf, 'funcdef', 'classdef')
if n is not None and n.start_pos < pos <= n.children[-1].start_pos:
# This is a bit of a special case. The context of a function/class
# name/param/keyword is always it's parent context, not the

View File

@@ -17,6 +17,8 @@ import re
from pathlib import Path
from typing import Optional
from parso.tree import search_ancestor
from jedi import settings
from jedi import debug
from jedi.inference.utils import unite
@@ -507,7 +509,7 @@ class BaseName:
# - param: The parent_context of a param is not its function but
# e.g. the outer class or module.
cls_or_func_node = self._name.tree_name.get_definition()
parent = cls_or_func_node.search_ancestor('funcdef', 'classdef', 'file_input')
parent = search_ancestor(cls_or_func_node, 'funcdef', 'classdef', 'file_input')
context = self._get_module_context().create_value(parent).as_context()
else:
context = self._name.parent_context

View File

@@ -4,7 +4,7 @@ from inspect import Parameter
from parso.python.token import PythonTokenTypes
from parso.python import tree
from parso.tree import Leaf
from parso.tree import search_ancestor, Leaf
from parso import split_lines
from jedi import debug
@@ -244,8 +244,8 @@ class Completion:
if previous_leaf is not None:
stmt = previous_leaf
while True:
stmt = stmt.search_ancestor(
'if_stmt', 'for_stmt', 'while_stmt', 'try_stmt',
stmt = search_ancestor(
stmt, 'if_stmt', 'for_stmt', 'while_stmt', 'try_stmt',
'error_node',
)
if stmt is None:
@@ -356,7 +356,7 @@ class Completion:
stack_node = self.stack[-3]
if stack_node.nonterminal == 'funcdef':
context = get_user_context(self._module_context, self._position)
node = leaf.search_ancestor('error_node', 'funcdef')
node = search_ancestor(leaf, 'error_node', 'funcdef')
if node is not None:
if node.type == 'error_node':
n = node.children[0]
@@ -426,7 +426,7 @@ class Completion:
Autocomplete inherited methods when overriding in child class.
"""
leaf = self._module_node.get_leaf_for_position(self._position, include_prefixes=True)
cls = leaf.search_ancestor('classdef')
cls = tree.search_ancestor(leaf, 'classdef')
if cls is None:
return

View File

@@ -22,7 +22,7 @@ if TYPE_CHECKING:
_VersionInfo = namedtuple('VersionInfo', 'major minor micro') # type: ignore[name-match]
_SUPPORTED_PYTHONS = ['3.13', '3.12', '3.11', '3.10', '3.9', '3.8']
_SUPPORTED_PYTHONS = ['3.13', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7', '3.6']
_SAFE_PATHS = ['/usr/bin', '/usr/local/bin']
_CONDA_VAR = 'CONDA_PREFIX'
_CURRENT_VERSION = '%s.%s' % (sys.version_info.major, sys.version_info.minor)

View File

@@ -28,7 +28,7 @@ def clear_time_caches(delete_all: bool = False) -> None:
:param delete_all: Deletes also the cache that is normally not deleted,
like parser cache, which is important for faster parsing.
"""
global _time_caches # noqa: F824
global _time_caches
if delete_all:
for cache in _time_caches.values():

View File

@@ -21,7 +21,7 @@ try:
raise ImportError
else:
# Use colorama for nicer console output.
from colorama import Fore, init # type: ignore[import, unused-ignore]
from colorama import Fore, init # type: ignore[import]
from colorama import initialise
def _lazy_colorama_init(): # noqa: F811

View File

@@ -122,14 +122,14 @@ class InferenceState:
return value_set
# mypy doesn't suppport decorated propeties (https://github.com/python/mypy/issues/1362)
@property
@property # type: ignore[misc]
@inference_state_function_cache()
def builtins_module(self):
module_name = 'builtins'
builtins_module, = self.import_module((module_name,), sys_path=[])
return builtins_module
@property
@property # type: ignore[misc]
@inference_state_function_cache()
def typing_module(self):
typing_module, = self.import_module(('typing',))

View File

@@ -184,7 +184,7 @@ class DirectObjectAccess:
def py__file__(self) -> Optional[Path]:
try:
return Path(self._obj.__file__)
except (AttributeError, TypeError):
except AttributeError:
return None
def py__doc__(self):

View File

@@ -3,6 +3,10 @@ import sys
from importlib.abc import MetaPathFinder
from importlib.machinery import PathFinder
# Remove the first entry, because it's simply a directory entry that equals
# this directory.
del sys.path[0]
def _get_paths():
# Get the path to jedi.

View File

@@ -3,6 +3,7 @@ from contextlib import contextmanager
from pathlib import Path
from typing import Optional
from parso.tree import search_ancestor
from parso.python.tree import Name
from jedi.inference.filters import ParserTreeFilter, MergedFilter, \
@@ -289,7 +290,7 @@ class TreeContextMixin:
def create_name(self, tree_name):
definition = tree_name.get_definition()
if definition and definition.type == 'param' and definition.name == tree_name:
funcdef = definition.search_ancestor('funcdef', 'lambdef')
funcdef = search_ancestor(definition, 'funcdef', 'lambdef')
func = self.create_value(funcdef)
return AnonymousParamName(func, tree_name)
else:
@@ -415,13 +416,13 @@ def _get_global_filters_for_name(context, name_or_none, position):
# function and get inferred in the value before the function. So
# make sure to exclude the function/class name.
if name_or_none is not None:
ancestor = name_or_none.search_ancestor('funcdef', 'classdef', 'lambdef')
ancestor = search_ancestor(name_or_none, 'funcdef', 'classdef', 'lambdef')
lambdef = None
if ancestor == 'lambdef':
# For lambdas it's even more complicated since parts will
# be inferred later.
lambdef = ancestor
ancestor = name_or_none.search_ancestor('funcdef', 'classdef')
ancestor = search_ancestor(name_or_none, 'funcdef', 'classdef')
if ancestor is not None:
colon = ancestor.children[-2]
if position is not None and position < colon.start_pos:

View File

@@ -48,7 +48,7 @@ def _get_numpy_doc_string_cls():
global _numpy_doc_string_cache
if isinstance(_numpy_doc_string_cache, (ImportError, SyntaxError)):
raise _numpy_doc_string_cache
from numpydoc.docscrape import NumpyDocString # type: ignore[import, unused-ignore]
from numpydoc.docscrape import NumpyDocString # type: ignore[import]
_numpy_doc_string_cache = NumpyDocString
return _numpy_doc_string_cache
@@ -109,7 +109,7 @@ def _expand_typestr(type_str):
yield type_str.split('of')[0]
# Check if type has is a set of valid literal values eg: {'C', 'F', 'A'}
elif type_str.startswith('{'):
node = parse(type_str, version='3.13').children[0]
node = parse(type_str, version='3.7').children[0]
if node.type == 'atom':
for leaf in getattr(node.children[1], "children", []):
if leaf.type == 'number':

View File

@@ -6,6 +6,7 @@ from abc import abstractmethod
from typing import List, MutableMapping, Type
import weakref
from parso.tree import search_ancestor
from parso.python.tree import Name, UsedNamesMapping
from jedi.inference import flow_analysis
@@ -180,7 +181,7 @@ class _FunctionExecutionFilter(ParserTreeFilter):
@to_list
def _convert_names(self, names):
for name in names:
param = name.search_ancestor('param')
param = search_ancestor(name, 'param')
# Here we don't need to check if the param is a default/annotation,
# because those are not definitions and never make it to this
# point.

View File

@@ -15,6 +15,7 @@ Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
check for -> a is a string). There's big potential in these checks.
"""
from parso.tree import search_ancestor
from parso.python.tree import Name
from jedi import settings
@@ -75,7 +76,7 @@ def check_flow_information(value, flow, search_name, pos):
])
for name in names:
ass = name.search_ancestor('assert_stmt')
ass = search_ancestor(name, 'assert_stmt')
if ass is not None:
result = _check_isinstance_type(value, ass.assertion, search_name)
if result is not None:

View File

@@ -12,6 +12,7 @@ import os
from pathlib import Path
from parso.python import tree
from parso.tree import search_ancestor
from jedi import debug
from jedi import settings
@@ -94,7 +95,7 @@ def goto_import(context, tree_name):
def _prepare_infer_import(module_context, tree_name):
import_node = tree_name.search_ancestor('import_name', 'import_from')
import_node = search_ancestor(tree_name, 'import_name', 'import_from')
import_path = import_node.get_path_for_name(tree_name)
from_import_name = None
try:
@@ -479,7 +480,7 @@ def _load_builtin_module(inference_state, import_names=None, sys_path=None):
if sys_path is None:
sys_path = inference_state.get_sys_path()
if not project._load_unsafe_extensions:
safe_paths = set(project._get_base_sys_path(inference_state))
safe_paths = project._get_base_sys_path(inference_state)
sys_path = [p for p in sys_path if p in safe_paths]
dotted_name = '.'.join(import_names)
@@ -548,7 +549,7 @@ def load_namespace_from_path(inference_state, folder_io):
def follow_error_node_imports_if_possible(context, name):
error_node = name.search_ancestor('error_node')
error_node = tree.search_ancestor(name, 'error_node')
if error_node is not None:
# Get the first command start of a started simple_stmt. The error
# node is sometimes a small_stmt and sometimes a simple_stmt. Check

View File

@@ -2,6 +2,8 @@ from abc import abstractmethod
from inspect import Parameter
from typing import Optional, Tuple
from parso.tree import search_ancestor
from jedi.parser_utils import find_statement_documentation, clean_scope_docstring
from jedi.inference.utils import unite
from jedi.inference.base_value import ValueSet, NO_VALUES
@@ -110,7 +112,7 @@ class AbstractTreeName(AbstractNameDefinition):
self.tree_name = tree_name
def get_qualified_names(self, include_module_names=False):
import_node = self.tree_name.search_ancestor('import_name', 'import_from')
import_node = search_ancestor(self.tree_name, 'import_name', 'import_from')
# For import nodes we cannot just have names, because it's very unclear
# how they would look like. For now we just ignore them in most cases.
# In case of level == 1, it works always, because it's like a submodule
@@ -203,13 +205,15 @@ class AbstractTreeName(AbstractNameDefinition):
values = infer_call_of_leaf(context, name, cut_own_trailer=True)
return values.goto(name, name_context=context)
else:
stmt = name.search_ancestor('expr_stmt', 'lambdef') or name
stmt = search_ancestor(
name, 'expr_stmt', 'lambdef'
) or name
if stmt.type == 'lambdef':
stmt = name
return context.goto(name, position=stmt.start_pos)
def is_import(self):
imp = self.tree_name.search_ancestor('import_from', 'import_name')
imp = search_ancestor(self.tree_name, 'import_from', 'import_name')
return imp is not None
@property
@@ -447,7 +451,7 @@ class _ActualTreeParamName(BaseTreeParamName):
self.function_value = function_value
def _get_param_node(self):
return self.tree_name.search_ancestor('param')
return search_ancestor(self.tree_name, 'param')
@property
def annotation_node(self):

View File

@@ -12,12 +12,15 @@ The signature here for bar should be `bar(b, c)` instead of bar(*args).
"""
from inspect import Parameter
from parso import tree
from jedi.inference.utils import to_list
from jedi.inference.names import ParamNameWrapper
from jedi.inference.helpers import is_big_annoying_library
def _iter_nodes_for_param(param_name):
from parso.python.tree import search_ancestor
from jedi.inference.arguments import TreeArguments
execution_context = param_name.parent_context
@@ -25,7 +28,7 @@ def _iter_nodes_for_param(param_name):
# tree rather than going via the execution context so that we're agnostic of
# the specific scope we're evaluating within (i.e: module or function,
# etc.).
function_node = param_name.tree_name.search_ancestor('funcdef', 'lambdef')
function_node = tree.search_ancestor(param_name.tree_name, 'funcdef', 'lambdef')
module_node = function_node.get_root_node()
start = function_node.children[-1].start_pos
end = function_node.children[-1].end_pos
@@ -35,7 +38,7 @@ def _iter_nodes_for_param(param_name):
argument = name.parent
if argument.type == 'argument' \
and argument.children[0] == '*' * param_name.star_count:
trailer = argument.search_ancestor('trailer')
trailer = search_ancestor(argument, 'trailer')
if trailer is not None: # Make sure we're in a function
context = execution_context.create_context(trailer)
if _goes_to_param_name(param_name, context, name):

View File

@@ -251,8 +251,6 @@ def _infer_node(context, element):
return NO_VALUES
elif typ == 'namedexpr_test':
return context.infer_node(element.children[2])
elif typ == 'star_expr':
return NO_VALUES
else:
return infer_or_test(context, element)
@@ -290,7 +288,7 @@ def infer_atom(context, atom):
state = context.inference_state
if atom.type == 'name':
# This is the first global lookup.
stmt = atom.search_ancestor('expr_stmt', 'lambdef', 'if_stmt') or atom
stmt = tree.search_ancestor(atom, 'expr_stmt', 'lambdef', 'if_stmt') or atom
if stmt.type == 'if_stmt':
if not any(n.start_pos <= atom.start_pos < n.end_pos for n in stmt.get_test_nodes()):
stmt = atom
@@ -436,7 +434,7 @@ def _infer_expr_stmt(context, stmt, seek_name=None):
else:
operator = copy.copy(first_operator)
operator.value = operator.value[:-1]
for_stmt = stmt.search_ancestor('for_stmt')
for_stmt = tree.search_ancestor(stmt, 'for_stmt')
if for_stmt is not None and for_stmt.type == 'for_stmt' and value_set \
and parser_utils.for_stmt_defines_one_name(for_stmt):
# Iterate through result and add the values, that's possible
@@ -549,7 +547,7 @@ def _infer_comparison(context, left_values, operator, right_values):
def _is_annotation_name(name):
ancestor = name.search_ancestor('param', 'funcdef', 'expr_stmt')
ancestor = tree.search_ancestor(name, 'param', 'funcdef', 'expr_stmt')
if ancestor is None:
return False

View File

@@ -1,3 +1,5 @@
from parso.python import tree
from jedi import debug
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass
from jedi.inference import compiled
@@ -260,8 +262,8 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
@recursion.execution_recursion_decorator(default=iter([]))
def get_yield_lazy_values(self, is_async=False):
# TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend
for_parents = [(y, y.search_ancestor('for_stmt', 'funcdef',
'while_stmt', 'if_stmt'))
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
'while_stmt', 'if_stmt'))
for y in get_yield_exprs(self.inference_state, self.tree_node)]
# Calculate if the yields are placed within the same for loop.

View File

@@ -1,5 +1,7 @@
from abc import abstractproperty
from parso.tree import search_ancestor
from jedi import debug
from jedi import settings
from jedi.inference import compiled
@@ -227,7 +229,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
new = node
while True:
func_node = new
new = new.search_ancestor('funcdef', 'classdef')
new = search_ancestor(new, 'funcdef', 'classdef')
if class_context.tree_node is new:
func = FunctionValue.from_context(class_context, func_node)
bound_method = BoundMethod(self, class_context, func)
@@ -496,7 +498,7 @@ class SelfName(TreeNameDefinition):
return self._instance
def infer(self):
stmt = self.tree_name.search_ancestor('expr_stmt')
stmt = search_ancestor(self.tree_name, 'expr_stmt')
if stmt is not None:
if stmt.children[1].type == "annassign":
from jedi.inference.gradual.annotation import infer_annotation

View File

@@ -36,10 +36,6 @@ py__doc__() Returns the docstring for a value.
====================================== ========================================
"""
from __future__ import annotations
from typing import List, Optional, Tuple
from jedi import debug
from jedi.parser_utils import get_cached_parent_scope, expr_is_dotted, \
function_is_property
@@ -51,15 +47,11 @@ from jedi.inference.filters import ParserTreeFilter
from jedi.inference.names import TreeNameDefinition, ValueName
from jedi.inference.arguments import unpack_arglist, ValuesArguments
from jedi.inference.base_value import ValueSet, iterator_to_value_set, \
NO_VALUES, ValueWrapper
NO_VALUES
from jedi.inference.context import ClassContext
from jedi.inference.value.function import FunctionAndClassBase, FunctionMixin
from jedi.inference.value.decorator import Decoratee
from jedi.inference.value.function import FunctionAndClassBase
from jedi.inference.gradual.generics import LazyGenericManager, TupleGenericManager
from jedi.plugins import plugin_manager
from inspect import Parameter
from jedi.inference.names import BaseTreeParamName
from jedi.inference.signature import AbstractSignature
class ClassName(TreeNameDefinition):
@@ -137,65 +129,6 @@ class ClassFilter(ParserTreeFilter):
return [name for name in names if self._access_possible(name)]
def init_param_value(arg_nodes) -> Optional[bool]:
"""
Returns:
- ``True`` if ``@dataclass(init=True)``
- ``False`` if ``@dataclass(init=False)``
- ``None`` if not specified ``@dataclass()``
"""
for arg_node in arg_nodes:
if (
arg_node.type == "argument"
and arg_node.children[0].value == "init"
):
if arg_node.children[2].value == "False":
return False
elif arg_node.children[2].value == "True":
return True
return None
def get_dataclass_param_names(cls) -> List[DataclassParamName]:
"""
``cls`` is a :class:`ClassMixin`. The type is only documented as mypy would
complain that some fields are missing.
.. code:: python
@dataclass
class A:
a: int
b: str = "toto"
For the previous example, the param names would be ``a`` and ``b``.
"""
param_names = []
filter_ = cls.as_context().get_global_filter()
for name in sorted(filter_.values(), key=lambda name: name.start_pos):
d = name.tree_name.get_definition()
annassign = d.children[1]
if d.type == 'expr_stmt' and annassign.type == 'annassign':
node = annassign.children[1]
if node.type == "atom_expr" and node.children[0].value == "ClassVar":
continue
if len(annassign.children) < 4:
default = None
else:
default = annassign.children[3]
param_names.append(DataclassParamName(
parent_context=cls.parent_context,
tree_name=name.tree_name,
annotation_node=annassign.children[1],
default_node=default,
))
return param_names
class ClassMixin:
def is_class(self):
return True
@@ -288,73 +221,6 @@ class ClassMixin:
assert x is not None
yield x
def _has_dataclass_transform_metaclasses(self) -> Tuple[bool, Optional[bool]]:
for meta in self.get_metaclasses(): # type: ignore[attr-defined]
if (
isinstance(meta, Decoratee)
# Internal leakage :|
and isinstance(meta._wrapped_value, DataclassTransformer)
):
return True, meta._wrapped_value.init_mode_from_new()
return False, None
def _get_dataclass_transform_signatures(self) -> List[DataclassSignature]:
"""
Returns: A non-empty list if the class has dataclass semantics else an
empty list.
The dataclass-like semantics will be assumed for any class that directly
or indirectly derives from the decorated class or uses the decorated
class as a metaclass.
"""
param_names = []
is_dataclass_transform = False
default_init_mode: Optional[bool] = None
for cls in reversed(list(self.py__mro__())):
if not is_dataclass_transform:
# If dataclass_transform is applied to a class, dataclass-like semantics
# will be assumed for any class that directly or indirectly derives from
# the decorated class or uses the decorated class as a metaclass.
if (
isinstance(cls, DataclassTransformer)
and cls.init_mode_from_init_subclass
):
is_dataclass_transform = True
default_init_mode = cls.init_mode_from_init_subclass
elif (
# Some object like CompiledValues would not be compatible
isinstance(cls, ClassMixin)
):
is_dataclass_transform, default_init_mode = (
cls._has_dataclass_transform_metaclasses()
)
# Attributes on the decorated class and its base classes are not
# considered to be fields.
if is_dataclass_transform:
continue
# All inherited classes behave like dataclass semantics
if (
is_dataclass_transform
and isinstance(cls, ClassValue)
and (
cls.init_param_mode()
or (cls.init_param_mode() is None and default_init_mode)
)
):
param_names.extend(
get_dataclass_param_names(cls)
)
if is_dataclass_transform:
return [DataclassSignature(cls, param_names)]
else:
return []
def get_signatures(self):
# Since calling staticmethod without a function is illegal, the Jedi
# plugin doesn't return anything. Therefore call directly and get what
@@ -366,12 +232,7 @@ class ClassMixin:
return sigs
args = ValuesArguments([])
init_funcs = self.py__call__(args).py__getattribute__('__init__')
dataclass_sigs = self._get_dataclass_transform_signatures()
if dataclass_sigs:
return dataclass_sigs
else:
return [sig.bind(self) for sig in init_funcs.get_signatures()]
return [sig.bind(self) for sig in init_funcs.get_signatures()]
def _as_context(self):
return ClassContext(self)
@@ -458,158 +319,6 @@ class ClassMixin:
return ValueSet({self})
class DataclassParamName(BaseTreeParamName):
"""
Represent a field declaration on a class with dataclass semantics.
"""
def __init__(self, parent_context, tree_name, annotation_node, default_node):
super().__init__(parent_context, tree_name)
self.annotation_node = annotation_node
self.default_node = default_node
def get_kind(self):
return Parameter.POSITIONAL_OR_KEYWORD
def infer(self):
if self.annotation_node is None:
return NO_VALUES
else:
return self.parent_context.infer_node(self.annotation_node)
class DataclassSignature(AbstractSignature):
"""
It represents the ``__init__`` signature of a class with dataclass semantics.
.. code:: python
"""
def __init__(self, value, param_names):
super().__init__(value)
self._param_names = param_names
def get_param_names(self, resolve_stars=False):
return self._param_names
class DataclassDecorator(ValueWrapper, FunctionMixin):
"""
A dataclass(-like) decorator with custom parameters.
.. code:: python
@dataclass(init=True) # this
class A: ...
@dataclass_transform
def create_model(*, init=False): pass
@create_model(init=False) # or this
class B: ...
"""
def __init__(self, function, arguments, default_init: bool = True):
"""
Args:
function: Decoratee | function
arguments: The parameters to the dataclass function decorator
default_init: Boolean to indicate the default init value
"""
super().__init__(function)
argument_init = self._init_param_value(arguments)
self.init_param_mode = (
argument_init if argument_init is not None else default_init
)
def _init_param_value(self, arguments) -> Optional[bool]:
if not arguments.argument_node:
return None
arg_nodes = (
arguments.argument_node.children
if arguments.argument_node.type == "arglist"
else [arguments.argument_node]
)
return init_param_value(arg_nodes)
class DataclassTransformer(ValueWrapper, ClassMixin):
"""
A class decorated with the ``dataclass_transform`` decorator. dataclass-like
semantics will be assumed for any class that directly or indirectly derives
from the decorated class or uses the decorated class as a metaclass.
Attributes on the decorated class and its base classes are not considered to
be fields.
"""
def __init__(self, wrapped_value):
super().__init__(wrapped_value)
def init_mode_from_new(self) -> bool:
"""Default value if missing is ``True``"""
new_methods = self._wrapped_value.py__getattribute__("__new__")
if not new_methods:
return True
new_method = list(new_methods)[0]
for param in new_method.get_param_names():
if (
param.string_name == "init"
and param.default_node
and param.default_node.type == "keyword"
):
if param.default_node.value == "False":
return False
elif param.default_node.value == "True":
return True
return True
@property
def init_mode_from_init_subclass(self) -> Optional[bool]:
# def __init_subclass__(cls) -> None: ... is hardcoded in the typeshed
# so the extra parameters can not be inferred.
return True
class DataclassWrapper(ValueWrapper, ClassMixin):
"""
A class with dataclass semantics from a decorator. The init parameters are
only from the current class and parent classes decorated where the ``init``
parameter was ``True``.
.. code:: python
@dataclass
class A: ... # this
@dataclass_transform
def create_model(): pass
@create_model()
class B: ... # or this
"""
def __init__(
self, wrapped_value, should_generate_init: bool
):
super().__init__(wrapped_value)
self.should_generate_init = should_generate_init
def get_signatures(self):
param_names = []
for cls in reversed(list(self.py__mro__())):
if (
isinstance(cls, DataclassWrapper)
and cls.should_generate_init
):
param_names.extend(get_dataclass_param_names(cls))
return [DataclassSignature(cls, param_names)]
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
api_type = 'class'
@@ -676,19 +385,6 @@ class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
return values
return NO_VALUES
def init_param_mode(self) -> Optional[bool]:
"""
It returns ``True`` if ``class X(init=False):`` else ``False``.
"""
bases_arguments = self._get_bases_arguments()
if bases_arguments.argument_node.type != "arglist":
# If it is not inheriting from the base model and having
# extra parameters, then init behavior is not changed.
return None
return init_param_value(bases_arguments.argument_node.children)
@plugin_manager.decorate()
def get_metaclass_signatures(self, metaclasses):
return []

View File

@@ -80,7 +80,7 @@ class ModuleMixin(SubModuleDictMixin):
def is_stub(self):
return False
@property
@property # type: ignore[misc]
@inference_state_method_cache()
def name(self):
return self._module_name_class(self, self.string_names[-1])
@@ -138,7 +138,7 @@ class ModuleValue(ModuleMixin, TreeValue):
api_type = 'module'
def __init__(self, inference_state, module_node, code_lines, file_io=None,
string_names=None, is_package=False) -> None:
string_names=None, is_package=False):
super().__init__(
inference_state,
parent_context=None,
@@ -149,7 +149,7 @@ class ModuleValue(ModuleMixin, TreeValue):
self._path: Optional[Path] = None
else:
self._path = file_io.path
self.string_names: Optional[tuple[str, ...]] = string_names
self.string_names = string_names # Optional[Tuple[str, ...]]
self.code_lines = code_lines
self._is_package = is_package

View File

@@ -38,7 +38,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
def get_qualified_names(self):
return ()
@property
@property # type: ignore[misc]
@inference_state_method_cache()
def name(self):
string_name = self.py__package__()[-1]

View File

@@ -2,6 +2,7 @@ import sys
from typing import List
from pathlib import Path
from parso.tree import search_ancestor
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.imports import goto_import, load_module_from_path
from jedi.inference.filters import ParserTreeFilter
@@ -119,7 +120,7 @@ def _is_a_pytest_param_and_inherited(param_name):
This is a heuristic and will work in most cases.
"""
funcdef = param_name.tree_name.search_ancestor('funcdef')
funcdef = search_ancestor(param_name.tree_name, 'funcdef')
if funcdef is None: # A lambda
return False, False
decorators = funcdef.get_decorators()

View File

@@ -11,6 +11,7 @@ compiled module that returns the types for C-builtins.
"""
import parso
import os
from inspect import Parameter
from jedi import debug
from jedi.inference.utils import safe_property
@@ -24,20 +25,15 @@ from jedi.inference.value.instance import \
from jedi.inference.base_value import ContextualizedNode, \
NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper
from jedi.inference.value import ClassValue, ModuleValue
from jedi.inference.value.decorator import Decoratee
from jedi.inference.value.klass import (
DataclassWrapper,
DataclassDecorator,
DataclassTransformer,
)
from jedi.inference.value.klass import ClassMixin
from jedi.inference.value.function import FunctionMixin
from jedi.inference.value import iterable
from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \
LazyKnownValues
from jedi.inference.names import ValueName
from jedi.inference.names import ValueName, BaseTreeParamName
from jedi.inference.filters import AttributeOverwrite, publish_method, \
ParserTreeFilter, DictFilter
from jedi.inference.signature import SignatureWrapper
from jedi.inference.signature import AbstractSignature, SignatureWrapper
# Copied from Python 3.6's stdlib.
@@ -595,103 +591,65 @@ def _random_choice(sequences):
def _dataclass(value, arguments, callback):
"""
Decorator entry points for dataclass.
1. dataclass decorator declaration with parameters
2. dataclass semantics on a class from a dataclass(-like) decorator
"""
for c in _follow_param(value.inference_state, arguments, 0):
if c.is_class():
# Declare dataclass semantics on a class from a dataclass decorator
should_generate_init = (
# Customized decorator, init may be disabled
value.init_param_mode
if isinstance(value, DataclassDecorator)
# Bare dataclass decorator, always with init mode
else True
)
return ValueSet([DataclassWrapper(c, should_generate_init)])
return ValueSet([DataclassWrapper(c)])
else:
# @dataclass(init=False)
# dataclass decorator customization
return ValueSet(
[
DataclassDecorator(
value,
arguments=arguments,
default_init=True,
)
]
)
return NO_VALUES
def _dataclass_transform(value, arguments, callback):
"""
Decorator entry points for dataclass_transform.
1. dataclass-like decorator instantiation from a dataclass_transform decorator
2. dataclass_transform decorator declaration with parameters
3. dataclass-like decorator declaration with parameters
4. dataclass-like semantics on a class from a dataclass-like decorator
"""
for c in _follow_param(value.inference_state, arguments, 0):
if c.is_class():
is_dataclass_transform = (
value.name.string_name == "dataclass_transform"
# The decorator function from dataclass_transform acting as the
# dataclass decorator.
and not isinstance(value, Decoratee)
# The decorator function from dataclass_transform acting as the
# dataclass decorator with customized parameters
and not isinstance(value, DataclassDecorator)
)
if is_dataclass_transform:
# Declare base class
return ValueSet([DataclassTransformer(c)])
else:
# Declare dataclass-like semantics on a class from a
# dataclass-like decorator
should_generate_init = value.init_param_mode
return ValueSet([DataclassWrapper(c, should_generate_init)])
elif c.is_function():
# dataclass-like decorator instantiation:
# @dataclass_transform
# def create_model()
return ValueSet(
[
DataclassDecorator(
value,
arguments=arguments,
default_init=True,
)
]
)
elif (
# @dataclass_transform
# def create_model(): pass
# @create_model(init=...)
isinstance(value, Decoratee)
):
# dataclass (or like) decorator customization
return ValueSet(
[
DataclassDecorator(
value,
arguments=arguments,
default_init=value._wrapped_value.init_param_mode,
)
]
)
else:
# dataclass_transform decorator with parameters; nothing impactful
return ValueSet([value])
return NO_VALUES
class DataclassWrapper(ValueWrapper, ClassMixin):
def get_signatures(self):
param_names = []
for cls in reversed(list(self.py__mro__())):
if isinstance(cls, DataclassWrapper):
filter_ = cls.as_context().get_global_filter()
# .values ordering is not guaranteed, at least not in
# Python < 3.6, when dicts where not ordered, which is an
# implementation detail anyway.
for name in sorted(filter_.values(), key=lambda name: name.start_pos):
d = name.tree_name.get_definition()
annassign = d.children[1]
if d.type == 'expr_stmt' and annassign.type == 'annassign':
if len(annassign.children) < 4:
default = None
else:
default = annassign.children[3]
param_names.append(DataclassParamName(
parent_context=cls.parent_context,
tree_name=name.tree_name,
annotation_node=annassign.children[1],
default_node=default,
))
return [DataclassSignature(cls, param_names)]
class DataclassSignature(AbstractSignature):
def __init__(self, value, param_names):
super().__init__(value)
self._param_names = param_names
def get_param_names(self, resolve_stars=False):
return self._param_names
class DataclassParamName(BaseTreeParamName):
def __init__(self, parent_context, tree_name, annotation_node, default_node):
super().__init__(parent_context, tree_name)
self.annotation_node = annotation_node
self.default_node = default_node
def get_kind(self):
return Parameter.POSITIONAL_OR_KEYWORD
def infer(self):
if self.annotation_node is None:
return NO_VALUES
else:
return self.parent_context.infer_node(self.annotation_node)
class ItemGetterCallable(ValueWrapper):
def __init__(self, instance, args_value_set):
super().__init__(instance)
@@ -840,17 +798,22 @@ _implemented = {
# runtime_checkable doesn't really change anything and is just
# adding logs for infering stuff, so we can safely ignore it.
'runtime_checkable': lambda value, arguments, callback: NO_VALUES,
# Python 3.11+
'dataclass_transform': _dataclass_transform,
},
'typing_extensions': {
# Python <3.11
'dataclass_transform': _dataclass_transform,
},
'dataclasses': {
# For now this works at least better than Jedi trying to understand it.
'dataclass': _dataclass
},
# attrs exposes declaration interface roughly compatible with dataclasses
# via attrs.define, attrs.frozen and attrs.mutable
# https://www.attrs.org/en/stable/names.html
'attr': {
'define': _dataclass,
'frozen': _dataclass,
},
'attrs': {
'define': _dataclass,
'frozen': _dataclass,
},
'os.path': {
'dirname': _create_string_input_function(os.path.dirname),
'abspath': _create_string_input_function(os.path.abspath),

View File

@@ -1,32 +0,0 @@
[tool.mypy]
# Exclude our copies of external stubs
exclude = "^jedi/third_party"
show_error_codes = true
enable_error_code = "ignore-without-code"
# Ensure generics are explicit about what they are (e.g: `List[str]` rather than
# just `List`)
disallow_any_generics = true
disallow_subclassing_any = true
# Avoid creating future gotchas emerging from bad typing
warn_redundant_casts = true
warn_unused_ignores = true
warn_return_any = true
warn_unused_configs = true
warn_unreachable = true
# Require values to be explicitly re-exported; this makes things easier for
# Flake8 too and avoids accidentally importing thing from the "wrong" place
# (which helps avoid circular imports)
implicit_reexport = false
strict_equality = true
[[tool.mypy.overrides]]
# Various __init__.py files which contain re-exports we want to implicitly make.
module = ["jedi", "jedi.inference.compiled", "jedi.inference.value", "parso"]
implicit_reexport = true

View File

@@ -31,3 +31,36 @@ exclude =
[pycodestyle]
max-line-length = 100
[mypy]
# Exclude our copies of external stubs
exclude = ^jedi/third_party
show_error_codes = true
enable_error_code = ignore-without-code
# Ensure generics are explicit about what they are (e.g: `List[str]` rather than
# just `List`)
disallow_any_generics = True
disallow_subclassing_any = True
# Avoid creating future gotchas emerging from bad typing
warn_redundant_casts = True
warn_unused_ignores = True
warn_return_any = True
warn_unused_configs = True
warn_unreachable = True
# Require values to be explicitly re-exported; this makes things easier for
# Flake8 too and avoids accidentally importing thing from the "wrong" place
# (which helps avoid circular imports)
implicit_reexport = False
strict_equality = True
[mypy-jedi,jedi.inference.compiled,jedi.inference.value,parso]
# Various __init__.py files which contain re-exports we want to implicitly make.
implicit_reexport = True

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python
from typing import cast
from setuptools import setup, find_packages
from setuptools.depends import get_module_constant
@@ -10,7 +9,7 @@ __AUTHOR__ = 'David Halter'
__AUTHOR_EMAIL__ = 'davidhalter88@gmail.com'
# Get the version from within jedi. It's defined in exactly one place now.
version = cast(str, get_module_constant("jedi", "__version__"))
version = get_module_constant("jedi", "__version__")
readme = open('README.rst').read() + '\n\n' + open('CHANGELOG.rst').read()
@@ -35,9 +34,9 @@ setup(name='jedi',
keywords='python completion refactoring vim',
long_description=readme,
packages=find_packages(exclude=['test', 'test.*']),
python_requires='>=3.8',
python_requires='>=3.6',
# Python 3.13 grammars are added to parso in 0.8.4
install_requires=['parso>=0.8.5,<0.9.0'],
install_requires=['parso>=0.8.4,<0.9.0'],
extras_require={
'testing': [
'pytest<9.0.0',
@@ -47,15 +46,14 @@ setup(name='jedi',
'colorama',
'Django',
'attrs',
'typing_extensions',
],
'qa': [
# latest version on 2025-06-16
'flake8==7.2.0',
# latest version supporting Python 3.6
'mypy==1.16',
'flake8==5.0.4',
# latest version supporting Python 3.6
'mypy==0.971',
# Arbitrary pins, latest at the time of pinning
'types-setuptools==80.9.0.20250529',
'types-setuptools==67.2.0.1',
],
'docs': [
# Just pin all of these.
@@ -96,6 +94,8 @@ setup(name='jedi',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',

View File

@@ -44,7 +44,7 @@ Options:
--pudb Launch pudb when error is raised.
"""
from docopt import docopt # type: ignore[import, unused-ignore]
from docopt import docopt # type: ignore[import]
import json
import os

View File

@@ -527,11 +527,3 @@ lc = [x for a, *x in [(1, '', 1.0)]]
lc[0][0]
#?
lc[0][1]
xy = (1,)
x, y = *xy, None
# whatever it is should not crash
#?
x

View File

@@ -134,7 +134,7 @@ TEST_GOTO = 2
TEST_REFERENCES = 3
grammar313 = parso.load_grammar(version='3.13')
grammar36 = parso.load_grammar(version='3.6')
class BaseTestCase(object):
@@ -238,7 +238,7 @@ class IntegrationTestCase(BaseTestCase):
should_be = set()
for match in re.finditer('(?:[^ ]+)', correct):
string = match.group(0)
parser = grammar313.parse(string, start_symbol='eval_input', error_recovery=False)
parser = grammar36.parse(string, start_symbol='eval_input', error_recovery=False)
parser_utils.move(parser.get_root_node(), self.line_nr)
node = parser.get_root_node()
module_context = script._get_module_context()
@@ -504,7 +504,7 @@ if __name__ == '__main__':
if arguments['--env']:
environment = get_system_environment(arguments['--env'])
else:
# Will be 3.13.
# Will be 3.6.
environment = get_default_environment()
import traceback

View File

@@ -26,7 +26,7 @@ def test_find_system_environments():
@pytest.mark.parametrize(
'version',
['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
['3.6', '3.7', '3.8', '3.9']
)
def test_versions(version):
try:

View File

@@ -16,13 +16,13 @@ def test_on_code():
assert i.infer()
def test_generics_without_definition() -> None:
def test_generics_without_definition():
# Used to raise a recursion error
T = TypeVar('T')
class Stack(Generic[T]):
def __init__(self) -> None:
self.items: List[T] = []
def __init__(self):
self.items = [] # type: List[T]
def push(self, item):
self.items.append(item)

View File

@@ -318,511 +318,40 @@ def test_wraps_signature(Script, code, signature):
@pytest.mark.parametrize(
"start, start_params, include_params",
[
["@dataclass\nclass X:", [], True],
["@dataclass(eq=True)\nclass X:", [], True],
[
dedent(
"""
'start, start_params', [
['@dataclass\nclass X:', []],
['@dataclass(eq=True)\nclass X:', []],
[dedent('''
class Y():
y: int
@dataclass
class X(Y):"""
),
[],
True,
],
[
dedent(
"""
class X(Y):'''), []],
[dedent('''
@dataclass
class Y():
y: int
z = 5
@dataclass
class X(Y):"""
),
["y"],
True,
],
[
dedent(
"""
@dataclass
class Y():
y: int
class Z(Y): # Not included
z = 5
@dataclass
class X(Z):"""
),
["y"],
True,
],
# init=False
[
dedent(
"""
@dataclass(init=False)
class X:"""
),
[],
False,
],
[
dedent(
"""
@dataclass(eq=True, init=False)
class X:"""
),
[],
False,
],
# custom init
[
dedent(
"""
@dataclass()
class X:
def __init__(self, toto: str):
pass
"""
),
["toto"],
False,
],
],
ids=[
"direct_transformed",
"transformed_with_params",
"subclass_transformed",
"both_transformed",
"intermediate_not_transformed",
"init_false",
"init_false_multiple",
"custom_init",
],
class X(Y):'''), ['y']],
]
)
def test_dataclass_signature(
Script, skip_pre_python37, start, start_params, include_params, environment
):
if environment.version_info < (3, 8):
# Final is not yet supported
price_type = "float"
price_type_infer = "float"
else:
price_type = "Final[float]"
price_type_infer = "object"
code = dedent(
f"""
name: str
foo = 3
blob: ClassVar[str]
price: {price_type}
quantity: int = 0.0
X("""
)
code = (
"from dataclasses import dataclass\n"
+ "from typing import ClassVar, Final\n"
+ start
+ code
)
sig, = Script(code).get_signatures()
expected_params = (
[*start_params, "name", "price", "quantity"]
if include_params
else [*start_params]
)
assert [p.name for p in sig.params] == expected_params
if include_params:
quantity, = sig.params[-1].infer()
assert quantity.name == 'int'
price, = sig.params[-2].infer()
assert price.name == price_type_infer
dataclass_transform_cases = [
# Attributes on the decorated class and its base classes
# are not considered to be fields.
# 1/ Declare dataclass transformer
# Base Class
['@dataclass_transform\nclass X:', [], False],
# Base Class with params
['@dataclass_transform(eq_default=True)\nclass X:', [], False],
# Subclass
[dedent('''
class Y():
y: int
@dataclass_transform
class X(Y):'''), [], False],
# 2/ Declare dataclass transformed
# Class based
[dedent('''
@dataclass_transform
class Y():
y: int
z = 5
class X(Y):'''), [], True],
# Class based with params
[dedent('''
@dataclass_transform(eq_default=True)
class Y():
y: int
z = 5
class X(Y):'''), [], True],
# Decorator based
[dedent('''
@dataclass_transform
def create_model():
pass
@create_model
class X:'''), [], True],
[dedent('''
@dataclass_transform
def create_model():
pass
class Y:
y: int
@create_model
class X(Y):'''), [], True],
[dedent('''
@dataclass_transform
def create_model():
pass
@create_model
class Y:
y: int
@create_model
class X(Y):'''), ["y"], True],
[dedent('''
@dataclass_transform
def create_model():
pass
@create_model
class Y:
y: int
class Z(Y):
z: int
@create_model
class X(Z):'''), ["y"], True],
# Metaclass based
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase):'''), [], True],
# 3/ Init custom init
[dedent('''
@dataclass_transform()
class Y():
y: int
z = 5
class X(Y):
def __init__(self, toto: str):
pass
'''), ["toto"], False],
# 4/ init=false
# Class based
# WARNING: Unsupported
# [dedent('''
# @dataclass_transform
# class Y():
# y: int
# z = 5
# def __init_subclass__(
# cls,
# *,
# init: bool = False,
# )
# class X(Y):'''), [], False],
[dedent('''
@dataclass_transform
class Y():
y: int
z = 5
def __init_subclass__(
cls,
*,
init: bool = False,
)
class X(Y, init=True):'''), [], True],
[dedent('''
@dataclass_transform
class Y():
y: int
z = 5
def __init_subclass__(
cls,
*,
init: bool = False,
)
class X(Y, init=False):'''), [], False],
[dedent('''
@dataclass_transform
class Y():
y: int
z = 5
class X(Y, init=False):'''), [], False],
# Decorator based
[dedent('''
@dataclass_transform
def create_model(init=False):
pass
@create_model()
class X:'''), [], False],
[dedent('''
@dataclass_transform
def create_model(init=False):
pass
@create_model(init=True)
class X:'''), [], True],
[dedent('''
@dataclass_transform
def create_model(init=False):
pass
@create_model(init=False)
class X:'''), [], False],
[dedent('''
@dataclass_transform
def create_model():
pass
@create_model(init=False)
class X:'''), [], False],
# Metaclass based
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
def __new__(
cls,
name,
bases,
namespace,
*,
init: bool = False,
):
...
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase):'''), [], False],
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
def __new__(
cls,
name,
bases,
namespace,
*,
init: bool = False,
):
...
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase, init=True):'''), [], True],
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
def __new__(
cls,
name,
bases,
namespace,
*,
init: bool = False,
):
...
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase, init=False):'''), [], False],
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase, init=False):'''), [], False],
# 4/ Other parameters
# Class based
[dedent('''
@dataclass_transform
class Y():
y: int
z = 5
class X(Y, eq=True):'''), [], True],
# Decorator based
[dedent('''
@dataclass_transform
def create_model():
pass
@create_model(eq=True)
class X:'''), [], True],
# Metaclass based
[dedent('''
@dataclass_transform
class ModelMeta():
y: int
z = 5
class ModelBase(metaclass=ModelMeta):
t: int
p = 5
class X(ModelBase, eq=True):'''), [], True],
]
ids = [
"direct_transformer",
"transformer_with_params",
"subclass_transformer",
"base_transformed",
"base_transformed_with_params",
"decorator_transformed_direct",
"decorator_transformed_subclass",
"decorator_transformed_both",
"decorator_transformed_intermediate_not",
"metaclass_transformed",
"custom_init",
# "base_transformed_init_false_dataclass_init_default",
"base_transformed_init_false_dataclass_init_true",
"base_transformed_init_false_dataclass_init_false",
"base_transformed_init_default_dataclass_init_false",
"decorator_transformed_init_false_dataclass_init_default",
"decorator_transformed_init_false_dataclass_init_true",
"decorator_transformed_init_false_dataclass_init_false",
"decorator_transformed_init_default_dataclass_init_false",
"metaclass_transformed_init_false_dataclass_init_default",
"metaclass_transformed_init_false_dataclass_init_true",
"metaclass_transformed_init_false_dataclass_init_false",
"metaclass_transformed_init_default_dataclass_init_false",
"base_transformed_other_parameters",
"decorator_transformed_other_parameters",
"metaclass_transformed_other_parameters",
]
@pytest.mark.parametrize(
'start, start_params, include_params', dataclass_transform_cases, ids=ids
)
def test_extensions_dataclass_transform_signature(
Script, skip_pre_python37, start, start_params, include_params, environment
):
has_typing_ext = bool(Script('import typing_extensions').infer())
if not has_typing_ext:
raise pytest.skip("typing_extensions needed in target environment to run this test")
if environment.version_info < (3, 8):
# Final is not yet supported
price_type = "float"
price_type_infer = "float"
else:
price_type = "Final[float]"
price_type_infer = "object"
code = dedent(
f"""
name: str
foo = 3
blob: ClassVar[str]
price: {price_type}
quantity: int = 0.0
X("""
)
code = (
"from typing_extensions import dataclass_transform\n"
+ "from typing import ClassVar, Final\n"
+ start
+ code
)
(sig,) = Script(code).get_signatures()
expected_params = (
[*start_params, "name", "price", "quantity"]
if include_params
else [*start_params]
)
assert [p.name for p in sig.params] == expected_params
if include_params:
quantity, = sig.params[-1].infer()
assert quantity.name == 'int'
price, = sig.params[-2].infer()
assert price.name == price_type_infer
def test_dataclass_transform_complete(Script):
script = Script('''\
@dataclass_transform
class Y():
y: int
z = 5
class X(Y):
name: str
foo = 3
def f(x: X):
x.na''')
completion, = script.complete()
assert completion.description == 'name: str'
@pytest.mark.parametrize(
"start, start_params, include_params", dataclass_transform_cases, ids=ids
)
def test_dataclass_transform_signature(
Script, skip_pre_python311, start, start_params, include_params
):
def test_dataclass_signature(Script, skip_pre_python37, start, start_params):
code = dedent('''
name: str
foo = 3
blob: ClassVar[str]
price: Final[float]
price: float
quantity: int = 0.0
X(''')
code = (
"from typing import dataclass_transform\n"
+ "from typing import ClassVar, Final\n"
+ start
+ code
)
code = 'from dataclasses import dataclass\n' + start + code
sig, = Script(code).get_signatures()
expected_params = (
[*start_params, "name", "price", "quantity"]
if include_params
else [*start_params]
)
assert [p.name for p in sig.params] == expected_params
if include_params:
quantity, = sig.params[-1].infer()
assert quantity.name == 'int'
price, = sig.params[-2].infer()
assert price.name == 'object'
assert [p.name for p in sig.params] == start_params + ['name', 'price', 'quantity']
quantity, = sig.params[-1].infer()
assert quantity.name == 'int'
price, = sig.params[-2].infer()
assert price.name == 'float'
@pytest.mark.parametrize(
@@ -842,8 +371,7 @@ def test_dataclass_transform_signature(
z = 5
@define
class X(Y):'''), ['y']],
],
ids=["define", "frozen", "define_customized", "define_subclass", "define_both"]
]
)
def test_attrs_signature(Script, skip_pre_python37, start, start_params):
has_attrs = bool(Script('import attrs').infer())

View File

@@ -91,7 +91,7 @@ class TestSetupReadline(unittest.TestCase):
}
# There are quite a few differences, because both Windows and Linux
# (posix and nt) libraries are included.
assert len(difference) < 40
assert len(difference) < 30
def test_local_import(self):
s = 'import test.test_utils'