forked from VimPlug/jedi
Merge branch 'master' of github.com:davidhalter/jedi
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -14,3 +14,4 @@ record.json
|
||||
/.pytest_cache
|
||||
/.mypy_cache
|
||||
/venv/
|
||||
.nvimrc
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Main Authors
|
||||
Main Authors
|
||||
------------
|
||||
|
||||
- David Halter (@davidhalter) <davidhalter88@gmail.com>
|
||||
@@ -62,6 +62,7 @@ Code Contributors
|
||||
- Andrii Kolomoiets (@muffinmad)
|
||||
- Leo Ryu (@Leo-Ryu)
|
||||
- Joseph Birkner (@josephbirkner)
|
||||
- Márcio Mazza (@marciomazza)
|
||||
|
||||
And a few more "anonymous" contributors.
|
||||
|
||||
|
||||
@@ -6,6 +6,10 @@ Changelog
|
||||
Unreleased
|
||||
++++++++++
|
||||
|
||||
- Added dataclass-equivalent for attrs.define
|
||||
- Find fixtures from Pytest entrypoints; Examples of pytest plugins installed
|
||||
like this are pytest-django, pytest-sugar and Faker.
|
||||
|
||||
0.18.1 (2021-11-17)
|
||||
+++++++++++++++++++
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ class RefactoringError(_JediError):
|
||||
Refactorings can fail for various reasons. So if you work with refactorings
|
||||
like :meth:`.Script.rename`, :meth:`.Script.inline`,
|
||||
:meth:`.Script.extract_variable` and :meth:`.Script.extract_function`, make
|
||||
sure to catch these. The descriptions in the errors are ususally valuable
|
||||
sure to catch these. The descriptions in the errors are usually valuable
|
||||
for end users.
|
||||
|
||||
A typical ``RefactoringError`` would tell the user that inlining is not
|
||||
|
||||
@@ -280,7 +280,7 @@ class CallDetails:
|
||||
def count_positional_arguments(self):
|
||||
count = 0
|
||||
for star_count, key_start, had_equal in self._list_arguments()[:-1]:
|
||||
if star_count:
|
||||
if star_count or key_start:
|
||||
break
|
||||
count += 1
|
||||
return count
|
||||
@@ -306,7 +306,7 @@ def _iter_arguments(nodes, position):
|
||||
first = node.children[0]
|
||||
second = node.children[1]
|
||||
if second == '=':
|
||||
if second.start_pos < position:
|
||||
if second.start_pos < position and first.type == 'name':
|
||||
yield 0, first.value, True
|
||||
else:
|
||||
yield 0, remove_after_pos(first), False
|
||||
|
||||
@@ -433,7 +433,6 @@ def get_default_project(path=None):
|
||||
probable_path = dir
|
||||
|
||||
if probable_path is not None:
|
||||
# TODO search for setup.py etc
|
||||
return Project(probable_path)
|
||||
|
||||
if first_no_init_file is not None:
|
||||
|
||||
@@ -42,11 +42,17 @@ class ChangedFile:
|
||||
if self._from_path is None:
|
||||
from_p = ''
|
||||
else:
|
||||
from_p = self._from_path.relative_to(project_path)
|
||||
try:
|
||||
from_p = self._from_path.relative_to(project_path)
|
||||
except ValueError: # Happens it the path is not on th project_path
|
||||
from_p = self._from_path
|
||||
if self._to_path is None:
|
||||
to_p = ''
|
||||
else:
|
||||
to_p = self._to_path.relative_to(project_path)
|
||||
try:
|
||||
to_p = self._to_path.relative_to(project_path)
|
||||
except ValueError:
|
||||
to_p = self._to_path
|
||||
diff = difflib.unified_diff(
|
||||
old_lines, new_lines,
|
||||
fromfile=str(from_p),
|
||||
|
||||
@@ -90,7 +90,7 @@ class InferenceState:
|
||||
self.compiled_subprocess = environment.get_inference_state_subprocess(self)
|
||||
self.grammar = environment.get_grammar()
|
||||
|
||||
self.latest_grammar = parso.load_grammar(version='3.7')
|
||||
self.latest_grammar = parso.load_grammar(version='3.10')
|
||||
self.memoize_cache = {} # for memoize decorators
|
||||
self.module_cache = imports.ModuleCache() # does the job of `sys.modules`.
|
||||
self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]]
|
||||
|
||||
@@ -297,7 +297,7 @@ class Value(HelperValueMixin):
|
||||
just the `_T` generic parameter.
|
||||
|
||||
`value_set`: represents the actual argument passed to the parameter
|
||||
we're inferrined for, or (for recursive calls) their types. In the
|
||||
we're inferred for, or (for recursive calls) their types. In the
|
||||
above example this would first be the representation of the list
|
||||
`[1]` and then, when recursing, just of `1`.
|
||||
"""
|
||||
|
||||
@@ -34,7 +34,7 @@ class MixedObject(ValueWrapper):
|
||||
|
||||
This combined logic makes it possible to provide more powerful REPL
|
||||
completion. It allows side effects that are not noticable with the default
|
||||
parser structure to still be completeable.
|
||||
parser structure to still be completable.
|
||||
|
||||
The biggest difference from CompiledValue to MixedObject is that we are
|
||||
generally dealing with Python code and not with C code. This will generate
|
||||
@@ -267,7 +267,7 @@ def _find_syntax_node_name(inference_state, python_object):
|
||||
@inference_state_function_cache()
|
||||
def _create(inference_state, compiled_value, module_context):
|
||||
# TODO accessing this is bad, but it probably doesn't matter that much,
|
||||
# because we're working with interpreteters only here.
|
||||
# because we're working with interpreters only here.
|
||||
python_object = compiled_value.access_handle.access._obj
|
||||
result = _find_syntax_node_name(inference_state, python_object)
|
||||
if result is None:
|
||||
|
||||
@@ -151,7 +151,11 @@ def _find_module(string, path=None, full_name=None, is_global_search=True):
|
||||
|
||||
spec = find_spec(string, p)
|
||||
if spec is not None:
|
||||
if spec.origin == "frozen":
|
||||
continue
|
||||
|
||||
loader = spec.loader
|
||||
|
||||
if loader is None and not spec.has_location:
|
||||
# This is a namespace package.
|
||||
full_name = string if not path else full_name
|
||||
|
||||
@@ -86,6 +86,8 @@ class StubFilter(ParserTreeFilter):
|
||||
# Imports in stub files are only public if they have an "as"
|
||||
# export.
|
||||
definition = name.get_definition()
|
||||
if definition is None:
|
||||
return False
|
||||
if definition.type in ('import_from', 'import_name'):
|
||||
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
|
||||
return False
|
||||
|
||||
@@ -248,7 +248,7 @@ class ValueNameMixin:
|
||||
|
||||
def get_defining_qualified_value(self):
|
||||
context = self.parent_context
|
||||
if context.is_module() or context.is_class():
|
||||
if context is not None and (context.is_module() or context.is_class()):
|
||||
return self.parent_context.get_value() # Might be None
|
||||
return None
|
||||
|
||||
|
||||
@@ -180,26 +180,34 @@ def _check_fs(inference_state, file_io, regex):
|
||||
return m.as_context()
|
||||
|
||||
|
||||
def gitignored_lines(folder_io, file_io):
|
||||
ignored_paths = set()
|
||||
ignored_names = set()
|
||||
def gitignored_paths(folder_io, file_io):
|
||||
ignored_paths_abs = set()
|
||||
ignored_paths_rel = set()
|
||||
|
||||
for l in file_io.read().splitlines():
|
||||
if not l or l.startswith(b'#'):
|
||||
if not l or l.startswith(b'#') or l.startswith(b'!') or b'*' in l:
|
||||
continue
|
||||
|
||||
p = l.decode('utf-8', 'ignore')
|
||||
if p.startswith('/'):
|
||||
name = p[1:]
|
||||
if name.endswith(os.path.sep):
|
||||
name = name[:-1]
|
||||
ignored_paths.add(os.path.join(folder_io.path, name))
|
||||
p = l.decode('utf-8', 'ignore').rstrip('/')
|
||||
if '/' in p:
|
||||
name = p.lstrip('/')
|
||||
ignored_paths_abs.add(os.path.join(folder_io.path, name))
|
||||
else:
|
||||
ignored_names.add(p)
|
||||
return ignored_paths, ignored_names
|
||||
name = p
|
||||
ignored_paths_rel.add((folder_io.path, name))
|
||||
|
||||
return ignored_paths_abs, ignored_paths_rel
|
||||
|
||||
|
||||
def expand_relative_ignore_paths(folder_io, relative_paths):
|
||||
curr_path = folder_io.path
|
||||
return {os.path.join(curr_path, p[1]) for p in relative_paths if curr_path.startswith(p[0])}
|
||||
|
||||
|
||||
def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
||||
except_paths = set(except_paths)
|
||||
except_paths_relative = set()
|
||||
|
||||
for root_folder_io, folder_ios, file_ios in folder_io.walk():
|
||||
# Delete folders that we don't want to iterate over.
|
||||
for file_io in file_ios:
|
||||
@@ -209,14 +217,21 @@ def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
||||
yield None, file_io
|
||||
|
||||
if path.name == '.gitignore':
|
||||
ignored_paths, ignored_names = \
|
||||
gitignored_lines(root_folder_io, file_io)
|
||||
except_paths |= ignored_paths
|
||||
ignored_paths_abs, ignored_paths_rel = gitignored_paths(
|
||||
root_folder_io, file_io
|
||||
)
|
||||
except_paths |= ignored_paths_abs
|
||||
except_paths_relative |= ignored_paths_rel
|
||||
|
||||
except_paths_relative_expanded = expand_relative_ignore_paths(
|
||||
root_folder_io, except_paths_relative
|
||||
)
|
||||
|
||||
folder_ios[:] = [
|
||||
folder_io
|
||||
for folder_io in folder_ios
|
||||
if folder_io.path not in except_paths
|
||||
and folder_io.path not in except_paths_relative_expanded
|
||||
and folder_io.get_base_name() not in _IGNORE_FOLDERS
|
||||
]
|
||||
for folder_io in folder_ios:
|
||||
|
||||
@@ -16,7 +16,7 @@ settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module.
|
||||
1. Array modifications work only in the current module.
|
||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||
"""
|
||||
from jedi import debug
|
||||
|
||||
@@ -78,6 +78,8 @@ class ClassName(TreeNameDefinition):
|
||||
type_ = super().api_type
|
||||
if type_ == 'function':
|
||||
definition = self.tree_name.get_definition()
|
||||
if definition is None:
|
||||
return type_
|
||||
if function_is_property(definition):
|
||||
# This essentially checks if there is an @property before
|
||||
# the function. @property could be something different, but
|
||||
@@ -118,21 +120,6 @@ class ClassFilter(ParserTreeFilter):
|
||||
return False
|
||||
|
||||
def _access_possible(self, name):
|
||||
# Filter for ClassVar variables
|
||||
# TODO this is not properly done, yet. It just checks for the string
|
||||
# ClassVar in the annotation, which can be quite imprecise. If we
|
||||
# wanted to do this correct, we would have to infer the ClassVar.
|
||||
if not self._is_instance:
|
||||
expr_stmt = name.get_definition()
|
||||
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
|
||||
annassign = expr_stmt.children[1]
|
||||
if annassign.type == 'annassign':
|
||||
# If there is an =, the variable is obviously also
|
||||
# defined on the class.
|
||||
if 'ClassVar' not in annassign.children[1].get_code() \
|
||||
and '=' not in annassign.children:
|
||||
return False
|
||||
|
||||
# Filter for name mangling of private variables like __foo
|
||||
return not name.value.startswith('__') or name.value.endswith('__') \
|
||||
or self._equals_origin_scope()
|
||||
|
||||
@@ -2,7 +2,7 @@ from pathlib import Path
|
||||
|
||||
from parso.tree import search_ancestor
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.imports import load_module_from_path
|
||||
from jedi.inference.imports import goto_import, load_module_from_path
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.base_value import NO_VALUES, ValueSet
|
||||
from jedi.inference.helpers import infer_call_of_leaf
|
||||
@@ -131,6 +131,17 @@ def _is_pytest_func(func_name, decorator_nodes):
|
||||
or any('fixture' in n.get_code() for n in decorator_nodes)
|
||||
|
||||
|
||||
def _find_pytest_plugin_modules():
|
||||
"""
|
||||
Finds pytest plugin modules hooked by setuptools entry points
|
||||
|
||||
See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
|
||||
"""
|
||||
from pkg_resources import iter_entry_points
|
||||
|
||||
return [ep.module_name.split(".") for ep in iter_entry_points(group="pytest11")]
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def _iter_pytest_modules(module_context, skip_own_module=False):
|
||||
if not skip_own_module:
|
||||
@@ -159,7 +170,7 @@ def _iter_pytest_modules(module_context, skip_own_module=False):
|
||||
break
|
||||
last_folder = folder # keep track of the last found parent name
|
||||
|
||||
for names in _PYTEST_FIXTURE_MODULES:
|
||||
for names in _PYTEST_FIXTURE_MODULES + _find_pytest_plugin_modules():
|
||||
for module_value in module_context.inference_state.import_module(names):
|
||||
yield module_value.as_context()
|
||||
|
||||
@@ -167,14 +178,28 @@ def _iter_pytest_modules(module_context, skip_own_module=False):
|
||||
class FixtureFilter(ParserTreeFilter):
|
||||
def _filter(self, names):
|
||||
for name in super()._filter(names):
|
||||
funcdef = name.parent
|
||||
# Class fixtures are not supported
|
||||
if funcdef.type == 'funcdef':
|
||||
decorated = funcdef.parent
|
||||
if decorated.type == 'decorated' and self._is_fixture(decorated):
|
||||
# look for fixture definitions of imported names
|
||||
if name.parent.type == "import_from":
|
||||
imported_names = goto_import(self.parent_context, name)
|
||||
if any(
|
||||
self._is_fixture(iname.parent_context, iname.tree_name)
|
||||
for iname in imported_names
|
||||
# discard imports of whole modules, that have no tree_name
|
||||
if iname.tree_name
|
||||
):
|
||||
yield name
|
||||
|
||||
def _is_fixture(self, decorated):
|
||||
elif self._is_fixture(self.parent_context, name):
|
||||
yield name
|
||||
|
||||
def _is_fixture(self, context, name):
|
||||
funcdef = name.parent
|
||||
# Class fixtures are not supported
|
||||
if funcdef.type != "funcdef":
|
||||
return False
|
||||
decorated = funcdef.parent
|
||||
if decorated.type != "decorated":
|
||||
return False
|
||||
decorators = decorated.children[0]
|
||||
if decorators.type == 'decorators':
|
||||
decorators = decorators.children
|
||||
@@ -191,11 +216,12 @@ class FixtureFilter(ParserTreeFilter):
|
||||
last_leaf = last_trailer.get_last_leaf()
|
||||
if last_leaf == ')':
|
||||
values = infer_call_of_leaf(
|
||||
self.parent_context, last_leaf, cut_own_trailer=True)
|
||||
context, last_leaf, cut_own_trailer=True
|
||||
)
|
||||
else:
|
||||
values = self.parent_context.infer_node(dotted_name)
|
||||
values = context.infer_node(dotted_name)
|
||||
else:
|
||||
values = self.parent_context.infer_node(dotted_name)
|
||||
values = context.infer_node(dotted_name)
|
||||
for value in values:
|
||||
if value.name.get_qualified_names(include_module_names=True) \
|
||||
== ('_pytest', 'fixtures', 'fixture'):
|
||||
|
||||
@@ -803,6 +803,15 @@ _implemented = {
|
||||
# For now this works at least better than Jedi trying to understand it.
|
||||
'dataclass': _dataclass
|
||||
},
|
||||
# attrs exposes declaration interface roughly compatible with dataclasses
|
||||
# via attrs.define, attrs.frozen and attrs.mutable
|
||||
# https://www.attrs.org/en/stable/names.html
|
||||
'attr': {
|
||||
'define': _dataclass,
|
||||
},
|
||||
'attrs': {
|
||||
'define': _dataclass,
|
||||
},
|
||||
'os.path': {
|
||||
'dirname': _create_string_input_function(os.path.dirname),
|
||||
'abspath': _create_string_input_function(os.path.abspath),
|
||||
|
||||
29
setup.py
29
setup.py
@@ -41,14 +41,39 @@ setup(name='jedi',
|
||||
# coloroma for colored debug output
|
||||
'colorama',
|
||||
'Django<3.1', # For now pin this.
|
||||
'attrs',
|
||||
],
|
||||
'qa': [
|
||||
'flake8==3.8.3',
|
||||
'mypy==0.782',
|
||||
],
|
||||
'docs': [
|
||||
'sphinx<2',
|
||||
'docutils<0.18',
|
||||
# Just pin all of these.
|
||||
'Jinja2==2.11.3',
|
||||
'MarkupSafe==1.1.1',
|
||||
'Pygments==2.8.1',
|
||||
'alabaster==0.7.12',
|
||||
'babel==2.9.1',
|
||||
'chardet==4.0.0',
|
||||
'commonmark==0.8.1',
|
||||
'docutils==0.17.1',
|
||||
'future==0.18.2',
|
||||
'idna==2.10',
|
||||
'imagesize==1.2.0',
|
||||
'mock==1.0.1',
|
||||
'packaging==20.9',
|
||||
'pyparsing==2.4.7',
|
||||
'pytz==2021.1',
|
||||
'readthedocs-sphinx-ext==2.1.4',
|
||||
'recommonmark==0.5.0',
|
||||
'requests==2.25.1',
|
||||
'six==1.15.0',
|
||||
'snowballstemmer==2.1.0',
|
||||
'sphinx==1.8.5',
|
||||
'sphinx-rtd-theme==0.4.3',
|
||||
'sphinxcontrib-serializinghtml==1.1.4',
|
||||
'sphinxcontrib-websupport==1.2.4',
|
||||
'urllib3==1.26.4',
|
||||
],
|
||||
},
|
||||
package_data={'jedi': ['*.pyi', 'third_party/typeshed/LICENSE',
|
||||
|
||||
@@ -23,7 +23,7 @@ def inheritance_fixture():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def testdir(testdir):
|
||||
#? ['chdir']
|
||||
testdir.chdir
|
||||
return testdir
|
||||
def capsysbinary(capsysbinary):
|
||||
#? ['close']
|
||||
capsysbinary.clos
|
||||
return capsysbinary
|
||||
|
||||
@@ -5,6 +5,7 @@ import uuid
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models.query_utils import DeferredAttribute
|
||||
from django.db.models.manager import BaseManager
|
||||
|
||||
|
||||
class TagManager(models.Manager):
|
||||
|
||||
@@ -44,7 +44,7 @@ class Foo():
|
||||
baz: typing.ClassVar[str]
|
||||
|
||||
|
||||
#?
|
||||
#? int()
|
||||
Foo.bar
|
||||
#? int()
|
||||
Foo().bar
|
||||
@@ -58,6 +58,7 @@ class VarClass:
|
||||
var_instance2: float
|
||||
var_class1: typing.ClassVar[str] = 1
|
||||
var_class2: typing.ClassVar[bytes]
|
||||
var_class3 = None
|
||||
|
||||
def __init__(self):
|
||||
#? int()
|
||||
@@ -70,15 +71,21 @@ class VarClass:
|
||||
d.var_class2
|
||||
#? []
|
||||
d.int
|
||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2']
|
||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2', 'var_class3']
|
||||
self.var_
|
||||
|
||||
class VarClass2(VarClass):
|
||||
var_class3: typing.ClassVar[int]
|
||||
|
||||
#? ['var_class1', 'var_class2', 'var_instance1']
|
||||
def __init__(self):
|
||||
#? int()
|
||||
self.var_class3
|
||||
|
||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_class3', 'var_instance2']
|
||||
VarClass.var_
|
||||
#? int()
|
||||
VarClass.var_instance1
|
||||
#?
|
||||
#? float()
|
||||
VarClass.var_instance2
|
||||
#? str()
|
||||
VarClass.var_class1
|
||||
@@ -88,7 +95,7 @@ VarClass.var_class2
|
||||
VarClass.int
|
||||
|
||||
d = VarClass()
|
||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2']
|
||||
#? ['var_class1', 'var_class2', 'var_class3', 'var_instance1', 'var_instance2']
|
||||
d.var_
|
||||
#? int()
|
||||
d.var_instance1
|
||||
|
||||
@@ -139,9 +139,6 @@ def test_p(monkeypatch):
|
||||
#? ['capsysbinary']
|
||||
def test_p(capsysbin
|
||||
|
||||
#? ['tmpdir', 'tmpdir_factory']
|
||||
def test_p(tmpdi
|
||||
|
||||
|
||||
def close_parens():
|
||||
pass
|
||||
@@ -183,3 +180,28 @@ def with_annot() -> Generator[float, None, None]:
|
||||
def test_with_annot(inheritance_fixture, with_annot):
|
||||
#? float()
|
||||
with_annot
|
||||
|
||||
# -----------------
|
||||
# pytest external plugins
|
||||
# -----------------
|
||||
|
||||
#? ['admin_user', 'admin_client']
|
||||
def test_z(admin
|
||||
|
||||
#! 15 ['def admin_client']
|
||||
def test_p(admin_client):
|
||||
#? ['login', 'logout']
|
||||
admin_client.log
|
||||
|
||||
@pytest.fixture
|
||||
@some_decorator
|
||||
#? ['admin_user']
|
||||
def bla(admin_u
|
||||
return
|
||||
|
||||
@pytest.fixture
|
||||
@some_decorator
|
||||
#! 12 ['def admin_user']
|
||||
def bla(admin_user):
|
||||
pass
|
||||
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
from pytest import fixture
|
||||
|
||||
|
||||
@fixture()
|
||||
def admin_user():
|
||||
pass
|
||||
16
test/examples/pytest_plugin_package/pytest_plugin/plugin.py
Normal file
16
test/examples/pytest_plugin_package/pytest_plugin/plugin.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import pytest
|
||||
|
||||
from .fixtures import admin_user # noqa
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def admin_client():
|
||||
return Client()
|
||||
|
||||
|
||||
class Client:
|
||||
def login(self, **credentials):
|
||||
...
|
||||
|
||||
def logout(self):
|
||||
...
|
||||
@@ -209,7 +209,7 @@ class IntegrationTestCase(BaseTestCase):
|
||||
TEST_REFERENCES: self.run_get_references,
|
||||
}
|
||||
if (self.path.endswith('pytest.py') or self.path.endswith('conftest.py')) \
|
||||
and environment.executable != os.path.realpath(sys.executable):
|
||||
and os.path.realpath(environment.executable) != os.path.realpath(sys.executable):
|
||||
# It's not guarantueed that pytest is installed in test
|
||||
# environments, if we're not running in the same environment that
|
||||
# we're already in, so just skip that case.
|
||||
|
||||
@@ -650,6 +650,7 @@ def test_cursor_after_signature(Script, column):
|
||||
('abs(chr ( \nclass y: pass', 1, 8, 'abs', 0),
|
||||
('abs(chr ( \nclass y: pass', 1, 9, 'abs', 0),
|
||||
('abs(chr ( \nclass y: pass', 1, 10, 'chr', 0),
|
||||
('abs(foo.bar=3)', 1, 13, 'abs', 0),
|
||||
]
|
||||
)
|
||||
def test_base_signatures(Script, code, line, column, name, index):
|
||||
|
||||
@@ -739,3 +739,18 @@ def test_param_infer_default():
|
||||
param, = abs_sig.params
|
||||
assert param.name == 'x'
|
||||
assert param.infer_default() == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code, expected', [
|
||||
("random.triangular(", ['high=', 'low=', 'mode=']),
|
||||
("random.triangular(low=1, ", ['high=', 'mode=']),
|
||||
("random.triangular(high=1, ", ['low=', 'mode=']),
|
||||
("random.triangular(low=1, high=2, ", ['mode=']),
|
||||
("random.triangular(low=1, mode=2, ", ['high=']),
|
||||
],
|
||||
)
|
||||
def test_keyword_param_completion(code, expected):
|
||||
import random
|
||||
completions = jedi.Interpreter(code, [locals()]).complete()
|
||||
assert expected == [c.name for c in completions if c.name.endswith('=')]
|
||||
|
||||
@@ -178,7 +178,7 @@ def test_is_potential_project(path, expected):
|
||||
|
||||
if expected is None:
|
||||
try:
|
||||
expected = _CONTAINS_POTENTIAL_PROJECT in os.listdir(path)
|
||||
expected = bool(set(_CONTAINS_POTENTIAL_PROJECT) & set(os.listdir(path)))
|
||||
except OSError:
|
||||
expected = False
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
from textwrap import dedent
|
||||
from pathlib import Path
|
||||
import platform
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -70,3 +71,23 @@ def test_diff_without_ending_newline(Script):
|
||||
-a
|
||||
+c
|
||||
''')
|
||||
|
||||
|
||||
def test_diff_path_outside_of_project(Script):
|
||||
if platform.system().lower() == 'windows':
|
||||
abs_path = r'D:\unknown_dir\file.py'
|
||||
else:
|
||||
abs_path = '/unknown_dir/file.py'
|
||||
script = Script(
|
||||
code='foo = 1',
|
||||
path=abs_path,
|
||||
project=jedi.get_default_project()
|
||||
)
|
||||
diff = script.rename(line=1, column=0, new_name='bar').get_diff()
|
||||
assert diff == dedent(f'''\
|
||||
--- {abs_path}
|
||||
+++ {abs_path}
|
||||
@@ -1 +1 @@
|
||||
-foo = 1
|
||||
+bar = 1
|
||||
''')
|
||||
|
||||
@@ -43,6 +43,9 @@ def test_implicit_namespace_package(Script):
|
||||
solution = "foo = '%s'" % solution
|
||||
assert completion.description == solution
|
||||
|
||||
c, = script_with_path('import pkg').complete()
|
||||
assert c.docstring() == ""
|
||||
|
||||
|
||||
def test_implicit_nested_namespace_package(Script):
|
||||
code = 'from implicit_nested_namespaces.namespace.pkg.module import CONST'
|
||||
|
||||
@@ -297,7 +297,6 @@ def test_os_issues(Script):
|
||||
# Github issue #759
|
||||
s = 'import os, s'
|
||||
assert 'sys' in import_names(s)
|
||||
assert 'path' not in import_names(s, column=len(s) - 1)
|
||||
assert 'os' in import_names(s, column=len(s) - 3)
|
||||
|
||||
# Some more checks
|
||||
|
||||
@@ -355,6 +355,48 @@ def test_dataclass_signature(Script, skip_pre_python37, start, start_params):
|
||||
price, = sig.params[-2].infer()
|
||||
assert price.name == 'float'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'start, start_params', [
|
||||
['@define\nclass X:', []],
|
||||
['@frozen\nclass X:', []],
|
||||
['@define(eq=True)\nclass X:', []],
|
||||
[dedent('''
|
||||
class Y():
|
||||
y: int
|
||||
@define
|
||||
class X(Y):'''), []],
|
||||
[dedent('''
|
||||
@define
|
||||
class Y():
|
||||
y: int
|
||||
z = 5
|
||||
@define
|
||||
class X(Y):'''), ['y']],
|
||||
]
|
||||
)
|
||||
def test_attrs_signature(Script, skip_pre_python37, start, start_params):
|
||||
has_attrs = bool(Script('import attrs').infer())
|
||||
if not has_attrs:
|
||||
raise pytest.skip("attrs needed in target environment to run this test")
|
||||
|
||||
code = dedent('''
|
||||
name: str
|
||||
foo = 3
|
||||
price: float
|
||||
quantity: int = 0.0
|
||||
|
||||
X(''')
|
||||
|
||||
# attrs exposes two namespaces
|
||||
code = 'from attrs import define, frozen\n' + start + code
|
||||
|
||||
sig, = Script(code).get_signatures()
|
||||
assert [p.name for p in sig.params] == start_params + ['name', 'price', 'quantity']
|
||||
quantity, = sig.params[-1].infer()
|
||||
assert quantity.name == 'int'
|
||||
price, = sig.params[-2].infer()
|
||||
assert price.name == 'float'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'stmt, expected', [
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from collections import namedtuple
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -42,6 +43,22 @@ def test_completion(case, monkeypatch, environment, has_django):
|
||||
|
||||
if (not has_django) and case.path.endswith('django.py'):
|
||||
pytest.skip('Needs django to be installed to run this test.')
|
||||
|
||||
if case.path.endswith("pytest.py"):
|
||||
# to test finding pytest fixtures from external plugins
|
||||
# add a stub pytest plugin to the project sys_path...
|
||||
pytest_plugin_dir = str(helpers.get_example_dir("pytest_plugin_package"))
|
||||
case._project.added_sys_path = [pytest_plugin_dir]
|
||||
|
||||
# ... and mock setuptools entry points to include it
|
||||
# see https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
|
||||
def mock_iter_entry_points(group):
|
||||
assert group == "pytest11"
|
||||
EntryPoint = namedtuple("EntryPoint", ["module_name"])
|
||||
return [EntryPoint("pytest_plugin.plugin")]
|
||||
|
||||
monkeypatch.setattr("pkg_resources.iter_entry_points", mock_iter_entry_points)
|
||||
|
||||
repo_root = helpers.root_dir
|
||||
monkeypatch.chdir(os.path.join(repo_root, 'jedi'))
|
||||
case.run(assert_case_equal, environment)
|
||||
|
||||
Reference in New Issue
Block a user