forked from VimPlug/jedi
Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b7106ae71 | ||
|
|
74116fe2ea | ||
|
|
1233caebdc | ||
|
|
7851dff915 | ||
|
|
e4987b3e7a | ||
|
|
d1851c369c | ||
|
|
d63fbd8624 | ||
|
|
b0f664ec94 | ||
|
|
9957374508 | ||
|
|
7f3a7db7e6 | ||
|
|
3ffe8475b8 | ||
|
|
396d7df314 | ||
|
|
0c618a4456 | ||
|
|
c4c36d8e2e | ||
|
|
a3a9ae1a26 | ||
|
|
e41b966283 | ||
|
|
4188526e2d | ||
|
|
804b0f0d06 | ||
|
|
7b15f1736c | ||
|
|
4846848a1e | ||
|
|
344fef1e2f | ||
|
|
bc23458164 | ||
|
|
9a54e583e7 | ||
|
|
59ccd2da93 | ||
|
|
737c1e5792 | ||
|
|
f72adf0cbc | ||
|
|
5184d0cb9c | ||
|
|
2d0258db1a | ||
|
|
f5e6a25542 | ||
|
|
bc5a8ddf87 | ||
|
|
eabddb9698 | ||
|
|
6fcdc44f3e | ||
|
|
0d1a45ddc1 | ||
|
|
f9183bbf64 | ||
|
|
7ec8454fc1 | ||
|
|
a3410f124a | ||
|
|
3488f6b61d | ||
|
|
3dad9cac6b |
@@ -6,6 +6,17 @@ Changelog
|
||||
Unreleased
|
||||
++++++++++
|
||||
|
||||
0.17.2 (2020-07-17)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Added an option to pass environment variables to ``Environment``
|
||||
- ``Project(...).path`` exists now
|
||||
- Support for Python 3.9
|
||||
- A few bugfixes
|
||||
|
||||
This will be the last release that supports Python 2 and Python 3.5.
|
||||
``0.18.0`` will be Python 3.6+.
|
||||
|
||||
0.17.1 (2020-06-20)
|
||||
+++++++++++++++++++
|
||||
|
||||
|
||||
@@ -22,6 +22,10 @@ Jedi - an awesome autocompletion, static analysis and refactoring library for Py
|
||||
:target: https://coveralls.io/r/davidhalter/jedi
|
||||
:alt: Coverage status
|
||||
|
||||
.. image:: https://pepy.tech/badge/jedi
|
||||
:target: https://pepy.tech/project/jedi
|
||||
:alt: PyPI Downloads
|
||||
|
||||
|
||||
Jedi is a static analysis tool for Python that is typically used in
|
||||
IDEs/editors plugins. Jedi has a focus on autocompletion and goto
|
||||
|
||||
@@ -27,7 +27,7 @@ git submodule update --init
|
||||
tox
|
||||
|
||||
# Create tag
|
||||
tag=v$(python -c "import $PROJECT_NAME; print($PROJECT_NAME.__version__)")
|
||||
tag=v$(python3 -c "import $PROJECT_NAME; print($PROJECT_NAME.__version__)")
|
||||
|
||||
master_ref=$(git show-ref -s heads/$BRANCH)
|
||||
tag_ref=$(git show-ref -s $tag || true)
|
||||
@@ -44,7 +44,7 @@ fi
|
||||
# Package and upload to PyPI
|
||||
#rm -rf dist/ - Not needed anymore, because the folder is never reused.
|
||||
echo `pwd`
|
||||
python setup.py sdist bdist_wheel
|
||||
python3 setup.py sdist bdist_wheel
|
||||
# Maybe do a pip install twine before.
|
||||
twine upload dist/*
|
||||
|
||||
|
||||
@@ -30,6 +30,10 @@ Jedi - an awesome autocompletion, static analysis and refactoring library for Py
|
||||
:target: https://coveralls.io/r/davidhalter/jedi
|
||||
:alt: Coverage status
|
||||
|
||||
.. image:: https://pepy.tech/badge/jedi
|
||||
:target: https://pepy.tech/project/jedi
|
||||
:alt: PyPI Downloads
|
||||
|
||||
`Github Repository <https://github.com/davidhalter/jedi>`_
|
||||
|
||||
.. automodule:: jedi
|
||||
|
||||
@@ -27,7 +27,7 @@ ad
|
||||
load
|
||||
"""
|
||||
|
||||
__version__ = '0.17.1'
|
||||
__version__ = '0.17.2'
|
||||
|
||||
from jedi.api import Script, Interpreter, set_debug_function, \
|
||||
preload_module, names
|
||||
|
||||
@@ -61,8 +61,9 @@ class Environment(_BaseEnvironment):
|
||||
"""
|
||||
_subprocess = None
|
||||
|
||||
def __init__(self, executable):
|
||||
def __init__(self, executable, env_vars=None):
|
||||
self._start_executable = executable
|
||||
self._env_vars = env_vars
|
||||
# Initialize the environment
|
||||
self._get_subprocess()
|
||||
|
||||
@@ -71,7 +72,8 @@ class Environment(_BaseEnvironment):
|
||||
return self._subprocess
|
||||
|
||||
try:
|
||||
self._subprocess = CompiledSubprocess(self._start_executable)
|
||||
self._subprocess = CompiledSubprocess(self._start_executable,
|
||||
env_vars=self._env_vars)
|
||||
info = self._subprocess._send(None, _get_info)
|
||||
except Exception as exc:
|
||||
raise InvalidPythonEnvironment(
|
||||
@@ -134,6 +136,7 @@ class _SameEnvironmentMixin(object):
|
||||
self._start_executable = self.executable = sys.executable
|
||||
self.path = sys.prefix
|
||||
self.version_info = _VersionInfo(*sys.version_info[:3])
|
||||
self._env_vars = None
|
||||
|
||||
|
||||
class SameEnvironment(_SameEnvironmentMixin, Environment):
|
||||
@@ -321,7 +324,7 @@ def find_virtualenvs(paths=None, **kwargs):
|
||||
return py27_comp(paths, **kwargs)
|
||||
|
||||
|
||||
def find_system_environments():
|
||||
def find_system_environments(**kwargs):
|
||||
"""
|
||||
Ignores virtualenvs and returns the Python versions that were installed on
|
||||
your system. This might return nothing, if you're running Python e.g. from
|
||||
@@ -333,14 +336,14 @@ def find_system_environments():
|
||||
"""
|
||||
for version_string in _SUPPORTED_PYTHONS:
|
||||
try:
|
||||
yield get_system_environment(version_string)
|
||||
yield get_system_environment(version_string, **kwargs)
|
||||
except InvalidPythonEnvironment:
|
||||
pass
|
||||
|
||||
|
||||
# TODO: this function should probably return a list of environments since
|
||||
# multiple Python installations can be found on a system for the same version.
|
||||
def get_system_environment(version):
|
||||
def get_system_environment(version, **kwargs):
|
||||
"""
|
||||
Return the first Python environment found for a string of the form 'X.Y'
|
||||
where X and Y are the major and minor versions of Python.
|
||||
@@ -357,24 +360,30 @@ def get_system_environment(version):
|
||||
if os.name == 'nt':
|
||||
for exe in _get_executables_from_windows_registry(version):
|
||||
try:
|
||||
return Environment(exe)
|
||||
return Environment(exe, **kwargs)
|
||||
except InvalidPythonEnvironment:
|
||||
pass
|
||||
raise InvalidPythonEnvironment("Cannot find executable python%s." % version)
|
||||
|
||||
|
||||
def create_environment(path, safe=True):
|
||||
def create_environment(path, safe=True, **kwargs):
|
||||
"""
|
||||
Make it possible to manually create an Environment object by specifying a
|
||||
Virtualenv path or an executable path.
|
||||
Virtualenv path or an executable path and optional environment variables.
|
||||
|
||||
:raises: :exc:`.InvalidPythonEnvironment`
|
||||
:returns: :class:`.Environment`
|
||||
|
||||
TODO: make env_vars a kwarg when Python 2 is dropped. For now, preserve API
|
||||
"""
|
||||
return _create_environment(path, safe, **kwargs)
|
||||
|
||||
|
||||
def _create_environment(path, safe=True, env_vars=None):
|
||||
if os.path.isfile(path):
|
||||
_assert_safe(path, safe)
|
||||
return Environment(path)
|
||||
return Environment(_get_executable_path(path, safe=safe))
|
||||
return Environment(path, env_vars=env_vars)
|
||||
return Environment(_get_executable_path(path, safe=safe), env_vars=env_vars)
|
||||
|
||||
|
||||
def _get_executable_path(path, safe=True):
|
||||
|
||||
@@ -36,7 +36,7 @@ def complete_file_name(inference_state, module_context, start_leaf, quote, strin
|
||||
is_in_os_path_join = False
|
||||
else:
|
||||
string = to_be_added + string
|
||||
base_path = os.path.join(inference_state.project._path, string)
|
||||
base_path = os.path.join(inference_state.project.path, string)
|
||||
try:
|
||||
listed = sorted(scandir(base_path), key=lambda e: e.name)
|
||||
# OSError: [Errno 36] File name too long: '...'
|
||||
|
||||
@@ -3,6 +3,9 @@ TODO Some parts of this module are still not well documented.
|
||||
"""
|
||||
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.base_value import ValueSet
|
||||
from jedi.inference.filters import ParserTreeFilter, MergedFilter
|
||||
from jedi.inference.names import TreeNameDefinition
|
||||
from jedi.inference.compiled import mixed
|
||||
from jedi.inference.compiled.access import create_access_path
|
||||
from jedi.inference.context import ModuleContext
|
||||
@@ -19,10 +22,37 @@ class NamespaceObject(object):
|
||||
self.__dict__ = dct
|
||||
|
||||
|
||||
class MixedTreeName(TreeNameDefinition):
|
||||
def infer(self):
|
||||
"""
|
||||
In IPython notebook it is typical that some parts of the code that is
|
||||
provided was already executed. In that case if something is not properly
|
||||
inferred, it should still infer from the variables it already knows.
|
||||
"""
|
||||
inferred = super(MixedTreeName, self).infer()
|
||||
if not inferred:
|
||||
for compiled_value in self.parent_context.mixed_values:
|
||||
for f in compiled_value.get_filters():
|
||||
values = ValueSet.from_sets(
|
||||
n.infer() for n in f.get(self.string_name)
|
||||
)
|
||||
if values:
|
||||
return values
|
||||
return inferred
|
||||
|
||||
|
||||
class MixedParserTreeFilter(ParserTreeFilter):
|
||||
name_class = MixedTreeName
|
||||
|
||||
|
||||
class MixedModuleContext(ModuleContext):
|
||||
def __init__(self, tree_module_value, namespaces):
|
||||
super(MixedModuleContext, self).__init__(tree_module_value)
|
||||
self._namespace_objects = [NamespaceObject(n) for n in namespaces]
|
||||
self.mixed_values = [
|
||||
self._get_mixed_object(
|
||||
_create(self.inference_state, NamespaceObject(n))
|
||||
) for n in namespaces
|
||||
]
|
||||
|
||||
def _get_mixed_object(self, compiled_value):
|
||||
return mixed.MixedObject(
|
||||
@@ -30,12 +60,16 @@ class MixedModuleContext(ModuleContext):
|
||||
tree_value=self._value
|
||||
)
|
||||
|
||||
def get_filters(self, *args, **kwargs):
|
||||
for filter in self._value.as_context().get_filters(*args, **kwargs):
|
||||
yield filter
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield MergedFilter(
|
||||
MixedParserTreeFilter(
|
||||
parent_context=self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope
|
||||
),
|
||||
self.get_global_filter(),
|
||||
)
|
||||
|
||||
for namespace_obj in self._namespace_objects:
|
||||
compiled_value = _create(self.inference_state, namespace_obj)
|
||||
mixed_object = self._get_mixed_object(compiled_value)
|
||||
for filter in mixed_object.get_filters(*args, **kwargs):
|
||||
for mixed_object in self.mixed_values:
|
||||
for filter in mixed_object.get_filters(until_position, origin_scope):
|
||||
yield filter
|
||||
|
||||
@@ -150,6 +150,13 @@ class Project(object):
|
||||
|
||||
py2_comp(path, **kwargs)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""
|
||||
The base path for this project.
|
||||
"""
|
||||
return self._path
|
||||
|
||||
@inference_state_as_method_param_cache()
|
||||
def _get_base_sys_path(self, inference_state):
|
||||
# The sys path has not been set explicitly.
|
||||
|
||||
@@ -39,7 +39,7 @@ class ChangedFile(object):
|
||||
if new_lines[-1] != '':
|
||||
new_lines[-1] += '\n'
|
||||
|
||||
project_path = self._inference_state.project._path
|
||||
project_path = self._inference_state.project.path
|
||||
if self._from_path is None:
|
||||
from_p = ''
|
||||
else:
|
||||
@@ -112,7 +112,7 @@ class Refactoring(object):
|
||||
|
||||
def get_diff(self):
|
||||
text = ''
|
||||
project_path = self._inference_state.project._path
|
||||
project_path = self._inference_state.project.path
|
||||
for from_, to in self.get_renames():
|
||||
text += 'rename from %s\nrename to %s\n' \
|
||||
% (relpath(from_, project_path), relpath(to, project_path))
|
||||
|
||||
@@ -97,6 +97,9 @@ class HelperValueMixin(object):
|
||||
debug.warning('Tried to run __await__ on value %s', self)
|
||||
return await_value_set.execute_with_values()
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
debug.dbg('iterate %s', self)
|
||||
if is_async:
|
||||
@@ -172,6 +175,9 @@ class Value(HelperValueMixin):
|
||||
message="TypeError: '%s' object is not iterable" % self)
|
||||
return iter([])
|
||||
|
||||
def py__next__(self, contextualized_node=None):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
def get_signatures(self):
|
||||
return []
|
||||
|
||||
@@ -262,9 +268,6 @@ class Value(HelperValueMixin):
|
||||
def name(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
return None
|
||||
|
||||
|
||||
@@ -71,12 +71,22 @@ class MixedObject(ValueWrapper):
|
||||
else:
|
||||
return self.compiled_value.get_safe_value(default)
|
||||
|
||||
@property
|
||||
def array_type(self):
|
||||
return self.compiled_value.array_type
|
||||
|
||||
def get_key_values(self):
|
||||
return self.compiled_value.get_key_values()
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
python_object = self.compiled_value.access_handle.access._obj
|
||||
if type(python_object) in ALLOWED_GETITEM_TYPES:
|
||||
return self.compiled_value.py__simple_getitem__(index)
|
||||
return self._wrapped_value.py__simple_getitem__(index)
|
||||
|
||||
def negate(self):
|
||||
return self.compiled_value.negate()
|
||||
|
||||
def _as_context(self):
|
||||
if self.parent_context is None:
|
||||
return MixedModuleContext(self)
|
||||
|
||||
@@ -156,8 +156,9 @@ class CompiledSubprocess(object):
|
||||
# Start with 2, gets set after _get_info.
|
||||
_pickle_protocol = 2
|
||||
|
||||
def __init__(self, executable):
|
||||
def __init__(self, executable, env_vars=None):
|
||||
self._executable = executable
|
||||
self._env_vars = env_vars
|
||||
self._inference_state_deletion_queue = queue.deque()
|
||||
self._cleanup_callable = lambda: None
|
||||
|
||||
@@ -181,15 +182,6 @@ class CompiledSubprocess(object):
|
||||
os.path.dirname(os.path.dirname(parso_path)),
|
||||
'.'.join(str(x) for x in sys.version_info[:3]),
|
||||
)
|
||||
# Use explicit envionment to ensure reliable results (#1540)
|
||||
env = {}
|
||||
if os.name == 'nt':
|
||||
# if SYSTEMROOT (or case variant) exists in environment,
|
||||
# ensure it goes to subprocess
|
||||
for k, v in os.environ.items():
|
||||
if 'SYSTEMROOT' == k.upper():
|
||||
env.update({k: os.environ[k]})
|
||||
break # don't risk multiple entries
|
||||
process = GeneralizedPopen(
|
||||
args,
|
||||
stdin=subprocess.PIPE,
|
||||
@@ -198,7 +190,7 @@ class CompiledSubprocess(object):
|
||||
# Use system default buffering on Python 2 to improve performance
|
||||
# (this is already the case on Python 3).
|
||||
bufsize=-1,
|
||||
env=env
|
||||
env=self._env_vars
|
||||
)
|
||||
self._stderr_queue = Queue()
|
||||
self._stderr_thread = t = Thread(
|
||||
|
||||
@@ -281,7 +281,8 @@ def infer_return_for_callable(arguments, param_values, result_values):
|
||||
|
||||
return ValueSet.from_sets(
|
||||
v.define_generics(all_type_vars)
|
||||
if isinstance(v, (DefineGenericBaseClass, TypeVar)) else ValueSet({v})
|
||||
if isinstance(v, (DefineGenericBaseClass, TypeVar))
|
||||
else ValueSet({v})
|
||||
for v in result_values
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
@@ -415,6 +415,9 @@ class BaseTypingInstance(LazyValueWrapper):
|
||||
def get_annotated_class_object(self):
|
||||
return self._class_value
|
||||
|
||||
def get_qualified_names(self):
|
||||
return (self.py__name__(),)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
@@ -18,6 +18,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
|
||||
|
||||
was_instance = stub_value.is_instance()
|
||||
if was_instance:
|
||||
arguments = getattr(stub_value, '_arguments', None)
|
||||
stub_value = stub_value.py__class__()
|
||||
|
||||
qualified_names = stub_value.get_qualified_names()
|
||||
@@ -30,11 +31,12 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
|
||||
method_name = qualified_names[-1]
|
||||
qualified_names = qualified_names[:-1]
|
||||
was_instance = True
|
||||
arguments = None
|
||||
|
||||
values = _infer_from_stub(stub_module_context, qualified_names, ignore_compiled)
|
||||
if was_instance:
|
||||
values = ValueSet.from_sets(
|
||||
c.execute_with_values()
|
||||
c.execute_with_values() if arguments is None else c.execute(arguments)
|
||||
for c in values
|
||||
if c.is_class()
|
||||
)
|
||||
|
||||
@@ -102,7 +102,7 @@ class TypeVar(BaseTypingValue):
|
||||
else:
|
||||
if found:
|
||||
return found
|
||||
return self._get_classes() or ValueSet({self})
|
||||
return ValueSet({self})
|
||||
|
||||
def execute_annotation(self):
|
||||
return self._get_classes().execute_annotation()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import re
|
||||
from functools import wraps
|
||||
from collections import namedtuple
|
||||
|
||||
from jedi import settings
|
||||
from jedi.file_io import FileIO
|
||||
@@ -20,36 +21,38 @@ _IMPORT_MAP = dict(
|
||||
_socket='socket',
|
||||
)
|
||||
|
||||
PathInfo = namedtuple('PathInfo', 'path is_third_party')
|
||||
|
||||
def _merge_create_stub_map(directories):
|
||||
|
||||
def _merge_create_stub_map(path_infos):
|
||||
map_ = {}
|
||||
for directory in directories:
|
||||
map_.update(_create_stub_map(directory))
|
||||
for directory_path_info in path_infos:
|
||||
map_.update(_create_stub_map(directory_path_info))
|
||||
return map_
|
||||
|
||||
|
||||
def _create_stub_map(directory):
|
||||
def _create_stub_map(directory_path_info):
|
||||
"""
|
||||
Create a mapping of an importable name in Python to a stub file.
|
||||
"""
|
||||
def generate():
|
||||
try:
|
||||
listed = os.listdir(directory)
|
||||
listed = os.listdir(directory_path_info.path)
|
||||
except (FileNotFoundError, OSError):
|
||||
# OSError is Python 2
|
||||
return
|
||||
|
||||
for entry in listed:
|
||||
entry = cast_path(entry)
|
||||
path = os.path.join(directory, entry)
|
||||
path = os.path.join(directory_path_info.path, entry)
|
||||
if os.path.isdir(path):
|
||||
init = os.path.join(path, '__init__.pyi')
|
||||
if os.path.isfile(init):
|
||||
yield entry, init
|
||||
yield entry, PathInfo(init, directory_path_info.is_third_party)
|
||||
elif entry.endswith('.pyi') and os.path.isfile(path):
|
||||
name = entry[:-4]
|
||||
if name != '__init__':
|
||||
yield name, path
|
||||
yield name, PathInfo(path, directory_path_info.is_third_party)
|
||||
|
||||
# Create a dictionary from the tuple generator.
|
||||
return dict(generate())
|
||||
@@ -58,8 +61,8 @@ def _create_stub_map(directory):
|
||||
def _get_typeshed_directories(version_info):
|
||||
check_version_list = ['2and3', str(version_info.major)]
|
||||
for base in ['stdlib', 'third_party']:
|
||||
base = os.path.join(TYPESHED_PATH, base)
|
||||
base_list = os.listdir(base)
|
||||
base_path = os.path.join(TYPESHED_PATH, base)
|
||||
base_list = os.listdir(base_path)
|
||||
for base_list_entry in base_list:
|
||||
match = re.match(r'(\d+)\.(\d+)$', base_list_entry)
|
||||
if match is not None:
|
||||
@@ -68,7 +71,8 @@ def _get_typeshed_directories(version_info):
|
||||
check_version_list.append(base_list_entry)
|
||||
|
||||
for check_version in check_version_list:
|
||||
yield os.path.join(base, check_version)
|
||||
is_third_party = base != 'stdlib'
|
||||
yield PathInfo(os.path.join(base_path, check_version), is_third_party)
|
||||
|
||||
|
||||
_version_cache = {}
|
||||
@@ -175,7 +179,7 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
|
||||
)
|
||||
if m is not None:
|
||||
return m
|
||||
if import_names[0] == 'django':
|
||||
if import_names[0] == 'django' and python_value_set:
|
||||
return _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
@@ -249,16 +253,20 @@ def _load_from_typeshed(inference_state, python_value_set, parent_module_value,
|
||||
# Only if it's a package (= a folder) something can be
|
||||
# imported.
|
||||
return None
|
||||
path = parent_module_value.py__path__()
|
||||
map_ = _merge_create_stub_map(path)
|
||||
paths = parent_module_value.py__path__()
|
||||
# Once the initial package has been loaded, the sub packages will
|
||||
# always be loaded, regardless if they are there or not. This makes
|
||||
# sense, IMO, because stubs take preference, even if the original
|
||||
# library doesn't provide a module (it could be dynamic). ~dave
|
||||
map_ = _merge_create_stub_map([PathInfo(p, is_third_party=False) for p in paths])
|
||||
|
||||
if map_ is not None:
|
||||
path = map_.get(import_name)
|
||||
if path is not None:
|
||||
path_info = map_.get(import_name)
|
||||
if path_info is not None and (not path_info.is_third_party or python_value_set):
|
||||
return _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
file_io=FileIO(path),
|
||||
file_io=FileIO(path_info.path),
|
||||
import_names=import_names,
|
||||
)
|
||||
|
||||
|
||||
@@ -192,7 +192,7 @@ class Importer(object):
|
||||
import_path = base + tuple(import_path)
|
||||
else:
|
||||
path = module_context.py__file__()
|
||||
project_path = self._inference_state.project._path
|
||||
project_path = self._inference_state.project.path
|
||||
import_path = list(import_path)
|
||||
if path is None:
|
||||
# If no path is defined, our best guess is that the current
|
||||
|
||||
@@ -90,10 +90,12 @@ class TreeSignature(AbstractSignature):
|
||||
|
||||
@memoize_method
|
||||
def get_param_names(self, resolve_stars=False):
|
||||
params = super(TreeSignature, self).get_param_names(resolve_stars=False)
|
||||
params = self._function_value.get_param_names()
|
||||
if resolve_stars:
|
||||
from jedi.inference.star_args import process_params
|
||||
params = process_params(params)
|
||||
if self.is_bound:
|
||||
return params[1:]
|
||||
return params
|
||||
|
||||
def matches_signature(self, arguments):
|
||||
|
||||
@@ -35,6 +35,7 @@ operator_to_magic_method = {
|
||||
'+': '__add__',
|
||||
'-': '__sub__',
|
||||
'*': '__mul__',
|
||||
'@': '__matmul__',
|
||||
'/': '__truediv__',
|
||||
'//': '__floordiv__',
|
||||
'%': '__mod__',
|
||||
|
||||
@@ -3,7 +3,7 @@ Decorators are not really values, however we need some wrappers to improve
|
||||
docstrings and other things around decorators.
|
||||
'''
|
||||
|
||||
from jedi.inference.base_value import ValueWrapper
|
||||
from jedi.inference.base_value import ValueWrapper, ValueSet
|
||||
|
||||
|
||||
class Decoratee(ValueWrapper):
|
||||
@@ -13,3 +13,9 @@ class Decoratee(ValueWrapper):
|
||||
|
||||
def py__doc__(self):
|
||||
return self._original_value.py__doc__()
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet(
|
||||
Decoratee(v, self._original_value)
|
||||
for v in self._wrapped_value.py__get__(instance, class_value)
|
||||
)
|
||||
|
||||
@@ -202,7 +202,7 @@ class MethodValue(FunctionValue):
|
||||
|
||||
|
||||
class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
def _infer_annotations(self):
|
||||
def infer_annotations(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@@ -216,7 +216,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
value_set = NO_VALUES
|
||||
returns = get_yield_exprs(self.inference_state, funcdef)
|
||||
else:
|
||||
value_set = self._infer_annotations()
|
||||
value_set = self.infer_annotations()
|
||||
if value_set:
|
||||
# If there are annotations, prefer them over anything else.
|
||||
# This will make it faster.
|
||||
@@ -373,7 +373,7 @@ class FunctionExecutionContext(BaseFunctionExecutionContext):
|
||||
arguments=self._arguments
|
||||
)
|
||||
|
||||
def _infer_annotations(self):
|
||||
def infer_annotations(self):
|
||||
from jedi.inference.gradual.annotation import infer_return_types
|
||||
return infer_return_types(self._value, self._arguments)
|
||||
|
||||
@@ -385,7 +385,7 @@ class FunctionExecutionContext(BaseFunctionExecutionContext):
|
||||
|
||||
|
||||
class AnonymousFunctionExecution(BaseFunctionExecutionContext):
|
||||
def _infer_annotations(self):
|
||||
def infer_annotations(self):
|
||||
# I don't think inferring anonymous executions is a big thing.
|
||||
# Anonymous contexts are mostly there for the user to work in. ~ dave
|
||||
return NO_VALUES
|
||||
|
||||
@@ -256,24 +256,24 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
|
||||
def iterate():
|
||||
for generator in self.execute_function_slots(iter_slot_names):
|
||||
if generator.is_instance() and not generator.is_compiled():
|
||||
# `__next__` logic.
|
||||
if self.inference_state.environment.version_info.major == 2:
|
||||
name = u'next'
|
||||
else:
|
||||
name = u'__next__'
|
||||
next_slot_names = generator.get_function_slot_names(name)
|
||||
if next_slot_names:
|
||||
yield LazyKnownValues(
|
||||
generator.execute_function_slots(next_slot_names)
|
||||
)
|
||||
else:
|
||||
debug.warning('Instance has no __next__ function in %s.', generator)
|
||||
else:
|
||||
for lazy_value in generator.py__iter__():
|
||||
yield lazy_value
|
||||
for lazy_value in generator.py__next__(contextualized_node):
|
||||
yield lazy_value
|
||||
return iterate()
|
||||
|
||||
def py__next__(self, contextualized_node=None):
|
||||
# `__next__` logic.
|
||||
if self.inference_state.environment.version_info.major == 2:
|
||||
name = u'next'
|
||||
else:
|
||||
name = u'__next__'
|
||||
next_slot_names = self.get_function_slot_names(name)
|
||||
if next_slot_names:
|
||||
yield LazyKnownValues(
|
||||
self.execute_function_slots(next_slot_names)
|
||||
)
|
||||
else:
|
||||
debug.warning('Instance has no __next__ function in %s.', self)
|
||||
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names(u'__call__')
|
||||
if not names:
|
||||
|
||||
@@ -23,6 +23,9 @@ from jedi.inference.value.dynamic_arrays import check_array_additions
|
||||
|
||||
|
||||
class IterableMixin(object):
|
||||
def py__next__(self, contextualized_node=None):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
|
||||
|
||||
@@ -46,13 +49,12 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
array_type = None
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
generator, = self.inference_state.typing_module \
|
||||
.py__getattribute__('Generator') \
|
||||
.execute_annotation()
|
||||
return generator
|
||||
instance, = self._get_cls().execute_annotation()
|
||||
return instance
|
||||
|
||||
def is_instance(self):
|
||||
return False
|
||||
def _get_cls(self):
|
||||
generator, = self.inference_state.typing_module.py__getattribute__('Generator')
|
||||
return generator
|
||||
|
||||
def py__bool__(self):
|
||||
return True
|
||||
@@ -64,7 +66,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
@publish_method('send')
|
||||
@publish_method('next', python_version_match=2)
|
||||
@publish_method('__next__', python_version_match=3)
|
||||
def py__next__(self, arguments):
|
||||
def _next(self, arguments):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
@@ -74,6 +76,12 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, 'Generator')
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
gen_values = self.merge_types_of_iterate().py__class__()
|
||||
gm = TupleGenericManager((gen_values, NO_VALUES, NO_VALUES))
|
||||
return self._get_cls().with_generics(gm)
|
||||
|
||||
|
||||
class Generator(GeneratorBase):
|
||||
"""Handling of `yield` functions."""
|
||||
@@ -82,6 +90,9 @@ class Generator(GeneratorBase):
|
||||
self._func_execution_context = func_execution_context
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iterators = self._func_execution_context.infer_annotations()
|
||||
if iterators:
|
||||
return iterators.iterate(contextualized_node)
|
||||
return self._func_execution_context.get_yield_lazy_values()
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
|
||||
@@ -119,7 +119,9 @@ def execute(callback):
|
||||
else:
|
||||
return call()
|
||||
|
||||
if value.is_bound_method():
|
||||
if value.is_bound_method() or value.is_instance():
|
||||
# value can be an instance for example if it is a partial
|
||||
# object.
|
||||
return call()
|
||||
|
||||
# for now we just support builtin functions.
|
||||
@@ -265,7 +267,7 @@ class ReversedObject(AttributeOverwrite):
|
||||
|
||||
@publish_method('next', python_version_match=2)
|
||||
@publish_method('__next__', python_version_match=3)
|
||||
def py__next__(self, arguments):
|
||||
def _next(self, arguments):
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer() for lazy_value in self._iter_list
|
||||
)
|
||||
@@ -474,11 +476,10 @@ def collections_namedtuple(value, arguments, callback):
|
||||
class PartialObject(ValueWrapper):
|
||||
def __init__(self, actual_value, arguments, instance=None):
|
||||
super(PartialObject, self).__init__(actual_value)
|
||||
self._actual_value = actual_value
|
||||
self._arguments = arguments
|
||||
self._instance = instance
|
||||
|
||||
def _get_function(self, unpacked_arguments):
|
||||
def _get_functions(self, unpacked_arguments):
|
||||
key, lazy_value = next(unpacked_arguments, (None, None))
|
||||
if key is not None or lazy_value is None:
|
||||
debug.warning("Partial should have a proper function %s", self._arguments)
|
||||
@@ -487,8 +488,8 @@ class PartialObject(ValueWrapper):
|
||||
|
||||
def get_signatures(self):
|
||||
unpacked_arguments = self._arguments.unpack()
|
||||
func = self._get_function(unpacked_arguments)
|
||||
if func is None:
|
||||
funcs = self._get_functions(unpacked_arguments)
|
||||
if funcs is None:
|
||||
return []
|
||||
|
||||
arg_count = 0
|
||||
@@ -500,17 +501,30 @@ class PartialObject(ValueWrapper):
|
||||
arg_count += 1
|
||||
else:
|
||||
keys.add(key)
|
||||
return [PartialSignature(s, arg_count, keys) for s in func.get_signatures()]
|
||||
return [PartialSignature(s, arg_count, keys) for s in funcs.get_signatures()]
|
||||
|
||||
def py__call__(self, arguments):
|
||||
func = self._get_function(self._arguments.unpack())
|
||||
if func is None:
|
||||
funcs = self._get_functions(self._arguments.unpack())
|
||||
if funcs is None:
|
||||
return NO_VALUES
|
||||
|
||||
return func.execute(
|
||||
return funcs.execute(
|
||||
MergedPartialArguments(self._arguments, arguments, self._instance)
|
||||
)
|
||||
|
||||
def py__doc__(self):
|
||||
"""
|
||||
In CPython partial does not replace the docstring. However we are still
|
||||
imitating it here, because we want this docstring to be worth something
|
||||
for the user.
|
||||
"""
|
||||
callables = self._get_functions(self._arguments.unpack())
|
||||
if callables is None:
|
||||
return ''
|
||||
for callable_ in callables:
|
||||
return callable_.py__doc__()
|
||||
return ''
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet([self])
|
||||
|
||||
@@ -519,7 +533,7 @@ class PartialMethodObject(PartialObject):
|
||||
def py__get__(self, instance, class_value):
|
||||
if instance is None:
|
||||
return ValueSet([self])
|
||||
return ValueSet([PartialObject(self._actual_value, self._arguments, instance)])
|
||||
return ValueSet([PartialObject(self._wrapped_value, self._arguments, instance)])
|
||||
|
||||
|
||||
class PartialSignature(SignatureWrapper):
|
||||
|
||||
@@ -292,3 +292,22 @@ def test_in_brackets():
|
||||
x = yield from [1]
|
||||
#? None
|
||||
x
|
||||
|
||||
|
||||
# -----------------
|
||||
# Annotations
|
||||
# -----------------
|
||||
|
||||
from typing import Iterator
|
||||
|
||||
def annotation1() -> float:
|
||||
yield 1
|
||||
|
||||
def annotation2() -> Iterator[float]:
|
||||
yield 1
|
||||
|
||||
|
||||
#?
|
||||
next(annotation1())
|
||||
#? float()
|
||||
next(annotation2())
|
||||
|
||||
@@ -230,6 +230,87 @@ is_decorated(the_para
|
||||
)
|
||||
|
||||
|
||||
class class_decorator_factory_plain:
|
||||
def __call__(self, func: T) -> T:
|
||||
...
|
||||
|
||||
#? class_decorator_factory_plain()
|
||||
class_decorator_factory_plain()
|
||||
|
||||
#?
|
||||
class_decorator_factory_plain()()
|
||||
|
||||
is_decorated_by_class_decorator_factory = class_decorator_factory_plain()(will_be_decorated)
|
||||
|
||||
#? will_be_decorated
|
||||
is_decorated_by_class_decorator_factory
|
||||
|
||||
#? ['the_param=']
|
||||
is_decorated_by_class_decorator_factory(the_par
|
||||
)
|
||||
|
||||
|
||||
def decorator_factory_plain() -> Callable[[T], T]:
|
||||
pass
|
||||
|
||||
#? Callable()
|
||||
decorator_factory_plain()
|
||||
|
||||
#?
|
||||
decorator_factory_plain()()
|
||||
|
||||
#? int()
|
||||
decorator_factory_plain()(42)
|
||||
|
||||
is_decorated_by_plain_factory = decorator_factory_plain()(will_be_decorated)
|
||||
|
||||
#? will_be_decorated
|
||||
is_decorated_by_plain_factory
|
||||
|
||||
#? ['the_param=']
|
||||
is_decorated_by_plain_factory(the_par
|
||||
)
|
||||
|
||||
|
||||
class class_decorator_factory_bound_callable:
|
||||
def __call__(self, func: TCallable) -> TCallable:
|
||||
...
|
||||
|
||||
#? class_decorator_factory_bound_callable()
|
||||
class_decorator_factory_bound_callable()
|
||||
|
||||
#? Callable()
|
||||
class_decorator_factory_bound_callable()()
|
||||
|
||||
is_decorated_by_class_bound_factory = class_decorator_factory_bound_callable()(will_be_decorated)
|
||||
|
||||
#? will_be_decorated
|
||||
is_decorated_by_class_bound_factory
|
||||
|
||||
#? ['the_param=']
|
||||
is_decorated_by_class_bound_factory(the_par
|
||||
)
|
||||
|
||||
|
||||
def decorator_factory_bound_callable() -> Callable[[TCallable], TCallable]:
|
||||
pass
|
||||
|
||||
#? Callable()
|
||||
decorator_factory_bound_callable()
|
||||
|
||||
#? Callable()
|
||||
decorator_factory_bound_callable()()
|
||||
|
||||
is_decorated_by_bound_factory = decorator_factory_bound_callable()(will_be_decorated)
|
||||
|
||||
#? will_be_decorated
|
||||
is_decorated_by_bound_factory
|
||||
|
||||
#? ['the_param=']
|
||||
is_decorated_by_bound_factory(the_par
|
||||
)
|
||||
|
||||
|
||||
class That(Generic[T]):
|
||||
def __init__(self, items: List[Tuple[str, T]]) -> None:
|
||||
pass
|
||||
|
||||
@@ -417,6 +417,7 @@ type_in_out2()
|
||||
type_in_out2(float)
|
||||
|
||||
def ma(a: typing.Callable[[str], TYPE_VARX]) -> typing.Callable[[str], TYPE_VARX]:
|
||||
#? typing.Callable()
|
||||
return a
|
||||
|
||||
def mf(s: str) -> int:
|
||||
|
||||
@@ -253,12 +253,34 @@ z.read('name').upper
|
||||
# -----------------
|
||||
# contextlib
|
||||
# -----------------
|
||||
|
||||
# python > 2.7
|
||||
from typing import Iterator
|
||||
import contextlib
|
||||
with contextlib.closing('asd') as string:
|
||||
#? str()
|
||||
string
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cm1() -> Iterator[float]:
|
||||
yield 1
|
||||
with cm1() as x:
|
||||
#? float()
|
||||
x
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cm2() -> float:
|
||||
yield 1
|
||||
with cm2() as x:
|
||||
#?
|
||||
x
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cm3():
|
||||
yield 3
|
||||
with cm3() as x:
|
||||
#? int()
|
||||
x
|
||||
|
||||
# -----------------
|
||||
# operator
|
||||
# -----------------
|
||||
|
||||
@@ -137,15 +137,7 @@ def test_infer_on_non_name(Script):
|
||||
def test_infer_on_generator(Script, environment):
|
||||
script = Script('def x(): yield 1\ny=x()\ny')
|
||||
def_, = script.infer()
|
||||
if environment.version_info >= (3, 9):
|
||||
# The Generator in Python 3.9 is properly inferred, however once it is
|
||||
# converted from stub to Python, the definition is
|
||||
# Generator = _SpecialGenericAlias(collections.abc.Generator, 3)
|
||||
# This is pretty normal for most typing types, like Sequence, List,
|
||||
# etc.
|
||||
assert def_.name == '_SpecialGenericAlias'
|
||||
else:
|
||||
assert def_.name == 'Generator'
|
||||
assert def_.name == 'Generator'
|
||||
def_, = script.infer(only_stubs=True)
|
||||
assert def_.name == 'Generator'
|
||||
|
||||
|
||||
@@ -594,12 +594,18 @@ def test_param_annotation_completion(class_is_findable):
|
||||
('mixed[Non', 9, ['e']),
|
||||
|
||||
('implicit[10', None, ['00']),
|
||||
|
||||
('inherited["', None, ['blablu"']),
|
||||
]
|
||||
)
|
||||
def test_dict_completion(code, column, expected):
|
||||
strs = {'asdf': 1, u"""foo""": 2, r'fbar': 3}
|
||||
mixed = {1: 2, 1.10: 4, None: 6, r'a\sdf': 8, b'foo': 9}
|
||||
|
||||
class Inherited(dict):
|
||||
pass
|
||||
inherited = Inherited(blablu=3)
|
||||
|
||||
namespaces = [locals(), {'implicit': {1000: 3}}]
|
||||
comps = jedi.Interpreter(code, namespaces).complete(column=column)
|
||||
if Ellipsis in expected:
|
||||
@@ -681,3 +687,28 @@ def test_string_annotation(annotations, result, code):
|
||||
x.__annotations__ = annotations
|
||||
defs = jedi.Interpreter(code or 'x()', [locals()]).infer()
|
||||
assert [d.name for d in defs] == result
|
||||
|
||||
|
||||
def test_name_not_inferred_properly():
|
||||
"""
|
||||
In IPython notebook it is typical that some parts of the code that is
|
||||
provided was already executed. In that case if something is not properly
|
||||
inferred, it should still infer from the variables it already knows.
|
||||
"""
|
||||
x = 1
|
||||
d, = jedi.Interpreter('x = UNDEFINED; x', [locals()]).infer()
|
||||
assert d.name == 'int'
|
||||
|
||||
|
||||
def test_variable_reuse():
|
||||
x = 1
|
||||
d, = jedi.Interpreter('y = x\ny', [locals()]).infer()
|
||||
assert d.name == 'int'
|
||||
|
||||
|
||||
def test_negate():
|
||||
code = "x = -y"
|
||||
x, = jedi.Interpreter(code, [{'y': 3}]).infer(1, 0)
|
||||
assert x.name == 'int'
|
||||
value, = x._name.infer()
|
||||
assert value.get_safe_value() == -3
|
||||
|
||||
@@ -422,6 +422,39 @@ def test_decorator(Script):
|
||||
assert d.docstring(raw=True) == 'Nice docstring'
|
||||
|
||||
|
||||
def test_method_decorator(Script, skip_pre_python35):
|
||||
code = dedent('''
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
"""wrapper docstring"""
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
class Foo():
|
||||
@decorator
|
||||
def check_user(self, f):
|
||||
"""Nice docstring"""
|
||||
pass
|
||||
|
||||
Foo().check_user''')
|
||||
|
||||
d, = Script(code).infer()
|
||||
assert d.docstring() == 'wrapper(f)\n\nNice docstring'
|
||||
|
||||
|
||||
def test_partial(Script, skip_pre_python36):
|
||||
code = dedent('''
|
||||
def foo():
|
||||
'x y z'
|
||||
from functools import partial
|
||||
x = partial(foo)
|
||||
x''')
|
||||
|
||||
for p in Script(code).infer():
|
||||
assert p.docstring(raw=True) == 'x y z'
|
||||
|
||||
|
||||
def test_basic_str_init_signature(Script, disable_typeshed):
|
||||
# See GH #1414 and GH #1426
|
||||
code = dedent('''
|
||||
|
||||
@@ -14,8 +14,8 @@ TYPESHED_PYTHON3 = os.path.join(typeshed.TYPESHED_PATH, 'stdlib', '3')
|
||||
def test_get_typeshed_directories():
|
||||
def get_dirs(version_info):
|
||||
return {
|
||||
d.replace(typeshed.TYPESHED_PATH, '').lstrip(os.path.sep)
|
||||
for d in typeshed._get_typeshed_directories(version_info)
|
||||
p.path.replace(typeshed.TYPESHED_PATH, '').lstrip(os.path.sep)
|
||||
for p in typeshed._get_typeshed_directories(version_info)
|
||||
}
|
||||
|
||||
def transform(set_):
|
||||
@@ -35,11 +35,8 @@ def test_get_typeshed_directories():
|
||||
|
||||
|
||||
def test_get_stub_files():
|
||||
def get_map(version_info):
|
||||
return typeshed._create_stub_map(version_info)
|
||||
|
||||
map_ = typeshed._create_stub_map(TYPESHED_PYTHON3)
|
||||
assert map_['functools'] == os.path.join(TYPESHED_PYTHON3, 'functools.pyi')
|
||||
map_ = typeshed._create_stub_map(typeshed.PathInfo(TYPESHED_PYTHON3, is_third_party=False))
|
||||
assert map_['functools'].path == os.path.join(TYPESHED_PYTHON3, 'functools.pyi')
|
||||
|
||||
|
||||
def test_function(Script, environment):
|
||||
@@ -227,3 +224,25 @@ def test_goto_stubs_on_itself(Script, code, type_):
|
||||
|
||||
_assert_is_same(same_definition, definition)
|
||||
_assert_is_same(same_definition, same_definition2)
|
||||
|
||||
|
||||
def test_module_exists_only_as_stub(Script):
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
pytest.skip('redis is already installed, it should only exist as a stub for this test')
|
||||
redis_path = os.path.join(typeshed.TYPESHED_PATH, 'third_party', '2and3', 'redis')
|
||||
assert os.path.isdir(redis_path)
|
||||
assert not Script('import redis').infer()
|
||||
|
||||
|
||||
def test_django_exists_only_as_stub(Script):
|
||||
try:
|
||||
import django
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
pytest.skip('django is already installed, it should only exist as a stub for this test')
|
||||
assert not Script('import django').infer()
|
||||
|
||||
3
tox.ini
3
tox.ini
@@ -1,5 +1,5 @@
|
||||
[tox]
|
||||
envlist = py27, py35, py36, py37, qa
|
||||
envlist = py27, py35, py36, py37, py38, qa
|
||||
[testenv]
|
||||
extras = testing
|
||||
deps =
|
||||
@@ -25,6 +25,7 @@ setenv =
|
||||
env35: JEDI_TEST_ENVIRONMENT=35
|
||||
env36: JEDI_TEST_ENVIRONMENT=36
|
||||
env37: JEDI_TEST_ENVIRONMENT=37
|
||||
env38: JEDI_TEST_ENVIRONMENT=38
|
||||
interpreter: JEDI_TEST_ENVIRONMENT=interpreter
|
||||
commands =
|
||||
pytest {posargs}
|
||||
|
||||
Reference in New Issue
Block a user