mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-06 05:54:25 +08:00
Merge branch 'master' into deprecations
This commit is contained in:
14
.editorconfig
Normal file
14
.editorconfig
Normal file
@@ -0,0 +1,14 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
indent_size = 2
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,4 +12,5 @@ jedi.egg-info/
|
||||
record.json
|
||||
/.cache/
|
||||
/.pytest_cache
|
||||
/.mypy_cache
|
||||
/venv/
|
||||
|
||||
2
.readthedocs.yml
Normal file
2
.readthedocs.yml
Normal file
@@ -0,0 +1,2 @@
|
||||
python:
|
||||
pip_install: true
|
||||
19
.travis.yml
19
.travis.yml
@@ -8,6 +8,7 @@ python:
|
||||
|
||||
env:
|
||||
- JEDI_TEST_ENVIRONMENT=38
|
||||
- JEDI_TEST_ENVIRONMENT=39
|
||||
- JEDI_TEST_ENVIRONMENT=37
|
||||
- JEDI_TEST_ENVIRONMENT=36
|
||||
- JEDI_TEST_ENVIRONMENT=interpreter
|
||||
@@ -15,7 +16,7 @@ env:
|
||||
matrix:
|
||||
include:
|
||||
- python: 3.8
|
||||
script:
|
||||
script:
|
||||
- 'pip install coverage'
|
||||
- 'coverage run --source jedi -m pytest'
|
||||
- 'coverage report'
|
||||
@@ -30,8 +31,8 @@ matrix:
|
||||
install:
|
||||
- 'pip install .[qa]'
|
||||
script:
|
||||
# Ignore F401, which are unused imports. flake8 is a primitive tool and is sometimes wrong.
|
||||
- 'flake8 --extend-ignore F401 {posargs:jedi}'
|
||||
- 'flake8 jedi setup.py'
|
||||
- 'mypy jedi sith.py'
|
||||
install:
|
||||
- sudo apt-get -y install python3-venv
|
||||
- pip install .[testing]
|
||||
@@ -47,9 +48,15 @@ script:
|
||||
# Only required for JEDI_TEST_ENVIRONMENT=38, because it's not always
|
||||
# available.
|
||||
download_name=python-$test_env_version
|
||||
wget https://s3.amazonaws.com/travis-python-archives/binaries/ubuntu/16.04/x86_64/$download_name.tar.bz2
|
||||
sudo tar xjf $download_name.tar.bz2 --directory / opt/python
|
||||
ln -s "/opt/python/${test_env_version}/bin/python" /home/travis/bin/$python_bin
|
||||
if [ "$JEDI_TEST_ENVIRONMENT" == "39" ]; then
|
||||
wget https://storage.googleapis.com/travis-ci-language-archives/python/binaries/ubuntu/16.04/x86_64/python-3.9-dev.tar.bz2
|
||||
sudo tar xjf python-3.9-dev.tar.bz2 --directory / opt/python
|
||||
ln -s "/opt/python/3.9-dev/bin/python" /home/travis/bin/python3.9
|
||||
else
|
||||
wget https://s3.amazonaws.com/travis-python-archives/binaries/ubuntu/16.04/x86_64/$download_name.tar.bz2
|
||||
sudo tar xjf $download_name.tar.bz2 --directory / opt/python
|
||||
ln -s "/opt/python/${test_env_version}/bin/python" /home/travis/bin/$python_bin
|
||||
fi
|
||||
elif [ "${python_path#/opt/pyenv/shims}" != "$python_path" ]; then
|
||||
# Activate pyenv version (required with JEDI_TEST_ENVIRONMENT=36).
|
||||
pyenv_bin="$(pyenv whence --path "$python_bin" | head -n1)"
|
||||
|
||||
@@ -16,6 +16,8 @@ Unreleased
|
||||
- Functions with ``@property`` now return ``property`` instead of ``function``
|
||||
in ``Name().type``
|
||||
- Started using annotations
|
||||
- Better support for the walrus operator
|
||||
- Project attributes are now read accessible
|
||||
|
||||
This is likely going to be the last minor release before 1.0.
|
||||
|
||||
|
||||
@@ -52,9 +52,16 @@ Jedi can currently be used with the following editors/projects:
|
||||
- wdb_ - Web Debugger
|
||||
- `Eric IDE`_ (Available as a plugin)
|
||||
- `IPython 6.0.0+ <https://ipython.readthedocs.io/en/stable/whatsnew/version6.html>`_
|
||||
- `xonsh shell <https://xon.sh/contents.html>`_ has `jedi extension <https://xon.sh/xontribs.html#jedi>`_
|
||||
|
||||
and many more!
|
||||
|
||||
There are a few language servers that use Jedi:
|
||||
|
||||
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
||||
- `python-language-server <https://github.com/palantir/python-language-server>`_
|
||||
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
||||
|
||||
Here are some pictures taken from jedi-vim_:
|
||||
|
||||
.. image:: https://github.com/davidhalter/jedi/raw/master/docs/_screenshots/screenshot_complete.png
|
||||
|
||||
@@ -3,11 +3,21 @@
|
||||
Using Jedi
|
||||
==========
|
||||
|
||||
|jedi| is can be used with a variety of plugins and software. It is also possible
|
||||
to use |jedi| in the :ref:`Python shell or with IPython <repl-completion>`.
|
||||
|jedi| is can be used with a variety of :ref:`plugins <editor-plugins>`,
|
||||
`language servers <language-servers>` and other software.
|
||||
It is also possible to use |jedi| in the :ref:`Python shell or with IPython
|
||||
<repl-completion>`.
|
||||
|
||||
Below you can also find a list of :ref:`recipes for type hinting <recipes>`.
|
||||
|
||||
.. _language-servers:
|
||||
|
||||
Language Servers
|
||||
--------------
|
||||
|
||||
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
||||
- `python-language-server <https://github.com/palantir/python-language-server>`_
|
||||
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
||||
|
||||
.. _editor-plugins:
|
||||
|
||||
@@ -83,6 +93,16 @@ Web Debugger
|
||||
|
||||
- wdb_
|
||||
|
||||
xonsh shell
|
||||
~~~~~~~~~~~
|
||||
|
||||
Jedi is a preinstalled extension in `xonsh shell <https://xon.sh/contents.html>`_.
|
||||
Run the following command to enable:
|
||||
|
||||
::
|
||||
|
||||
xontrib load jedi
|
||||
|
||||
and many more!
|
||||
|
||||
.. _repl-completion:
|
||||
|
||||
@@ -49,7 +49,7 @@ from jedi.inference.utils import to_list
|
||||
sys.setrecursionlimit(3000)
|
||||
|
||||
|
||||
class Script(object):
|
||||
class Script:
|
||||
"""
|
||||
A Script is the base for completions, goto or whatever you want to do with
|
||||
Jedi. The counter part of this class is :class:`Interpreter`, which works
|
||||
@@ -122,7 +122,7 @@ class Script(object):
|
||||
self._module_node, code = self._inference_state.parse_and_get_code(
|
||||
code=code,
|
||||
path=self.path,
|
||||
use_latest_grammar=path and path.suffix == 'pyi',
|
||||
use_latest_grammar=path and path.suffix == '.pyi',
|
||||
cache=False, # No disk cache, because the current script often changes.
|
||||
diff_cache=settings.fast_parser,
|
||||
cache_path=settings.cache_directory,
|
||||
@@ -157,6 +157,7 @@ class Script(object):
|
||||
# We are in a stub file. Try to load the stub properly.
|
||||
stub_module = load_proper_stub_module(
|
||||
self._inference_state,
|
||||
self._inference_state.latest_grammar,
|
||||
file_io,
|
||||
names,
|
||||
self._module_node
|
||||
@@ -234,6 +235,11 @@ class Script(object):
|
||||
leaf = self._module_node.get_leaf_for_position(pos)
|
||||
if leaf is None or leaf.type == 'string':
|
||||
return []
|
||||
if leaf.end_pos == (line, column) and leaf.type == 'operator':
|
||||
next_ = leaf.get_next_leaf()
|
||||
if next_.start_pos == leaf.end_pos \
|
||||
and next_.type in ('number', 'string', 'keyword'):
|
||||
leaf = next_
|
||||
|
||||
context = self._get_module_context().create_context(leaf)
|
||||
|
||||
|
||||
@@ -14,18 +14,18 @@ These classes are the much biggest part of the API, because they contain
|
||||
the interesting information about all operations.
|
||||
"""
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from parso.python.tree import search_ancestor
|
||||
from parso.tree import search_ancestor
|
||||
|
||||
from jedi import settings
|
||||
from jedi import debug
|
||||
from jedi.inference.utils import unite
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference import imports
|
||||
from jedi.inference.imports import ImportName
|
||||
from jedi.inference.compiled.mixed import MixedName
|
||||
from jedi.inference.gradual.typeshed import StubModuleValue
|
||||
from jedi.inference.names import ImportName, SubModuleName
|
||||
from jedi.inference.gradual.stub_value import StubModuleValue
|
||||
from jedi.inference.gradual.conversion import convert_names, convert_values
|
||||
from jedi.inference.base_value import ValueSet
|
||||
from jedi.api.keywords import KeywordName
|
||||
@@ -53,7 +53,7 @@ def _values_to_definitions(values):
|
||||
return [Name(c.inference_state, c.name) for c in values]
|
||||
|
||||
|
||||
class BaseName(object):
|
||||
class BaseName:
|
||||
"""
|
||||
The base class for all definitions, completions and signatures.
|
||||
"""
|
||||
@@ -92,17 +92,15 @@ class BaseName(object):
|
||||
return self._name.get_root_context()
|
||||
|
||||
@property
|
||||
def module_path(self) -> Optional[str]:
|
||||
def module_path(self) -> Optional[Path]:
|
||||
"""
|
||||
Shows the file path of a module. e.g. ``/usr/lib/python3.9/os.py``
|
||||
|
||||
:rtype: str or None
|
||||
"""
|
||||
module = self._get_module_context()
|
||||
if module.is_stub() or not module.is_compiled():
|
||||
# Compiled modules should not return a module path even if they
|
||||
# have one.
|
||||
path = self._get_module_context().py__file__()
|
||||
path: Optional[Path] = self._get_module_context().py__file__()
|
||||
if path is not None:
|
||||
return path
|
||||
|
||||
@@ -185,7 +183,7 @@ class BaseName(object):
|
||||
tree_name.is_definition():
|
||||
resolve = True
|
||||
|
||||
if isinstance(self._name, imports.SubModuleName) or resolve:
|
||||
if isinstance(self._name, SubModuleName) or resolve:
|
||||
for value in self._name.infer():
|
||||
return value.api_type
|
||||
return self._name.api_type
|
||||
@@ -720,6 +718,24 @@ class Completion(BaseName):
|
||||
|
||||
return super().type
|
||||
|
||||
def get_completion_prefix_length(self):
|
||||
"""
|
||||
Returns the length of the prefix being completed.
|
||||
For example, completing ``isinstance``::
|
||||
|
||||
isinstan# <-- Cursor is here
|
||||
|
||||
would return 8, because len('isinstan') == 8.
|
||||
|
||||
Assuming the following function definition::
|
||||
|
||||
def foo(param=0):
|
||||
pass
|
||||
|
||||
completing ``foo(par`` would return 3.
|
||||
"""
|
||||
return self._like_name_length
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (type(self).__name__, self._name.get_public_name())
|
||||
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
_cache = {}
|
||||
from typing import Dict, Tuple, Callable
|
||||
|
||||
CacheValues = Tuple[str, str, str]
|
||||
CacheValuesCallback = Callable[[], CacheValues]
|
||||
|
||||
|
||||
def save_entry(module_name, name, cache):
|
||||
_cache: Dict[str, Dict[str, CacheValues]] = {}
|
||||
|
||||
|
||||
def save_entry(module_name: str, name: str, cache: CacheValues) -> None:
|
||||
try:
|
||||
module_cache = _cache[module_name]
|
||||
except KeyError:
|
||||
@@ -9,8 +15,8 @@ def save_entry(module_name, name, cache):
|
||||
module_cache[name] = cache
|
||||
|
||||
|
||||
def _create_get_from_cache(number):
|
||||
def _get_from_cache(module_name, name, get_cache_values):
|
||||
def _create_get_from_cache(number: int) -> Callable[[str, str, CacheValuesCallback], str]:
|
||||
def _get_from_cache(module_name: str, name: str, get_cache_values: CacheValuesCallback) -> str:
|
||||
try:
|
||||
return _cache[module_name][name][number]
|
||||
except KeyError:
|
||||
|
||||
@@ -30,7 +30,7 @@ class InvalidPythonEnvironment(Exception):
|
||||
"""
|
||||
|
||||
|
||||
class _BaseEnvironment(object):
|
||||
class _BaseEnvironment:
|
||||
@memoize_method
|
||||
def get_grammar(self):
|
||||
version_string = '%s.%s' % (self.version_info.major, self.version_info.minor)
|
||||
@@ -121,7 +121,7 @@ class Environment(_BaseEnvironment):
|
||||
return self._get_subprocess().get_sys_path()
|
||||
|
||||
|
||||
class _SameEnvironmentMixin(object):
|
||||
class _SameEnvironmentMixin:
|
||||
def __init__(self):
|
||||
self._start_executable = self.executable = sys.executable
|
||||
self.path = sys.prefix
|
||||
@@ -384,7 +384,8 @@ def _get_executable_path(path, safe=True):
|
||||
|
||||
|
||||
def _get_executables_from_windows_registry(version):
|
||||
import winreg
|
||||
# https://github.com/python/typeshed/pull/3794 adds winreg
|
||||
import winreg # type: ignore[import]
|
||||
|
||||
# TODO: support Python Anaconda.
|
||||
sub_keys = [
|
||||
|
||||
@@ -8,7 +8,7 @@ def parso_to_jedi_errors(grammar, module_node):
|
||||
return [SyntaxError(e) for e in grammar.iter_errors(module_node)]
|
||||
|
||||
|
||||
class SyntaxError(object):
|
||||
class SyntaxError:
|
||||
"""
|
||||
Syntax errors are generated by :meth:`.Script.get_syntax_errors`.
|
||||
"""
|
||||
|
||||
@@ -203,7 +203,7 @@ def filter_follow_imports(names, follow_builtin_imports=False):
|
||||
yield name
|
||||
|
||||
|
||||
class CallDetails(object):
|
||||
class CallDetails:
|
||||
def __init__(self, bracket_leaf, children, position):
|
||||
['bracket_leaf', 'call_index', 'keyword_name_str']
|
||||
self.bracket_leaf = bracket_leaf
|
||||
|
||||
@@ -17,7 +17,7 @@ def _create(inference_state, obj):
|
||||
)
|
||||
|
||||
|
||||
class NamespaceObject(object):
|
||||
class NamespaceObject:
|
||||
def __init__(self, dct):
|
||||
self.__dict__ = dct
|
||||
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
import pydoc
|
||||
from contextlib import suppress
|
||||
from typing import Dict, Optional
|
||||
|
||||
from jedi.inference.names import AbstractArbitraryName
|
||||
|
||||
from pydoc_data import topics as pydoc_topics
|
||||
try:
|
||||
# https://github.com/python/typeshed/pull/4351 adds pydoc_data
|
||||
from pydoc_data import topics # type: ignore[import]
|
||||
pydoc_topics: Optional[Dict[str, str]] = topics.topics
|
||||
except ImportError:
|
||||
# Python 3.6.8 embeddable does not have pydoc_data.
|
||||
pydoc_topics = None
|
||||
|
||||
|
||||
class KeywordName(AbstractArbitraryName):
|
||||
@@ -40,6 +47,6 @@ def imitate_pydoc(string):
|
||||
return ''
|
||||
|
||||
try:
|
||||
return pydoc_topics.topics[label].strip() if pydoc_topics else ''
|
||||
return pydoc_topics[label].strip() if pydoc_topics else ''
|
||||
except KeyError:
|
||||
return ''
|
||||
|
||||
@@ -56,7 +56,7 @@ def _remove_duplicates_from_path(path):
|
||||
yield p
|
||||
|
||||
|
||||
class Project(object):
|
||||
class Project:
|
||||
"""
|
||||
Projects are a simple way to manage Python folders and define how Jedi does
|
||||
import resolution. It is mostly used as a parameter to :class:`.Script`.
|
||||
@@ -152,6 +152,29 @@ class Project(object):
|
||||
"""
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def sys_path(self):
|
||||
"""
|
||||
The sys path provided to this project. This can be None and in that
|
||||
case will be auto generated.
|
||||
"""
|
||||
return self._sys_path
|
||||
|
||||
@property
|
||||
def smart_sys_path(self):
|
||||
"""
|
||||
If the sys path is going to be calculated in a smart way, where
|
||||
additional paths are added.
|
||||
"""
|
||||
return self._smart_sys_path
|
||||
|
||||
@property
|
||||
def load_unsafe_extensions(self):
|
||||
"""
|
||||
Wheter the project loads unsafe extensions.
|
||||
"""
|
||||
return self._load_unsafe_extensions
|
||||
|
||||
@inference_state_as_method_param_cache()
|
||||
def _get_base_sys_path(self, inference_state):
|
||||
# The sys path has not been set explicitly.
|
||||
@@ -343,8 +366,11 @@ class Project(object):
|
||||
|
||||
def _is_potential_project(path):
|
||||
for name in _CONTAINS_POTENTIAL_PROJECT:
|
||||
if path.joinpath(name).exists():
|
||||
return True
|
||||
try:
|
||||
if path.joinpath(name).exists():
|
||||
return True
|
||||
except OSError:
|
||||
continue
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ EXPRESSION_PARTS = (
|
||||
).split()
|
||||
|
||||
|
||||
class ChangedFile(object):
|
||||
class ChangedFile:
|
||||
def __init__(self, inference_state, from_path, to_path,
|
||||
module_node, node_to_str_map):
|
||||
self._inference_state = inference_state
|
||||
@@ -72,7 +72,7 @@ class ChangedFile(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._from_path)
|
||||
|
||||
|
||||
class Refactoring(object):
|
||||
class Refactoring:
|
||||
def __init__(self, inference_state, file_to_node_changes, renames=()):
|
||||
self._inference_state = inference_state
|
||||
self._renames = renames
|
||||
|
||||
@@ -13,14 +13,15 @@ these variables are being cleaned after every API usage.
|
||||
"""
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
from jedi import settings
|
||||
from parso.cache import parser_cache
|
||||
|
||||
_time_caches = {}
|
||||
_time_caches: Dict[str, Dict[Any, Tuple[float, Any]]] = {}
|
||||
|
||||
|
||||
def clear_time_caches(delete_all=False):
|
||||
def clear_time_caches(delete_all: bool = False) -> None:
|
||||
""" Jedi caches many things, that should be completed after each completion
|
||||
finishes.
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable, Optional
|
||||
|
||||
_inited = False
|
||||
|
||||
@@ -20,7 +21,7 @@ try:
|
||||
raise ImportError
|
||||
else:
|
||||
# Use colorama for nicer console output.
|
||||
from colorama import Fore, init
|
||||
from colorama import Fore, init # type: ignore[import]
|
||||
from colorama import initialise
|
||||
|
||||
def _lazy_colorama_init(): # noqa: F811
|
||||
@@ -45,7 +46,7 @@ try:
|
||||
_inited = True
|
||||
|
||||
except ImportError:
|
||||
class Fore(object):
|
||||
class Fore: # type: ignore[no-redef]
|
||||
RED = ''
|
||||
GREEN = ''
|
||||
YELLOW = ''
|
||||
@@ -62,7 +63,7 @@ enable_warning = False
|
||||
enable_notice = False
|
||||
|
||||
# callback, interface: level, str
|
||||
debug_function = None
|
||||
debug_function: Optional[Callable[[str, str], None]] = None
|
||||
_debug_indent = 0
|
||||
_start_time = time.time()
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
from parso import file_io
|
||||
|
||||
|
||||
class AbstractFolderIO(object):
|
||||
class AbstractFolderIO:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
@@ -57,7 +57,7 @@ class FolderIO(AbstractFolderIO):
|
||||
del dirs[i]
|
||||
|
||||
|
||||
class FileIOFolderMixin(object):
|
||||
class FileIOFolderMixin:
|
||||
def get_parent_folder(self):
|
||||
return FolderIO(os.path.dirname(self.path))
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ from jedi.inference.imports import follow_error_node_imports_if_possible
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
class InferenceState(object):
|
||||
class InferenceState:
|
||||
def __init__(self, project, environment=None, script_path=None):
|
||||
if environment is None:
|
||||
environment = project.get_environment()
|
||||
@@ -120,14 +120,15 @@ class InferenceState(object):
|
||||
debug.dbg('execute result: %s in %s', value_set, value)
|
||||
return value_set
|
||||
|
||||
@property
|
||||
# mypy doesn't suppport decorated propeties (https://github.com/python/mypy/issues/1362)
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_function_cache()
|
||||
def builtins_module(self):
|
||||
module_name = 'builtins'
|
||||
builtins_module, = self.import_module((module_name,), sys_path=())
|
||||
return builtins_module
|
||||
|
||||
@property
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_function_cache()
|
||||
def typing_module(self):
|
||||
typing_module, = self.import_module(('typing',))
|
||||
@@ -169,6 +170,8 @@ class InferenceState(object):
|
||||
return tree_name_to_values(self, context, name)
|
||||
elif type_ == 'param':
|
||||
return context.py__getattribute__(name.value, position=name.end_pos)
|
||||
elif type_ == 'namedexpr_test':
|
||||
return context.infer_node(def_)
|
||||
else:
|
||||
result = follow_error_node_imports_if_possible(context, name)
|
||||
if result is not None:
|
||||
|
||||
@@ -26,7 +26,7 @@ CODES = {
|
||||
}
|
||||
|
||||
|
||||
class Error(object):
|
||||
class Error:
|
||||
def __init__(self, name, module_path, start_pos, message=None):
|
||||
self.path = module_path
|
||||
self._start_pos = start_pos
|
||||
|
||||
@@ -124,7 +124,7 @@ def _parse_argument_clinic(string):
|
||||
allow_kwargs = True
|
||||
|
||||
|
||||
class _AbstractArgumentsMixin(object):
|
||||
class _AbstractArgumentsMixin:
|
||||
def unpack(self, funcdef=None):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from jedi.cache import memoize_method
|
||||
sentinel = object()
|
||||
|
||||
|
||||
class HelperValueMixin(object):
|
||||
class HelperValueMixin:
|
||||
def get_root_context(self):
|
||||
value = self
|
||||
if value.parent_context is None:
|
||||
@@ -111,7 +111,7 @@ class HelperValueMixin(object):
|
||||
.py__getattribute__('__anext__').execute_with_values()
|
||||
.py__getattribute__('__await__').execute_with_values()
|
||||
.py__stop_iteration_returns()
|
||||
) # noqa
|
||||
) # noqa: E124
|
||||
])
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
@@ -363,7 +363,7 @@ class TreeValue(Value):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.tree_node)
|
||||
|
||||
|
||||
class ContextualizedNode(object):
|
||||
class ContextualizedNode:
|
||||
def __init__(self, context, node):
|
||||
self.context = context
|
||||
self.node = node
|
||||
@@ -405,7 +405,7 @@ def _getitem(value, index_values, contextualized_node):
|
||||
return result
|
||||
|
||||
|
||||
class ValueSet(object):
|
||||
class ValueSet:
|
||||
def __init__(self, iterable):
|
||||
self._set = frozenset(iterable)
|
||||
for value in iterable:
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# This file also re-exports symbols for wider use. We configure mypy and flake8
|
||||
# to be aware that this file does this.
|
||||
|
||||
from jedi.inference.compiled.value import CompiledValue, CompiledName, \
|
||||
CompiledValueFilter, CompiledValueName, create_from_access_path
|
||||
from jedi.inference.base_value import LazyValueWrapper
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import inspect
|
||||
import types
|
||||
import traceback
|
||||
import sys
|
||||
import operator as op
|
||||
from collections import namedtuple
|
||||
@@ -117,13 +118,18 @@ def load_module(inference_state, dotted_name, sys_path):
|
||||
__import__(dotted_name)
|
||||
except ImportError:
|
||||
# If a module is "corrupt" or not really a Python module or whatever.
|
||||
print('Module %s not importable in path %s.' % (dotted_name, sys_path), file=sys.stderr)
|
||||
warnings.warn(
|
||||
"Module %s not importable in path %s." % (dotted_name, sys_path),
|
||||
UserWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return None
|
||||
except Exception:
|
||||
# Since __import__ pretty much makes code execution possible, just
|
||||
# catch any error here and print it.
|
||||
import traceback
|
||||
print("Cannot import:\n%s" % traceback.format_exc(), file=sys.stderr)
|
||||
warnings.warn(
|
||||
"Cannot import:\n%s" % traceback.format_exc(), UserWarning, stacklevel=2
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
sys.path = temp
|
||||
@@ -134,7 +140,7 @@ def load_module(inference_state, dotted_name, sys_path):
|
||||
return create_access_path(inference_state, module)
|
||||
|
||||
|
||||
class AccessPath(object):
|
||||
class AccessPath:
|
||||
def __init__(self, accesses):
|
||||
self.accesses = accesses
|
||||
|
||||
@@ -156,7 +162,7 @@ def get_api_type(obj):
|
||||
return 'instance'
|
||||
|
||||
|
||||
class DirectObjectAccess(object):
|
||||
class DirectObjectAccess:
|
||||
def __init__(self, inference_state, obj):
|
||||
self._inference_state = inference_state
|
||||
self._obj = obj
|
||||
|
||||
@@ -3,7 +3,7 @@ Used only for REPL Completion.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from jedi.parser_utils import get_cached_code_lines
|
||||
|
||||
@@ -190,8 +190,16 @@ def _find_syntax_node_name(inference_state, python_object):
|
||||
except TypeError:
|
||||
# The type might not be known (e.g. class_with_dict.__weakref__)
|
||||
return None
|
||||
if path is None or not os.path.exists(path):
|
||||
# The path might not exist or be e.g. <stdin>.
|
||||
path = None if path is None else Path(path)
|
||||
try:
|
||||
if path is None or not path.exists():
|
||||
# The path might not exist or be e.g. <stdin>.
|
||||
return None
|
||||
except OSError:
|
||||
# Might raise an OSError on Windows:
|
||||
#
|
||||
# [WinError 123] The filename, directory name, or volume label
|
||||
# syntax is incorrect: '<string>'
|
||||
return None
|
||||
|
||||
file_io = FileIO(path)
|
||||
|
||||
@@ -29,19 +29,19 @@ _MAIN_PATH = os.path.join(os.path.dirname(__file__), '__main__.py')
|
||||
PICKLE_PROTOCOL = 4
|
||||
|
||||
|
||||
class _GeneralizedPopen(subprocess.Popen):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
# Was introduced in Python 3.7.
|
||||
CREATE_NO_WINDOW = subprocess.CREATE_NO_WINDOW
|
||||
except AttributeError:
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
kwargs['creationflags'] = CREATE_NO_WINDOW
|
||||
# The child process doesn't need file descriptors except 0, 1, 2.
|
||||
# This is unix only.
|
||||
kwargs['close_fds'] = 'posix' in sys.builtin_module_names
|
||||
super().__init__(*args, **kwargs)
|
||||
def _GeneralizedPopen(*args, **kwargs):
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
# Was introduced in Python 3.7.
|
||||
CREATE_NO_WINDOW = subprocess.CREATE_NO_WINDOW
|
||||
except AttributeError:
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
kwargs['creationflags'] = CREATE_NO_WINDOW
|
||||
# The child process doesn't need file descriptors except 0, 1, 2.
|
||||
# This is unix only.
|
||||
kwargs['close_fds'] = 'posix' in sys.builtin_module_names
|
||||
|
||||
return subprocess.Popen(*args, **kwargs)
|
||||
|
||||
|
||||
def _enqueue_output(out, queue_):
|
||||
@@ -81,7 +81,7 @@ def _cleanup_process(process, thread):
|
||||
pass
|
||||
|
||||
|
||||
class _InferenceStateProcess(object):
|
||||
class _InferenceStateProcess:
|
||||
def __init__(self, inference_state):
|
||||
self._inference_state_weakref = weakref.ref(inference_state)
|
||||
self._inference_state_id = id(inference_state)
|
||||
@@ -162,7 +162,7 @@ class InferenceStateSubprocess(_InferenceStateProcess):
|
||||
self._compiled_subprocess.delete_inference_state(self._inference_state_id)
|
||||
|
||||
|
||||
class CompiledSubprocess(object):
|
||||
class CompiledSubprocess:
|
||||
is_crashed = False
|
||||
|
||||
def __init__(self, executable, env_vars=None):
|
||||
@@ -280,7 +280,7 @@ class CompiledSubprocess(object):
|
||||
self._inference_state_deletion_queue.append(inference_state_id)
|
||||
|
||||
|
||||
class Listener(object):
|
||||
class Listener:
|
||||
def __init__(self):
|
||||
self._inference_states = {}
|
||||
# TODO refactor so we don't need to process anymore just handle
|
||||
@@ -346,7 +346,7 @@ class Listener(object):
|
||||
pickle_dump(result, stdout, PICKLE_PROTOCOL)
|
||||
|
||||
|
||||
class AccessHandle(object):
|
||||
class AccessHandle:
|
||||
def __init__(self, subprocess, access, id_):
|
||||
self.access = access
|
||||
self._subprocess = subprocess
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
from importlib.abc import MetaPathFinder
|
||||
from importlib.machinery import PathFinder
|
||||
|
||||
# Remove the first entry, because it's simply a directory entry that equals
|
||||
@@ -16,7 +17,7 @@ def _get_paths():
|
||||
return {'jedi': _jedi_path, 'parso': _parso_path}
|
||||
|
||||
|
||||
class _ExactImporter(object):
|
||||
class _ExactImporter(MetaPathFinder):
|
||||
def __init__(self, path_dct):
|
||||
self._path_dct = path_dct
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import os
|
||||
import inspect
|
||||
import importlib
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from zipimport import zipimporter
|
||||
from importlib.machinery import all_suffixes
|
||||
|
||||
@@ -211,7 +212,7 @@ def _from_loader(loader, string):
|
||||
if code is None:
|
||||
return None, is_package
|
||||
if isinstance(loader, zipimporter):
|
||||
return ZipFileIO(module_path, code, cast_path(loader.archive)), is_package
|
||||
return ZipFileIO(module_path, code, Path(cast_path(loader.archive))), is_package
|
||||
|
||||
return KnownContentFileIO(module_path, code), is_package
|
||||
|
||||
@@ -229,7 +230,7 @@ def _get_source(loader, fullname):
|
||||
name=fullname)
|
||||
|
||||
|
||||
class ImplicitNSInfo(object):
|
||||
class ImplicitNSInfo:
|
||||
"""Stores information returned from an implicit namespace spec"""
|
||||
def __init__(self, name, paths):
|
||||
self.name = name
|
||||
|
||||
@@ -22,7 +22,7 @@ from jedi.inference.signature import BuiltinSignature
|
||||
from jedi.inference.context import CompiledContext, CompiledModuleContext
|
||||
|
||||
|
||||
class CheckAttribute(object):
|
||||
class CheckAttribute:
|
||||
"""Raises :exc:`AttributeError` if the attribute X is not available."""
|
||||
def __init__(self, check_name=None):
|
||||
# Remove the py in front of e.g. py__call__.
|
||||
@@ -324,8 +324,7 @@ class CompiledName(AbstractNameDefinition):
|
||||
self.string_name = name
|
||||
|
||||
def py__doc__(self):
|
||||
value, = self.infer()
|
||||
return value.py__doc__()
|
||||
return self.infer_compiled_value().py__doc__()
|
||||
|
||||
def _get_qualified_names(self):
|
||||
parent_qualified_names = self.parent_context.get_qualified_names()
|
||||
@@ -349,16 +348,12 @@ class CompiledName(AbstractNameDefinition):
|
||||
|
||||
@property
|
||||
def api_type(self):
|
||||
api = self.infer()
|
||||
# If we can't find the type, assume it is an instance variable
|
||||
if not api:
|
||||
return "instance"
|
||||
return next(iter(api)).api_type
|
||||
return self.infer_compiled_value().api_type
|
||||
|
||||
@memoize_method
|
||||
def infer(self):
|
||||
return ValueSet([self.infer_compiled_value()])
|
||||
|
||||
@memoize_method
|
||||
def infer_compiled_value(self):
|
||||
return create_from_name(self._inference_state, self._parent_value, self.string_name)
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from jedi import debug
|
||||
from jedi import parser_utils
|
||||
|
||||
|
||||
class AbstractContext(object):
|
||||
class AbstractContext:
|
||||
# Must be defined: inference_state and tree_node and parent_context as an attribute/property
|
||||
|
||||
def __init__(self, inference_state):
|
||||
@@ -216,7 +216,7 @@ class ValueContext(AbstractContext):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._value)
|
||||
|
||||
|
||||
class TreeContextMixin(object):
|
||||
class TreeContextMixin:
|
||||
def infer_node(self, node):
|
||||
from jedi.inference.syntax_tree import infer_node
|
||||
return infer_node(self, node)
|
||||
|
||||
@@ -50,7 +50,7 @@ def _get_numpy_doc_string_cls():
|
||||
global _numpy_doc_string_cache
|
||||
if isinstance(_numpy_doc_string_cache, (ImportError, SyntaxError)):
|
||||
raise _numpy_doc_string_cache
|
||||
from numpydoc.docscrape import NumpyDocString
|
||||
from numpydoc.docscrape import NumpyDocString # type: ignore[import]
|
||||
_numpy_doc_string_cache = NumpyDocString
|
||||
return _numpy_doc_string_cache
|
||||
|
||||
@@ -113,7 +113,7 @@ def _expand_typestr(type_str):
|
||||
elif type_str.startswith('{'):
|
||||
node = parse(type_str, version='3.7').children[0]
|
||||
if node.type == 'atom':
|
||||
for leaf in node.children[1].children:
|
||||
for leaf in getattr(node.children[1], "children", []):
|
||||
if leaf.type == 'number':
|
||||
if '.' in leaf.value:
|
||||
yield 'float'
|
||||
|
||||
@@ -3,9 +3,11 @@ Filters are objects that you can use to filter names in different scopes. They
|
||||
are needed for name resolution.
|
||||
"""
|
||||
from abc import abstractmethod
|
||||
from typing import List, MutableMapping, Type
|
||||
import weakref
|
||||
|
||||
from parso.tree import search_ancestor
|
||||
from parso.python.tree import Name, UsedNamesMapping
|
||||
|
||||
from jedi.inference import flow_analysis
|
||||
from jedi.inference.base_value import ValueSet, ValueWrapper, \
|
||||
@@ -13,12 +15,13 @@ from jedi.inference.base_value import ValueSet, ValueWrapper, \
|
||||
from jedi.parser_utils import get_cached_parent_scope
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.inference.names import TreeNameDefinition, ParamName, \
|
||||
AnonymousParamName, AbstractNameDefinition
|
||||
AnonymousParamName, AbstractNameDefinition, NameWrapper
|
||||
|
||||
_definition_name_cache: MutableMapping[UsedNamesMapping, List[Name]]
|
||||
_definition_name_cache = weakref.WeakKeyDictionary()
|
||||
|
||||
|
||||
class AbstractFilter(object):
|
||||
class AbstractFilter:
|
||||
_until_position = None
|
||||
|
||||
def _filter(self, names):
|
||||
@@ -35,8 +38,8 @@ class AbstractFilter(object):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class FilterWrapper(object):
|
||||
name_wrapper_class = None
|
||||
class FilterWrapper:
|
||||
name_wrapper_class: Type[NameWrapper]
|
||||
|
||||
def __init__(self, wrapped_filter):
|
||||
self._wrapped_filter = wrapped_filter
|
||||
@@ -229,7 +232,7 @@ class DictFilter(AbstractFilter):
|
||||
return '<%s: for {%s}>' % (self.__class__.__name__, keys)
|
||||
|
||||
|
||||
class MergedFilter(object):
|
||||
class MergedFilter:
|
||||
def __init__(self, *filters):
|
||||
self._filters = filters
|
||||
|
||||
@@ -320,7 +323,7 @@ class _OverwriteMeta(type):
|
||||
cls.overwritten_methods = base_dct
|
||||
|
||||
|
||||
class _AttributeOverwriteMixin(object):
|
||||
class _AttributeOverwriteMixin:
|
||||
def get_filters(self, *args, **kwargs):
|
||||
yield SpecialMethodFilter(self, self.overwritten_methods, self._wrapped_value)
|
||||
yield from self._wrapped_value.get_filters(*args, **kwargs)
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
from jedi.parser_utils import get_flow_branch_keyword, is_scope, get_parent_scope
|
||||
from jedi.inference.recursion import execution_allowed
|
||||
from jedi.inference.helpers import is_big_annoying_library
|
||||
|
||||
|
||||
class Status(object):
|
||||
lookup_table = {}
|
||||
class Status:
|
||||
lookup_table: Dict[Optional[bool], 'Status'] = {}
|
||||
|
||||
def __init__(self, value, name):
|
||||
def __init__(self, value: Optional[bool], name: str) -> None:
|
||||
self._value = value
|
||||
self._name = name
|
||||
Status.lookup_table[value] = self
|
||||
|
||||
@@ -53,8 +53,10 @@ def _infer_annotation_string(context, string, index=None):
|
||||
value_set = context.infer_node(node)
|
||||
if index is not None:
|
||||
value_set = value_set.filter(
|
||||
lambda value: value.array_type == 'tuple' # noqa
|
||||
and len(list(value.py__iter__())) >= index
|
||||
lambda value: (
|
||||
value.array_type == 'tuple'
|
||||
and len(list(value.py__iter__())) >= index
|
||||
)
|
||||
).py__simple_getitem__(index)
|
||||
return value_set
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class _BoundTypeVarName(AbstractNameDefinition):
|
||||
return '<%s %s -> %s>' % (self.__class__.__name__, self.py__name__(), self._value_set)
|
||||
|
||||
|
||||
class _TypeVarFilter(object):
|
||||
class _TypeVarFilter:
|
||||
"""
|
||||
A filter for all given variables in a class.
|
||||
|
||||
@@ -246,7 +246,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
|
||||
return type_var_dict
|
||||
|
||||
|
||||
class _LazyGenericBaseClass(object):
|
||||
class _LazyGenericBaseClass:
|
||||
def __init__(self, class_value, lazy_base_class, generics_manager):
|
||||
self._class_value = class_value
|
||||
self._lazy_base_class = lazy_base_class
|
||||
|
||||
@@ -23,7 +23,7 @@ def _resolve_forward_references(context, value_set):
|
||||
yield value
|
||||
|
||||
|
||||
class _AbstractGenericManager(object):
|
||||
class _AbstractGenericManager:
|
||||
def get_index_and_execute(self, index):
|
||||
try:
|
||||
return self[index].execute_annotation()
|
||||
|
||||
@@ -2,6 +2,7 @@ import os
|
||||
import re
|
||||
from functools import wraps
|
||||
from collections import namedtuple
|
||||
from typing import Dict, Mapping, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from jedi import settings
|
||||
@@ -74,7 +75,7 @@ def _get_typeshed_directories(version_info):
|
||||
yield PathInfo(str(base_path.joinpath(check_version)), is_third_party)
|
||||
|
||||
|
||||
_version_cache = {}
|
||||
_version_cache: Dict[Tuple[int, int], Mapping[str, PathInfo]] = {}
|
||||
|
||||
|
||||
def _cache_stub_file_map(version_info):
|
||||
@@ -278,8 +279,8 @@ def _try_to_load_stub_from_file(inference_state, python_value_set, file_io, impo
|
||||
return None
|
||||
else:
|
||||
return create_stub_module(
|
||||
inference_state, python_value_set, stub_module_node, file_io,
|
||||
import_names
|
||||
inference_state, inference_state.latest_grammar, python_value_set,
|
||||
stub_module_node, file_io, import_names
|
||||
)
|
||||
|
||||
|
||||
@@ -293,7 +294,8 @@ def parse_stub_module(inference_state, file_io):
|
||||
)
|
||||
|
||||
|
||||
def create_stub_module(inference_state, python_value_set, stub_module_node, file_io, import_names):
|
||||
def create_stub_module(inference_state, grammar, python_value_set,
|
||||
stub_module_node, file_io, import_names):
|
||||
if import_names == ('typing',):
|
||||
module_cls = TypingModuleWrapper
|
||||
else:
|
||||
@@ -305,7 +307,7 @@ def create_stub_module(inference_state, python_value_set, stub_module_node, file
|
||||
string_names=import_names,
|
||||
# The code was loaded with latest_grammar, so use
|
||||
# that.
|
||||
code_lines=get_cached_code_lines(inference_state.latest_grammar, file_io.path),
|
||||
code_lines=get_cached_code_lines(grammar, file_io.path),
|
||||
is_package=file_name == '__init__.pyi',
|
||||
)
|
||||
return stub_module_value
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module
|
||||
|
||||
|
||||
def load_proper_stub_module(inference_state, file_io, import_names, module_node):
|
||||
def load_proper_stub_module(inference_state, grammar, file_io, import_names, module_node):
|
||||
"""
|
||||
This function is given a random .pyi file and should return the proper
|
||||
module.
|
||||
@@ -28,7 +27,8 @@ def load_proper_stub_module(inference_state, file_io, import_names, module_node)
|
||||
actual_value_set = inference_state.import_module(import_names, prefer_stubs=False)
|
||||
|
||||
stub = create_stub_module(
|
||||
inference_state, actual_value_set, module_node, file_io, import_names
|
||||
inference_state, grammar, actual_value_set,
|
||||
module_node, file_io, import_names
|
||||
)
|
||||
inference_state.stub_module_cache[import_names] = stub
|
||||
return stub
|
||||
|
||||
@@ -32,7 +32,7 @@ from jedi.inference.compiled.subprocess.functions import ImplicitNSInfo
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
class ModuleCache(object):
|
||||
class ModuleCache:
|
||||
def __init__(self):
|
||||
self._name_cache = {}
|
||||
|
||||
@@ -150,7 +150,7 @@ def _level_to_base_import_path(project_path, directory, level):
|
||||
return None, directory
|
||||
|
||||
|
||||
class Importer(object):
|
||||
class Importer:
|
||||
def __init__(self, inference_state, import_path, module_context, level=0):
|
||||
"""
|
||||
An implementation similar to ``__import__``. Use `follow`
|
||||
@@ -498,8 +498,8 @@ def load_module_from_path(inference_state, file_io, import_names=None, is_packag
|
||||
values = NO_VALUES
|
||||
|
||||
return create_stub_module(
|
||||
inference_state, values, parse_stub_module(inference_state, file_io),
|
||||
file_io, import_names
|
||||
inference_state, inference_state.latest_grammar, values,
|
||||
parse_stub_module(inference_state, file_io), file_io, import_names
|
||||
)
|
||||
else:
|
||||
module = _load_python_module(
|
||||
|
||||
@@ -2,7 +2,7 @@ from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.common import monkeypatch
|
||||
|
||||
|
||||
class AbstractLazyValue(object):
|
||||
class AbstractLazyValue:
|
||||
def __init__(self, data, min=1, max=1):
|
||||
self.data = data
|
||||
self.min = min
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from abc import abstractmethod
|
||||
from inspect import Parameter
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from parso.tree import search_ancestor
|
||||
|
||||
from jedi.parser_utils import find_statement_documentation, clean_scope_docstring
|
||||
from jedi.inference.utils import unite
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference import docstrings
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference.helpers import deep_ast_copy, infer_call_of_leaf
|
||||
@@ -23,9 +25,9 @@ def _merge_name_docs(names):
|
||||
return doc
|
||||
|
||||
|
||||
class AbstractNameDefinition(object):
|
||||
start_pos = None
|
||||
string_name = None
|
||||
class AbstractNameDefinition:
|
||||
start_pos: Optional[Tuple[int, int]] = None
|
||||
string_name: str
|
||||
parent_context = None
|
||||
tree_name = None
|
||||
is_value_name = True
|
||||
@@ -223,7 +225,7 @@ class AbstractTreeName(AbstractNameDefinition):
|
||||
return self.tree_name.start_pos
|
||||
|
||||
|
||||
class ValueNameMixin(object):
|
||||
class ValueNameMixin:
|
||||
def infer(self):
|
||||
return ValueSet([self._value])
|
||||
|
||||
@@ -330,6 +332,12 @@ class TreeNameDefinition(AbstractTreeName):
|
||||
node = node.parent
|
||||
return indexes
|
||||
|
||||
@property
|
||||
def inference_state(self):
|
||||
# Used by the cache function below
|
||||
return self.parent_context.inference_state
|
||||
|
||||
@inference_state_method_cache(default='')
|
||||
def py__doc__(self):
|
||||
api_type = self.api_type
|
||||
if api_type in ('function', 'class'):
|
||||
@@ -346,7 +354,7 @@ class TreeNameDefinition(AbstractTreeName):
|
||||
return ''
|
||||
|
||||
|
||||
class _ParamMixin(object):
|
||||
class _ParamMixin:
|
||||
def maybe_positional_argument(self, include_star=True):
|
||||
options = [Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD]
|
||||
if include_star:
|
||||
@@ -604,7 +612,7 @@ class SubModuleName(ImportName):
|
||||
_level = 1
|
||||
|
||||
|
||||
class NameWrapper(object):
|
||||
class NameWrapper:
|
||||
def __init__(self, wrapped_name):
|
||||
self._wrapped_name = wrapped_name
|
||||
|
||||
@@ -615,7 +623,7 @@ class NameWrapper(object):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._wrapped_name)
|
||||
|
||||
|
||||
class StubNameMixin(object):
|
||||
class StubNameMixin:
|
||||
def py__doc__(self):
|
||||
from jedi.inference.gradual.conversion import convert_names
|
||||
# Stubs are not complicated and we can just follow simple statements
|
||||
|
||||
@@ -50,7 +50,7 @@ A function may not be executed more than this number of times recursively.
|
||||
"""
|
||||
|
||||
|
||||
class RecursionDetector(object):
|
||||
class RecursionDetector:
|
||||
def __init__(self):
|
||||
self.pushed_nodes = []
|
||||
|
||||
@@ -92,7 +92,7 @@ def execution_recursion_decorator(default=NO_VALUES):
|
||||
return decorator
|
||||
|
||||
|
||||
class ExecutionRecursionDetector(object):
|
||||
class ExecutionRecursionDetector:
|
||||
"""
|
||||
Catches recursions of executions.
|
||||
"""
|
||||
|
||||
@@ -5,7 +5,8 @@ from parso import python_bytes_to_unicode
|
||||
|
||||
from jedi.debug import dbg
|
||||
from jedi.file_io import KnownContentFileIO
|
||||
from jedi.inference.imports import SubModuleName, load_module_from_path
|
||||
from jedi.inference.names import SubModuleName
|
||||
from jedi.inference.imports import load_module_from_path
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.gradual.conversion import convert_names
|
||||
|
||||
@@ -203,11 +204,11 @@ def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
||||
# Delete folders that we don't want to iterate over.
|
||||
for file_io in file_ios:
|
||||
path = file_io.path
|
||||
if path.endswith('.py') or path.endswith('.pyi'):
|
||||
if path.suffix in ('.py', '.pyi'):
|
||||
if path not in except_paths:
|
||||
yield None, file_io
|
||||
|
||||
if path.endswith('.gitignore'):
|
||||
if path.name == '.gitignore':
|
||||
ignored_paths, ignored_names = \
|
||||
gitignored_lines(root_folder_io, file_io)
|
||||
except_paths |= ignored_paths
|
||||
|
||||
@@ -5,7 +5,7 @@ from jedi import debug
|
||||
from jedi import parser_utils
|
||||
|
||||
|
||||
class _SignatureMixin(object):
|
||||
class _SignatureMixin:
|
||||
def to_string(self):
|
||||
def param_strings():
|
||||
is_positional = False
|
||||
|
||||
@@ -287,10 +287,11 @@ def infer_atom(context, atom):
|
||||
state = context.inference_state
|
||||
if atom.type == 'name':
|
||||
# This is the first global lookup.
|
||||
stmt = tree.search_ancestor(
|
||||
atom, 'expr_stmt', 'lambdef'
|
||||
) or atom
|
||||
if stmt.type == 'lambdef':
|
||||
stmt = tree.search_ancestor(atom, 'expr_stmt', 'lambdef', 'if_stmt') or atom
|
||||
if stmt.type == 'if_stmt':
|
||||
if not any(n.start_pos <= atom.start_pos < n.end_pos for n in stmt.get_test_nodes()):
|
||||
stmt = atom
|
||||
elif stmt.type == 'lambdef':
|
||||
stmt = atom
|
||||
position = stmt.start_pos
|
||||
if _is_annotation_name(atom):
|
||||
@@ -753,6 +754,8 @@ def tree_name_to_values(inference_state, context, tree_name):
|
||||
types = NO_VALUES
|
||||
elif typ == 'del_stmt':
|
||||
types = NO_VALUES
|
||||
elif typ == 'namedexpr_test':
|
||||
types = infer_node(context, node)
|
||||
else:
|
||||
raise ValueError("Should not happen. type: %s" % typ)
|
||||
return types
|
||||
|
||||
@@ -14,8 +14,8 @@ from jedi import debug
|
||||
_BUILDOUT_PATH_INSERTION_LIMIT = 10
|
||||
|
||||
|
||||
def _abs_path(module_context, path: str):
|
||||
path = Path(path)
|
||||
def _abs_path(module_context, str_path: str):
|
||||
path = Path(str_path)
|
||||
if path.is_absolute():
|
||||
return path
|
||||
|
||||
@@ -164,15 +164,18 @@ def _get_paths_from_buildout_script(inference_state, buildout_script_path):
|
||||
inference_state, module_node,
|
||||
file_io=file_io,
|
||||
string_names=None,
|
||||
code_lines=get_cached_code_lines(inference_state.grammar, str(buildout_script_path)),
|
||||
code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path),
|
||||
).as_context()
|
||||
yield from check_sys_path_modifications(module_context)
|
||||
|
||||
|
||||
def _get_parent_dir_with_file(path: Path, filename):
|
||||
for parent in path.parents:
|
||||
if parent.joinpath(filename).is_file():
|
||||
return parent
|
||||
try:
|
||||
if parent.joinpath(filename).is_file():
|
||||
return parent
|
||||
except OSError:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ def reraise_uncaught(func):
|
||||
return wrapper
|
||||
|
||||
|
||||
class PushBackIterator(object):
|
||||
class PushBackIterator:
|
||||
def __init__(self, iterator):
|
||||
self.pushes = []
|
||||
self.iterator = iterator
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# Re-export symbols for wider use. We configure mypy and flake8 to be aware that
|
||||
# this file does this.
|
||||
|
||||
from jedi.inference.value.module import ModuleValue
|
||||
from jedi.inference.value.klass import ClassValue
|
||||
from jedi.inference.value.function import FunctionValue, \
|
||||
|
||||
@@ -53,7 +53,7 @@ class FunctionAndClassBase(TreeValue):
|
||||
return None
|
||||
|
||||
|
||||
class FunctionMixin(object):
|
||||
class FunctionMixin:
|
||||
api_type = 'function'
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from abc import abstractproperty
|
||||
|
||||
from parso.python.tree import search_ancestor
|
||||
from parso.tree import search_ancestor
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
@@ -401,21 +401,10 @@ class AnonymousInstance(_BaseTreeInstance):
|
||||
_arguments = None
|
||||
|
||||
|
||||
class CompiledInstanceName(compiled.CompiledName):
|
||||
def __init__(self, inference_state, instance, klass, name):
|
||||
parent_value = klass.parent_context.get_value()
|
||||
assert parent_value is not None, "How? Please reproduce and report"
|
||||
super().__init__(
|
||||
inference_state,
|
||||
parent_value,
|
||||
name.string_name
|
||||
)
|
||||
self._instance = instance
|
||||
self._class_member_name = name
|
||||
|
||||
class CompiledInstanceName(NameWrapper):
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
for result_value in self._class_member_name.infer():
|
||||
for result_value in self._wrapped_name.infer():
|
||||
if result_value.api_type == 'function':
|
||||
yield CompiledBoundMethod(result_value)
|
||||
else:
|
||||
@@ -434,11 +423,7 @@ class CompiledInstanceClassFilter(AbstractFilter):
|
||||
return self._convert(self._class_filter.values())
|
||||
|
||||
def _convert(self, names):
|
||||
klass = self._class_filter.compiled_value
|
||||
return [
|
||||
CompiledInstanceName(self._instance.inference_state, self._instance, klass, n)
|
||||
for n in names
|
||||
]
|
||||
return [CompiledInstanceName(n) for n in names]
|
||||
|
||||
|
||||
class BoundMethod(FunctionMixin, ValueWrapper):
|
||||
@@ -516,6 +501,18 @@ class SelfName(TreeNameDefinition):
|
||||
def get_defining_qualified_value(self):
|
||||
return self._instance
|
||||
|
||||
def infer(self):
|
||||
stmt = search_ancestor(self.tree_name, 'expr_stmt')
|
||||
if stmt is not None:
|
||||
if stmt.children[1].type == "annassign":
|
||||
from jedi.inference.gradual.annotation import infer_annotation
|
||||
values = infer_annotation(
|
||||
self.parent_context, stmt.children[1].children[1]
|
||||
).execute_annotation()
|
||||
if values:
|
||||
return values
|
||||
return super().infer()
|
||||
|
||||
|
||||
class LazyInstanceClassName(NameWrapper):
|
||||
def __init__(self, instance, class_member_name):
|
||||
|
||||
@@ -19,7 +19,7 @@ from jedi.inference.context import CompForContext
|
||||
from jedi.inference.value.dynamic_arrays import check_array_additions
|
||||
|
||||
|
||||
class IterableMixin(object):
|
||||
class IterableMixin:
|
||||
def py__next__(self, contextualized_node=None):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
@@ -127,7 +127,7 @@ def comprehension_from_atom(inference_state, value, atom):
|
||||
)
|
||||
|
||||
|
||||
class ComprehensionMixin(object):
|
||||
class ComprehensionMixin:
|
||||
@inference_state_method_cache()
|
||||
def _get_comp_for_context(self, parent_context, comp_for):
|
||||
return CompForContext(parent_context, comp_for)
|
||||
@@ -175,7 +175,7 @@ class ComprehensionMixin(object):
|
||||
return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node)
|
||||
|
||||
|
||||
class _DictMixin(object):
|
||||
class _DictMixin:
|
||||
def _get_generics(self):
|
||||
return tuple(c_set.py__class__() for c_set in self.get_mapping_item_values())
|
||||
|
||||
@@ -247,7 +247,7 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase):
|
||||
pass
|
||||
|
||||
|
||||
class _DictKeyMixin(object):
|
||||
class _DictKeyMixin:
|
||||
# TODO merge with _DictMixin?
|
||||
def get_mapping_item_values(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
@@ -142,7 +142,7 @@ class ClassFilter(ParserTreeFilter):
|
||||
return [name for name in names if self._access_possible(name)]
|
||||
|
||||
|
||||
class ClassMixin(object):
|
||||
class ClassMixin:
|
||||
def is_class(self):
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.names import AbstractNameDefinition, ModuleName
|
||||
@@ -33,7 +34,7 @@ class _ModuleAttributeName(AbstractNameDefinition):
|
||||
return compiled.get_string_value_set(self.parent_context.inference_state)
|
||||
|
||||
|
||||
class SubModuleDictMixin(object):
|
||||
class SubModuleDictMixin:
|
||||
@inference_state_method_cache()
|
||||
def sub_modules_dict(self):
|
||||
"""
|
||||
@@ -79,7 +80,7 @@ class ModuleMixin(SubModuleDictMixin):
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_method_cache()
|
||||
def name(self):
|
||||
return self._module_name_class(self, self.string_names[-1])
|
||||
@@ -145,7 +146,7 @@ class ModuleValue(ModuleMixin, TreeValue):
|
||||
)
|
||||
self.file_io = file_io
|
||||
if file_io is None:
|
||||
self._path = None
|
||||
self._path: Optional[Path] = None
|
||||
else:
|
||||
self._path = Path(file_io.path)
|
||||
self.string_names = string_names # Optional[Tuple[str, ...]]
|
||||
@@ -165,7 +166,7 @@ class ModuleValue(ModuleMixin, TreeValue):
|
||||
return None
|
||||
return '.'.join(self.string_names)
|
||||
|
||||
def py__file__(self) -> Path:
|
||||
def py__file__(self) -> Optional[Path]:
|
||||
"""
|
||||
In contrast to Python's __file__ can be None.
|
||||
"""
|
||||
|
||||
@@ -38,7 +38,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
|
||||
def get_qualified_names(self):
|
||||
return ()
|
||||
|
||||
@property
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_method_cache()
|
||||
def name(self):
|
||||
string_name = self.py__package__()[-1]
|
||||
|
||||
@@ -90,7 +90,7 @@ def get_flow_branch_keyword(flow_node, node):
|
||||
first_leaf = child.get_first_leaf()
|
||||
if first_leaf in _FLOW_KEYWORDS:
|
||||
keyword = first_leaf
|
||||
return 0
|
||||
return None
|
||||
|
||||
|
||||
def clean_scope_docstring(scope_node):
|
||||
@@ -239,7 +239,7 @@ def get_parent_scope(node, include_flows=False):
|
||||
return None # It's a module already.
|
||||
|
||||
while True:
|
||||
if is_scope(scope) or include_flows and isinstance(scope, tree.Flow):
|
||||
if is_scope(scope):
|
||||
if scope.type in ('classdef', 'funcdef', 'lambdef'):
|
||||
index = scope.children.index(':')
|
||||
if scope.children[index].start_pos >= node.start_pos:
|
||||
@@ -251,6 +251,14 @@ def get_parent_scope(node, include_flows=False):
|
||||
scope = scope.parent
|
||||
continue
|
||||
return scope
|
||||
elif include_flows and isinstance(scope, tree.Flow):
|
||||
# The cursor might be on `if foo`, so the parent scope will not be
|
||||
# the if, but the parent of the if.
|
||||
if not (scope.type == 'if_stmt'
|
||||
and any(n.start_pos <= node.start_pos < n.end_pos
|
||||
for n in scope.get_test_nodes())):
|
||||
return scope
|
||||
|
||||
scope = scope.parent
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from functools import wraps
|
||||
|
||||
|
||||
class _PluginManager(object):
|
||||
class _PluginManager:
|
||||
def __init__(self):
|
||||
self._registered_plugins = []
|
||||
self._cached_base_callbacks = {}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from pathlib import Path
|
||||
|
||||
from parso.python.tree import search_ancestor
|
||||
from parso.tree import search_ancestor
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.imports import load_module_from_path
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
|
||||
@@ -257,7 +257,7 @@ class ReversedObject(AttributeOverwrite):
|
||||
super().__init__(reversed_obj)
|
||||
self._iter_list = iter_list
|
||||
|
||||
def py__iter__(self, contextualized_node):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._iter_list
|
||||
|
||||
@publish_method('__next__')
|
||||
@@ -819,7 +819,8 @@ def get_metaclass_filters(func):
|
||||
and metaclass.get_root_context().py__name__() == 'enum':
|
||||
filter_ = ParserTreeFilter(parent_context=cls.as_context())
|
||||
return [DictFilter({
|
||||
name.string_name: EnumInstance(cls, name).name for name in filter_.values()
|
||||
name.string_name: EnumInstance(cls, name).name
|
||||
for name in filter_.values()
|
||||
})]
|
||||
return func(cls, metaclasses, is_instance)
|
||||
return wrapper
|
||||
@@ -837,6 +838,14 @@ class EnumInstance(LazyValueWrapper):
|
||||
return ValueName(self, self._name.tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
n = self._name.string_name
|
||||
if n.startswith('__') and n.endswith('__') or self._name.api_type == 'function':
|
||||
inferred = self._name.infer()
|
||||
if inferred:
|
||||
return next(iter(inferred))
|
||||
o, = self.inference_state.builtins_module.py__getattribute__('object')
|
||||
return o
|
||||
|
||||
value, = self._cls.execute_with_values()
|
||||
return value
|
||||
|
||||
|
||||
@@ -69,8 +69,11 @@ Adds an opening bracket after a function for completions.
|
||||
# ----------------
|
||||
|
||||
if platform.system().lower() == 'windows':
|
||||
_cache_directory = os.path.join(os.getenv('LOCALAPPDATA') or
|
||||
os.path.expanduser('~'), 'Jedi', 'Jedi')
|
||||
_cache_directory = os.path.join(
|
||||
os.getenv('LOCALAPPDATA') or os.path.expanduser('~'),
|
||||
'Jedi',
|
||||
'Jedi',
|
||||
)
|
||||
elif platform.system().lower() == 'darwin':
|
||||
_cache_directory = os.path.join('~', 'Library', 'Caches', 'Jedi')
|
||||
else:
|
||||
@@ -98,7 +101,7 @@ parse the parts again that have changed, while reusing the rest of the syntax
|
||||
tree.
|
||||
"""
|
||||
|
||||
_cropped_file_size = 10e6 # 1 Megabyte
|
||||
_cropped_file_size = int(10e6) # 1 Megabyte
|
||||
"""
|
||||
Jedi gets extremely slow if the file size exceed a few thousand lines.
|
||||
To avoid getting stuck completely Jedi crops the file at some point.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Utilities for end-users.
|
||||
"""
|
||||
|
||||
import __main__
|
||||
import __main__ # type: ignore[import]
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
import traceback
|
||||
@@ -65,7 +65,7 @@ def setup_readline(namespace_module=__main__, fuzzy=False):
|
||||
level=logging.DEBUG
|
||||
)
|
||||
|
||||
class JediRL(object):
|
||||
class JediRL:
|
||||
def complete(self, text, state):
|
||||
"""
|
||||
This complete stuff is pretty weird, a generator would make
|
||||
|
||||
35
setup.cfg
35
setup.cfg
@@ -13,7 +13,42 @@ ignore =
|
||||
E721,
|
||||
# Line break before binary operator
|
||||
W503,
|
||||
# Single letter loop variables are often fine
|
||||
E741,
|
||||
per-file-ignores =
|
||||
# Ignore apparently unused imports in files where we're (implicitly)
|
||||
# re-exporting them.
|
||||
jedi/__init__.py:F401
|
||||
jedi/inference/compiled/__init__.py:F401
|
||||
jedi/inference/value/__init__.py:F401
|
||||
exclude = jedi/third_party/* .tox/*
|
||||
|
||||
[pycodestyle]
|
||||
max-line-length = 100
|
||||
|
||||
|
||||
[mypy]
|
||||
# Ensure generics are explicit about what they are (e.g: `List[str]` rather than
|
||||
# just `List`)
|
||||
disallow_any_generics = True
|
||||
|
||||
disallow_subclassing_any = True
|
||||
|
||||
# Avoid creating future gotchas emerging from bad typing
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
warn_return_any = True
|
||||
warn_unused_configs = True
|
||||
|
||||
warn_unreachable = True
|
||||
|
||||
# Require values to be explicitly re-exported; this makes things easier for
|
||||
# Flake8 too and avoids accidentally importing thing from the "wrong" place
|
||||
# (which helps avoid circular imports)
|
||||
implicit_reexport = False
|
||||
|
||||
strict_equality = True
|
||||
|
||||
[mypy-jedi,jedi.inference.compiled,jedi.inference.value,parso]
|
||||
# Various __init__.py files which contain re-exports we want to implicitly make.
|
||||
implicit_reexport = True
|
||||
|
||||
5
setup.py
5
setup.py
@@ -32,7 +32,7 @@ setup(name='jedi',
|
||||
long_description=readme,
|
||||
packages=find_packages(exclude=['test', 'test.*']),
|
||||
python_requires='>=3.6',
|
||||
install_requires=['parso>=0.7.0,<0.8.0'],
|
||||
install_requires=['parso>=0.8.0,<0.9.0'],
|
||||
extras_require={
|
||||
'testing': [
|
||||
'pytest<6.0.0',
|
||||
@@ -43,7 +43,8 @@ setup(name='jedi',
|
||||
'Django<3.1', # For now pin this.
|
||||
],
|
||||
'qa': [
|
||||
'flake8==3.7.9',
|
||||
'flake8==3.8.3',
|
||||
'mypy==0.782',
|
||||
],
|
||||
},
|
||||
package_data={'jedi': ['*.pyi', 'third_party/typeshed/LICENSE',
|
||||
|
||||
2
sith.py
2
sith.py
@@ -44,7 +44,7 @@ Options:
|
||||
--pudb Launch pudb when error is raised.
|
||||
"""
|
||||
|
||||
from docopt import docopt
|
||||
from docopt import docopt # type: ignore[import]
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# For assignment expressions / named expressions / walrus operators / whatever
|
||||
# they are called.
|
||||
|
||||
# python >= 3.8
|
||||
b = (a:=1, a)
|
||||
|
||||
@@ -11,3 +14,39 @@ b = ('':=1,)
|
||||
|
||||
#? int()
|
||||
b[0]
|
||||
|
||||
def test_assignments():
|
||||
match = ''
|
||||
#? str()
|
||||
match
|
||||
#? 8 int()
|
||||
if match := 1:
|
||||
#? int()
|
||||
match
|
||||
#? int()
|
||||
match
|
||||
|
||||
def test_assignments2():
|
||||
class Foo:
|
||||
match = ''
|
||||
#? str()
|
||||
Foo.match
|
||||
#? 13 int()
|
||||
if Foo.match := 1:
|
||||
#? str()
|
||||
Foo.match
|
||||
#? str()
|
||||
Foo.match
|
||||
|
||||
#?
|
||||
y
|
||||
#? 16 str()
|
||||
if y := Foo.match:
|
||||
#? str()
|
||||
y
|
||||
#? str()
|
||||
y
|
||||
|
||||
#? 8 str()
|
||||
if z := Foo.match:
|
||||
pass
|
||||
|
||||
@@ -179,3 +179,22 @@ def argskwargs(*args: int, **kwargs: float):
|
||||
next(iter(kwargs.keys()))
|
||||
#? float()
|
||||
kwargs['']
|
||||
|
||||
|
||||
class NotCalledClass:
|
||||
def __init__(self, x):
|
||||
self.x: int = x
|
||||
self.y: int = ''
|
||||
#? int()
|
||||
self.x
|
||||
#? int()
|
||||
self.y
|
||||
#? int()
|
||||
self.y
|
||||
self.z: int
|
||||
self.z = ''
|
||||
#? str() int()
|
||||
self.z
|
||||
self.w: float
|
||||
#? float()
|
||||
self.w
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from .cq import selectors
|
||||
1
test/examples/import-recursion/cadquery_simple/cq.py
Normal file
1
test/examples/import-recursion/cadquery_simple/cq.py
Normal file
@@ -0,0 +1 @@
|
||||
from . import selectors
|
||||
3
test/examples/import-recursion/cq_example.py
Normal file
3
test/examples/import-recursion/cq_example.py
Normal file
@@ -0,0 +1,3 @@
|
||||
import cadquery_simple as cq
|
||||
|
||||
cq.
|
||||
@@ -15,7 +15,7 @@ from test.helpers import test_dir, get_example_dir
|
||||
|
||||
|
||||
def test_preload_modules():
|
||||
def check_loaded(*modules):
|
||||
def check_loaded(*module_names):
|
||||
for grammar_cache in cache.parser_cache.values():
|
||||
if None in grammar_cache:
|
||||
break
|
||||
@@ -25,9 +25,9 @@ def test_preload_modules():
|
||||
if path is not None and str(path).startswith(str(typeshed.TYPESHED_PATH))
|
||||
)
|
||||
# +1 for None module (currently used)
|
||||
assert len(grammar_cache) - typeshed_cache_count == len(modules) + 1
|
||||
for i in modules:
|
||||
assert [i in k for k in grammar_cache.keys() if k is not None]
|
||||
assert len(grammar_cache) - typeshed_cache_count == len(module_names) + 1
|
||||
for i in module_names:
|
||||
assert [i in str(k) for k in grammar_cache.keys() if k is not None]
|
||||
|
||||
old_cache = cache.parser_cache.copy()
|
||||
cache.parser_cache.clear()
|
||||
@@ -370,3 +370,35 @@ def test_multi_goto(Script):
|
||||
y, = script.goto(line=4)
|
||||
assert x.line == 1
|
||||
assert y.line == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code, column, expected', [
|
||||
('str() ', 3, 'str'),
|
||||
('str() ', 4, 'str'),
|
||||
('str() ', 5, 'str'),
|
||||
('str() ', 6, None),
|
||||
('str( ) ', 6, None),
|
||||
(' 1', 1, None),
|
||||
('str(1) ', 3, 'str'),
|
||||
('str(1) ', 4, 'int'),
|
||||
('str(1) ', 5, 'int'),
|
||||
('str(1) ', 6, 'str'),
|
||||
('str(1) ', 7, None),
|
||||
('str( 1) ', 4, 'str'),
|
||||
('str( 1) ', 5, 'int'),
|
||||
('str(+1) ', 4, 'str'),
|
||||
('str(+1) ', 5, 'int'),
|
||||
('str(1, 1.) ', 3, 'str'),
|
||||
('str(1, 1.) ', 4, 'int'),
|
||||
('str(1, 1.) ', 5, 'int'),
|
||||
('str(1, 1.) ', 6, None),
|
||||
('str(1, 1.) ', 7, 'float'),
|
||||
]
|
||||
)
|
||||
def test_infer_after_parentheses(Script, code, column, expected):
|
||||
completions = Script(code).infer(column=column)
|
||||
if expected is None:
|
||||
assert completions == []
|
||||
else:
|
||||
assert [c.name for c in completions] == [expected]
|
||||
|
||||
@@ -121,11 +121,8 @@ def test_multiple_signatures(Script):
|
||||
|
||||
|
||||
def test_get_signatures_whitespace(Script):
|
||||
s = dedent("""\
|
||||
abs(
|
||||
def x():
|
||||
pass
|
||||
""") # noqa
|
||||
# note: trailing space after 'abs'
|
||||
s = 'abs( \ndef x():\n pass\n'
|
||||
assert_signature(Script, s, 'abs', 0, line=1, column=5)
|
||||
|
||||
|
||||
|
||||
@@ -513,10 +513,14 @@ def test_added_equals_to_params(Script):
|
||||
|
||||
assert run('foo(bar').name_with_symbols == 'bar='
|
||||
assert run('foo(bar').complete == '='
|
||||
assert run('foo(bar').get_completion_prefix_length() == 3
|
||||
assert run('foo(bar, baz').complete == '='
|
||||
assert run('foo(bar, baz').get_completion_prefix_length() == 3
|
||||
assert run(' bar').name_with_symbols == 'bar'
|
||||
assert run(' bar').complete == ''
|
||||
assert run(' bar').get_completion_prefix_length() == 3
|
||||
x = run('foo(bar=isins').name_with_symbols
|
||||
assert run('foo(bar=isins').get_completion_prefix_length() == 5
|
||||
assert x == 'isinstance'
|
||||
|
||||
|
||||
|
||||
@@ -621,7 +621,8 @@ def bar():
|
||||
|
||||
# typing is available via globals.
|
||||
({'return': 'typing.Union[str, int]'}, ['int', 'str'], ''),
|
||||
({'return': 'typing.Union["str", int]'}, ['int'], ''),
|
||||
({'return': 'typing.Union["str", int]'},
|
||||
['int', 'str'] if sys.version_info >= (3, 9) else ['int'], ''),
|
||||
({'return': 'typing.Union["str", 1]'}, [], ''),
|
||||
({'return': 'typing.Optional[str]'}, ['NoneType', 'str'], ''),
|
||||
({'return': 'typing.Optional[str, int]'}, [], ''), # Takes only one arg
|
||||
|
||||
@@ -6,6 +6,7 @@ import pytest
|
||||
from ..helpers import get_example_dir, set_cwd, root_dir, test_dir
|
||||
from jedi import Interpreter
|
||||
from jedi.api import Project, get_default_project
|
||||
from jedi.api.project import _is_potential_project, _CONTAINS_POTENTIAL_PROJECT
|
||||
|
||||
|
||||
def test_django_default_project(Script):
|
||||
@@ -17,7 +18,12 @@ def test_django_default_project(Script):
|
||||
)
|
||||
c, = script.complete()
|
||||
assert c.name == "SomeModel"
|
||||
assert script._inference_state.project._django is True
|
||||
|
||||
project = script._inference_state.project
|
||||
assert project._django is True
|
||||
assert project.sys_path is None
|
||||
assert project.smart_sys_path is True
|
||||
assert project.load_unsafe_extensions is False
|
||||
|
||||
|
||||
def test_django_default_project_of_file(Script):
|
||||
@@ -155,3 +161,21 @@ def test_complete_search(Script, string, completions, all_scopes):
|
||||
project = Project(test_dir)
|
||||
defs = project.complete_search(string, all_scopes=all_scopes)
|
||||
assert [d.complete for d in defs] == completions
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'path,expected', [
|
||||
(Path(__file__).parents[2], True), # The path of the project
|
||||
(Path(__file__).parents[1], False), # The path of the tests, not a project
|
||||
(Path.home(), None)
|
||||
]
|
||||
)
|
||||
def test_is_potential_project(path, expected):
|
||||
|
||||
if expected is None:
|
||||
try:
|
||||
expected = _CONTAINS_POTENTIAL_PROJECT in os.listdir(path)
|
||||
except OSError:
|
||||
expected = False
|
||||
|
||||
assert _is_potential_project(path) == expected
|
||||
|
||||
@@ -206,6 +206,24 @@ def test_numpydoc_parameters_set_of_values():
|
||||
assert 'capitalize' in names
|
||||
assert 'numerator' in names
|
||||
|
||||
@pytest.mark.skipif(numpydoc_unavailable,
|
||||
reason='numpydoc module is unavailable')
|
||||
def test_numpydoc_parameters_set_single_value():
|
||||
"""
|
||||
This is found in numpy masked-array I'm not too sure what this means but should not crash
|
||||
"""
|
||||
s = dedent('''
|
||||
def foobar(x, y):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
x : {var}, optional
|
||||
"""
|
||||
x.''')
|
||||
names = [c.name for c in jedi.Script(s).complete()]
|
||||
# just don't crash
|
||||
assert names == []
|
||||
|
||||
|
||||
@pytest.mark.skipif(numpydoc_unavailable,
|
||||
reason='numpydoc module is unavailable')
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
from parso.cache import parser_cache
|
||||
|
||||
from test.helpers import root_dir
|
||||
from jedi.api.project import Project
|
||||
@@ -64,6 +65,17 @@ def test_goto_import(Script):
|
||||
assert not d.is_stub()
|
||||
|
||||
|
||||
def test_stub_get_line_code(Script):
|
||||
code = 'from abc import ABC; ABC'
|
||||
script = Script(code)
|
||||
d, = script.goto(only_stubs=True)
|
||||
assert d.get_line_code() == 'class ABC(metaclass=ABCMeta): ...\n'
|
||||
del parser_cache[script._inference_state.latest_grammar._hashed][d.module_path]
|
||||
d, = Script(path=d.module_path).goto(d.line, d.column, only_stubs=True)
|
||||
assert d.is_stub()
|
||||
assert d.get_line_code() == 'class ABC(metaclass=ABCMeta): ...\n'
|
||||
|
||||
|
||||
def test_os_stat_result(Script):
|
||||
d, = Script('import os; os.stat_result').goto()
|
||||
assert d.is_stub()
|
||||
|
||||
@@ -29,13 +29,13 @@ def test_find_module_basic():
|
||||
|
||||
def test_find_module_package():
|
||||
file_io, is_package = _find_module('json')
|
||||
assert file_io.path.endswith(os.path.join('json', '__init__.py'))
|
||||
assert file_io.path.parts[-2:] == ('json', '__init__.py')
|
||||
assert is_package is True
|
||||
|
||||
|
||||
def test_find_module_not_package():
|
||||
file_io, is_package = _find_module('io')
|
||||
assert file_io.path.endswith('io.py')
|
||||
assert file_io.path.name == 'io.py'
|
||||
assert is_package is False
|
||||
|
||||
|
||||
@@ -55,8 +55,8 @@ def test_find_module_package_zipped(Script, inference_state, environment):
|
||||
full_name='pkg'
|
||||
)
|
||||
assert file_io is not None
|
||||
assert file_io.path.endswith(os.path.join('pkg.zip', 'pkg', '__init__.py'))
|
||||
assert file_io._zip_path.endswith('pkg.zip')
|
||||
assert file_io.path.parts[-3:] == ('pkg.zip', 'pkg', '__init__.py')
|
||||
assert file_io._zip_path.name == 'pkg.zip'
|
||||
assert is_package is True
|
||||
|
||||
|
||||
@@ -108,7 +108,7 @@ def test_find_module_not_package_zipped(Script, inference_state, environment):
|
||||
string='not_pkg',
|
||||
full_name='not_pkg'
|
||||
)
|
||||
assert file_io.path.endswith(os.path.join('not_pkg.zip', 'not_pkg.py'))
|
||||
assert file_io.path.parts[-2:] == ('not_pkg.zip', 'not_pkg.py')
|
||||
assert is_package is False
|
||||
|
||||
|
||||
@@ -468,3 +468,9 @@ def test_relative_import_star(Script):
|
||||
script = Script(source, path='export.py')
|
||||
|
||||
assert script.complete(3, len("furl.c"))
|
||||
|
||||
|
||||
def test_import_recursion(Script):
|
||||
path = get_example_dir('import-recursion', "cq_example.py")
|
||||
for c in Script(path=path).complete(3, 3):
|
||||
c.docstring()
|
||||
|
||||
@@ -340,3 +340,20 @@ def test_overload(Script, code):
|
||||
x1, x2 = Script(code, path=os.path.join(dir_, 'foo.py')).get_signatures()
|
||||
assert x1.to_string() == 'with_overload(x: int, y: int) -> float'
|
||||
assert x2.to_string() == 'with_overload(x: str, y: list) -> float'
|
||||
|
||||
|
||||
def test_enum(Script):
|
||||
script = Script('''\
|
||||
from enum import Enum
|
||||
|
||||
class Planet(Enum):
|
||||
MERCURY = (3.303e+23, 2.4397e6)
|
||||
VENUS = (4.869e+24, 6.0518e6)
|
||||
|
||||
def __init__(self, mass, radius):
|
||||
self.mass = mass # in kilograms
|
||||
self.radius = radius # in meters
|
||||
|
||||
Planet.MERCURY''')
|
||||
completion, = script.complete()
|
||||
assert not completion.get_signatures()
|
||||
|
||||
Reference in New Issue
Block a user