1
0
forked from VimPlug/jedi

Merge branch 'master' into refactor

This commit is contained in:
Dave Halter
2020-03-13 23:53:09 +01:00
29 changed files with 331 additions and 54 deletions

View File

@@ -258,6 +258,7 @@ class Value(HelperValueMixin, BaseValue):
def _as_context(self):
raise NotImplementedError('Not all values need to be converted to contexts: %s', self)
@property
def name(self):
raise NotImplementedError

View File

@@ -484,6 +484,11 @@ class DirectObjectAccess(object):
def needs_type_completions(self):
return inspect.isclass(self._obj) and self._obj != type
def _annotation_to_str(self, annotation):
if isinstance(annotation, type):
return str(annotation.__name__)
return str(annotation)
def get_signature_params(self):
return [
SignatureParam(
@@ -493,7 +498,7 @@ class DirectObjectAccess(object):
default_string=repr(p.default),
has_annotation=p.annotation is not p.empty,
annotation=self._create_access_path(p.annotation),
annotation_string=str(p.annotation),
annotation_string=self._annotation_to_str(p.annotation),
kind_name=str(p.kind)
) for p in self._get_signature().parameters.values()
]

View File

@@ -6,6 +6,7 @@ information returned to enable Jedi to make decisions.
import types
from jedi import debug
from jedi._compatibility import py_version
_sentinel = object()
@@ -54,7 +55,14 @@ def _shadowed_dict_newstyle(klass):
def _static_getmro_newstyle(klass):
return type.__dict__['__mro__'].__get__(klass)
mro = type.__dict__['__mro__'].__get__(klass)
if not isinstance(mro, (tuple, list)):
# There are unfortunately no tests for this, I was not able to
# reproduce this in pure Python. However should still solve the issue
# raised in GH #1517.
debug.warning('mro of %s returned %s, should be a tuple' % (klass, mro))
return ()
return mro
if py_version >= 30:

View File

@@ -297,7 +297,7 @@ class Listener(object):
try:
inference_state = self._inference_states[inference_state_id]
except KeyError:
from jedi.api.environment import InterpreterEnvironment
from jedi import InterpreterEnvironment
inference_state = InferenceState(
# The project is not actually needed. Nothing should need to
# access it.

View File

@@ -130,6 +130,9 @@ def import_module_decorator(func):
def try_to_load_stub_cached(inference_state, import_names, *args, **kwargs):
if import_names is None:
return None
try:
return inference_state.stub_module_cache[import_names]
except KeyError:

View File

@@ -5,8 +5,11 @@ values.
This file deals with all the typing.py cases.
"""
import itertools
from jedi._compatibility import unicode
from jedi import debug
from jedi.inference.compiled import builtin_from_name
from jedi.inference.compiled import builtin_from_name, create_simple_object
from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
LazyValueWrapper
from jedi.inference.lazy_value import LazyKnownValues
@@ -81,7 +84,8 @@ class TypingModuleName(NameWrapper):
elif name == 'TypedDict':
# TODO doesn't even exist in typeshed/typing.py, yet. But will be
# added soon.
pass
yield TypedDictBase.create_cached(
inference_state, self.parent_context, self.tree_name)
elif name in ('no_type_check', 'no_type_check_decorator'):
# This is not necessary, as long as we are not doing type checking.
for c in self._wrapped_name.infer(): # Fuck my life Python 2
@@ -339,3 +343,47 @@ class CastFunction(BaseTypingValue):
@repack_with_argument_clinic('type, object, /')
def py__call__(self, type_value_set, object_value_set):
return type_value_set.execute_annotation()
class TypedDictBase(BaseTypingValue):
"""
This class has no responsibilities and is just here to make sure that typed
dicts can be identified.
"""
class TypedDict(LazyValueWrapper):
"""Represents the instance version of ``TypedDictClass``."""
def __init__(self, definition_class):
self.inference_state = definition_class.inference_state
self.parent_context = definition_class.parent_context
self.tree_node = definition_class.tree_node
self._definition_class = definition_class
@property
def name(self):
return ValueName(self, self.tree_node.name)
def py__simple_getitem__(self, index):
if isinstance(index, unicode):
return ValueSet.from_sets(
name.infer()
for filter in self._definition_class.get_filters(is_instance=True)
for name in filter.get(index)
)
return NO_VALUES
def get_key_values(self):
filtered_values = itertools.chain.from_iterable((
f.values()
for f in self._definition_class.get_filters(is_instance=True)
))
return ValueSet({
create_simple_object(self.inference_state, v.string_name)
for v in filtered_values
})
def _get_wrapped_value(self):
d, = self.inference_state.builtins_module.py__getattribute__('dict')
result, = d.execute_with_values()
return result

View File

@@ -51,6 +51,25 @@ class ExecutedParamName(ParamName):
def get_executed_param_names_and_issues(function_value, arguments):
"""
Return a tuple of:
- a list of `ExecutedParamName`s corresponding to the arguments of the
function execution `function_value`, containing the inferred value of
those arguments (whether explicit or default)
- a list of the issues encountered while building that list
For example, given:
```
def foo(a, b, c=None, d='d'): ...
foo(42, c='c')
```
Then for the execution of `foo`, this will return a tuple containing:
- a list with entries for each parameter a, b, c & d; the entries for a,
c, & d will have their values (42, 'c' and 'd' respectively) included.
- a list with a single entry about the lack of a value for `b`
"""
def too_many_args(argument):
m = _error_argument_count(funcdef, len(unpacked_va))
# Just report an error for the first param that is not needed (like
@@ -207,6 +226,23 @@ def get_executed_param_names_and_issues(function_value, arguments):
def get_executed_param_names(function_value, arguments):
"""
Return a list of `ExecutedParamName`s corresponding to the arguments of the
function execution `function_value`, containing the inferred value of those
arguments (whether explicit or default). Any issues building this list (for
example required arguments which are missing in the invocation) are ignored.
For example, given:
```
def foo(a, b, c=None, d='d'): ...
foo(42, c='c')
```
Then for the execution of `foo`, this will return a list containing entries
for each parameter a, b, c & d; the entries for a, c, & d will have their
values (42, 'c' and 'd' respectively) included.
"""
return get_executed_param_names_and_issues(function_value, arguments)[0]

View File

@@ -356,6 +356,12 @@ def infer_atom(context, atom):
def infer_expr_stmt(context, stmt, seek_name=None):
with recursion.execution_allowed(context.inference_state, stmt) as allowed:
if allowed:
if seek_name is not None:
pep0484_values = \
annotation.find_type_from_comment_hint_assign(context, stmt, seek_name)
if pep0484_values:
return pep0484_values
return _infer_expr_stmt(context, stmt, seek_name)
return NO_VALUES
@@ -632,23 +638,6 @@ def _infer_comparison_part(inference_state, context, left, operator, right):
return result
def _remove_statements(context, stmt, name):
"""
This is the part where statements are being stripped.
Due to lazy type inference, statements like a = func; b = a; b() have to be
inferred.
TODO merge with infer_expr_stmt?
"""
pep0484_values = \
annotation.find_type_from_comment_hint_assign(context, stmt, name)
if pep0484_values:
return pep0484_values
return infer_expr_stmt(context, stmt, seek_name=name)
@plugin_manager.decorate()
def tree_name_to_values(inference_state, context, tree_name):
value_set = NO_VALUES
@@ -713,7 +702,7 @@ def tree_name_to_values(inference_state, context, tree_name):
n = TreeNameDefinition(context, tree_name)
types = check_tuple_assignments(n, for_types)
elif typ == 'expr_stmt':
types = _remove_statements(context, node, tree_name)
types = infer_expr_stmt(context, node, tree_name)
elif typ == 'with_stmt':
value_managers = context.infer_node(node.get_test_node_from_name(tree_name))
enter_methods = value_managers.py__getattribute__(u'__enter__')

View File

@@ -547,10 +547,10 @@ class InstanceClassFilter(AbstractFilter):
self._class_filter = class_filter
def get(self, name):
return self._convert(self._class_filter.get(name, from_instance=True))
return self._convert(self._class_filter.get(name))
def values(self):
return self._convert(self._class_filter.values(from_instance=True))
return self._convert(self._class_filter.values())
def _convert(self, names):
return [
@@ -586,7 +586,7 @@ class SelfAttributeFilter(ClassFilter):
if trailer.type == 'trailer' \
and len(trailer.parent.children) == 2 \
and trailer.children[0] == '.':
if name.is_definition() and self._access_possible(name, from_instance=True):
if name.is_definition() and self._access_possible(name):
# TODO filter non-self assignments instead of this bad
# filter.
if self._is_in_right_scope(trailer.parent.children[0], name):

View File

@@ -38,11 +38,11 @@ py__doc__() Returns the docstring for a value.
"""
from jedi import debug
from jedi._compatibility import use_metaclass
from jedi.parser_utils import get_cached_parent_scope
from jedi.parser_utils import get_cached_parent_scope, expr_is_dotted
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass, \
inference_state_method_generator_cache
from jedi.inference import compiled
from jedi.inference.lazy_value import LazyKnownValues
from jedi.inference.lazy_value import LazyKnownValues, LazyTreeValue
from jedi.inference.filters import ParserTreeFilter
from jedi.inference.names import TreeNameDefinition, ValueName
from jedi.inference.arguments import unpack_arglist, ValuesArguments
@@ -104,27 +104,31 @@ class ClassFilter(ParserTreeFilter):
node = get_cached_parent_scope(self._used_names, node)
return False
def _access_possible(self, name, from_instance=False):
def _access_possible(self, name):
# Filter for ClassVar variables
# TODO this is not properly done, yet. It just checks for the string
# ClassVar in the annotation, which can be quite imprecise. If we
# wanted to do this correct, we would have to infer the ClassVar.
if not from_instance:
if not self._is_instance:
expr_stmt = name.get_definition()
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
annassign = expr_stmt.children[1]
if annassign.type == 'annassign':
# TODO this is not proper matching
if 'ClassVar' not in annassign.children[1].get_code():
# If there is an =, the variable is obviously also
# defined on the class.
if 'ClassVar' not in annassign.children[1].get_code() \
and '=' not in annassign.children:
return False
# Filter for name mangling of private variables like __foo
return not name.value.startswith('__') or name.value.endswith('__') \
or self._equals_origin_scope()
def _filter(self, names, from_instance=False):
def _filter(self, names):
names = super(ClassFilter, self)._filter(names)
return [name for name in names if self._access_possible(name, from_instance)]
return [name for name in names if self._access_possible(name)]
class ClassMixin(object):
@@ -133,6 +137,10 @@ class ClassMixin(object):
def py__call__(self, arguments=None):
from jedi.inference.value import TreeInstance
from jedi.inference.gradual.typing import TypedDict
if self.is_typeddict():
return ValueSet([TypedDict(self)])
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
def py__class__(self):
@@ -226,6 +234,36 @@ class ClassMixin(object):
return 'Type[%s]' % self.py__name__()
return self.py__name__()
@inference_state_method_cache(default=False)
def is_typeddict(self):
# TODO Do a proper mro resolution. Currently we are just listing
# classes. However, it's a complicated algorithm.
from jedi.inference.gradual.typing import TypedDictBase
for lazy_cls in self.py__bases__():
if not isinstance(lazy_cls, LazyTreeValue):
return False
tree_node = lazy_cls.data
# Only resolve simple classes, stuff like Iterable[str] are more
# intensive to resolve and if generics are involved, we know it's
# not a TypedDict.
if not expr_is_dotted(tree_node):
return False
for cls in lazy_cls.infer():
if isinstance(cls, TypedDictBase):
return True
try:
method = cls.is_typeddict
except AttributeError:
# We're only dealing with simple classes, so just returning
# here should be fine. This only happens with e.g. compiled
# classes.
return False
else:
if method():
return True
return False
class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
api_type = u'class'