forked from VimPlug/jedi
Delete a lot of names dict related stuff but also other things that were not used anymore.
This commit is contained in:
@@ -1,10 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
TODO Some parts of this module are still not well documented.
|
TODO Some parts of this module are still not well documented.
|
||||||
"""
|
"""
|
||||||
import copy
|
|
||||||
|
|
||||||
from jedi.cache import underscore_memoization
|
|
||||||
from jedi.evaluate import helpers
|
|
||||||
from jedi.evaluate.representation import ModuleContext
|
from jedi.evaluate.representation import ModuleContext
|
||||||
from jedi.evaluate import compiled
|
from jedi.evaluate import compiled
|
||||||
from jedi.evaluate.compiled import mixed
|
from jedi.evaluate.compiled import mixed
|
||||||
|
|||||||
@@ -180,26 +180,10 @@ class CompiledObject(Context):
|
|||||||
# automatically by doing `import numpy`.
|
# automatically by doing `import numpy`.
|
||||||
return type
|
return type
|
||||||
|
|
||||||
@property
|
|
||||||
def names_dict(self):
|
|
||||||
# For compatibility with `representation.Class`.
|
|
||||||
return self.names_dicts(False)[0]
|
|
||||||
|
|
||||||
def names_dicts(self, search_global, is_instance=False):
|
|
||||||
return self._names_dict_ensure_one_dict(is_instance)
|
|
||||||
|
|
||||||
def get_filters(self, search_global=False, is_instance=False,
|
def get_filters(self, search_global=False, is_instance=False,
|
||||||
until_position=None, origin_scope=None):
|
until_position=None, origin_scope=None):
|
||||||
yield self._ensure_one_filter(is_instance)
|
yield self._ensure_one_filter(is_instance)
|
||||||
|
|
||||||
@memoize_method
|
|
||||||
def _names_dict_ensure_one_dict(self, is_instance):
|
|
||||||
"""
|
|
||||||
search_global shouldn't change the fact that there's one dict, this way
|
|
||||||
there's only one `object`.
|
|
||||||
"""
|
|
||||||
return [LazyNamesDict(self.evaluator, self, is_instance)]
|
|
||||||
|
|
||||||
@memoize_method
|
@memoize_method
|
||||||
def _ensure_one_filter(self, is_instance):
|
def _ensure_one_filter(self, is_instance):
|
||||||
"""
|
"""
|
||||||
@@ -329,9 +313,6 @@ class EmptyCompiledName(AbstractNameDefinition):
|
|||||||
|
|
||||||
|
|
||||||
class CompiledObjectFilter(AbstractFilter):
|
class CompiledObjectFilter(AbstractFilter):
|
||||||
"""
|
|
||||||
A names_dict instance for compiled objects, resembles the parser.tree.
|
|
||||||
"""
|
|
||||||
name_class = CompiledName
|
name_class = CompiledName
|
||||||
|
|
||||||
def __init__(self, evaluator, compiled_object, is_instance=False):
|
def __init__(self, evaluator, compiled_object, is_instance=False):
|
||||||
|
|||||||
@@ -144,11 +144,6 @@ class GeneratorMixin(object):
|
|||||||
# TODO add TypeError if params are given.
|
# TODO add TypeError if params are given.
|
||||||
return unite(lazy_context.infer() for lazy_context in self.py__iter__())
|
return unite(lazy_context.infer() for lazy_context in self.py__iter__())
|
||||||
|
|
||||||
@memoize_default()
|
|
||||||
def names_dicts(self, search_global=False): # is always False
|
|
||||||
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
|
||||||
yield self._get_names_dict(gen_obj.names_dict)
|
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
||||||
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
|
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
|
||||||
@@ -228,13 +223,6 @@ class Comprehension(AbstractSequence):
|
|||||||
[x + 1 for x in foo]
|
[x + 1 for x in foo]
|
||||||
"""
|
"""
|
||||||
return self._get_comprehension().children[index]
|
return self._get_comprehension().children[index]
|
||||||
#TODO delete
|
|
||||||
comp_for = self._get_comp_for()
|
|
||||||
# For nested comprehensions we need to search the last one.
|
|
||||||
node = self._get_comprehension().children[index]
|
|
||||||
last_comp = list(comp_for.get_comp_fors())[-1]
|
|
||||||
#TODO raise NotImplementedError('should not need to copy...')
|
|
||||||
return helpers.deep_ast_copy(node, parent=last_comp)
|
|
||||||
|
|
||||||
@memoize_default()
|
@memoize_default()
|
||||||
def _get_comp_for_context(self, parent_context, comp_for):
|
def _get_comp_for_context(self, parent_context, comp_for):
|
||||||
@@ -284,15 +272,6 @@ class Comprehension(AbstractSequence):
|
|||||||
|
|
||||||
|
|
||||||
class ArrayMixin(object):
|
class ArrayMixin(object):
|
||||||
@memoize_default()
|
|
||||||
def names_dicts(self, search_global=False): # Always False.
|
|
||||||
# `array.type` is a string with the type, e.g. 'list'.
|
|
||||||
scope = compiled.builtin_from_name(self.evaluator, self.array_type)
|
|
||||||
# builtins only have one class -> [0]
|
|
||||||
scopes = self.evaluator.execute_evaluated(scope, self)
|
|
||||||
names_dicts = list(scopes)[0].names_dicts(search_global)
|
|
||||||
yield self._get_names_dict(names_dicts[1])
|
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
# `array.type` is a string with the type, e.g. 'list'.
|
# `array.type` is a string with the type, e.g. 'list'.
|
||||||
compiled_obj = compiled.builtin_from_name(self.evaluator, self.array_type)
|
compiled_obj = compiled.builtin_from_name(self.evaluator, self.array_type)
|
||||||
@@ -300,8 +279,6 @@ class ArrayMixin(object):
|
|||||||
for typ in compiled_obj.execute_evaluated(self):
|
for typ in compiled_obj.execute_evaluated(self):
|
||||||
for filter in typ.get_filters():
|
for filter in typ.get_filters():
|
||||||
yield filter
|
yield filter
|
||||||
# TODO this should be used.
|
|
||||||
#yield DictFilter(self._get_names_dict(names_dicts[1]))
|
|
||||||
|
|
||||||
def py__bool__(self):
|
def py__bool__(self):
|
||||||
return None # We don't know the length, because of appends.
|
return None # We don't know the length, because of appends.
|
||||||
@@ -745,7 +722,7 @@ def _check_array_additions(context, sequence):
|
|||||||
>>> a = [""]
|
>>> a = [""]
|
||||||
>>> a.append(1)
|
>>> a.append(1)
|
||||||
"""
|
"""
|
||||||
from jedi.evaluate import representation as er, param
|
from jedi.evaluate import param
|
||||||
|
|
||||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||||
module_context = context.get_root_context()
|
module_context = context.get_root_context()
|
||||||
@@ -766,27 +743,11 @@ def _check_array_additions(context, sequence):
|
|||||||
result |= set(py__iter__(context.evaluator, lazy_context.infer()))
|
result |= set(py__iter__(context.evaluator, lazy_context.infer()))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
'''
|
|
||||||
def get_execution_parent(element):
|
|
||||||
""" Used to get an Instance/FunctionExecution parent """
|
|
||||||
if isinstance(element, Array):
|
|
||||||
node = element.atom
|
|
||||||
else:
|
|
||||||
# Is an Instance with an
|
|
||||||
# Arguments([AlreadyEvaluated([_ArrayInstance])]) inside
|
|
||||||
# Yeah... I know... It's complicated ;-)
|
|
||||||
node = list(element.var_args.argument_node[0])[0].var_args.trailer
|
|
||||||
if isinstance(node, er.InstanceElement) or node is None:
|
|
||||||
return node
|
|
||||||
return node.get_parent_until(er.FunctionExecution)
|
|
||||||
'''
|
|
||||||
|
|
||||||
temp_param_add, settings.dynamic_params_for_other_modules = \
|
temp_param_add, settings.dynamic_params_for_other_modules = \
|
||||||
settings.dynamic_params_for_other_modules, False
|
settings.dynamic_params_for_other_modules, False
|
||||||
|
|
||||||
is_list = sequence.name.string_name == 'list'
|
is_list = sequence.name.string_name == 'list'
|
||||||
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
||||||
#comp_arr_parent = None
|
|
||||||
|
|
||||||
added_types = set()
|
added_types = set()
|
||||||
for add_name in search_names:
|
for add_name in search_names:
|
||||||
@@ -799,21 +760,6 @@ def _check_array_additions(context, sequence):
|
|||||||
context_node = context.get_node()
|
context_node = context.get_node()
|
||||||
if not (context_node.start_pos < name.start_pos < context_node.end_pos):
|
if not (context_node.start_pos < name.start_pos < context_node.end_pos):
|
||||||
continue
|
continue
|
||||||
'''
|
|
||||||
# Check if the original scope is an execution. If it is, one
|
|
||||||
# can search for the same statement, that is in the module
|
|
||||||
# dict. Executions are somewhat special in jedi, since they
|
|
||||||
# literally copy the contents of a function.
|
|
||||||
if isinstance(comp_arr_parent, er.FunctionExecution):
|
|
||||||
if comp_arr_parent.start_pos < name.start_pos < comp_arr_parent.end_pos:
|
|
||||||
name = comp_arr_parent.name_for_position(name.start_pos)
|
|
||||||
else:
|
|
||||||
# Don't check definitions that are not defined in the
|
|
||||||
# same function. This is not "proper" anyway. It also
|
|
||||||
# improves Jedi's speed for array lookups, since we
|
|
||||||
# don't have to check the whole source tree anymore.
|
|
||||||
continue
|
|
||||||
'''
|
|
||||||
trailer = name.parent
|
trailer = name.parent
|
||||||
power = trailer.parent
|
power = trailer.parent
|
||||||
trailer_pos = power.children.index(trailer)
|
trailer_pos = power.children.index(trailer)
|
||||||
@@ -872,7 +818,7 @@ class _ArrayInstance(object):
|
|||||||
|
|
||||||
In contrast to Array, ListComprehension and all other iterable types, this
|
In contrast to Array, ListComprehension and all other iterable types, this
|
||||||
is something that is only used inside `evaluate/compiled/fake/builtins.py`
|
is something that is only used inside `evaluate/compiled/fake/builtins.py`
|
||||||
and therefore doesn't need `names_dicts`, `py__bool__` and so on, because
|
and therefore doesn't need filters, `py__bool__` and so on, because
|
||||||
we don't use these operations in `builtins.py`.
|
we don't use these operations in `builtins.py`.
|
||||||
"""
|
"""
|
||||||
def __init__(self, instance):
|
def __init__(self, instance):
|
||||||
|
|||||||
@@ -169,16 +169,6 @@ class ClassContext(use_metaclass(CachedMetaClass, context.TreeContext)):
|
|||||||
anon = AnonymousInstance(self.evaluator, self.parent_context, self)
|
anon = AnonymousInstance(self.evaluator, self.parent_context, self)
|
||||||
return [AnonymousInstanceParamName(anon, param.name) for param in self.funcdef.params]
|
return [AnonymousInstanceParamName(anon, param.name) for param in self.funcdef.params]
|
||||||
|
|
||||||
def names_dicts(self, search_global, is_instance=False):
|
|
||||||
if search_global:
|
|
||||||
yield self.names_dict
|
|
||||||
else:
|
|
||||||
for scope in self.py__mro__():
|
|
||||||
if isinstance(scope, compiled.CompiledObject):
|
|
||||||
yield scope.names_dicts(False, is_instance)[0]
|
|
||||||
else:
|
|
||||||
yield scope.names_dict
|
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
|
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
|
||||||
if search_global:
|
if search_global:
|
||||||
yield ParserTreeFilter(self.evaluator, self, self.classdef, until_position, origin_scope=origin_scope)
|
yield ParserTreeFilter(self.evaluator, self, self.classdef, until_position, origin_scope=origin_scope)
|
||||||
@@ -238,14 +228,6 @@ class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext)):
|
|||||||
def get_node(self):
|
def get_node(self):
|
||||||
return self.funcdef
|
return self.funcdef
|
||||||
|
|
||||||
def names_dicts(self, search_global):
|
|
||||||
if search_global:
|
|
||||||
yield self.names_dict
|
|
||||||
else:
|
|
||||||
scope = self.py__class__()
|
|
||||||
for names_dict in scope.names_dicts(False):
|
|
||||||
yield names_dict
|
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
if search_global:
|
if search_global:
|
||||||
yield ParserTreeFilter(self.evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
yield ParserTreeFilter(self.evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
||||||
@@ -445,16 +427,6 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext)):
|
|||||||
def get_node(self):
|
def get_node(self):
|
||||||
return self.module_node
|
return self.module_node
|
||||||
|
|
||||||
def names_dicts(self, search_global):
|
|
||||||
yield self.base.names_dict
|
|
||||||
yield self._module_attributes_dict()
|
|
||||||
|
|
||||||
for star_module in self.star_imports():
|
|
||||||
yield star_module.names_dict
|
|
||||||
|
|
||||||
yield dict((str(n), [GlobalName(n)]) for n in self.base.global_names)
|
|
||||||
yield self._sub_modules_dict()
|
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
yield ParserTreeFilter(
|
yield ParserTreeFilter(
|
||||||
self.evaluator,
|
self.evaluator,
|
||||||
|
|||||||
Reference in New Issue
Block a user