forked from VimPlug/jedi
Some code cleanups
This commit is contained in:
@@ -361,8 +361,6 @@ def _is_django_path(directory):
|
|||||||
except (FileNotFoundError, IsADirectoryError, PermissionError):
|
except (FileNotFoundError, IsADirectoryError, PermissionError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_project(path=None):
|
def get_default_project(path=None):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -314,7 +314,7 @@ class ModuleContext(TreeContextMixin, ValueContext):
|
|||||||
def get_filters(self, until_position=None, origin_scope=None):
|
def get_filters(self, until_position=None, origin_scope=None):
|
||||||
filters = self._value.get_filters(origin_scope)
|
filters = self._value.get_filters(origin_scope)
|
||||||
# Skip the first filter and replace it.
|
# Skip the first filter and replace it.
|
||||||
next(filters)
|
next(filters, None)
|
||||||
yield MergedFilter(
|
yield MergedFilter(
|
||||||
ParserTreeFilter(
|
ParserTreeFilter(
|
||||||
parent_context=self,
|
parent_context=self,
|
||||||
@@ -494,5 +494,7 @@ def get_global_filters(context, until_position, origin_scope):
|
|||||||
|
|
||||||
context = context.parent_context
|
context = context.parent_context
|
||||||
|
|
||||||
|
b = next(base_context.inference_state.builtins_module.get_filters(), None)
|
||||||
|
assert b is not None
|
||||||
# Add builtins to the global scope.
|
# Add builtins to the global scope.
|
||||||
yield next(base_context.inference_state.builtins_module.get_filters())
|
yield b
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ def _avoid_recursions(func):
|
|||||||
finally:
|
finally:
|
||||||
inf.dynamic_params_depth -= 1
|
inf.dynamic_params_depth -= 1
|
||||||
return NO_VALUES
|
return NO_VALUES
|
||||||
return
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -125,11 +125,12 @@ def infer_param(function_value, param, ignore_stars=False):
|
|||||||
ValueSet([builtin_from_name(inference_state, 'str')]),
|
ValueSet([builtin_from_name(inference_state, 'str')]),
|
||||||
values
|
values
|
||||||
)
|
)
|
||||||
|
if not values:
|
||||||
|
return NO_VALUES
|
||||||
return ValueSet([GenericClass(
|
return ValueSet([GenericClass(
|
||||||
dct,
|
dct,
|
||||||
TupleGenericManager(generics),
|
TupleGenericManager(generics),
|
||||||
) for c in values])
|
)])
|
||||||
pass
|
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,9 @@ class StubModuleContext(ModuleContext):
|
|||||||
class TypingModuleWrapper(StubModuleValue):
|
class TypingModuleWrapper(StubModuleValue):
|
||||||
def get_filters(self, *args, **kwargs):
|
def get_filters(self, *args, **kwargs):
|
||||||
filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs)
|
filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs)
|
||||||
yield TypingModuleFilterWrapper(next(filters))
|
f = next(filters, None)
|
||||||
|
assert f is not None
|
||||||
|
yield TypingModuleFilterWrapper(f)
|
||||||
for f in filters:
|
for f in filters:
|
||||||
yield f
|
yield f
|
||||||
|
|
||||||
|
|||||||
@@ -109,8 +109,8 @@ def _find_global_variables(names, search_name):
|
|||||||
for global_name in method().get(search_name):
|
for global_name in method().get(search_name):
|
||||||
yield global_name
|
yield global_name
|
||||||
c = module_context.create_context(global_name.tree_name)
|
c = module_context.create_context(global_name.tree_name)
|
||||||
for name in _add_names_in_same_context(c, global_name.string_name):
|
for n in _add_names_in_same_context(c, global_name.string_name):
|
||||||
yield name
|
yield n
|
||||||
|
|
||||||
|
|
||||||
def find_references(module_context, tree_name):
|
def find_references(module_context, tree_name):
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from jedi.inference.base_value import ValueWrapper
|
|||||||
|
|
||||||
class Decoratee(ValueWrapper):
|
class Decoratee(ValueWrapper):
|
||||||
def __init__(self, wrapped_value, original_value):
|
def __init__(self, wrapped_value, original_value):
|
||||||
self._wrapped_value = wrapped_value
|
super(Decoratee, self).__init__(wrapped_value)
|
||||||
self._original_value = original_value
|
self._original_value = original_value
|
||||||
|
|
||||||
def py__doc__(self):
|
def py__doc__(self):
|
||||||
|
|||||||
@@ -241,7 +241,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
|||||||
def py__getitem__(self, index_value_set, contextualized_node):
|
def py__getitem__(self, index_value_set, contextualized_node):
|
||||||
names = self.get_function_slot_names(u'__getitem__')
|
names = self.get_function_slot_names(u'__getitem__')
|
||||||
if not names:
|
if not names:
|
||||||
return super(AbstractInstanceValue, self).py__getitem__(
|
return super(_BaseTreeInstance, self).py__getitem__(
|
||||||
index_value_set,
|
index_value_set,
|
||||||
contextualized_node,
|
contextualized_node,
|
||||||
)
|
)
|
||||||
@@ -252,7 +252,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
|||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
||||||
if not iter_slot_names:
|
if not iter_slot_names:
|
||||||
return super(AbstractInstanceValue, self).py__iter__(contextualized_node)
|
return super(_BaseTreeInstance, self).py__iter__(contextualized_node)
|
||||||
|
|
||||||
def iterate():
|
def iterate():
|
||||||
for generator in self.execute_function_slots(iter_slot_names):
|
for generator in self.execute_function_slots(iter_slot_names):
|
||||||
@@ -278,7 +278,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
|||||||
names = self.get_function_slot_names(u'__call__')
|
names = self.get_function_slot_names(u'__call__')
|
||||||
if not names:
|
if not names:
|
||||||
# Means the Instance is not callable.
|
# Means the Instance is not callable.
|
||||||
return super(AbstractInstanceValue, self).py__call__(arguments)
|
return super(_BaseTreeInstance, self).py__call__(arguments)
|
||||||
|
|
||||||
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||||
|
|
||||||
@@ -317,8 +317,7 @@ class TreeInstance(_BaseTreeInstance):
|
|||||||
if settings.dynamic_array_additions:
|
if settings.dynamic_array_additions:
|
||||||
arguments = get_dynamic_array_instance(self, arguments)
|
arguments = get_dynamic_array_instance(self, arguments)
|
||||||
|
|
||||||
super(_BaseTreeInstance, self).__init__(inference_state, parent_context,
|
super(TreeInstance, self).__init__(inference_state, parent_context, class_value)
|
||||||
class_value)
|
|
||||||
self._arguments = arguments
|
self._arguments = arguments
|
||||||
self.tree_node = class_value.tree_node
|
self.tree_node = class_value.tree_node
|
||||||
|
|
||||||
|
|||||||
@@ -214,9 +214,11 @@ class ClassMixin(object):
|
|||||||
for instance in type_.py__call__(args):
|
for instance in type_.py__call__(args):
|
||||||
instance_filters = instance.get_filters()
|
instance_filters = instance.get_filters()
|
||||||
# Filter out self filters
|
# Filter out self filters
|
||||||
next(instance_filters)
|
next(instance_filters, None)
|
||||||
next(instance_filters)
|
next(instance_filters, None)
|
||||||
yield next(instance_filters)
|
x = next(instance_filters, None)
|
||||||
|
assert x is not None
|
||||||
|
yield x
|
||||||
|
|
||||||
def get_signatures(self):
|
def get_signatures(self):
|
||||||
# Since calling staticmethod without a function is illegal, the Jedi
|
# Since calling staticmethod without a function is illegal, the Jedi
|
||||||
|
|||||||
@@ -99,7 +99,9 @@ class ModuleMixin(SubModuleDictMixin):
|
|||||||
|
|
||||||
def iter_star_filters(self):
|
def iter_star_filters(self):
|
||||||
for star_module in self.star_imports():
|
for star_module in self.star_imports():
|
||||||
yield next(star_module.get_filters())
|
f = next(star_module.get_filters(), None)
|
||||||
|
assert f is not None
|
||||||
|
yield f
|
||||||
|
|
||||||
# I'm not sure if the star import cache is really that effective anymore
|
# I'm not sure if the star import cache is really that effective anymore
|
||||||
# with all the other really fast import caches. Recheck. Also we would need
|
# with all the other really fast import caches. Recheck. Also we would need
|
||||||
|
|||||||
@@ -158,7 +158,6 @@ def argument_clinic(string, want_value=False, want_context=False,
|
|||||||
callback = kwargs.pop('callback')
|
callback = kwargs.pop('callback')
|
||||||
assert not kwargs # Python 2...
|
assert not kwargs # Python 2...
|
||||||
debug.dbg('builtin start %s' % value, color='MAGENTA')
|
debug.dbg('builtin start %s' % value, color='MAGENTA')
|
||||||
result = NO_VALUES
|
|
||||||
if want_context:
|
if want_context:
|
||||||
kwargs['context'] = arguments.context
|
kwargs['context'] = arguments.context
|
||||||
if want_value:
|
if want_value:
|
||||||
@@ -541,7 +540,7 @@ class MergedPartialArguments(AbstractArguments):
|
|||||||
unpacked = self._partial_arguments.unpack(funcdef)
|
unpacked = self._partial_arguments.unpack(funcdef)
|
||||||
# Ignore this one, it's the function. It was checked before that it's
|
# Ignore this one, it's the function. It was checked before that it's
|
||||||
# there.
|
# there.
|
||||||
next(unpacked)
|
next(unpacked, None)
|
||||||
if self._instance is not None:
|
if self._instance is not None:
|
||||||
yield None, LazyKnownValue(self._instance)
|
yield None, LazyKnownValue(self._instance)
|
||||||
for key_lazy_value in unpacked:
|
for key_lazy_value in unpacked:
|
||||||
|
|||||||
Reference in New Issue
Block a user