Start implementing the bulk of the context/value separation

This commit is contained in:
Dave Halter
2019-08-16 16:12:12 +02:00
parent d19233a338
commit 165639c1dd
23 changed files with 322 additions and 251 deletions

View File

@@ -192,6 +192,9 @@ class Script(object):
self._inference_state.module_cache.add(names, ValueSet([module])) self._inference_state.module_cache.add(names, ValueSet([module]))
return module return module
def _get_module_context(self):
return self._get_module().as_context()
def __repr__(self): def __repr__(self):
return '<%s: %s %r>' % ( return '<%s: %s %r>' % (
self.__class__.__name__, self.__class__.__name__,
@@ -209,7 +212,7 @@ class Script(object):
""" """
with debug.increase_indent_cm('completions'): with debug.increase_indent_cm('completions'):
completion = Completion( completion = Completion(
self._inference_state, self._get_module(), self._code_lines, self._inference_state, self._get_module_context(), self._code_lines,
self._pos, self.call_signatures self._pos, self.call_signatures
) )
return completion.completions() return completion.completions()
@@ -239,9 +242,9 @@ class Script(object):
if leaf is None: if leaf is None:
return [] return []
value = self._inference_state.create_value(self._get_module(), leaf) context = self._inference_state.create_context(self._get_module_context(), leaf)
values = helpers.infer_goto_definition(self._inference_state, value, leaf) values = helpers.infer_goto_definition(self._inference_state, context, leaf)
values = convert_values( values = convert_values(
values, values,
only_stubs=only_stubs, only_stubs=only_stubs,
@@ -299,8 +302,8 @@ class Script(object):
# Without a name we really just want to jump to the result e.g. # Without a name we really just want to jump to the result e.g.
# executed by `foo()`, if we the cursor is after `)`. # executed by `foo()`, if we the cursor is after `)`.
return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs) return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs)
value = self._inference_state.create_value(self._get_module(), tree_name) context = self._inference_state.create_context(self._get_module_context(), tree_name)
names = list(self._inference_state.goto(value, tree_name)) names = list(self._inference_state.goto(context, tree_name))
if follow_imports: if follow_imports:
names = filter_follow_imports(names, lambda name: name.is_import()) names = filter_follow_imports(names, lambda name: name.is_import())
@@ -340,7 +343,7 @@ class Script(object):
# Must be syntax # Must be syntax
return [] return []
names = usages.usages(self._get_module(), tree_name) names = usages.usages(self._get_module_context(), tree_name)
definitions = [classes.Definition(self._inference_state, n) for n in names] definitions = [classes.Definition(self._inference_state, n) for n in names]
if not include_builtins: if not include_builtins:
@@ -368,8 +371,8 @@ class Script(object):
if call_details is None: if call_details is None:
return [] return []
value = self._inference_state.create_value( value = self._inference_state.create_context(
self._get_module(), self._get_module_context(),
call_details.bracket_leaf call_details.bracket_leaf
) )
definitions = helpers.cache_call_signatures( definitions = helpers.cache_call_signatures(
@@ -389,10 +392,10 @@ class Script(object):
def _analysis(self): def _analysis(self):
self._inference_state.is_analysis = True self._inference_state.is_analysis = True
self._inference_state.analysis_modules = [self._module_node] self._inference_state.analysis_modules = [self._module_node]
module = self._get_module() module = self._get_module_context()
try: try:
for node in get_executable_nodes(self._module_node): for node in get_executable_nodes(self._module_node):
value = module.create_value(node) value = module.create_context(node)
if node.type in ('funcdef', 'classdef'): if node.type in ('funcdef', 'classdef'):
# Resolve the decorators. # Resolve the decorators.
tree_name_to_values(self._inference_state, value, node.children[1]) tree_name_to_values(self._inference_state, value, node.children[1])
@@ -505,13 +508,13 @@ def names(source=None, path=None, encoding='utf-8', all_scopes=False,
else: else:
cls = TreeNameDefinition cls = TreeNameDefinition
return cls( return cls(
module_value.create_value(name), module_context.create_context(name),
name name
) )
# Set line/column to a random position, because they don't matter. # Set line/column to a random position, because they don't matter.
script = Script(source, line=1, column=0, path=path, encoding=encoding, environment=environment) script = Script(source, line=1, column=0, path=path, encoding=encoding, environment=environment)
module_value = script._get_module() module_context = script._get_module_context()
defs = [ defs = [
classes.Definition( classes.Definition(
script._inference_state, script._inference_state,

View File

@@ -52,11 +52,11 @@ def filter_names(inference_state, completion_names, stack, like_name):
yield new yield new
def get_user_scope(module_value, position): def get_user_context(module_context, position):
""" """
Returns the scope in which the user resides. This includes flows. Returns the scope in which the user resides. This includes flows.
""" """
user_stmt = get_statement_of_position(module_value.tree_node, position) user_stmt = get_statement_of_position(module_context.tree_node, position)
if user_stmt is None: if user_stmt is None:
def scan(scope): def scan(scope):
for s in scope.children: for s in scope.children:
@@ -68,12 +68,12 @@ def get_user_scope(module_value, position):
return scan(s) return scan(s)
return None return None
scanned_node = scan(module_value.tree_node) scanned_node = scan(module_context.tree_node)
if scanned_node: if scanned_node:
return module_value.create_value(scanned_node, node_is_value=True) return module_context.create_context(scanned_node, node_is_value=True)
return module_value return module_context
else: else:
return module_value.create_value(user_stmt) return module_context.create_context(user_stmt)
def get_flow_scope_node(module_node, position): def get_flow_scope_node(module_node, position):
@@ -85,10 +85,11 @@ def get_flow_scope_node(module_node, position):
class Completion: class Completion:
def __init__(self, inference_state, module, code_lines, position, call_signatures_callback): def __init__(self, inference_state, module_context, code_lines, position,
call_signatures_callback):
self._inference_state = inference_state self._inference_state = inference_state
self._module_value = module self._module_context = module_context
self._module_node = module.tree_node self._module_node = module_context.tree_node
self._code_lines = code_lines self._code_lines = code_lines
# The first step of completions is to get the name # The first step of completions is to get the name
@@ -104,7 +105,7 @@ class Completion:
string, start_leaf = _extract_string_while_in_string(leaf, self._position) string, start_leaf = _extract_string_while_in_string(leaf, self._position)
if string is not None: if string is not None:
completions = list(file_name_completions( completions = list(file_name_completions(
self._inference_state, self._module_value, start_leaf, string, self._inference_state, self._module_context, start_leaf, string,
self._like_name, self._call_signatures_callback, self._like_name, self._call_signatures_callback,
self._code_lines, self._original_position self._code_lines, self._original_position
)) ))
@@ -237,7 +238,7 @@ class Completion:
yield keywords.KeywordName(self._inference_state, k) yield keywords.KeywordName(self._inference_state, k)
def _global_completions(self): def _global_completions(self):
value = get_user_scope(self._module_value, self._position) value = get_user_context(self._module_context, self._position)
debug.dbg('global completion scope: %s', value) debug.dbg('global completion scope: %s', value)
flow_scope_node = get_flow_scope_node(self._module_node, self._position) flow_scope_node = get_flow_scope_node(self._module_node, self._position)
filters = get_global_filters( filters = get_global_filters(
@@ -252,9 +253,9 @@ class Completion:
return completion_names return completion_names
def _trailer_completions(self, previous_leaf): def _trailer_completions(self, previous_leaf):
user_value = get_user_scope(self._module_value, self._position) user_value = get_user_context(self._module_context, self._position)
inferred_value = self._inference_state.create_value( inferred_value = self._inference_state.create_context(
self._module_value, previous_leaf self._module_context, previous_leaf
) )
values = infer_call_of_leaf(inferred_value, previous_leaf) values = infer_call_of_leaf(inferred_value, previous_leaf)
completion_names = [] completion_names = []
@@ -276,7 +277,7 @@ class Completion:
def _get_importer_names(self, names, level=0, only_modules=True): def _get_importer_names(self, names, level=0, only_modules=True):
names = [n.value for n in names] names = [n.value for n in names]
i = imports.Importer(self._inference_state, names, self._module_value, level) i = imports.Importer(self._inference_state, names, self._module_context, level)
return i.completion_names(self._inference_state, only_modules=only_modules) return i.completion_names(self._inference_state, only_modules=only_modules)
def _get_class_value_completions(self, is_function=True): def _get_class_value_completions(self, is_function=True):
@@ -287,7 +288,7 @@ class Completion:
cls = tree.search_ancestor(leaf, 'classdef') cls = tree.search_ancestor(leaf, 'classdef')
if isinstance(cls, (tree.Class, tree.Function)): if isinstance(cls, (tree.Class, tree.Function)):
# Complete the methods that are defined in the super classes. # Complete the methods that are defined in the super classes.
random_value = self._module_value.create_value( random_value = self._module_context.create_context(
cls, cls,
node_is_value=True node_is_value=True
) )

View File

@@ -7,12 +7,12 @@ from jedi.inference.helpers import get_str_or_none
from jedi.parser_utils import get_string_quote from jedi.parser_utils import get_string_quote
def file_name_completions(inference_state, module_value, start_leaf, string, def file_name_completions(inference_state, module_context, start_leaf, string,
like_name, call_signatures_callback, code_lines, position): like_name, call_signatures_callback, code_lines, position):
# First we want to find out what can actually be changed as a name. # First we want to find out what can actually be changed as a name.
like_name_length = len(os.path.basename(string) + like_name) like_name_length = len(os.path.basename(string) + like_name)
addition = _get_string_additions(module_value, start_leaf) addition = _get_string_additions(module_context, start_leaf)
if addition is None: if addition is None:
return return
string = addition + string string = addition + string
@@ -25,7 +25,7 @@ def file_name_completions(inference_state, module_value, start_leaf, string,
sigs = call_signatures_callback() sigs = call_signatures_callback()
is_in_os_path_join = sigs and all(s.full_name == 'os.path.join' for s in sigs) is_in_os_path_join = sigs and all(s.full_name == 'os.path.join' for s in sigs)
if is_in_os_path_join: if is_in_os_path_join:
to_be_added = _add_os_path_join(module_value, start_leaf, sigs[0].bracket_start) to_be_added = _add_os_path_join(module_context, start_leaf, sigs[0].bracket_start)
if to_be_added is None: if to_be_added is None:
is_in_os_path_join = False is_in_os_path_join = False
else: else:
@@ -60,7 +60,7 @@ def file_name_completions(inference_state, module_value, start_leaf, string,
) )
def _get_string_additions(module_value, start_leaf): def _get_string_additions(module_context, start_leaf):
def iterate_nodes(): def iterate_nodes():
node = addition.parent node = addition.parent
was_addition = True was_addition = True
@@ -77,7 +77,7 @@ def _get_string_additions(module_value, start_leaf):
addition = start_leaf.get_previous_leaf() addition = start_leaf.get_previous_leaf()
if addition != '+': if addition != '+':
return '' return ''
value = module_value.create_value(start_leaf) value = module_context.create_context(start_leaf)
return _add_strings(value, reversed(list(iterate_nodes()))) return _add_strings(value, reversed(list(iterate_nodes())))
@@ -104,14 +104,14 @@ class FileName(AbstractArbitraryName):
is_value_name = False is_value_name = False
def _add_os_path_join(module_value, start_leaf, bracket_start): def _add_os_path_join(module_context, start_leaf, bracket_start):
def check(maybe_bracket, nodes): def check(maybe_bracket, nodes):
if maybe_bracket.start_pos != bracket_start: if maybe_bracket.start_pos != bracket_start:
return None return None
if not nodes: if not nodes:
return '' return ''
value = module_value.create_value(nodes[0]) value = module_context.create_context(nodes[0])
return _add_strings(value, nodes, add_slash=True) or '' return _add_strings(value, nodes, add_slash=True) or ''
if start_leaf.type == 'error_leaf': if start_leaf.type == 'error_leaf':

View File

@@ -111,11 +111,11 @@ class InferenceState(object):
self.reset_recursion_limitations() self.reset_recursion_limitations()
self.allow_different_encoding = True self.allow_different_encoding = True
def import_module(self, import_names, parent_module_value=None, def import_module(self, import_names, parent_module_context=None,
sys_path=None, prefer_stubs=True): sys_path=None, prefer_stubs=True):
if sys_path is None: if sys_path is None:
sys_path = self.get_sys_path() sys_path = self.get_sys_path()
return imports.import_module(self, import_names, parent_module_value, return imports.import_module(self, import_names, parent_module_context,
sys_path, prefer_stubs=prefer_stubs) sys_path, prefer_stubs=prefer_stubs)
@staticmethod @staticmethod
@@ -150,9 +150,9 @@ class InferenceState(object):
"""Convenience function""" """Convenience function"""
return self.project._get_sys_path(self, environment=self.environment, **kwargs) return self.project._get_sys_path(self, environment=self.environment, **kwargs)
def infer_element(self, value, element): def infer_element(self, context, element):
if isinstance(value, CompForValue): if isinstance(context, CompForValue):
return infer_node(value, element) return infer_node(context, element)
if_stmt = element if_stmt = element
while if_stmt is not None: while if_stmt is not None:
@@ -162,7 +162,7 @@ class InferenceState(object):
if parser_utils.is_scope(if_stmt): if parser_utils.is_scope(if_stmt):
if_stmt = None if_stmt = None
break break
predefined_if_name_dict = value.predefined_names.get(if_stmt) predefined_if_name_dict = context.predefined_names.get(if_stmt)
# TODO there's a lot of issues with this one. We actually should do # TODO there's a lot of issues with this one. We actually should do
# this in a different way. Caching should only be active in certain # this in a different way. Caching should only be active in certain
# cases and this all sucks. # cases and this all sucks.
@@ -182,7 +182,7 @@ class InferenceState(object):
str_element_names = [e.value for e in element_names] str_element_names = [e.value for e in element_names]
if any(i.value in str_element_names for i in if_names): if any(i.value in str_element_names for i in if_names):
for if_name in if_names: for if_name in if_names:
definitions = self.goto_definitions(value, if_name) definitions = self.goto_definitions(context, if_name)
# Every name that has multiple different definitions # Every name that has multiple different definitions
# causes the complexity to rise. The complexity should # causes the complexity to rise. The complexity should
# never fall below 1. # never fall below 1.
@@ -210,65 +210,65 @@ class InferenceState(object):
if len(name_dicts) > 1: if len(name_dicts) > 1:
result = NO_VALUES result = NO_VALUES
for name_dict in name_dicts: for name_dict in name_dicts:
with helpers.predefine_names(value, if_stmt, name_dict): with helpers.predefine_names(context, if_stmt, name_dict):
result |= infer_node(value, element) result |= infer_node(context, element)
return result return result
else: else:
return self._infer_element_if_inferred(value, element) return self._infer_element_if_inferred(context, element)
else: else:
if predefined_if_name_dict: if predefined_if_name_dict:
return infer_node(value, element) return infer_node(context, element)
else: else:
return self._infer_element_if_inferred(value, element) return self._infer_element_if_inferred(context, element)
def _infer_element_if_inferred(self, value, element): def _infer_element_if_inferred(self, context, element):
""" """
TODO This function is temporary: Merge with infer_element. TODO This function is temporary: Merge with infer_element.
""" """
parent = element parent = element
while parent is not None: while parent is not None:
parent = parent.parent parent = parent.parent
predefined_if_name_dict = value.predefined_names.get(parent) predefined_if_name_dict = context.predefined_names.get(parent)
if predefined_if_name_dict is not None: if predefined_if_name_dict is not None:
return infer_node(value, element) return infer_node(context, element)
return self._infer_element_cached(value, element) return self._infer_element_cached(context, element)
@inference_state_function_cache(default=NO_VALUES) @inference_state_function_cache(default=NO_VALUES)
def _infer_element_cached(self, value, element): def _infer_element_cached(self, context, element):
return infer_node(value, element) return infer_node(context, element)
def goto_definitions(self, value, name): def goto_definitions(self, context, name):
def_ = name.get_definition(import_name_always=True) def_ = name.get_definition(import_name_always=True)
if def_ is not None: if def_ is not None:
type_ = def_.type type_ = def_.type
is_classdef = type_ == 'classdef' is_classdef = type_ == 'classdef'
if is_classdef or type_ == 'funcdef': if is_classdef or type_ == 'funcdef':
if is_classdef: if is_classdef:
c = ClassValue(self, value, name.parent) c = ClassValue(self, context, name.parent)
else: else:
c = FunctionValue.from_value(value, name.parent) c = FunctionValue.from_value(context, name.parent)
return ValueSet([c]) return ValueSet([c])
if type_ == 'expr_stmt': if type_ == 'expr_stmt':
is_simple_name = name.parent.type not in ('power', 'trailer') is_simple_name = name.parent.type not in ('power', 'trailer')
if is_simple_name: if is_simple_name:
return infer_expr_stmt(value, def_, name) return infer_expr_stmt(context, def_, name)
if type_ == 'for_stmt': if type_ == 'for_stmt':
container_types = value.infer_node(def_.children[3]) container_types = context.infer_node(def_.children[3])
cn = ValueualizedNode(value, def_.children[3]) cn = ValueualizedNode(context, def_.children[3])
for_types = iterate_values(container_types, cn) for_types = iterate_values(container_types, cn)
c_node = ValueualizedName(value, name) c_node = ValueualizedName(context, name)
return check_tuple_assignments(c_node, for_types) return check_tuple_assignments(c_node, for_types)
if type_ in ('import_from', 'import_name'): if type_ in ('import_from', 'import_name'):
return imports.infer_import(value, name) return imports.infer_import(context, name)
else: else:
result = self._follow_error_node_imports_if_possible(value, name) result = self._follow_error_node_imports_if_possible(context, name)
if result is not None: if result is not None:
return result return result
return helpers.infer_call_of_leaf(value, name) return helpers.infer_call_of_leaf(context, name)
def _follow_error_node_imports_if_possible(self, value, name): def _follow_error_node_imports_if_possible(self, context, name):
error_node = tree.search_ancestor(name, 'error_node') error_node = tree.search_ancestor(name, 'error_node')
if error_node is not None: if error_node is not None:
# Get the first command start of a started simple_stmt. The error # Get the first command start of a started simple_stmt. The error
@@ -292,10 +292,10 @@ class InferenceState(object):
is_import_from=is_import_from, is_import_from=is_import_from,
until_node=name, until_node=name,
) )
return imports.Importer(self, names, value.get_root_value(), level).follow() return imports.Importer(self, names, context.get_root_context(), level).follow()
return None return None
def goto(self, value, name): def goto(self, context, name):
definition = name.get_definition(import_name_always=True) definition = name.get_definition(import_name_always=True)
if definition is not None: if definition is not None:
type_ = definition.type type_ = definition.type
@@ -304,16 +304,16 @@ class InferenceState(object):
# a name it's something you can "goto" again. # a name it's something you can "goto" again.
is_simple_name = name.parent.type not in ('power', 'trailer') is_simple_name = name.parent.type not in ('power', 'trailer')
if is_simple_name: if is_simple_name:
return [TreeNameDefinition(value, name)] return [TreeNameDefinition(context, name)]
elif type_ == 'param': elif type_ == 'param':
return [ParamName(value, name)] return [ParamName(context, name)]
elif type_ in ('import_from', 'import_name'): elif type_ in ('import_from', 'import_name'):
module_names = imports.infer_import(value, name, is_goto=True) module_names = imports.infer_import(context, name, is_goto=True)
return module_names return module_names
else: else:
return [TreeNameDefinition(value, name)] return [TreeNameDefinition(context, name)]
else: else:
values = self._follow_error_node_imports_if_possible(value, name) values = self._follow_error_node_imports_if_possible(context, name)
if values is not None: if values is not None:
return [value.name for value in values] return [value.name for value in values]
@@ -326,15 +326,15 @@ class InferenceState(object):
trailer = trailer.parent trailer = trailer.parent
if trailer.type != 'classdef': if trailer.type != 'classdef':
if trailer.type == 'decorator': if trailer.type == 'decorator':
value_set = value.infer_node(trailer.children[1]) value_set = context.infer_node(trailer.children[1])
else: else:
i = trailer.parent.children.index(trailer) i = trailer.parent.children.index(trailer)
to_infer = trailer.parent.children[:i] to_infer = trailer.parent.children[:i]
if to_infer[0] == 'await': if to_infer[0] == 'await':
to_infer.pop(0) to_infer.pop(0)
value_set = value.infer_node(to_infer[0]) value_set = context.infer_node(to_infer[0])
for trailer in to_infer[1:]: for trailer in to_infer[1:]:
value_set = infer_trailer(value, value_set, trailer) value_set = infer_trailer(context, value_set, trailer)
param_names = [] param_names = []
for value in value_set: for value in value_set:
for signature in value.get_signatures(): for signature in value.get_signatures():
@@ -347,28 +347,28 @@ class InferenceState(object):
if index > 0: if index > 0:
new_dotted = helpers.deep_ast_copy(par) new_dotted = helpers.deep_ast_copy(par)
new_dotted.children[index - 1:] = [] new_dotted.children[index - 1:] = []
values = value.infer_node(new_dotted) values = context.infer_node(new_dotted)
return unite( return unite(
value.py__getattribute__(name, name_value=value, is_goto=True) value.py__getattribute__(name, name_value=value, is_goto=True)
for value in values for value in values
) )
if node_type == 'trailer' and par.children[0] == '.': if node_type == 'trailer' and par.children[0] == '.':
values = helpers.infer_call_of_leaf(value, name, cut_own_trailer=True) values = helpers.infer_call_of_leaf(context, name, cut_own_trailer=True)
return values.py__getattribute__(name, name_value=value, is_goto=True) return values.py__getattribute__(name, name_context=context, is_goto=True)
else: else:
stmt = tree.search_ancestor( stmt = tree.search_ancestor(
name, 'expr_stmt', 'lambdef' name, 'expr_stmt', 'lambdef'
) or name ) or name
if stmt.type == 'lambdef': if stmt.type == 'lambdef':
stmt = name stmt = name
return value.py__getattribute__( return context.py__getattribute__(
name, name,
position=stmt.start_pos, position=stmt.start_pos,
search_global=True, is_goto=True search_global=True, is_goto=True
) )
def create_value(self, base_value, node, node_is_value=False, node_is_object=False): def create_context(self, base_context, node, node_is_value=False, node_is_object=False):
def parent_scope(node): def parent_scope(node):
while True: while True:
node = node.parent node = node.parent
@@ -386,14 +386,14 @@ class InferenceState(object):
def from_scope_node(scope_node, is_nested=True, node_is_object=False): def from_scope_node(scope_node, is_nested=True, node_is_object=False):
if scope_node == base_node: if scope_node == base_node:
return base_value return base_context
is_funcdef = scope_node.type in ('funcdef', 'lambdef') is_funcdef = scope_node.type in ('funcdef', 'lambdef')
parent_scope = parser_utils.get_parent_scope(scope_node) parent_scope = parser_utils.get_parent_scope(scope_node)
parent_context = from_scope_node(parent_scope) parent_context = from_scope_node(parent_scope)
if is_funcdef: if is_funcdef:
func = FunctionValue.from_value(parent_context, scope_node) func = FunctionValue.from_context(parent_context, scope_node)
if parent_context.is_class(): if parent_context.is_class():
instance = AnonymousInstance( instance = AnonymousInstance(
self, parent_context.parent_context, parent_context) self, parent_context.parent_context, parent_context)
@@ -413,7 +413,7 @@ class InferenceState(object):
return CompForValue.from_comp_for(parent_context, scope_node) return CompForValue.from_comp_for(parent_context, scope_node)
raise Exception("There's a scope that was not managed.") raise Exception("There's a scope that was not managed.")
base_node = base_value.tree_node base_node = base_context.tree_node
if node_is_value and parser_utils.is_scope(node): if node_is_value and parser_utils.is_scope(node):
scope_node = node scope_node = node

View File

@@ -77,17 +77,17 @@ class Warning(Error):
pass pass
def add(node_value, error_name, node, message=None, typ=Error, payload=None): def add(node_context, error_name, node, message=None, typ=Error, payload=None):
exception = CODES[error_name][1] exception = CODES[error_name][1]
if _check_for_exception_catch(node_value, node, exception, payload): if _check_for_exception_catch(node_context, node, exception, payload):
return return
# TODO this path is probably not right # TODO this path is probably not right
module_value = node_value.get_root_value() module_context = node_context.get_root_context()
module_path = module_value.py__file__() module_path = module_context.py__file__()
issue_instance = typ(error_name, module_path, node.start_pos, message) issue_instance = typ(error_name, module_path, node.start_pos, message)
debug.warning(str(issue_instance), format=False) debug.warning(str(issue_instance), format=False)
node_value.inference_state.analysis.append(issue_instance) node_context.inference_state.analysis.append(issue_instance)
return issue_instance return issue_instance
@@ -95,7 +95,7 @@ def _check_for_setattr(instance):
""" """
Check if there's any setattr method inside an instance. If so, return True. Check if there's any setattr method inside an instance. If so, return True.
""" """
module = instance.get_root_value() module = instance.get_root_context()
node = module.tree_node node = module.tree_node
if node is None: if node is None:
# If it's a compiled module or doesn't have a tree_node # If it's a compiled module or doesn't have a tree_node
@@ -112,7 +112,7 @@ def _check_for_setattr(instance):
for n in stmt_names) for n in stmt_names)
def add_attribute_error(name_value, lookup_value, name): def add_attribute_error(name_context, lookup_value, name):
message = ('AttributeError: %s has no attribute %s.' % (lookup_value, name)) message = ('AttributeError: %s has no attribute %s.' % (lookup_value, name))
from jedi.inference.value.instance import CompiledInstanceName from jedi.inference.value.instance import CompiledInstanceName
# Check for __getattr__/__getattribute__ existance and issue a warning # Check for __getattr__/__getattribute__ existance and issue a warning
@@ -132,10 +132,10 @@ def add_attribute_error(name_value, lookup_value, name):
typ = Warning typ = Warning
payload = lookup_value, name payload = lookup_value, name
add(name_value, 'attribute-error', name, message, typ, payload) add(name_context, 'attribute-error', name, message, typ, payload)
def _check_for_exception_catch(node_value, jedi_name, exception, payload=None): def _check_for_exception_catch(node_context, jedi_name, exception, payload=None):
""" """
Checks if a jedi object (e.g. `Statement`) sits inside a try/catch and Checks if a jedi object (e.g. `Statement`) sits inside a try/catch and
doesn't count as an error (if equal to `exception`). doesn't count as an error (if equal to `exception`).
@@ -167,7 +167,7 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None):
if node is None: if node is None:
return True # An exception block that catches everything. return True # An exception block that catches everything.
else: else:
except_classes = node_value.infer_node(node) except_classes = node_context.infer_node(node)
for cls in except_classes: for cls in except_classes:
from jedi.inference.value import iterable from jedi.inference.value import iterable
if isinstance(cls, iterable.Sequence) and \ if isinstance(cls, iterable.Sequence) and \
@@ -192,18 +192,19 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None):
arglist = trailer.children[1] arglist = trailer.children[1]
assert arglist.type == 'arglist' assert arglist.type == 'arglist'
from jedi.inference.arguments import TreeArguments from jedi.inference.arguments import TreeArguments
args = list(TreeArguments(node_value.inference_state, node_value, arglist).unpack()) args = TreeArguments(node_context.inference_state, node_context, arglist)
unpacked_args = list(args.unpack())
# Arguments should be very simple # Arguments should be very simple
assert len(args) == 2 assert len(unpacked_args) == 2
# Check name # Check name
key, lazy_value = args[1] key, lazy_value = unpacked_args[1]
names = list(lazy_value.infer()) names = list(lazy_value.infer())
assert len(names) == 1 and is_string(names[0]) assert len(names) == 1 and is_string(names[0])
assert force_unicode(names[0].get_safe_value()) == payload[1].value assert force_unicode(names[0].get_safe_value()) == payload[1].value
# Check objects # Check objects
key, lazy_value = args[0] key, lazy_value = unpacked_args[0]
objects = lazy_value.infer() objects = lazy_value.infer()
return payload[0] in objects return payload[0] in objects
except AssertionError: except AssertionError:

View File

@@ -50,7 +50,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac
clinic_args = list(_parse_argument_clinic(string)) clinic_args = list(_parse_argument_clinic(string))
def decorator(func): def decorator(func):
def wrapper(value, *args, **kwargs): def wrapper(context, *args, **kwargs):
if keep_arguments_param: if keep_arguments_param:
arguments = kwargs['arguments'] arguments = kwargs['arguments']
else: else:
@@ -59,14 +59,14 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac
kwargs.pop('callback', None) kwargs.pop('callback', None)
try: try:
args += tuple(_iterate_argument_clinic( args += tuple(_iterate_argument_clinic(
value.inference_state, context.inference_state,
arguments, arguments,
clinic_args clinic_args
)) ))
except ParamIssue: except ParamIssue:
return NO_VALUES return NO_VALUES
else: else:
return func(value, *args, **kwargs) return func(context, *args, **kwargs)
return wrapper return wrapper
return decorator return decorator
@@ -152,7 +152,7 @@ class _AbstractArgumentsMixin(object):
class AbstractArguments(_AbstractArgumentsMixin): class AbstractArguments(_AbstractArgumentsMixin):
value = None context = None
argument_node = None argument_node = None
trailer = None trailer = None
@@ -198,12 +198,12 @@ def unpack_arglist(arglist):
class TreeArguments(AbstractArguments): class TreeArguments(AbstractArguments):
def __init__(self, inference_state, value, argument_node, trailer=None): def __init__(self, inference_state, context, argument_node, trailer=None):
""" """
:param argument_node: May be an argument_node or a list of nodes. :param argument_node: May be an argument_node or a list of nodes.
""" """
self.argument_node = argument_node self.argument_node = argument_node
self.value = value self.context = context
self._inference_state = inference_state self._inference_state = inference_state
self.trailer = trailer # Can be None, e.g. in a class definition. self.trailer = trailer # Can be None, e.g. in a class definition.
@@ -216,8 +216,8 @@ class TreeArguments(AbstractArguments):
named_args = [] named_args = []
for star_count, el in unpack_arglist(self.argument_node): for star_count, el in unpack_arglist(self.argument_node):
if star_count == 1: if star_count == 1:
arrays = self.value.infer_node(el) arrays = self.context.infer_node(el)
iterators = [_iterate_star_args(self.value, a, el, funcdef) iterators = [_iterate_star_args(self.context, a, el, funcdef)
for a in arrays] for a in arrays]
for values in list(zip_longest(*iterators)): for values in list(zip_longest(*iterators)):
# TODO zip_longest yields None, that means this would raise # TODO zip_longest yields None, that means this would raise
@@ -226,15 +226,15 @@ class TreeArguments(AbstractArguments):
[v for v in values if v is not None] [v for v in values if v is not None]
) )
elif star_count == 2: elif star_count == 2:
arrays = self.value.infer_node(el) arrays = self.context.infer_node(el)
for dct in arrays: for dct in arrays:
for key, values in _star_star_dict(self.value, dct, el, funcdef): for key, values in _star_star_dict(self.context, dct, el, funcdef):
yield key, values yield key, values
else: else:
if el.type == 'argument': if el.type == 'argument':
c = el.children c = el.children
if len(c) == 3: # Keyword argument. if len(c) == 3: # Keyword argument.
named_args.append((c[0].value, LazyTreeValue(self.value, c[2]),)) named_args.append((c[0].value, LazyTreeValue(self.context, c[2]),))
else: # Generator comprehension. else: # Generator comprehension.
# Include the brackets with the parent. # Include the brackets with the parent.
sync_comp_for = el.children[1] sync_comp_for = el.children[1]
@@ -242,13 +242,13 @@ class TreeArguments(AbstractArguments):
sync_comp_for = sync_comp_for.children[1] sync_comp_for = sync_comp_for.children[1]
comp = iterable.GeneratorComprehension( comp = iterable.GeneratorComprehension(
self._inference_state, self._inference_state,
defining_value=self.value, defining_context=self.context,
sync_comp_for_node=sync_comp_for, sync_comp_for_node=sync_comp_for,
entry_node=el.children[0], entry_node=el.children[0],
) )
yield None, LazyKnownValue(comp) yield None, LazyKnownValue(comp)
else: else:
yield None, LazyTreeValue(self.value, el) yield None, LazyTreeValue(self.context, el)
# Reordering arguments is necessary, because star args sometimes appear # Reordering arguments is necessary, because star args sometimes appear
# after named argument, but in the actual order it's prepended. # after named argument, but in the actual order it's prepended.
@@ -269,7 +269,7 @@ class TreeArguments(AbstractArguments):
if not star_count or not isinstance(name, tree.Name): if not star_count or not isinstance(name, tree.Name):
continue continue
yield TreeNameDefinition(self.value, name) yield TreeNameDefinition(self.context, name)
def __repr__(self): def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.argument_node) return '<%s: %s>' % (self.__class__.__name__, self.argument_node)
@@ -302,9 +302,9 @@ class TreeArguments(AbstractArguments):
break break
if arguments.argument_node is not None: if arguments.argument_node is not None:
return [ValueualizedNode(arguments.value, arguments.argument_node)] return [ValueualizedNode(arguments.context, arguments.argument_node)]
if arguments.trailer is not None: if arguments.trailer is not None:
return [ValueualizedNode(arguments.value, arguments.trailer)] return [ValueualizedNode(arguments.context, arguments.trailer)]
return [] return []
@@ -325,8 +325,8 @@ class TreeArgumentsWrapper(_AbstractArgumentsMixin):
self._wrapped_arguments = arguments self._wrapped_arguments = arguments
@property @property
def value(self): def context(self):
return self._wrapped_arguments.value return self._wrapped_arguments.context
@property @property
def argument_node(self): def argument_node(self):
@@ -346,13 +346,13 @@ class TreeArgumentsWrapper(_AbstractArgumentsMixin):
return '<%s: %s>' % (self.__class__.__name__, self._wrapped_arguments) return '<%s: %s>' % (self.__class__.__name__, self._wrapped_arguments)
def _iterate_star_args(value, array, input_node, funcdef=None): def _iterate_star_args(context, array, input_node, funcdef=None):
if not array.py__getattribute__('__iter__'): if not array.py__getattribute__('__iter__'):
if funcdef is not None: if funcdef is not None:
# TODO this funcdef should not be needed. # TODO this funcdef should not be needed.
m = "TypeError: %s() argument after * must be a sequence, not %s" \ m = "TypeError: %s() argument after * must be a sequence, not %s" \
% (funcdef.name.value, array) % (funcdef.name.value, array)
analysis.add(value, 'type-error-star', input_node, message=m) analysis.add(context, 'type-error-star', input_node, message=m)
try: try:
iter_ = array.py__iter__ iter_ = array.py__iter__
except AttributeError: except AttributeError:
@@ -362,7 +362,7 @@ def _iterate_star_args(value, array, input_node, funcdef=None):
yield lazy_value yield lazy_value
def _star_star_dict(value, array, input_node, funcdef): def _star_star_dict(context, array, input_node, funcdef):
from jedi.inference.value.instance import CompiledInstance from jedi.inference.value.instance import CompiledInstance
if isinstance(array, CompiledInstance) and array.name.string_name == 'dict': if isinstance(array, CompiledInstance) and array.name.string_name == 'dict':
# For now ignore this case. In the future add proper iterators and just # For now ignore this case. In the future add proper iterators and just
@@ -374,5 +374,5 @@ def _star_star_dict(value, array, input_node, funcdef):
if funcdef is not None: if funcdef is not None:
m = "TypeError: %s argument after ** must be a mapping, not %s" \ m = "TypeError: %s argument after ** must be a mapping, not %s" \
% (funcdef.name.value, array) % (funcdef.name.value, array)
analysis.add(value, 'type-error-star-star', input_node, message=m) analysis.add(context, 'type-error-star-star', input_node, message=m)
return {} return {}

View File

@@ -23,7 +23,7 @@ _sentinel = object()
class HelperValueMixin(object): class HelperValueMixin(object):
def get_root_value(self): def get_root_context(self):
value = self value = self
while True: while True:
if value.parent_context is None: if value.parent_context is None:
@@ -83,9 +83,6 @@ class HelperValueMixin(object):
def infer_node(self, node): def infer_node(self, node):
return self.inference_state.infer_element(self, node) return self.inference_state.infer_element(self, node)
def create_value(self, node, node_is_value=False, node_is_object=False):
return self.inference_state.create_value(self, node, node_is_value, node_is_object)
def iterate(self, valueualized_node=None, is_async=False): def iterate(self, valueualized_node=None, is_async=False):
debug.dbg('iterate %s', self) debug.dbg('iterate %s', self)
if is_async: if is_async:
@@ -281,18 +278,18 @@ class TreeValue(Value):
class ValueualizedNode(object): class ValueualizedNode(object):
def __init__(self, value, node): def __init__(self, context, node):
self.value = value self.context = context
self.node = node self.node = node
def get_root_value(self): def get_root_context(self):
return self.value.get_root_value() return self.context.get_root_context()
def infer(self): def infer(self):
return self.value.infer_node(self.node) return self.context.infer_node(self.node)
def __repr__(self): def __repr__(self):
return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.value) return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.context)
class ValueualizedName(ValueualizedNode): class ValueualizedName(ValueualizedNode):

View File

@@ -254,27 +254,27 @@ def _create(inference_state, access_handle, parent_context, *args):
if parent_context is None: if parent_context is None:
# TODO this __name__ is probably wrong. # TODO this __name__ is probably wrong.
name = compiled_object.get_root_value().py__name__() name = compiled_object.get_root_context().py__name__()
string_names = tuple(name.split('.')) string_names = tuple(name.split('.'))
module_value = ModuleValue( module_context = ModuleValue(
inference_state, module_node, inference_state, module_node,
file_io=file_io, file_io=file_io,
string_names=string_names, string_names=string_names,
code_lines=code_lines, code_lines=code_lines,
is_package=hasattr(compiled_object, 'py__path__'), is_package=hasattr(compiled_object, 'py__path__'),
) ).as_context()
if name is not None: if name is not None:
inference_state.module_cache.add(string_names, ValueSet([module_value])) inference_state.module_cache.add(string_names, ValueSet([module_context]))
else: else:
if parent_context.tree_node.get_root_node() != module_node: if parent_context.tree_node.get_root_node() != module_node:
# This happens e.g. when __module__ is wrong, or when using # This happens e.g. when __module__ is wrong, or when using
# TypeVar('foo'), where Jedi uses 'foo' as the name and # TypeVar('foo'), where Jedi uses 'foo' as the name and
# Python's TypeVar('foo').__module__ will be typing. # Python's TypeVar('foo').__module__ will be typing.
return ValueSet({compiled_object}) return ValueSet({compiled_object})
module_value = parent_context.get_root_value() module_context = parent_context.get_root_context()
tree_values = ValueSet({ tree_values = ValueSet({
module_value.create_value( module_context.create_context(
tree_node, tree_node,
node_is_value=True, node_is_value=True,
node_is_object=True node_is_object=True

58
jedi/inference/context.py Normal file
View File

@@ -0,0 +1,58 @@
from abc import abstractmethod
from jedi.inference.filters import ParserTreeFilter
class AbstractContext(object):
"""
Should be defined, otherwise the API returns empty types.
"""
predefined_names = {}
def __init__(self, value):
self.inference_state = value.inference_state
self._value = value
@abstractmethod
def get_filters(self, until_position=None, origin_scope=None):
raise NotImplementedError
def get_root_context(self):
return self._value.get_root_context()
def create_context(self, node, node_is_value=False, node_is_object=False):
return self.inference_state.create_context(self, node, node_is_value, node_is_object)
@property
def py__getattribute__(self):
return self._value.py__getattribute__
@property
def tree_name(self):
return self._value.tree_node
def infer_node(self, node):
raise NotImplementedError
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._value)
class FunctionContext(AbstractContext):
@abstractmethod
def get_filters(self, until_position=None, origin_scope=None):
yield ParserTreeFilter(
self.inference_state,
context=self,
until_position=until_position,
origin_scope=origin_scope
)
class ModuleContext(AbstractContext):
def py__file__(self):
return self._value.py__file__()
@property
def py__package__(self):
return self._value.py__package__

View File

@@ -89,10 +89,10 @@ def search_params(inference_state, execution_value, funcdef):
debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA') debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA')
try: try:
module_value = execution_value.get_root_value() module_context = execution_value.get_root_value()
function_executions = _search_function_executions( function_executions = _search_function_executions(
inference_state, inference_state,
module_value, module_context,
funcdef, funcdef,
string_name=string_name, string_name=string_name,
) )
@@ -115,7 +115,7 @@ def search_params(inference_state, execution_value, funcdef):
@inference_state_function_cache(default=None) @inference_state_function_cache(default=None)
@to_list @to_list
def _search_function_executions(inference_state, module_value, funcdef, string_name): def _search_function_executions(inference_state, module_context, funcdef, string_name):
""" """
Returns a list of param names. Returns a list of param names.
""" """
@@ -129,8 +129,8 @@ def _search_function_executions(inference_state, module_value, funcdef, string_n
found_executions = False found_executions = False
i = 0 i = 0
for for_mod_value in imports.get_modules_containing_name( for for_mod_value in imports.get_modules_containing_name(
inference_state, [module_value], string_name): inference_state, [module_context], string_name):
if not isinstance(module_value, ModuleValue): if not isinstance(module_context, ModuleValue):
return return
for name, trailer in _get_possible_nodes(for_mod_value, string_name): for name, trailer in _get_possible_nodes(for_mod_value, string_name):
i += 1 i += 1
@@ -141,7 +141,8 @@ def _search_function_executions(inference_state, module_value, funcdef, string_n
if i * inference_state.dynamic_params_depth > MAX_PARAM_SEARCHES: if i * inference_state.dynamic_params_depth > MAX_PARAM_SEARCHES:
return return
random_value = inference_state.create_value(for_mod_value, name) raise NotImplementedError
random_value = inference_state.create_context(for_mod_value, name)
for function_execution in _check_name_for_execution( for function_execution in _check_name_for_execution(
inference_state, random_value, compare_node, name, trailer): inference_state, random_value, compare_node, name, trailer):
found_executions = True found_executions = True
@@ -219,7 +220,8 @@ def _check_name_for_execution(inference_state, value, compare_node, name, traile
execution_value = next(create_func_excs()) execution_value = next(create_func_excs())
for name, trailer in _get_possible_nodes(module_value, params[0].string_name): for name, trailer in _get_possible_nodes(module_value, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos: if value_node.start_pos < name.start_pos < value_node.end_pos:
random_value = inference_state.create_value(execution_value, name) raise NotImplementedError
random_value = inference_state.create_context(execution_value, name)
iterator = _check_name_for_execution( iterator = _check_name_for_execution(
inference_state, inference_state,
random_value, random_value,

View File

@@ -159,7 +159,7 @@ class FunctionExecutionFilter(ParserTreeFilter):
if param: if param:
yield self.param_name(self.value, name) yield self.param_name(self.value, name)
else: else:
yield TreeNameDefinition(self.value, name) yield TreeNameDefinition(self.context, name)
class GlobalNameFilter(AbstractUsedNamesFilter): class GlobalNameFilter(AbstractUsedNamesFilter):
@@ -362,7 +362,7 @@ def get_global_filters(inference_state, value, until_position, origin_scope):
>>> scope = next(module_node.iter_funcdefs()) >>> scope = next(module_node.iter_funcdefs())
>>> scope >>> scope
<Function: func@3-5> <Function: func@3-5>
>>> value = script._get_module().create_value(scope) >>> value = script._get_module_context().create_context(scope)
>>> filters = list(get_global_filters(value.inference_state, value, (4, 0), None)) >>> filters = list(get_global_filters(value.inference_state, value, (4, 0), None))
First we get the names from the function scope. First we get the names from the function scope.

View File

@@ -90,7 +90,7 @@ def _load_stub_module(module):
module.inference_state, module.inference_state,
import_names=module.string_names, import_names=module.string_names,
python_value_set=ValueSet([module]), python_value_set=ValueSet([module]),
parent_module_value=None, parent_module_context=None,
sys_path=module.inference_state.get_sys_path(), sys_path=module.inference_state.get_sys_path(),
) )

View File

@@ -89,27 +89,27 @@ def _cache_stub_file_map(version_info):
def import_module_decorator(func): def import_module_decorator(func):
@wraps(func) @wraps(func)
def wrapper(inference_state, import_names, parent_module_value, sys_path, prefer_stubs): def wrapper(inference_state, import_names, parent_module_context, sys_path, prefer_stubs):
try: try:
python_value_set = inference_state.module_cache.get(import_names) python_value_set = inference_state.module_cache.get(import_names)
except KeyError: except KeyError:
if parent_module_value is not None and parent_module_value.is_stub(): if parent_module_context is not None and parent_module_context.is_stub():
parent_module_values = parent_module_value.non_stub_value_set parent_module_contexts = parent_module_context.non_stub_value_set
else: else:
parent_module_values = [parent_module_value] parent_module_contexts = [parent_module_context]
if import_names == ('os', 'path'): if import_names == ('os', 'path'):
# This is a huge exception, we follow a nested import # This is a huge exception, we follow a nested import
# ``os.path``, because it's a very important one in Python # ``os.path``, because it's a very important one in Python
# that is being achieved by messing with ``sys.modules`` in # that is being achieved by messing with ``sys.modules`` in
# ``os``. # ``os``.
python_parent = next(iter(parent_module_values)) python_parent = next(iter(parent_module_contexts))
if python_parent is None: if python_parent is None:
python_parent, = inference_state.import_module(('os',), prefer_stubs=False) python_parent, = inference_state.import_module(('os',), prefer_stubs=False)
python_value_set = python_parent.py__getattribute__('path') python_value_set = python_parent.py__getattribute__('path')
else: else:
python_value_set = ValueSet.from_sets( python_value_set = ValueSet.from_sets(
func(inference_state, import_names, p, sys_path,) func(inference_state, import_names, p, sys_path,)
for p in parent_module_values for p in parent_module_contexts
) )
inference_state.module_cache.add(import_names, python_value_set) inference_state.module_cache.add(import_names, python_value_set)
@@ -117,7 +117,7 @@ def import_module_decorator(func):
return python_value_set return python_value_set
stub = _try_to_load_stub_cached(inference_state, import_names, python_value_set, stub = _try_to_load_stub_cached(inference_state, import_names, python_value_set,
parent_module_value, sys_path) parent_module_context, sys_path)
if stub is not None: if stub is not None:
return ValueSet([stub]) return ValueSet([stub])
return python_value_set return python_value_set
@@ -140,18 +140,18 @@ def _try_to_load_stub_cached(inference_state, import_names, *args, **kwargs):
def _try_to_load_stub(inference_state, import_names, python_value_set, def _try_to_load_stub(inference_state, import_names, python_value_set,
parent_module_value, sys_path): parent_module_context, sys_path):
""" """
Trying to load a stub for a set of import_names. Trying to load a stub for a set of import_names.
This is modelled to work like "PEP 561 -- Distributing and Packaging Type This is modelled to work like "PEP 561 -- Distributing and Packaging Type
Information", see https://www.python.org/dev/peps/pep-0561. Information", see https://www.python.org/dev/peps/pep-0561.
""" """
if parent_module_value is None and len(import_names) > 1: if parent_module_context is None and len(import_names) > 1:
try: try:
parent_module_value = _try_to_load_stub_cached( parent_module_context = _try_to_load_stub_cached(
inference_state, import_names[:-1], NO_VALUES, inference_state, import_names[:-1], NO_VALUES,
parent_module_value=None, sys_path=sys_path) parent_module_context=None, sys_path=sys_path)
except KeyError: except KeyError:
pass pass
@@ -195,15 +195,15 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
return m return m
# 3. Try to load typeshed # 3. Try to load typeshed
m = _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names) m = _load_from_typeshed(inference_state, python_value_set, parent_module_context, import_names)
if m is not None: if m is not None:
return m return m
# 4. Try to load pyi file somewhere if python_value_set was not defined. # 4. Try to load pyi file somewhere if python_value_set was not defined.
if not python_value_set: if not python_value_set:
if parent_module_value is not None: if parent_module_context is not None:
try: try:
method = parent_module_value.py__path__ method = parent_module_context.py__path__
except AttributeError: except AttributeError:
check_path = [] check_path = []
else: else:
@@ -229,18 +229,18 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
return None return None
def _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names): def _load_from_typeshed(inference_state, python_value_set, parent_module_context, import_names):
import_name = import_names[-1] import_name = import_names[-1]
map_ = None map_ = None
if len(import_names) == 1: if len(import_names) == 1:
map_ = _cache_stub_file_map(inference_state.grammar.version_info) map_ = _cache_stub_file_map(inference_state.grammar.version_info)
import_name = _IMPORT_MAP.get(import_name, import_name) import_name = _IMPORT_MAP.get(import_name, import_name)
elif isinstance(parent_module_value, StubModuleValue): elif isinstance(parent_module_context, StubModuleValue):
if not parent_module_value.is_package: if not parent_module_context.is_package:
# Only if it's a package (= a folder) something can be # Only if it's a package (= a folder) something can be
# imported. # imported.
return None return None
path = parent_module_value.py__path__() path = parent_module_context.py__path__()
map_ = _merge_create_stub_map(path) map_ = _merge_create_stub_map(path)
if map_ is not None: if map_ is not None:

View File

@@ -57,12 +57,12 @@ class ModuleCache(object):
# This memoization is needed, because otherwise we will infinitely loop on # This memoization is needed, because otherwise we will infinitely loop on
# certain imports. # certain imports.
@inference_state_method_cache(default=NO_VALUES) @inference_state_method_cache(default=NO_VALUES)
def infer_import(value, tree_name, is_goto=False): def infer_import(context, tree_name, is_goto=False):
module_value = value.get_root_value() module_context = context.get_root_context()
import_node = search_ancestor(tree_name, 'import_name', 'import_from') import_node = search_ancestor(tree_name, 'import_name', 'import_from')
import_path = import_node.get_path_for_name(tree_name) import_path = import_node.get_path_for_name(tree_name)
from_import_name = None from_import_name = None
inference_state = value.inference_state inference_state = context.inference_state
try: try:
from_names = import_node.get_from_names() from_names = import_node.get_from_names()
except AttributeError: except AttributeError:
@@ -76,7 +76,7 @@ def infer_import(value, tree_name, is_goto=False):
import_path = from_names import_path = from_names
importer = Importer(inference_state, tuple(import_path), importer = Importer(inference_state, tuple(import_path),
module_value, import_node.level) module_context, import_node.level)
types = importer.follow() types = importer.follow()
@@ -90,7 +90,7 @@ def infer_import(value, tree_name, is_goto=False):
types = unite( types = unite(
t.py__getattribute__( t.py__getattribute__(
from_import_name, from_import_name,
name_value=value, name_context=context,
is_goto=is_goto, is_goto=is_goto,
analysis_errors=False analysis_errors=False
) )
@@ -102,7 +102,7 @@ def infer_import(value, tree_name, is_goto=False):
if not types: if not types:
path = import_path + [from_import_name] path = import_path + [from_import_name]
importer = Importer(inference_state, tuple(path), importer = Importer(inference_state, tuple(path),
module_value, import_node.level) module_context, import_node.level)
types = importer.follow() types = importer.follow()
# goto only accepts `Name` # goto only accepts `Name`
if is_goto: if is_goto:
@@ -183,7 +183,7 @@ def _level_to_base_import_path(project_path, directory, level):
class Importer(object): class Importer(object):
def __init__(self, inference_state, import_path, module_value, level=0): def __init__(self, inference_state, import_path, module_context, level=0):
""" """
An implementation similar to ``__import__``. Use `follow` An implementation similar to ``__import__``. Use `follow`
to actually follow the imports. to actually follow the imports.
@@ -196,15 +196,15 @@ class Importer(object):
:param import_path: List of namespaces (strings or Names). :param import_path: List of namespaces (strings or Names).
""" """
debug.speed('import %s %s' % (import_path, module_value)) debug.speed('import %s %s' % (import_path, module_context))
self._inference_state = inference_state self._inference_state = inference_state
self.level = level self.level = level
self.module_value = module_value self.module_context = module_context
self._fixed_sys_path = None self._fixed_sys_path = None
self._infer_possible = True self._infer_possible = True
if level: if level:
base = module_value.py__package__() base = module_context.py__package__()
# We need to care for two cases, the first one is if it's a valid # We need to care for two cases, the first one is if it's a valid
# Python import. This import has a properly defined module name # Python import. This import has a properly defined module name
# chain like `foo.bar.baz` and an import in baz is made for # chain like `foo.bar.baz` and an import in baz is made for
@@ -221,7 +221,7 @@ class Importer(object):
base = base[:-level + 1] base = base[:-level + 1]
import_path = base + tuple(import_path) import_path = base + tuple(import_path)
else: else:
path = module_value.py__file__() path = module_context.py__file__()
import_path = list(import_path) import_path = list(import_path)
if path is None: if path is None:
# If no path is defined, our best guess is that the current # If no path is defined, our best guess is that the current
@@ -245,7 +245,7 @@ class Importer(object):
if base_import_path is None: if base_import_path is None:
if import_path: if import_path:
_add_error( _add_error(
module_value, import_path[0], module_context, import_path[0],
message='Attempted relative import beyond top-level package.' message='Attempted relative import beyond top-level package.'
) )
else: else:
@@ -266,11 +266,11 @@ class Importer(object):
sys_path_mod = ( sys_path_mod = (
self._inference_state.get_sys_path() self._inference_state.get_sys_path()
+ sys_path.check_sys_path_modifications(self.module_value) + sys_path.check_sys_path_modifications(self.module_context)
) )
if self._inference_state.environment.version_info.major == 2: if self._inference_state.environment.version_info.major == 2:
file_path = self.module_value.py__file__() file_path = self.module_context.py__file__()
if file_path is not None: if file_path is not None:
# Python2 uses an old strange way of importing relative imports. # Python2 uses an old strange way of importing relative imports.
sys_path_mod.append(force_unicode(os.path.dirname(file_path))) sys_path_mod.append(force_unicode(os.path.dirname(file_path)))
@@ -292,13 +292,13 @@ class Importer(object):
value_set = ValueSet.from_sets([ value_set = ValueSet.from_sets([
self._inference_state.import_module( self._inference_state.import_module(
import_names[:i+1], import_names[:i+1],
parent_module_value, parent_module_context,
sys_path sys_path
) for parent_module_value in value_set ) for parent_module_context in value_set
]) ])
if not value_set: if not value_set:
message = 'No module named ' + '.'.join(import_names) message = 'No module named ' + '.'.join(import_names)
_add_error(self.module_value, name, message) _add_error(self.module_context, name, message)
return NO_VALUES return NO_VALUES
return value_set return value_set
@@ -310,7 +310,7 @@ class Importer(object):
names = [] names = []
# add builtin module names # add builtin module names
if search_path is None and in_module is None: if search_path is None and in_module is None:
names += [ImportName(self.module_value, name) names += [ImportName(self.module_context, name)
for name in self._inference_state.compiled_subprocess.get_builtin_module_names()] for name in self._inference_state.compiled_subprocess.get_builtin_module_names()]
if search_path is None: if search_path is None:
@@ -318,7 +318,7 @@ class Importer(object):
for name in iter_module_names(self._inference_state, search_path): for name in iter_module_names(self._inference_state, search_path):
if in_module is None: if in_module is None:
n = ImportName(self.module_value, name) n = ImportName(self.module_context, name)
else: else:
n = SubModuleName(in_module, name) n = SubModuleName(in_module, name)
names.append(n) names.append(n)
@@ -341,7 +341,7 @@ class Importer(object):
modname = mod.string_name modname = mod.string_name
if modname.startswith('flask_'): if modname.startswith('flask_'):
extname = modname[len('flask_'):] extname = modname[len('flask_'):]
names.append(ImportName(self.module_value, extname)) names.append(ImportName(self.module_context, extname))
# Now the old style: ``flaskext.foo`` # Now the old style: ``flaskext.foo``
for dir in self._sys_path_with_modifications(): for dir in self._sys_path_with_modifications():
flaskext = os.path.join(dir, 'flaskext') flaskext = os.path.join(dir, 'flaskext')
@@ -374,7 +374,7 @@ class Importer(object):
@plugin_manager.decorate() @plugin_manager.decorate()
@import_module_decorator @import_module_decorator
def import_module(inference_state, import_names, parent_module_value, sys_path): def import_module(inference_state, import_names, parent_module_context, sys_path):
""" """
This method is very similar to importlib's `_gcd_import`. This method is very similar to importlib's `_gcd_import`.
""" """
@@ -385,7 +385,7 @@ def import_module(inference_state, import_names, parent_module_value, sys_path):
return ValueSet([module]) return ValueSet([module])
module_name = '.'.join(import_names) module_name = '.'.join(import_names)
if parent_module_value is None: if parent_module_context is None:
# Override the sys.path. It works only good that way. # Override the sys.path. It works only good that way.
# Injecting the path directly into `find_module` did not work. # Injecting the path directly into `find_module` did not work.
file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info( file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info(
@@ -398,7 +398,7 @@ def import_module(inference_state, import_names, parent_module_value, sys_path):
return NO_VALUES return NO_VALUES
else: else:
try: try:
method = parent_module_value.py__path__ method = parent_module_context.py__path__
except AttributeError: except AttributeError:
# The module is not a package. # The module is not a package.
return NO_VALUES return NO_VALUES
@@ -438,7 +438,7 @@ def import_module(inference_state, import_names, parent_module_value, sys_path):
is_package=is_pkg, is_package=is_pkg,
) )
if parent_module_value is None: if parent_module_context is None:
debug.dbg('global search_module %s: %s', import_names[-1], module) debug.dbg('global search_module %s: %s', import_names[-1], module)
else: else:
debug.dbg('search_module %s in paths %s: %s', module_name, paths, module) debug.dbg('search_module %s in paths %s: %s', module_name, paths, module)

View File

@@ -14,7 +14,7 @@ class AbstractLazyValue(object):
class LazyKnownValue(AbstractLazyValue): class LazyKnownValue(AbstractLazyValue):
"""data is a value.""" """data is a Value."""
def infer(self): def infer(self):
return ValueSet([self.data]) return ValueSet([self.data])
@@ -34,16 +34,16 @@ class LazyUnknownValue(AbstractLazyValue):
class LazyTreeValue(AbstractLazyValue): class LazyTreeValue(AbstractLazyValue):
def __init__(self, value, node): def __init__(self, context, node):
super(LazyTreeValue, self).__init__(node) super(LazyTreeValue, self).__init__(node)
self.value = value self.context = context
# We need to save the predefined names. It's an unfortunate side effect # We need to save the predefined names. It's an unfortunate side effect
# that needs to be tracked otherwise results will be wrong. # that needs to be tracked otherwise results will be wrong.
self._predefined_names = dict(value.predefined_names) self._predefined_names = dict(context.predefined_names)
def infer(self): def infer(self):
with monkeypatch(self.value, 'predefined_names', self._predefined_names): with monkeypatch(self.context, 'predefined_names', self._predefined_names):
return self.value.infer_node(self.data) return self.context.infer_node(self.data)
def get_merged_lazy_value(lazy_values): def get_merged_lazy_value(lazy_values):

View File

@@ -32,7 +32,7 @@ class AbstractNameDefinition(object):
if qualified_names is None or not include_module_names: if qualified_names is None or not include_module_names:
return qualified_names return qualified_names
module_names = self.get_root_value().string_names module_names = self.get_root_context().string_names
if module_names is None: if module_names is None:
return None return None
return module_names + qualified_names return module_names + qualified_names
@@ -41,8 +41,8 @@ class AbstractNameDefinition(object):
# By default, a name has no qualified names. # By default, a name has no qualified names.
return None return None
def get_root_value(self): def get_root_context(self):
return self.parent_context.get_root_value() return self.parent_context.get_root_context()
def __repr__(self): def __repr__(self):
if self.start_pos is None: if self.start_pos is None:
@@ -87,7 +87,7 @@ class AbstractTreeName(AbstractNameDefinition):
# In case of level == 1, it works always, because it's like a submodule # In case of level == 1, it works always, because it's like a submodule
# lookup. # lookup.
if import_node is not None and not (import_node.level == 1 if import_node is not None and not (import_node.level == 1
and self.get_root_value().is_package): and self.get_root_context().is_package):
# TODO improve the situation for when level is present. # TODO improve the situation for when level is present.
if include_module_names and not import_node.level: if include_module_names and not import_node.level:
return tuple(n.value for n in import_node.get_path_for_name(self.tree_name)) return tuple(n.value for n in import_node.get_path_for_name(self.tree_name))
@@ -103,7 +103,9 @@ class AbstractTreeName(AbstractNameDefinition):
return parent_names + (self.tree_name.value,) return parent_names + (self.tree_name.value,)
def goto(self, **kwargs): def goto(self, **kwargs):
return self.parent_context.inference_state.goto(self.parent_context, self.tree_name, **kwargs) return self.parent_context.inference_state.goto(
self.parent_context, self.tree_name, **kwargs
)
def is_import(self): def is_import(self):
imp = search_ancestor(self.tree_name, 'import_from', 'import_name') imp = search_ancestor(self.tree_name, 'import_from', 'import_name')
@@ -125,10 +127,10 @@ class ValueNameMixin(object):
def _get_qualified_names(self): def _get_qualified_names(self):
return self._value.get_qualified_names() return self._value.get_qualified_names()
def get_root_value(self): def get_root_context(self):
if self.parent_context is None: # A module if self.parent_context is None: # A module
return self._value return self._value.as_context()
return super(ValueNameMixin, self).get_root_value() return super(ValueNameMixin, self).get_root_context()
@property @property
def api_type(self): def api_type(self):
@@ -156,8 +158,11 @@ class TreeNameDefinition(AbstractTreeName):
def infer(self): def infer(self):
# Refactor this, should probably be here. # Refactor this, should probably be here.
from jedi.inference.syntax_tree import tree_name_to_values from jedi.inference.syntax_tree import tree_name_to_values
parent = self.parent_context return tree_name_to_values(
return tree_name_to_values(parent.inference_state, parent, self.tree_name) self.parent_context.inference_state,
self.parent_context,
self.tree_name
)
@property @property
def api_type(self): def api_type(self):
@@ -318,14 +323,14 @@ class ImportName(AbstractNameDefinition):
_level = 0 _level = 0
def __init__(self, parent_context, string_name): def __init__(self, parent_context, string_name):
self._from_module_value = parent_context self._from_module_context = parent_context
self.string_name = string_name self.string_name = string_name
def get_qualified_names(self, include_module_names=False): def get_qualified_names(self, include_module_names=False):
if include_module_names: if include_module_names:
if self._level: if self._level:
assert self._level == 1, "Everything else is not supported for now" assert self._level == 1, "Everything else is not supported for now"
module_names = self._from_module_value.string_names module_names = self._from_module_context.string_names
if module_names is None: if module_names is None:
return module_names return module_names
return module_names + (self.string_name,) return module_names + (self.string_name,)
@@ -334,7 +339,7 @@ class ImportName(AbstractNameDefinition):
@property @property
def parent_context(self): def parent_context(self):
m = self._from_module_value m = self._from_module_context
import_values = self.infer() import_values = self.infer()
if not import_values: if not import_values:
return m return m
@@ -345,7 +350,7 @@ class ImportName(AbstractNameDefinition):
@memoize_method @memoize_method
def infer(self): def infer(self):
from jedi.inference.imports import Importer from jedi.inference.imports import Importer
m = self._from_module_value m = self._from_module_context
return Importer(m.inference_state, [self.string_name], m, level=self._level).follow() return Importer(m.inference_state, [self.string_name], m, level=self._level).follow()
def goto(self): def goto(self):

View File

@@ -35,13 +35,14 @@ def _iter_nodes_for_param(param_name):
# anyway # anyway
trailer = search_ancestor(argument, 'trailer') trailer = search_ancestor(argument, 'trailer')
if trailer is not None: # Make sure we're in a function if trailer is not None: # Make sure we're in a function
value = execution_value.create_value(trailer) raise NotImplementedError
if _goes_to_param_name(param_name, value, name): context = execution_value.create_context(trailer)
values = _to_callables(value, trailer) if _goes_to_param_name(param_name, context, name):
values = _to_callables(context, trailer)
args = TreeArguments.create_cached( args = TreeArguments.create_cached(
execution_value.inference_state, execution_value.inference_state,
value=value, context=context,
argument_node=trailer.children[1], argument_node=trailer.children[1],
trailer=trailer, trailer=trailer,
) )
@@ -51,11 +52,11 @@ def _iter_nodes_for_param(param_name):
assert False assert False
def _goes_to_param_name(param_name, value, potential_name): def _goes_to_param_name(param_name, context, potential_name):
if potential_name.type != 'name': if potential_name.type != 'name':
return False return False
from jedi.inference.names import TreeNameDefinition from jedi.inference.names import TreeNameDefinition
found = TreeNameDefinition(value, potential_name).goto() found = TreeNameDefinition(context, potential_name).goto()
return any(param_name.parent_context == p.parent_context return any(param_name.parent_context == p.parent_context
and param_name.start_pos == p.start_pos and param_name.start_pos == p.start_pos
for p in found) for p in found)

View File

@@ -563,7 +563,7 @@ def tree_name_to_values(inference_state, context, tree_name):
expr_stmt = name.parent expr_stmt = name.parent
if expr_stmt.type == "expr_stmt" and expr_stmt.children[1].type == "annassign": if expr_stmt.type == "expr_stmt" and expr_stmt.children[1].type == "annassign":
correct_scope = parser_utils.get_parent_scope(name) == value.tree_node correct_scope = parser_utils.get_parent_scope(name) == context.tree_node
if correct_scope: if correct_scope:
value_set |= annotation.infer_annotation( value_set |= annotation.infer_annotation(
context, expr_stmt.children[1].children[1] context, expr_stmt.children[1].children[1]
@@ -576,7 +576,8 @@ def tree_name_to_values(inference_state, context, tree_name):
if node is None: if node is None:
node = tree_name.parent node = tree_name.parent
if node.type == 'global_stmt': if node.type == 'global_stmt':
value = inference_state.create_context(value, tree_name) c = context.create_context(tree_name)
raise NotImplementedError
finder = NameFinder(inference_state, value, value, tree_name.value) finder = NameFinder(inference_state, value, value, tree_name.value)
filters = finder.get_global_filters() filters = finder.get_global_filters()
# For global_stmt lookups, we only need the first possible scope, # For global_stmt lookups, we only need the first possible scope,
@@ -584,8 +585,8 @@ def tree_name_to_values(inference_state, context, tree_name):
filters = [next(filters)] filters = [next(filters)]
return finder.find(filters, attribute_lookup=False) return finder.find(filters, attribute_lookup=False)
elif node.type not in ('import_from', 'import_name'): elif node.type not in ('import_from', 'import_name'):
value = inference_state.create_value(value, tree_name) c = inference_state.create_context(context, tree_name)
return infer_atom(value, tree_name) return infer_atom(c, tree_name)
typ = node.type typ = node.type
if typ == 'for_stmt': if typ == 'for_stmt':

View File

@@ -11,11 +11,11 @@ from jedi import settings
from jedi import debug from jedi import debug
def _abs_path(module_value, path): def _abs_path(module_context, path):
if os.path.isabs(path): if os.path.isabs(path):
return path return path
module_path = module_value.py__file__() module_path = module_context.py__file__()
if module_path is None: if module_path is None:
# In this case we have no idea where we actually are in the file # In this case we have no idea where we actually are in the file
# system. # system.
@@ -26,7 +26,7 @@ def _abs_path(module_value, path):
return os.path.abspath(os.path.join(base_dir, path)) return os.path.abspath(os.path.join(base_dir, path))
def _paths_from_assignment(module_value, expr_stmt): def _paths_from_assignment(module_context, expr_stmt):
""" """
Extracts the assigned strings from an assignment that looks as follows:: Extracts the assigned strings from an assignment that looks as follows::
@@ -60,16 +60,16 @@ def _paths_from_assignment(module_value, expr_stmt):
except AssertionError: except AssertionError:
continue continue
cn = ValueualizedNode(module_value.create_value(expr_stmt), expr_stmt) cn = ValueualizedNode(module_context.create_context(expr_stmt), expr_stmt)
for lazy_value in cn.infer().iterate(cn): for lazy_value in cn.infer().iterate(cn):
for value in lazy_value.infer(): for value in lazy_value.infer():
if is_string(value): if is_string(value):
abs_path = _abs_path(module_value, value.get_safe_value()) abs_path = _abs_path(module_context, value.get_safe_value())
if abs_path is not None: if abs_path is not None:
yield abs_path yield abs_path
def _paths_from_list_modifications(module_value, trailer1, trailer2): def _paths_from_list_modifications(module_context, trailer1, trailer2):
""" extract the path from either "sys.path.append" or "sys.path.insert" """ """ extract the path from either "sys.path.append" or "sys.path.insert" """
# Guarantee that both are trailers, the first one a name and the second one # Guarantee that both are trailers, the first one a name and the second one
# a function execution with at least one param. # a function execution with at least one param.
@@ -85,15 +85,15 @@ def _paths_from_list_modifications(module_value, trailer1, trailer2):
if name == 'insert' and len(arg.children) in (3, 4): # Possible trailing comma. if name == 'insert' and len(arg.children) in (3, 4): # Possible trailing comma.
arg = arg.children[2] arg = arg.children[2]
for value in module_value.create_value(arg).infer_node(arg): for value in module_context.create_context(arg).infer_node(arg):
if is_string(value): if is_string(value):
abs_path = _abs_path(module_value, value.get_safe_value()) abs_path = _abs_path(module_context, value.get_safe_value())
if abs_path is not None: if abs_path is not None:
yield abs_path yield abs_path
@inference_state_method_cache(default=[]) @inference_state_method_cache(default=[])
def check_sys_path_modifications(module_value): def check_sys_path_modifications(module_context):
""" """
Detect sys.path modifications within module. Detect sys.path modifications within module.
""" """
@@ -108,12 +108,12 @@ def check_sys_path_modifications(module_value):
if n.type == 'name' and n.value == 'path': if n.type == 'name' and n.value == 'path':
yield name, power yield name, power
if module_value.tree_node is None: if module_context.tree_node is None:
return [] return []
added = [] added = []
try: try:
possible_names = module_value.tree_node.get_used_names()['path'] possible_names = module_context.tree_node.get_used_names()['path']
except KeyError: except KeyError:
pass pass
else: else:
@@ -122,11 +122,11 @@ def check_sys_path_modifications(module_value):
if len(power.children) >= 4: if len(power.children) >= 4:
added.extend( added.extend(
_paths_from_list_modifications( _paths_from_list_modifications(
module_value, *power.children[2:4] module_context, *power.children[2:4]
) )
) )
elif expr_stmt is not None and expr_stmt.type == 'expr_stmt': elif expr_stmt is not None and expr_stmt.type == 'expr_stmt':
added.extend(_paths_from_assignment(module_value, expr_stmt)) added.extend(_paths_from_assignment(module_context, expr_stmt))
return added return added
@@ -157,7 +157,7 @@ def _get_paths_from_buildout_script(inference_state, buildout_script_path):
inference_state, module_node, file_io, inference_state, module_node, file_io,
string_names=None, string_names=None,
code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path), code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path),
) ).as_context()
for path in check_sys_path_modifications(module): for path in check_sys_path_modifications(module):
yield path yield path

View File

@@ -26,22 +26,23 @@ def _dictionarize(names):
) )
def _find_names(module_value, tree_name): def _find_names(module_context, tree_name):
value = module_value.create_value(tree_name) context = module_context.create_context(tree_name)
name = TreeNameDefinition(value, tree_name) name = TreeNameDefinition(context, tree_name)
found_names = set(name.goto()) found_names = set(name.goto())
found_names.add(name) found_names.add(name)
return _dictionarize(_resolve_names(found_names)) return _dictionarize(_resolve_names(found_names))
def usages(module_value, tree_name): def usages(module_context, tree_name):
search_name = tree_name.value search_name = tree_name.value
found_names = _find_names(module_value, tree_name) found_names = _find_names(module_context, tree_name)
modules = set(d.get_root_value() for d in found_names.values()) modules = set(d.get_root_value() for d in found_names.values())
modules = set(m for m in modules if m.is_module() and not m.is_compiled()) modules = set(m for m in modules if m.is_module() and not m.is_compiled())
non_matching_usage_maps = {} non_matching_usage_maps = {}
for m in imports.get_modules_containing_name(module_value.inference_state, modules, search_name): inf = module_context.inference_state
for m in imports.get_modules_containing_name(inf, modules, search_name):
for name_leaf in m.tree_node.get_used_names().get(search_name, []): for name_leaf in m.tree_node.get_used_names().get(search_name, []):
new = _find_names(m, name_leaf) new = _find_names(m, name_leaf)
if any(tree_name in found_names for tree_name in new): if any(tree_name in found_names for tree_name in new):

View File

@@ -669,8 +669,8 @@ def _check_array_additions(value, sequence):
from jedi.inference import arguments from jedi.inference import arguments
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA') debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
module_value = value.get_root_value() module_context = value.get_root_context()
if not settings.dynamic_array_additions or isinstance(module_value, compiled.CompiledObject): if not settings.dynamic_array_additions or isinstance(module_context, compiled.CompiledObject):
debug.dbg('Dynamic array search aborted.', color='MAGENTA') debug.dbg('Dynamic array search aborted.', color='MAGENTA')
return NO_VALUES return NO_VALUES
@@ -696,7 +696,7 @@ def _check_array_additions(value, sequence):
added_types = set() added_types = set()
for add_name in search_names: for add_name in search_names:
try: try:
possible_names = module_value.tree_node.get_used_names()[add_name] possible_names = module_context.tree_node.get_used_names()[add_name]
except KeyError: except KeyError:
continue continue
else: else:
@@ -717,19 +717,20 @@ def _check_array_additions(value, sequence):
or execution_trailer.children[1] == ')': or execution_trailer.children[1] == ')':
continue continue
random_value = value.create_value(name) raise NotImplementedError
random_context = value.create_context(name)
with recursion.execution_allowed(value.inference_state, power) as allowed: with recursion.execution_allowed(value.inference_state, power) as allowed:
if allowed: if allowed:
found = infer_call_of_leaf( found = infer_call_of_leaf(
random_value, random_context,
name, name,
cut_own_trailer=True cut_own_trailer=True
) )
if sequence in found: if sequence in found:
# The arrays match. Now add the results # The arrays match. Now add the results
added_types |= find_additions( added_types |= find_additions(
random_value, random_context,
execution_trailer.children[1], execution_trailer.children[1],
add_name add_name
) )

View File

@@ -190,7 +190,8 @@ class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module' api_type = u'module'
parent_context = None parent_context = None
def __init__(self, inference_state, module_node, file_io, string_names, code_lines, is_package=False): def __init__(self, inference_state, module_node, file_io, string_names,
code_lines, is_package=False):
super(ModuleValue, self).__init__( super(ModuleValue, self).__init__(
inference_state, inference_state,
parent_context=None, parent_context=None,

View File

@@ -123,7 +123,7 @@ import jedi
from jedi import debug from jedi import debug
from jedi._compatibility import unicode, is_py3 from jedi._compatibility import unicode, is_py3
from jedi.api.classes import Definition from jedi.api.classes import Definition
from jedi.api.completion import get_user_scope from jedi.api.completion import get_user_context
from jedi import parser_utils from jedi import parser_utils
from jedi.api.environment import get_default_environment, get_system_environment from jedi.api.environment import get_default_environment, get_system_environment
from jedi.inference.gradual.conversion import convert_values from jedi.inference.gradual.conversion import convert_values
@@ -225,14 +225,13 @@ class IntegrationTestCase(object):
parser = grammar36.parse(string, start_symbol='eval_input', error_recovery=False) parser = grammar36.parse(string, start_symbol='eval_input', error_recovery=False)
parser_utils.move(parser.get_root_node(), self.line_nr) parser_utils.move(parser.get_root_node(), self.line_nr)
element = parser.get_root_node() element = parser.get_root_node()
module_value = script._get_module() module_context = script._get_module_context()
# The value shouldn't matter for the test results. user_context = get_user_context(module_context, (self.line_nr, 0))
user_value = get_user_scope(module_value, (self.line_nr, 0)) if user_context.api_type == 'function':
if user_value.api_type == 'function': user_context = user_context.get_function_execution()
user_value = user_value.get_function_execution() element.parent = user_context.tree_node
element.parent = user_value.tree_node
results = convert_values( results = convert_values(
inference_state.infer_element(user_value, element), inference_state.infer_element(user_context, element),
) )
if not results: if not results:
raise Exception('Could not resolve %s on line %s' raise Exception('Could not resolve %s on line %s'