1
0
forked from VimPlug/jedi

infer_state -> inference_state

This commit is contained in:
Dave Halter
2019-08-16 11:44:30 +02:00
parent fffb39227e
commit 03920502c4
60 changed files with 727 additions and 727 deletions

View File

@@ -111,11 +111,11 @@ class Script(object):
# TODO deprecate and remove sys_path from the Script API. # TODO deprecate and remove sys_path from the Script API.
if sys_path is not None: if sys_path is not None:
project._sys_path = sys_path project._sys_path = sys_path
self._infer_state = InferenceState( self._inference_state = InferenceState(
project, environment=environment, script_path=self.path project, environment=environment, script_path=self.path
) )
debug.speed('init') debug.speed('init')
self._module_node, source = self._infer_state.parse_and_get_code( self._module_node, source = self._inference_state.parse_and_get_code(
code=source, code=source,
path=self.path, path=self.path,
encoding=encoding, encoding=encoding,
@@ -156,7 +156,7 @@ class Script(object):
is_package = False is_package = False
if self.path is not None: if self.path is not None:
import_names, is_p = transform_path_to_dotted( import_names, is_p = transform_path_to_dotted(
self._infer_state.get_sys_path(add_parent_paths=False), self._inference_state.get_sys_path(add_parent_paths=False),
self.path self.path
) )
if import_names is not None: if import_names is not None:
@@ -170,7 +170,7 @@ class Script(object):
if self.path is not None and self.path.endswith('.pyi'): if self.path is not None and self.path.endswith('.pyi'):
# We are in a stub file. Try to load the stub properly. # We are in a stub file. Try to load the stub properly.
stub_module = load_proper_stub_module( stub_module = load_proper_stub_module(
self._infer_state, self._inference_state,
file_io, file_io,
names, names,
self._module_node self._module_node
@@ -182,21 +182,21 @@ class Script(object):
names = ('__main__',) names = ('__main__',)
module = ModuleValue( module = ModuleValue(
self._infer_state, self._module_node, file_io, self._inference_state, self._module_node, file_io,
string_names=names, string_names=names,
code_lines=self._code_lines, code_lines=self._code_lines,
is_package=is_package, is_package=is_package,
) )
if names[0] not in ('builtins', '__builtin__', 'typing'): if names[0] not in ('builtins', '__builtin__', 'typing'):
# These modules are essential for Jedi, so don't overwrite them. # These modules are essential for Jedi, so don't overwrite them.
self._infer_state.module_cache.add(names, ValueSet([module])) self._inference_state.module_cache.add(names, ValueSet([module]))
return module return module
def __repr__(self): def __repr__(self):
return '<%s: %s %r>' % ( return '<%s: %s %r>' % (
self.__class__.__name__, self.__class__.__name__,
repr(self._orig_path), repr(self._orig_path),
self._infer_state.environment, self._inference_state.environment,
) )
def completions(self): def completions(self):
@@ -209,7 +209,7 @@ class Script(object):
""" """
with debug.increase_indent_cm('completions'): with debug.increase_indent_cm('completions'):
completion = Completion( completion = Completion(
self._infer_state, self._get_module(), self._code_lines, self._inference_state, self._get_module(), self._code_lines,
self._pos, self.call_signatures self._pos, self.call_signatures
) )
return completion.completions() return completion.completions()
@@ -239,16 +239,16 @@ class Script(object):
if leaf is None: if leaf is None:
return [] return []
value = self._infer_state.create_value(self._get_module(), leaf) value = self._inference_state.create_value(self._get_module(), leaf)
values = helpers.infer_goto_definition(self._infer_state, value, leaf) values = helpers.infer_goto_definition(self._inference_state, value, leaf)
values = convert_values( values = convert_values(
values, values,
only_stubs=only_stubs, only_stubs=only_stubs,
prefer_stubs=prefer_stubs, prefer_stubs=prefer_stubs,
) )
defs = [classes.Definition(self._infer_state, c.name) for c in values] defs = [classes.Definition(self._inference_state, c.name) for c in values]
# The additional set here allows the definitions to become unique in an # The additional set here allows the definitions to become unique in an
# API sense. In the internals we want to separate more things than in # API sense. In the internals we want to separate more things than in
# the API. # the API.
@@ -299,8 +299,8 @@ class Script(object):
# Without a name we really just want to jump to the result e.g. # Without a name we really just want to jump to the result e.g.
# executed by `foo()`, if we the cursor is after `)`. # executed by `foo()`, if we the cursor is after `)`.
return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs) return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs)
value = self._infer_state.create_value(self._get_module(), tree_name) value = self._inference_state.create_value(self._get_module(), tree_name)
names = list(self._infer_state.goto(value, tree_name)) names = list(self._inference_state.goto(value, tree_name))
if follow_imports: if follow_imports:
names = filter_follow_imports(names, lambda name: name.is_import()) names = filter_follow_imports(names, lambda name: name.is_import())
@@ -310,7 +310,7 @@ class Script(object):
prefer_stubs=prefer_stubs, prefer_stubs=prefer_stubs,
) )
defs = [classes.Definition(self._infer_state, d) for d in set(names)] defs = [classes.Definition(self._inference_state, d) for d in set(names)]
return helpers.sorted_definitions(defs) return helpers.sorted_definitions(defs)
def usages(self, additional_module_paths=(), **kwargs): def usages(self, additional_module_paths=(), **kwargs):
@@ -342,7 +342,7 @@ class Script(object):
names = usages.usages(self._get_module(), tree_name) names = usages.usages(self._get_module(), tree_name)
definitions = [classes.Definition(self._infer_state, n) for n in names] definitions = [classes.Definition(self._inference_state, n) for n in names]
if not include_builtins: if not include_builtins:
definitions = [d for d in definitions if not d.in_builtin_module()] definitions = [d for d in definitions if not d.in_builtin_module()]
return helpers.sorted_definitions(definitions) return helpers.sorted_definitions(definitions)
@@ -368,12 +368,12 @@ class Script(object):
if call_details is None: if call_details is None:
return [] return []
value = self._infer_state.create_value( value = self._inference_state.create_value(
self._get_module(), self._get_module(),
call_details.bracket_leaf call_details.bracket_leaf
) )
definitions = helpers.cache_call_signatures( definitions = helpers.cache_call_signatures(
self._infer_state, self._inference_state,
value, value,
call_details.bracket_leaf, call_details.bracket_leaf,
self._code_lines, self._code_lines,
@@ -383,19 +383,19 @@ class Script(object):
# TODO here we use stubs instead of the actual values. We should use # TODO here we use stubs instead of the actual values. We should use
# the signatures from stubs, but the actual values, probably?! # the signatures from stubs, but the actual values, probably?!
return [classes.CallSignature(self._infer_state, signature, call_details) return [classes.CallSignature(self._inference_state, signature, call_details)
for signature in definitions.get_signatures()] for signature in definitions.get_signatures()]
def _analysis(self): def _analysis(self):
self._infer_state.is_analysis = True self._inference_state.is_analysis = True
self._infer_state.analysis_modules = [self._module_node] self._inference_state.analysis_modules = [self._module_node]
module = self._get_module() module = self._get_module()
try: try:
for node in get_executable_nodes(self._module_node): for node in get_executable_nodes(self._module_node):
value = module.create_value(node) value = module.create_value(node)
if node.type in ('funcdef', 'classdef'): if node.type in ('funcdef', 'classdef'):
# Resolve the decorators. # Resolve the decorators.
tree_name_to_values(self._infer_state, value, node.children[1]) tree_name_to_values(self._inference_state, value, node.children[1])
elif isinstance(node, tree.Import): elif isinstance(node, tree.Import):
import_names = set(node.get_defined_names()) import_names = set(node.get_defined_names())
if node.is_nested(): if node.is_nested():
@@ -409,16 +409,16 @@ class Script(object):
unpack_tuple_to_dict(value, types, testlist) unpack_tuple_to_dict(value, types, testlist)
else: else:
if node.type == 'name': if node.type == 'name':
defs = self._infer_state.goto_definitions(value, node) defs = self._inference_state.goto_definitions(value, node)
else: else:
defs = infer_call_of_leaf(value, node) defs = infer_call_of_leaf(value, node)
try_iter_content(defs) try_iter_content(defs)
self._infer_state.reset_recursion_limitations() self._inference_state.reset_recursion_limitations()
ana = [a for a in self._infer_state.analysis if self.path == a.path] ana = [a for a in self._inference_state.analysis if self.path == a.path]
return sorted(set(ana), key=lambda x: x.line) return sorted(set(ana), key=lambda x: x.line)
finally: finally:
self._infer_state.is_analysis = False self._inference_state.is_analysis = False
class Interpreter(Script): class Interpreter(Script):
@@ -467,11 +467,11 @@ class Interpreter(Script):
super(Interpreter, self).__init__(source, environment=environment, super(Interpreter, self).__init__(source, environment=environment,
_project=Project(os.getcwd()), **kwds) _project=Project(os.getcwd()), **kwds)
self.namespaces = namespaces self.namespaces = namespaces
self._infer_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default self._inference_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default
def _get_module(self): def _get_module(self):
return interpreter.MixedModuleValue( return interpreter.MixedModuleValue(
self._infer_state, self._inference_state,
self._module_node, self._module_node,
self.namespaces, self.namespaces,
file_io=KnownContentFileIO(self.path, self._code), file_io=KnownContentFileIO(self.path, self._code),
@@ -514,7 +514,7 @@ def names(source=None, path=None, encoding='utf-8', all_scopes=False,
module_value = script._get_module() module_value = script._get_module()
defs = [ defs = [
classes.Definition( classes.Definition(
script._infer_state, script._inference_state,
create_name(name) create_name(name)
) for name in get_module_names(script._module_node, all_scopes) ) for name in get_module_names(script._module_node, all_scopes)
] ]

View File

@@ -25,7 +25,7 @@ def _sort_names_by_start_pos(names):
return sorted(names, key=lambda s: s.start_pos or (0, 0)) return sorted(names, key=lambda s: s.start_pos or (0, 0))
def defined_names(infer_state, value): def defined_names(inference_state, value):
""" """
List sub-definitions (e.g., methods in class). List sub-definitions (e.g., methods in class).
@@ -34,11 +34,11 @@ def defined_names(infer_state, value):
""" """
filter = next(value.get_filters(search_global=True)) filter = next(value.get_filters(search_global=True))
names = [name for name in filter.values()] names = [name for name in filter.values()]
return [Definition(infer_state, n) for n in _sort_names_by_start_pos(names)] return [Definition(inference_state, n) for n in _sort_names_by_start_pos(names)]
def _values_to_definitions(values): def _values_to_definitions(values):
return [Definition(c.infer_state, c.name) for c in values] return [Definition(c.inference_state, c.name) for c in values]
class BaseDefinition(object): class BaseDefinition(object):
@@ -62,8 +62,8 @@ class BaseDefinition(object):
'argparse._ActionsContainer': 'argparse.ArgumentParser', 'argparse._ActionsContainer': 'argparse.ArgumentParser',
}.items()) }.items())
def __init__(self, infer_state, name): def __init__(self, inference_state, name):
self._infer_state = infer_state self._inference_state = inference_state
self._name = name self._name = name
""" """
An instance of :class:`parso.python.tree.Name` subclass. An instance of :class:`parso.python.tree.Name` subclass.
@@ -306,7 +306,7 @@ class BaseDefinition(object):
only_stubs=only_stubs, only_stubs=only_stubs,
prefer_stubs=prefer_stubs, prefer_stubs=prefer_stubs,
) )
return [self if n == self._name else Definition(self._infer_state, n) return [self if n == self._name else Definition(self._inference_state, n)
for n in names] for n in names]
def infer(self, **kwargs): # Python 2... def infer(self, **kwargs): # Python 2...
@@ -329,7 +329,7 @@ class BaseDefinition(object):
prefer_stubs=prefer_stubs, prefer_stubs=prefer_stubs,
) )
resulting_names = [c.name for c in values] resulting_names = [c.name for c in values]
return [self if n == self._name else Definition(self._infer_state, n) return [self if n == self._name else Definition(self._inference_state, n)
for n in resulting_names] for n in resulting_names]
@property @property
@@ -346,7 +346,7 @@ class BaseDefinition(object):
for value in self._name.infer(): for value in self._name.infer():
for signature in value.get_signatures(): for signature in value.get_signatures():
return [ return [
Definition(self._infer_state, n) Definition(self._inference_state, n)
for n in signature.get_param_names(resolve_stars=True) for n in signature.get_param_names(resolve_stars=True)
] ]
@@ -366,7 +366,7 @@ class BaseDefinition(object):
if isinstance(value, FunctionExecutionValue): if isinstance(value, FunctionExecutionValue):
value = value.function_value value = value.function_value
return Definition(self._infer_state, value.name) return Definition(self._inference_state, value.name)
def __repr__(self): def __repr__(self):
return "<%s %sname=%r, description=%r>" % ( return "<%s %sname=%r, description=%r>" % (
@@ -396,7 +396,7 @@ class BaseDefinition(object):
return ''.join(lines[start_index:index + after + 1]) return ''.join(lines[start_index:index + after + 1])
def get_signatures(self): def get_signatures(self):
return [Signature(self._infer_state, s) for s in self._name.infer().get_signatures()] return [Signature(self._inference_state, s) for s in self._name.infer().get_signatures()]
def execute(self): def execute(self):
return _values_to_definitions(self._name.infer().execute_with_values()) return _values_to_definitions(self._name.infer().execute_with_values())
@@ -407,8 +407,8 @@ class Completion(BaseDefinition):
`Completion` objects are returned from :meth:`api.Script.completions`. They `Completion` objects are returned from :meth:`api.Script.completions`. They
provide additional information about a completion. provide additional information about a completion.
""" """
def __init__(self, infer_state, name, stack, like_name_length): def __init__(self, inference_state, name, stack, like_name_length):
super(Completion, self).__init__(infer_state, name) super(Completion, self).__init__(inference_state, name)
self._like_name_length = like_name_length self._like_name_length = like_name_length
self._stack = stack self._stack = stack
@@ -512,8 +512,8 @@ class Definition(BaseDefinition):
*Definition* objects are returned from :meth:`api.Script.goto_assignments` *Definition* objects are returned from :meth:`api.Script.goto_assignments`
or :meth:`api.Script.goto_definitions`. or :meth:`api.Script.goto_definitions`.
""" """
def __init__(self, infer_state, definition): def __init__(self, inference_state, definition):
super(Definition, self).__init__(infer_state, definition) super(Definition, self).__init__(inference_state, definition)
@property @property
def description(self): def description(self):
@@ -588,7 +588,7 @@ class Definition(BaseDefinition):
""" """
defs = self._name.infer() defs = self._name.infer()
return sorted( return sorted(
unite(defined_names(self._infer_state, d) for d in defs), unite(defined_names(self._inference_state, d) for d in defs),
key=lambda s: s._name.start_pos or (0, 0) key=lambda s: s._name.start_pos or (0, 0)
) )
@@ -606,13 +606,13 @@ class Definition(BaseDefinition):
return self._name.start_pos == other._name.start_pos \ return self._name.start_pos == other._name.start_pos \
and self.module_path == other.module_path \ and self.module_path == other.module_path \
and self.name == other.name \ and self.name == other.name \
and self._infer_state == other._infer_state and self._inference_state == other._inference_state
def __ne__(self, other): def __ne__(self, other):
return not self.__eq__(other) return not self.__eq__(other)
def __hash__(self): def __hash__(self):
return hash((self._name.start_pos, self.module_path, self.name, self._infer_state)) return hash((self._name.start_pos, self.module_path, self.name, self._inference_state))
class Signature(Definition): class Signature(Definition):
@@ -621,8 +621,8 @@ class Signature(Definition):
It knows what functions you are currently in. e.g. `isinstance(` would It knows what functions you are currently in. e.g. `isinstance(` would
return the `isinstance` function. without `(` it would return nothing. return the `isinstance` function. without `(` it would return nothing.
""" """
def __init__(self, infer_state, signature): def __init__(self, inference_state, signature):
super(Signature, self).__init__(infer_state, signature.name) super(Signature, self).__init__(inference_state, signature.name)
self._signature = signature self._signature = signature
@property @property
@@ -630,7 +630,7 @@ class Signature(Definition):
""" """
:return list of ParamDefinition: :return list of ParamDefinition:
""" """
return [ParamDefinition(self._infer_state, n) return [ParamDefinition(self._inference_state, n)
for n in self._signature.get_param_names(resolve_stars=True)] for n in self._signature.get_param_names(resolve_stars=True)]
def to_string(self): def to_string(self):
@@ -644,8 +644,8 @@ class CallSignature(Signature):
return the `isinstance` function with its params. Without `(` it would return the `isinstance` function with its params. Without `(` it would
return nothing. return nothing.
""" """
def __init__(self, infer_state, signature, call_details): def __init__(self, inference_state, signature, call_details):
super(CallSignature, self).__init__(infer_state, signature) super(CallSignature, self).__init__(inference_state, signature)
self._call_details = call_details self._call_details = call_details
self._signature = signature self._signature = signature

View File

@@ -28,7 +28,7 @@ def get_call_signature_param_names(call_signatures):
yield p._name yield p._name
def filter_names(infer_state, completion_names, stack, like_name): def filter_names(inference_state, completion_names, stack, like_name):
comp_dct = {} comp_dct = {}
if settings.case_insensitive_completion: if settings.case_insensitive_completion:
like_name = like_name.lower() like_name = like_name.lower()
@@ -39,7 +39,7 @@ def filter_names(infer_state, completion_names, stack, like_name):
if string.startswith(like_name): if string.startswith(like_name):
new = classes.Completion( new = classes.Completion(
infer_state, inference_state,
name, name,
stack, stack,
len(like_name) len(like_name)
@@ -85,8 +85,8 @@ def get_flow_scope_node(module_node, position):
class Completion: class Completion:
def __init__(self, infer_state, module, code_lines, position, call_signatures_callback): def __init__(self, inference_state, module, code_lines, position, call_signatures_callback):
self._infer_state = infer_state self._inference_state = inference_state
self._module_value = module self._module_value = module
self._module_node = module.tree_node self._module_node = module.tree_node
self._code_lines = code_lines self._code_lines = code_lines
@@ -104,7 +104,7 @@ class Completion:
string, start_leaf = _extract_string_while_in_string(leaf, self._position) string, start_leaf = _extract_string_while_in_string(leaf, self._position)
if string is not None: if string is not None:
completions = list(file_name_completions( completions = list(file_name_completions(
self._infer_state, self._module_value, start_leaf, string, self._inference_state, self._module_value, start_leaf, string,
self._like_name, self._call_signatures_callback, self._like_name, self._call_signatures_callback,
self._code_lines, self._original_position self._code_lines, self._original_position
)) ))
@@ -113,7 +113,7 @@ class Completion:
completion_names = self._get_value_completions(leaf) completion_names = self._get_value_completions(leaf)
completions = filter_names(self._infer_state, completion_names, completions = filter_names(self._inference_state, completion_names,
self.stack, self._like_name) self.stack, self._like_name)
return sorted(completions, key=lambda x: (x.name.startswith('__'), return sorted(completions, key=lambda x: (x.name.startswith('__'),
@@ -135,7 +135,7 @@ class Completion:
- In params (also lambda): no completion before = - In params (also lambda): no completion before =
""" """
grammar = self._infer_state.grammar grammar = self._inference_state.grammar
self.stack = stack = None self.stack = stack = None
try: try:
@@ -234,14 +234,14 @@ class Completion:
def _get_keyword_completion_names(self, allowed_transitions): def _get_keyword_completion_names(self, allowed_transitions):
for k in allowed_transitions: for k in allowed_transitions:
if isinstance(k, str) and k.isalpha(): if isinstance(k, str) and k.isalpha():
yield keywords.KeywordName(self._infer_state, k) yield keywords.KeywordName(self._inference_state, k)
def _global_completions(self): def _global_completions(self):
value = get_user_scope(self._module_value, self._position) value = get_user_scope(self._module_value, self._position)
debug.dbg('global completion scope: %s', value) debug.dbg('global completion scope: %s', value)
flow_scope_node = get_flow_scope_node(self._module_node, self._position) flow_scope_node = get_flow_scope_node(self._module_node, self._position)
filters = get_global_filters( filters = get_global_filters(
self._infer_state, self._inference_state,
value, value,
self._position, self._position,
origin_scope=flow_scope_node origin_scope=flow_scope_node
@@ -253,7 +253,7 @@ class Completion:
def _trailer_completions(self, previous_leaf): def _trailer_completions(self, previous_leaf):
user_value = get_user_scope(self._module_value, self._position) user_value = get_user_scope(self._module_value, self._position)
inferred_value = self._infer_state.create_value( inferred_value = self._inference_state.create_value(
self._module_value, previous_leaf self._module_value, previous_leaf
) )
values = infer_call_of_leaf(inferred_value, previous_leaf) values = infer_call_of_leaf(inferred_value, previous_leaf)
@@ -276,8 +276,8 @@ class Completion:
def _get_importer_names(self, names, level=0, only_modules=True): def _get_importer_names(self, names, level=0, only_modules=True):
names = [n.value for n in names] names = [n.value for n in names]
i = imports.Importer(self._infer_state, names, self._module_value, level) i = imports.Importer(self._inference_state, names, self._module_value, level)
return i.completion_names(self._infer_state, only_modules=only_modules) return i.completion_names(self._inference_state, only_modules=only_modules)
def _get_class_value_completions(self, is_function=True): def _get_class_value_completions(self, is_function=True):
""" """

View File

@@ -109,8 +109,8 @@ class Environment(_BaseEnvironment):
version = '.'.join(str(i) for i in self.version_info) version = '.'.join(str(i) for i in self.version_info)
return '<%s: %s in %s>' % (self.__class__.__name__, version, self.path) return '<%s: %s in %s>' % (self.__class__.__name__, version, self.path)
def get_infer_state_subprocess(self, infer_state): def get_inference_state_subprocess(self, inference_state):
return InferenceStateSubprocess(infer_state, self._get_subprocess()) return InferenceStateSubprocess(inference_state, self._get_subprocess())
@memoize_method @memoize_method
def get_sys_path(self): def get_sys_path(self):
@@ -140,8 +140,8 @@ class SameEnvironment(_SameEnvironmentMixin, Environment):
class InterpreterEnvironment(_SameEnvironmentMixin, _BaseEnvironment): class InterpreterEnvironment(_SameEnvironmentMixin, _BaseEnvironment):
def get_infer_state_subprocess(self, infer_state): def get_inference_state_subprocess(self, inference_state):
return InferenceStateSameProcess(infer_state) return InferenceStateSameProcess(inference_state)
def get_sys_path(self): def get_sys_path(self):
return sys.path return sys.path

View File

@@ -7,7 +7,7 @@ from jedi.inference.helpers import get_str_or_none
from jedi.parser_utils import get_string_quote from jedi.parser_utils import get_string_quote
def file_name_completions(infer_state, module_value, start_leaf, string, def file_name_completions(inference_state, module_value, start_leaf, string,
like_name, call_signatures_callback, code_lines, position): like_name, call_signatures_callback, code_lines, position):
# First we want to find out what can actually be changed as a name. # First we want to find out what can actually be changed as a name.
like_name_length = len(os.path.basename(string) + like_name) like_name_length = len(os.path.basename(string) + like_name)
@@ -30,7 +30,7 @@ def file_name_completions(infer_state, module_value, start_leaf, string,
is_in_os_path_join = False is_in_os_path_join = False
else: else:
string = to_be_added + string string = to_be_added + string
base_path = os.path.join(infer_state.project._path, string) base_path = os.path.join(inference_state.project._path, string)
try: try:
listed = os.listdir(base_path) listed = os.listdir(base_path)
except FileNotFoundError: except FileNotFoundError:
@@ -53,8 +53,8 @@ def file_name_completions(infer_state, module_value, start_leaf, string,
name += os.path.sep name += os.path.sep
yield classes.Completion( yield classes.Completion(
infer_state, inference_state,
FileName(infer_state, name[len(must_start_with) - like_name_length:]), FileName(inference_state, name[len(must_start_with) - like_name_length:]),
stack=None, stack=None,
like_name_length=like_name_length like_name_length=like_name_length
) )

View File

@@ -136,11 +136,11 @@ def get_stack_at_position(grammar, code_lines, leaf, pos):
) )
def infer_goto_definition(infer_state, value, leaf): def infer_goto_definition(inference_state, value, leaf):
if leaf.type == 'name': if leaf.type == 'name':
# In case of a name we can just use goto_definition which does all the # In case of a name we can just use goto_definition which does all the
# magic itself. # magic itself.
return infer_state.goto_definitions(value, leaf) return inference_state.goto_definitions(value, leaf)
parent = leaf.parent parent = leaf.parent
definitions = NO_VALUES definitions = NO_VALUES
@@ -154,7 +154,7 @@ def infer_goto_definition(infer_state, value, leaf):
# e.g. `"foo"` or `1.0` # e.g. `"foo"` or `1.0`
return infer_atom(value, leaf) return infer_atom(value, leaf)
elif leaf.type in ('fstring_string', 'fstring_start', 'fstring_end'): elif leaf.type in ('fstring_string', 'fstring_start', 'fstring_end'):
return get_string_value_set(infer_state) return get_string_value_set(inference_state)
return definitions return definitions
@@ -376,7 +376,7 @@ def get_call_signature_details(module, position):
@call_signature_time_cache("call_signatures_validity") @call_signature_time_cache("call_signatures_validity")
def cache_call_signatures(infer_state, value, bracket_leaf, code_lines, user_pos): def cache_call_signatures(inference_state, value, bracket_leaf, code_lines, user_pos):
"""This function calculates the cache key.""" """This function calculates the cache key."""
line_index = user_pos[0] - 1 line_index = user_pos[0] - 1
@@ -391,7 +391,7 @@ def cache_call_signatures(infer_state, value, bracket_leaf, code_lines, user_pos
else: else:
yield (module_path, before_bracket, bracket_leaf.start_pos) yield (module_path, before_bracket, bracket_leaf.start_pos)
yield infer_goto_definition( yield infer_goto_definition(
infer_state, inference_state,
value, value,
bracket_leaf.get_previous_leaf(), bracket_leaf.get_previous_leaf(),
) )

View File

@@ -9,9 +9,9 @@ from jedi.inference.compiled.access import create_access_path
from jedi.inference.base_value import ValueWrapper from jedi.inference.base_value import ValueWrapper
def _create(infer_state, obj): def _create(inference_state, obj):
return compiled.create_from_access_path( return compiled.create_from_access_path(
infer_state, create_access_path(infer_state, obj) inference_state, create_access_path(inference_state, obj)
) )
@@ -23,9 +23,9 @@ class NamespaceObject(object):
class MixedModuleValue(ValueWrapper): class MixedModuleValue(ValueWrapper):
type = 'mixed_module' type = 'mixed_module'
def __init__(self, infer_state, tree_module, namespaces, file_io, code_lines): def __init__(self, inference_state, tree_module, namespaces, file_io, code_lines):
module_value = ModuleValue( module_value = ModuleValue(
infer_state, tree_module, inference_state, tree_module,
file_io=file_io, file_io=file_io,
string_names=('__main__',), string_names=('__main__',),
code_lines=code_lines code_lines=code_lines
@@ -38,7 +38,7 @@ class MixedModuleValue(ValueWrapper):
yield filter yield filter
for namespace_obj in self._namespace_objects: for namespace_obj in self._namespace_objects:
compiled_object = _create(self.infer_state, namespace_obj) compiled_object = _create(self.inference_state, namespace_obj)
mixed_object = mixed.MixedObject( mixed_object = mixed.MixedObject(
compiled_object=compiled_object, compiled_object=compiled_object,
tree_value=self._wrapped_value tree_value=self._wrapped_value

View File

@@ -15,24 +15,24 @@ except ImportError:
pydoc_topics = None pydoc_topics = None
def get_operator(infer_state, string, pos): def get_operator(inference_state, string, pos):
return Keyword(infer_state, string, pos) return Keyword(inference_state, string, pos)
class KeywordName(AbstractArbitraryName): class KeywordName(AbstractArbitraryName):
api_type = u'keyword' api_type = u'keyword'
def infer(self): def infer(self):
return [Keyword(self.infer_state, self.string_name, (0, 0))] return [Keyword(self.inference_state, self.string_name, (0, 0))]
class Keyword(object): class Keyword(object):
api_type = u'keyword' api_type = u'keyword'
def __init__(self, infer_state, name, pos): def __init__(self, inference_state, name, pos):
self.name = KeywordName(infer_state, name) self.name = KeywordName(inference_state, name)
self.start_pos = pos self.start_pos = pos
self.parent = infer_state.builtins_module self.parent = inference_state.builtins_module
@property @property
def names(self): def names(self):

View File

@@ -7,7 +7,7 @@ from jedi.api.environment import SameEnvironment, \
from jedi.api.exceptions import WrongVersion from jedi.api.exceptions import WrongVersion
from jedi._compatibility import force_unicode from jedi._compatibility import force_unicode
from jedi.inference.sys_path import discover_buildout_paths from jedi.inference.sys_path import discover_buildout_paths
from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.cache import inference_state_as_method_param_cache
from jedi.common.utils import traverse_parents from jedi.common.utils import traverse_parents
_CONFIG_FOLDER = '.jedi' _CONFIG_FOLDER = '.jedi'
@@ -77,8 +77,8 @@ class Project(object):
py2_comp(path, **kwargs) py2_comp(path, **kwargs)
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def _get_base_sys_path(self, infer_state, environment=None): def _get_base_sys_path(self, inference_state, environment=None):
if self._sys_path is not None: if self._sys_path is not None:
return self._sys_path return self._sys_path
@@ -93,8 +93,8 @@ class Project(object):
pass pass
return sys_path return sys_path
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def _get_sys_path(self, infer_state, environment=None, add_parent_paths=True): def _get_sys_path(self, inference_state, environment=None, add_parent_paths=True):
""" """
Keep this method private for all users of jedi. However internally this Keep this method private for all users of jedi. However internally this
one is used like a public method. one is used like a public method.
@@ -102,15 +102,15 @@ class Project(object):
suffixed = [] suffixed = []
prefixed = [] prefixed = []
sys_path = list(self._get_base_sys_path(infer_state, environment)) sys_path = list(self._get_base_sys_path(inference_state, environment))
if self._smart_sys_path: if self._smart_sys_path:
prefixed.append(self._path) prefixed.append(self._path)
if infer_state.script_path is not None: if inference_state.script_path is not None:
suffixed += discover_buildout_paths(infer_state, infer_state.script_path) suffixed += discover_buildout_paths(inference_state, inference_state.script_path)
if add_parent_paths: if add_parent_paths:
traversed = list(traverse_parents(infer_state.script_path)) traversed = list(traverse_parents(inference_state.script_path))
# AFAIK some libraries have imports like `foo.foo.bar`, which # AFAIK some libraries have imports like `foo.foo.bar`, which
# leads to the conclusion to by default prefer longer paths # leads to the conclusion to by default prefer longer paths

View File

@@ -1,6 +1,6 @@
class BaseValue(object): class BaseValue(object):
def __init__(self, infer_state, parent_context=None): def __init__(self, inference_state, parent_context=None):
self.infer_state = infer_state self.inference_state = inference_state
self.parent_context = parent_context self.parent_context = parent_context
def get_root_value(self): def get_root_value(self):

View File

@@ -15,7 +15,7 @@ Type inference of Python code in |jedi| is based on three assumptions:
The actual algorithm is based on a principle I call lazy type inference. That The actual algorithm is based on a principle I call lazy type inference. That
said, the typical entry point for static analysis is calling said, the typical entry point for static analysis is calling
``infer_expr_stmt``. There's separate logic for autocompletion in the API, the ``infer_expr_stmt``. There's separate logic for autocompletion in the API, the
infer_state is all about inferring an expression. inference_state is all about inferring an expression.
TODO this paragraph is not what jedi does anymore, it's similar, but not the TODO this paragraph is not what jedi does anymore, it's similar, but not the
same. same.
@@ -72,7 +72,7 @@ from jedi import parser_utils
from jedi.inference.utils import unite from jedi.inference.utils import unite
from jedi.inference import imports from jedi.inference import imports
from jedi.inference import recursion from jedi.inference import recursion
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import inference_state_function_cache
from jedi.inference import helpers from jedi.inference import helpers
from jedi.inference.names import TreeNameDefinition, ParamName from jedi.inference.names import TreeNameDefinition, ParamName
from jedi.inference.base_value import ValueualizedName, ValueualizedNode, \ from jedi.inference.base_value import ValueualizedName, ValueualizedNode, \
@@ -91,7 +91,7 @@ class InferenceState(object):
environment = project.get_environment() environment = project.get_environment()
self.environment = environment self.environment = environment
self.script_path = script_path self.script_path = script_path
self.compiled_subprocess = environment.get_infer_state_subprocess(self) self.compiled_subprocess = environment.get_inference_state_subprocess(self)
self.grammar = environment.get_grammar() self.grammar = environment.get_grammar()
self.latest_grammar = parso.load_grammar(version='3.7') self.latest_grammar = parso.load_grammar(version='3.7')
@@ -128,7 +128,7 @@ class InferenceState(object):
return value_set return value_set
@property @property
@infer_state_function_cache() @inference_state_function_cache()
def builtins_module(self): def builtins_module(self):
module_name = u'builtins' module_name = u'builtins'
if self.environment.version_info.major == 2: if self.environment.version_info.major == 2:
@@ -137,7 +137,7 @@ class InferenceState(object):
return builtins_module return builtins_module
@property @property
@infer_state_function_cache() @inference_state_function_cache()
def typing_module(self): def typing_module(self):
typing_module, = self.import_module((u'typing',)) typing_module, = self.import_module((u'typing',))
return typing_module return typing_module
@@ -233,7 +233,7 @@ class InferenceState(object):
return infer_node(value, element) return infer_node(value, element)
return self._infer_element_cached(value, element) return self._infer_element_cached(value, element)
@infer_state_function_cache(default=NO_VALUES) @inference_state_function_cache(default=NO_VALUES)
def _infer_element_cached(self, value, element): def _infer_element_cached(self, value, element):
return infer_node(value, element) return infer_node(value, element)

View File

@@ -87,7 +87,7 @@ def add(node_value, error_name, node, message=None, typ=Error, payload=None):
module_path = module_value.py__file__() module_path = module_value.py__file__()
issue_instance = typ(error_name, module_path, node.start_pos, message) issue_instance = typ(error_name, module_path, node.start_pos, message)
debug.warning(str(issue_instance), format=False) debug.warning(str(issue_instance), format=False)
node_value.infer_state.analysis.append(issue_instance) node_value.inference_state.analysis.append(issue_instance)
return issue_instance return issue_instance
@@ -149,7 +149,7 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None):
for python_cls in exception.mro(): for python_cls in exception.mro():
if cls.py__name__() == python_cls.__name__ \ if cls.py__name__() == python_cls.__name__ \
and cls.parent_context == cls.infer_state.builtins_module: and cls.parent_context == cls.inference_state.builtins_module:
return True return True
return False return False
@@ -192,7 +192,7 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None):
arglist = trailer.children[1] arglist = trailer.children[1]
assert arglist.type == 'arglist' assert arglist.type == 'arglist'
from jedi.inference.arguments import TreeArguments from jedi.inference.arguments import TreeArguments
args = list(TreeArguments(node_value.infer_state, node_value, arglist).unpack()) args = list(TreeArguments(node_value.inference_state, node_value, arglist).unpack())
# Arguments should be very simple # Arguments should be very simple
assert len(args) == 2 assert len(args) == 2

View File

@@ -11,7 +11,7 @@ from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
from jedi.inference.names import ParamName, TreeNameDefinition from jedi.inference.names import ParamName, TreeNameDefinition
from jedi.inference.base_value import NO_VALUES, ValueSet, ValueualizedNode from jedi.inference.base_value import NO_VALUES, ValueSet, ValueualizedNode
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.cache import inference_state_as_method_param_cache
from jedi.inference.param import get_executed_params_and_issues, ExecutedParam from jedi.inference.param import get_executed_params_and_issues, ExecutedParam
@@ -59,7 +59,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac
kwargs.pop('callback', None) kwargs.pop('callback', None)
try: try:
args += tuple(_iterate_argument_clinic( args += tuple(_iterate_argument_clinic(
value.infer_state, value.inference_state,
arguments, arguments,
clinic_args clinic_args
)) ))
@@ -72,7 +72,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac
return decorator return decorator
def _iterate_argument_clinic(infer_state, arguments, parameters): def _iterate_argument_clinic(inference_state, arguments, parameters):
"""Uses a list with argument clinic information (see PEP 436).""" """Uses a list with argument clinic information (see PEP 436)."""
iterator = PushBackIterator(arguments.unpack()) iterator = PushBackIterator(arguments.unpack())
for i, (name, optional, allow_kwargs, stars) in enumerate(parameters): for i, (name, optional, allow_kwargs, stars) in enumerate(parameters):
@@ -84,7 +84,7 @@ def _iterate_argument_clinic(infer_state, arguments, parameters):
break break
lazy_values.append(argument) lazy_values.append(argument)
yield ValueSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)]) yield ValueSet([iterable.FakeSequence(inference_state, u'tuple', lazy_values)])
lazy_values lazy_values
continue continue
elif stars == 2: elif stars == 2:
@@ -161,7 +161,7 @@ class AnonymousArguments(AbstractArguments):
def get_executed_params_and_issues(self, execution_value): def get_executed_params_and_issues(self, execution_value):
from jedi.inference.dynamic import search_params from jedi.inference.dynamic import search_params
return search_params( return search_params(
execution_value.infer_state, execution_value.inference_state,
execution_value, execution_value,
execution_value.tree_node execution_value.tree_node
), [] ), []
@@ -198,17 +198,17 @@ def unpack_arglist(arglist):
class TreeArguments(AbstractArguments): class TreeArguments(AbstractArguments):
def __init__(self, infer_state, value, argument_node, trailer=None): def __init__(self, inference_state, value, argument_node, trailer=None):
""" """
:param argument_node: May be an argument_node or a list of nodes. :param argument_node: May be an argument_node or a list of nodes.
""" """
self.argument_node = argument_node self.argument_node = argument_node
self.value = value self.value = value
self._infer_state = infer_state self._inference_state = inference_state
self.trailer = trailer # Can be None, e.g. in a class definition. self.trailer = trailer # Can be None, e.g. in a class definition.
@classmethod @classmethod
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def create_cached(cls, *args, **kwargs): def create_cached(cls, *args, **kwargs):
return cls(*args, **kwargs) return cls(*args, **kwargs)
@@ -241,7 +241,7 @@ class TreeArguments(AbstractArguments):
if sync_comp_for.type == 'comp_for': if sync_comp_for.type == 'comp_for':
sync_comp_for = sync_comp_for.children[1] sync_comp_for = sync_comp_for.children[1]
comp = iterable.GeneratorComprehension( comp = iterable.GeneratorComprehension(
self._infer_state, self._inference_state,
defining_value=self.value, defining_value=self.value,
sync_comp_for_node=sync_comp_for, sync_comp_for_node=sync_comp_for,
entry_node=el.children[0], entry_node=el.children[0],

View File

@@ -16,7 +16,7 @@ from jedi.parser_utils import clean_scope_docstring
from jedi.common import BaseValueSet, BaseValue from jedi.common import BaseValueSet, BaseValue
from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.helpers import SimpleGetItemNotFound
from jedi.inference.utils import safe_property from jedi.inference.utils import safe_property
from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.cache import inference_state_as_method_param_cache
from jedi.cache import memoize_method from jedi.cache import memoize_method
_sentinel = object() _sentinel = object()
@@ -31,17 +31,17 @@ class HelperValueMixin(object):
value = value.parent_context value = value.parent_context
@classmethod @classmethod
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def create_cached(cls, *args, **kwargs): def create_cached(cls, *args, **kwargs):
return cls(*args, **kwargs) return cls(*args, **kwargs)
def execute(self, arguments): def execute(self, arguments):
return self.infer_state.execute(self, arguments=arguments) return self.inference_state.execute(self, arguments=arguments)
def execute_with_values(self, *value_list): def execute_with_values(self, *value_list):
from jedi.inference.arguments import ValuesArguments from jedi.inference.arguments import ValuesArguments
arguments = ValuesArguments([ValueSet([value]) for value in value_list]) arguments = ValuesArguments([ValueSet([value]) for value in value_list])
return self.infer_state.execute(self, arguments) return self.inference_state.execute(self, arguments)
def execute_annotation(self): def execute_annotation(self):
return self.execute_with_values() return self.execute_with_values()
@@ -64,7 +64,7 @@ class HelperValueMixin(object):
if name_value is None: if name_value is None:
name_value = self name_value = self
from jedi.inference import finder from jedi.inference import finder
f = finder.NameFinder(self.infer_state, self, name_value, name_or_str, f = finder.NameFinder(self.inference_state, self, name_value, name_or_str,
position, analysis_errors=analysis_errors) position, analysis_errors=analysis_errors)
if search_global: if search_global:
filters = f.get_global_filters() filters = f.get_global_filters()
@@ -81,10 +81,10 @@ class HelperValueMixin(object):
return await_value_set.execute_with_values() return await_value_set.execute_with_values()
def infer_node(self, node): def infer_node(self, node):
return self.infer_state.infer_element(self, node) return self.inference_state.infer_element(self, node)
def create_value(self, node, node_is_value=False, node_is_object=False): def create_value(self, node, node_is_value=False, node_is_object=False):
return self.infer_state.create_value(self, node, node_is_value, node_is_object) return self.inference_state.create_value(self, node, node_is_value, node_is_object)
def iterate(self, valueualized_node=None, is_async=False): def iterate(self, valueualized_node=None, is_async=False):
debug.dbg('iterate %s', self) debug.dbg('iterate %s', self)
@@ -239,8 +239,8 @@ class _ValueWrapperBase(HelperValueMixin):
return CompiledValueName(self, wrapped_name.string_name) return CompiledValueName(self, wrapped_name.string_name)
@classmethod @classmethod
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def create_cached(cls, infer_state, *args, **kwargs): def create_cached(cls, inference_state, *args, **kwargs):
return cls(*args, **kwargs) return cls(*args, **kwargs)
def __getattr__(self, name): def __getattr__(self, name):
@@ -271,8 +271,8 @@ class ValueWrapper(_ValueWrapperBase):
class TreeValue(Value): class TreeValue(Value):
def __init__(self, infer_state, parent_context, tree_node): def __init__(self, inference_state, parent_context, tree_node):
super(TreeValue, self).__init__(infer_state, parent_context) super(TreeValue, self).__init__(inference_state, parent_context)
self.predefined_names = {} self.predefined_names = {}
self.tree_node = tree_node self.tree_node = tree_node
@@ -398,7 +398,7 @@ class ValueSet(BaseValueSet):
) )
def execute(self, arguments): def execute(self, arguments):
return ValueSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set) return ValueSet.from_sets(c.inference_state.execute(c, arguments) for c in self._set)
def execute_with_values(self, *args, **kwargs): def execute_with_values(self, *args, **kwargs):
return ValueSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set) return ValueSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set)

View File

@@ -10,7 +10,7 @@ _NO_DEFAULT = object()
_RECURSION_SENTINEL = object() _RECURSION_SENTINEL = object()
def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second_arg_is_infer_state=False): def _memoize_default(default=_NO_DEFAULT, inference_state_is_first_arg=False, second_arg_is_inference_state=False):
""" This is a typical memoization decorator, BUT there is one difference: """ This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults. To prevent recursion it sets defaults.
@@ -21,12 +21,12 @@ def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second
def func(function): def func(function):
def wrapper(obj, *args, **kwargs): def wrapper(obj, *args, **kwargs):
# TODO These checks are kind of ugly and slow. # TODO These checks are kind of ugly and slow.
if infer_state_is_first_arg: if inference_state_is_first_arg:
cache = obj.memoize_cache cache = obj.memoize_cache
elif second_arg_is_infer_state: elif second_arg_is_inference_state:
cache = args[0].memoize_cache # needed for meta classes cache = args[0].memoize_cache # needed for meta classes
else: else:
cache = obj.infer_state.memoize_cache cache = obj.inference_state.memoize_cache
try: try:
memo = cache[function] memo = cache[function]
@@ -47,23 +47,23 @@ def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second
return func return func
def infer_state_function_cache(default=_NO_DEFAULT): def inference_state_function_cache(default=_NO_DEFAULT):
def decorator(func): def decorator(func):
return _memoize_default(default=default, infer_state_is_first_arg=True)(func) return _memoize_default(default=default, inference_state_is_first_arg=True)(func)
return decorator return decorator
def infer_state_method_cache(default=_NO_DEFAULT): def inference_state_method_cache(default=_NO_DEFAULT):
def decorator(func): def decorator(func):
return _memoize_default(default=default)(func) return _memoize_default(default=default)(func)
return decorator return decorator
def infer_state_as_method_param_cache(): def inference_state_as_method_param_cache():
def decorator(call): def decorator(call):
return _memoize_default(second_arg_is_infer_state=True)(call) return _memoize_default(second_arg_is_inference_state=True)(call)
return decorator return decorator
@@ -74,19 +74,19 @@ class CachedMetaClass(type):
class initializations. Either you do it this way or with decorators, but class initializations. Either you do it this way or with decorators, but
with decorators you lose class access (isinstance, etc). with decorators you lose class access (isinstance, etc).
""" """
@infer_state_as_method_param_cache() @inference_state_as_method_param_cache()
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
return super(CachedMetaClass, self).__call__(*args, **kwargs) return super(CachedMetaClass, self).__call__(*args, **kwargs)
def infer_state_method_generator_cache(): def inference_state_method_generator_cache():
""" """
This is a special memoizer. It memoizes generators and also checks for This is a special memoizer. It memoizes generators and also checks for
recursion errors and returns no further iterator elemends in that case. recursion errors and returns no further iterator elemends in that case.
""" """
def func(function): def func(function):
def wrapper(obj, *args, **kwargs): def wrapper(obj, *args, **kwargs):
cache = obj.infer_state.memoize_cache cache = obj.inference_state.memoize_cache
try: try:
memo = cache[function] memo = cache[function]
except KeyError: except KeyError:

View File

@@ -4,8 +4,8 @@ from jedi.inference.compiled.value import CompiledObject, CompiledName, \
from jedi.inference.base_value import ValueWrapper, LazyValueWrapper from jedi.inference.base_value import ValueWrapper, LazyValueWrapper
def builtin_from_name(infer_state, string): def builtin_from_name(inference_state, string):
typing_builtins_module = infer_state.builtins_module typing_builtins_module = inference_state.builtins_module
if string in ('None', 'True', 'False'): if string in ('None', 'True', 'False'):
builtins, = typing_builtins_module.non_stub_value_set builtins, = typing_builtins_module.non_stub_value_set
filter_ = next(builtins.get_filters()) filter_ = next(builtins.get_filters())
@@ -18,7 +18,7 @@ def builtin_from_name(infer_state, string):
class CompiledValue(LazyValueWrapper): class CompiledValue(LazyValueWrapper):
def __init__(self, compiled_obj): def __init__(self, compiled_obj):
self.infer_state = compiled_obj.infer_state self.inference_state = compiled_obj.inference_state
self._compiled_obj = compiled_obj self._compiled_obj = compiled_obj
def __getattribute__(self, name): def __getattribute__(self, name):
@@ -29,36 +29,36 @@ class CompiledValue(LazyValueWrapper):
def _get_wrapped_value(self): def _get_wrapped_value(self):
instance, = builtin_from_name( instance, = builtin_from_name(
self.infer_state, self._compiled_obj.name.string_name).execute_with_values() self.inference_state, self._compiled_obj.name.string_name).execute_with_values()
return instance return instance
def __repr__(self): def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self._compiled_obj) return '<%s: %s>' % (self.__class__.__name__, self._compiled_obj)
def create_simple_object(infer_state, obj): def create_simple_object(inference_state, obj):
""" """
Only allows creations of objects that are easily picklable across Python Only allows creations of objects that are easily picklable across Python
versions. versions.
""" """
assert type(obj) in (int, float, str, bytes, unicode, slice, complex, bool), obj assert type(obj) in (int, float, str, bytes, unicode, slice, complex, bool), obj
compiled_obj = create_from_access_path( compiled_obj = create_from_access_path(
infer_state, inference_state,
infer_state.compiled_subprocess.create_simple_object(obj) inference_state.compiled_subprocess.create_simple_object(obj)
) )
return CompiledValue(compiled_obj) return CompiledValue(compiled_obj)
def get_string_value_set(infer_state): def get_string_value_set(inference_state):
return builtin_from_name(infer_state, u'str').execute_with_values() return builtin_from_name(inference_state, u'str').execute_with_values()
def load_module(infer_state, dotted_name, **kwargs): def load_module(inference_state, dotted_name, **kwargs):
# Temporary, some tensorflow builtins cannot be loaded, so it's tried again # Temporary, some tensorflow builtins cannot be loaded, so it's tried again
# and again and it's really slow. # and again and it's really slow.
if dotted_name.startswith('tensorflow.'): if dotted_name.startswith('tensorflow.'):
return None return None
access_path = infer_state.compiled_subprocess.load_module(dotted_name=dotted_name, **kwargs) access_path = inference_state.compiled_subprocess.load_module(dotted_name=dotted_name, **kwargs)
if access_path is None: if access_path is None:
return None return None
return create_from_access_path(infer_state, access_path) return create_from_access_path(inference_state, access_path)

View File

@@ -109,8 +109,8 @@ def compiled_objects_cache(attribute_name):
Caching the id has the advantage that an object doesn't need to be Caching the id has the advantage that an object doesn't need to be
hashable. hashable.
""" """
def wrapper(infer_state, obj, parent_context=None): def wrapper(inference_state, obj, parent_context=None):
cache = getattr(infer_state, attribute_name) cache = getattr(inference_state, attribute_name)
# Do a very cheap form of caching here. # Do a very cheap form of caching here.
key = id(obj) key = id(obj)
try: try:
@@ -119,9 +119,9 @@ def compiled_objects_cache(attribute_name):
except KeyError: except KeyError:
# TODO wuaaaarrghhhhhhhh # TODO wuaaaarrghhhhhhhh
if attribute_name == 'mixed_cache': if attribute_name == 'mixed_cache':
result = func(infer_state, obj, parent_context) result = func(inference_state, obj, parent_context)
else: else:
result = func(infer_state, obj) result = func(inference_state, obj)
# Need to cache all of them, otherwise the id could be overwritten. # Need to cache all of them, otherwise the id could be overwritten.
cache[key] = result, obj, parent_context cache[key] = result, obj, parent_context
return result return result
@@ -130,11 +130,11 @@ def compiled_objects_cache(attribute_name):
return decorator return decorator
def create_access(infer_state, obj): def create_access(inference_state, obj):
return infer_state.compiled_subprocess.get_or_create_access_handle(obj) return inference_state.compiled_subprocess.get_or_create_access_handle(obj)
def load_module(infer_state, dotted_name, sys_path): def load_module(inference_state, dotted_name, sys_path):
temp, sys.path = sys.path, sys_path temp, sys.path = sys.path, sys_path
try: try:
__import__(dotted_name) __import__(dotted_name)
@@ -154,7 +154,7 @@ def load_module(infer_state, dotted_name, sys_path):
# Just access the cache after import, because of #59 as well as the very # Just access the cache after import, because of #59 as well as the very
# complicated import structure of Python. # complicated import structure of Python.
module = sys.modules[dotted_name] module = sys.modules[dotted_name]
return create_access_path(infer_state, module) return create_access_path(inference_state, module)
class AccessPath(object): class AccessPath(object):
@@ -171,8 +171,8 @@ class AccessPath(object):
self.accesses = value self.accesses = value
def create_access_path(infer_state, obj): def create_access_path(inference_state, obj):
access = create_access(infer_state, obj) access = create_access(inference_state, obj)
return AccessPath(access.get_access_path_tuples()) return AccessPath(access.get_access_path_tuples())
@@ -193,18 +193,18 @@ def get_api_type(obj):
class DirectObjectAccess(object): class DirectObjectAccess(object):
def __init__(self, infer_state, obj): def __init__(self, inference_state, obj):
self._infer_state = infer_state self._inference_state = inference_state
self._obj = obj self._obj = obj
def __repr__(self): def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.get_repr()) return '%s(%s)' % (self.__class__.__name__, self.get_repr())
def _create_access(self, obj): def _create_access(self, obj):
return create_access(self._infer_state, obj) return create_access(self._inference_state, obj)
def _create_access_path(self, obj): def _create_access_path(self, obj):
return create_access_path(self._infer_state, obj) return create_access_path(self._inference_state, obj)
def py__bool__(self): def py__bool__(self):
return bool(self._obj) return bool(self._obj)
@@ -376,7 +376,7 @@ class DirectObjectAccess(object):
return get_api_type(self._obj) return get_api_type(self._obj)
def get_access_path_tuples(self): def get_access_path_tuples(self):
accesses = [create_access(self._infer_state, o) for o in self._get_objects_path()] accesses = [create_access(self._inference_state, o) for o in self._get_objects_path()]
return [(access.py__name__(), access) for access in accesses] return [(access.py__name__(), access) for access in accesses]
def _get_objects_path(self): def _get_objects_path(self):

View File

@@ -15,7 +15,7 @@ from jedi.file_io import FileIO
from jedi.inference.base_value import ValueSet, ValueWrapper from jedi.inference.base_value import ValueSet, ValueWrapper
from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.helpers import SimpleGetItemNotFound
from jedi.inference.value import ModuleValue from jedi.inference.value import ModuleValue
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import inference_state_function_cache
from jedi.inference.compiled.getattr_static import getattr_static from jedi.inference.compiled.getattr_static import getattr_static
from jedi.inference.compiled.access import compiled_objects_cache, \ from jedi.inference.compiled.access import compiled_objects_cache, \
ALLOWED_GETITEM_TYPES, get_api_type ALLOWED_GETITEM_TYPES, get_api_type
@@ -48,7 +48,7 @@ class MixedObject(ValueWrapper):
self.access_handle = compiled_object.access_handle self.access_handle = compiled_object.access_handle
def get_filters(self, *args, **kwargs): def get_filters(self, *args, **kwargs):
yield MixedObjectFilter(self.infer_state, self) yield MixedObjectFilter(self.inference_state, self)
def get_signatures(self): def get_signatures(self):
# Prefer `inspect.signature` over somehow analyzing Python code. It # Prefer `inspect.signature` over somehow analyzing Python code. It
@@ -105,9 +105,9 @@ class MixedName(compiled.CompiledName):
values = [None] values = [None]
for access in access_paths: for access in access_paths:
values = ValueSet.from_sets( values = ValueSet.from_sets(
_create(self._infer_state, access, parent_context=c) _create(self._inference_state, access, parent_context=c)
if c is None or isinstance(c, MixedObject) if c is None or isinstance(c, MixedObject)
else ValueSet({create_cached_compiled_object(c.infer_state, access, c)}) else ValueSet({create_cached_compiled_object(c.inference_state, access, c)})
for c in values for c in values
) )
return values return values
@@ -121,9 +121,9 @@ class MixedObjectFilter(compiled.CompiledObjectFilter):
name_class = MixedName name_class = MixedName
@infer_state_function_cache() @inference_state_function_cache()
def _load_module(infer_state, path): def _load_module(inference_state, path):
module_node = infer_state.parse( module_node = inference_state.parse(
path=path, path=path,
cache=True, cache=True,
diff_cache=settings.fast_parser, diff_cache=settings.fast_parser,
@@ -131,7 +131,7 @@ def _load_module(infer_state, path):
).get_root_node() ).get_root_node()
# python_module = inspect.getmodule(python_object) # python_module = inspect.getmodule(python_object)
# TODO we should actually make something like this possible. # TODO we should actually make something like this possible.
#infer_state.modules[python_module.__name__] = module_node #inference_state.modules[python_module.__name__] = module_node
return module_node return module_node
@@ -155,7 +155,7 @@ def _get_object_to_check(python_object):
raise TypeError # Prevents computation of `repr` within inspect. raise TypeError # Prevents computation of `repr` within inspect.
def _find_syntax_node_name(infer_state, python_object): def _find_syntax_node_name(inference_state, python_object):
original_object = python_object original_object = python_object
try: try:
python_object = _get_object_to_check(python_object) python_object = _get_object_to_check(python_object)
@@ -168,13 +168,13 @@ def _find_syntax_node_name(infer_state, python_object):
return None return None
file_io = FileIO(path) file_io = FileIO(path)
module_node = _load_module(infer_state, path) module_node = _load_module(inference_state, path)
if inspect.ismodule(python_object): if inspect.ismodule(python_object):
# We don't need to check names for modules, because there's not really # We don't need to check names for modules, because there's not really
# a way to write a module in a module in Python (and also __name__ can # a way to write a module in a module in Python (and also __name__ can
# be something like ``email.utils``). # be something like ``email.utils``).
code_lines = get_cached_code_lines(infer_state.grammar, path) code_lines = get_cached_code_lines(inference_state.grammar, path)
return module_node, module_node, file_io, code_lines return module_node, module_node, file_io, code_lines
try: try:
@@ -214,7 +214,7 @@ def _find_syntax_node_name(infer_state, python_object):
if line_names: if line_names:
names = line_names names = line_names
code_lines = get_cached_code_lines(infer_state.grammar, path) code_lines = get_cached_code_lines(inference_state.grammar, path)
# It's really hard to actually get the right definition, here as a last # It's really hard to actually get the right definition, here as a last
# resort we just return the last one. This chance might lead to odd # resort we just return the last one. This chance might lead to odd
# completions at some points but will lead to mostly correct type # completions at some points but will lead to mostly correct type
@@ -230,9 +230,9 @@ def _find_syntax_node_name(infer_state, python_object):
@compiled_objects_cache('mixed_cache') @compiled_objects_cache('mixed_cache')
def _create(infer_state, access_handle, parent_context, *args): def _create(inference_state, access_handle, parent_context, *args):
compiled_object = create_cached_compiled_object( compiled_object = create_cached_compiled_object(
infer_state, inference_state,
access_handle, access_handle,
parent_context=parent_context and parent_context.compiled_object parent_context=parent_context and parent_context.compiled_object
) )
@@ -240,7 +240,7 @@ def _create(infer_state, access_handle, parent_context, *args):
# TODO accessing this is bad, but it probably doesn't matter that much, # TODO accessing this is bad, but it probably doesn't matter that much,
# because we're working with interpreteters only here. # because we're working with interpreteters only here.
python_object = access_handle.access._obj python_object = access_handle.access._obj
result = _find_syntax_node_name(infer_state, python_object) result = _find_syntax_node_name(inference_state, python_object)
if result is None: if result is None:
# TODO Care about generics from stuff like `[1]` and don't return like this. # TODO Care about generics from stuff like `[1]` and don't return like this.
if type(python_object) in (dict, list, tuple): if type(python_object) in (dict, list, tuple):
@@ -257,14 +257,14 @@ def _create(infer_state, access_handle, parent_context, *args):
name = compiled_object.get_root_value().py__name__() name = compiled_object.get_root_value().py__name__()
string_names = tuple(name.split('.')) string_names = tuple(name.split('.'))
module_value = ModuleValue( module_value = ModuleValue(
infer_state, module_node, inference_state, module_node,
file_io=file_io, file_io=file_io,
string_names=string_names, string_names=string_names,
code_lines=code_lines, code_lines=code_lines,
is_package=hasattr(compiled_object, 'py__path__'), is_package=hasattr(compiled_object, 'py__path__'),
) )
if name is not None: if name is not None:
infer_state.module_cache.add(string_names, ValueSet([module_value])) inference_state.module_cache.add(string_names, ValueSet([module_value]))
else: else:
if parent_context.tree_node.get_root_node() != module_node: if parent_context.tree_node.get_root_node() != module_node:
# This happens e.g. when __module__ is wrong, or when using # This happens e.g. when __module__ is wrong, or when using

View File

@@ -71,9 +71,9 @@ def _cleanup_process(process, thread):
class _InferenceStateProcess(object): class _InferenceStateProcess(object):
def __init__(self, infer_state): def __init__(self, inference_state):
self._infer_state_weakref = weakref.ref(infer_state) self._inference_state_weakref = weakref.ref(inference_state)
self._infer_state_id = id(infer_state) self._inference_state_id = id(inference_state)
self._handles = {} self._handles = {}
def get_or_create_access_handle(self, obj): def get_or_create_access_handle(self, obj):
@@ -81,7 +81,7 @@ class _InferenceStateProcess(object):
try: try:
return self.get_access_handle(id_) return self.get_access_handle(id_)
except KeyError: except KeyError:
access = DirectObjectAccess(self._infer_state_weakref(), obj) access = DirectObjectAccess(self._inference_state_weakref(), obj)
handle = AccessHandle(self, access, id_) handle = AccessHandle(self, access, id_)
self.set_access_handle(handle) self.set_access_handle(handle)
return handle return handle
@@ -100,12 +100,12 @@ class InferenceStateSameProcess(_InferenceStateProcess):
This is necessary for the Interpreter process. This is necessary for the Interpreter process.
""" """
def __getattr__(self, name): def __getattr__(self, name):
return partial(_get_function(name), self._infer_state_weakref()) return partial(_get_function(name), self._inference_state_weakref())
class InferenceStateSubprocess(_InferenceStateProcess): class InferenceStateSubprocess(_InferenceStateProcess):
def __init__(self, infer_state, compiled_subprocess): def __init__(self, inference_state, compiled_subprocess):
super(InferenceStateSubprocess, self).__init__(infer_state) super(InferenceStateSubprocess, self).__init__(inference_state)
self._used = False self._used = False
self._compiled_subprocess = compiled_subprocess self._compiled_subprocess = compiled_subprocess
@@ -116,7 +116,7 @@ class InferenceStateSubprocess(_InferenceStateProcess):
self._used = True self._used = True
result = self._compiled_subprocess.run( result = self._compiled_subprocess.run(
self._infer_state_weakref(), self._inference_state_weakref(),
func, func,
args=args, args=args,
kwargs=kwargs, kwargs=kwargs,
@@ -148,7 +148,7 @@ class InferenceStateSubprocess(_InferenceStateProcess):
def __del__(self): def __del__(self):
if self._used and not self._compiled_subprocess.is_crashed: if self._used and not self._compiled_subprocess.is_crashed:
self._compiled_subprocess.delete_infer_state(self._infer_state_id) self._compiled_subprocess.delete_inference_state(self._inference_state_id)
class CompiledSubprocess(object): class CompiledSubprocess(object):
@@ -158,7 +158,7 @@ class CompiledSubprocess(object):
def __init__(self, executable): def __init__(self, executable):
self._executable = executable self._executable = executable
self._infer_state_deletion_queue = queue.deque() self._inference_state_deletion_queue = queue.deque()
self._cleanup_callable = lambda: None self._cleanup_callable = lambda: None
def __repr__(self): def __repr__(self):
@@ -205,18 +205,18 @@ class CompiledSubprocess(object):
t) t)
return process return process
def run(self, infer_state, function, args=(), kwargs={}): def run(self, inference_state, function, args=(), kwargs={}):
# Delete old infer_states. # Delete old inference_states.
while True: while True:
try: try:
infer_state_id = self._infer_state_deletion_queue.pop() inference_state_id = self._inference_state_deletion_queue.pop()
except IndexError: except IndexError:
break break
else: else:
self._send(infer_state_id, None) self._send(inference_state_id, None)
assert callable(function) assert callable(function)
return self._send(id(infer_state), function, args, kwargs) return self._send(id(inference_state), function, args, kwargs)
def get_sys_path(self): def get_sys_path(self):
return self._send(None, functions.get_sys_path, (), {}) return self._send(None, functions.get_sys_path, (), {})
@@ -225,7 +225,7 @@ class CompiledSubprocess(object):
self.is_crashed = True self.is_crashed = True
self._cleanup_callable() self._cleanup_callable()
def _send(self, infer_state_id, function, args=(), kwargs={}): def _send(self, inference_state_id, function, args=(), kwargs={}):
if self.is_crashed: if self.is_crashed:
raise InternalError("The subprocess %s has crashed." % self._executable) raise InternalError("The subprocess %s has crashed." % self._executable)
@@ -233,7 +233,7 @@ class CompiledSubprocess(object):
# Python 2 compatibility # Python 2 compatibility
kwargs = {force_unicode(key): value for key, value in kwargs.items()} kwargs = {force_unicode(key): value for key, value in kwargs.items()}
data = infer_state_id, function, args, kwargs data = inference_state_id, function, args, kwargs
try: try:
pickle_dump(data, self._get_process().stdin, self._pickle_protocol) pickle_dump(data, self._get_process().stdin, self._pickle_protocol)
except (socket.error, IOError) as e: except (socket.error, IOError) as e:
@@ -272,59 +272,59 @@ class CompiledSubprocess(object):
raise result raise result
return result return result
def delete_infer_state(self, infer_state_id): def delete_inference_state(self, inference_state_id):
""" """
Currently we are not deleting infer_state instantly. They only get Currently we are not deleting inference_state instantly. They only get
deleted once the subprocess is used again. It would probably a better deleted once the subprocess is used again. It would probably a better
solution to move all of this into a thread. However, the memory usage solution to move all of this into a thread. However, the memory usage
of a single infer_state shouldn't be that high. of a single inference_state shouldn't be that high.
""" """
# With an argument - the infer_state gets deleted. # With an argument - the inference_state gets deleted.
self._infer_state_deletion_queue.append(infer_state_id) self._inference_state_deletion_queue.append(inference_state_id)
class Listener(object): class Listener(object):
def __init__(self, pickle_protocol): def __init__(self, pickle_protocol):
self._infer_states = {} self._inference_states = {}
# TODO refactor so we don't need to process anymore just handle # TODO refactor so we don't need to process anymore just handle
# controlling. # controlling.
self._process = _InferenceStateProcess(Listener) self._process = _InferenceStateProcess(Listener)
self._pickle_protocol = pickle_protocol self._pickle_protocol = pickle_protocol
def _get_infer_state(self, function, infer_state_id): def _get_inference_state(self, function, inference_state_id):
from jedi.inference import InferenceState from jedi.inference import InferenceState
try: try:
infer_state = self._infer_states[infer_state_id] inference_state = self._inference_states[inference_state_id]
except KeyError: except KeyError:
from jedi.api.environment import InterpreterEnvironment from jedi.api.environment import InterpreterEnvironment
infer_state = InferenceState( inference_state = InferenceState(
# The project is not actually needed. Nothing should need to # The project is not actually needed. Nothing should need to
# access it. # access it.
project=None, project=None,
environment=InterpreterEnvironment() environment=InterpreterEnvironment()
) )
self._infer_states[infer_state_id] = infer_state self._inference_states[inference_state_id] = inference_state
return infer_state return inference_state
def _run(self, infer_state_id, function, args, kwargs): def _run(self, inference_state_id, function, args, kwargs):
if infer_state_id is None: if inference_state_id is None:
return function(*args, **kwargs) return function(*args, **kwargs)
elif function is None: elif function is None:
del self._infer_states[infer_state_id] del self._inference_states[inference_state_id]
else: else:
infer_state = self._get_infer_state(function, infer_state_id) inference_state = self._get_inference_state(function, inference_state_id)
# Exchange all handles # Exchange all handles
args = list(args) args = list(args)
for i, arg in enumerate(args): for i, arg in enumerate(args):
if isinstance(arg, AccessHandle): if isinstance(arg, AccessHandle):
args[i] = infer_state.compiled_subprocess.get_access_handle(arg.id) args[i] = inference_state.compiled_subprocess.get_access_handle(arg.id)
for key, value in kwargs.items(): for key, value in kwargs.items():
if isinstance(value, AccessHandle): if isinstance(value, AccessHandle):
kwargs[key] = infer_state.compiled_subprocess.get_access_handle(value.id) kwargs[key] = inference_state.compiled_subprocess.get_access_handle(value.id)
return function(infer_state, *args, **kwargs) return function(inference_state, *args, **kwargs)
def listen(self): def listen(self):
stdout = sys.stdout stdout = sys.stdout

View File

@@ -12,20 +12,20 @@ def get_sys_path():
return list(map(cast_path, sys.path)) return list(map(cast_path, sys.path))
def load_module(infer_state, **kwargs): def load_module(inference_state, **kwargs):
return access.load_module(infer_state, **kwargs) return access.load_module(inference_state, **kwargs)
def get_compiled_method_return(infer_state, id, attribute, *args, **kwargs): def get_compiled_method_return(inference_state, id, attribute, *args, **kwargs):
handle = infer_state.compiled_subprocess.get_access_handle(id) handle = inference_state.compiled_subprocess.get_access_handle(id)
return getattr(handle.access, attribute)(*args, **kwargs) return getattr(handle.access, attribute)(*args, **kwargs)
def create_simple_object(infer_state, obj): def create_simple_object(inference_state, obj):
return access.create_access_path(infer_state, obj) return access.create_access_path(inference_state, obj)
def get_module_info(infer_state, sys_path=None, full_name=None, **kwargs): def get_module_info(inference_state, sys_path=None, full_name=None, **kwargs):
""" """
Returns Tuple[Union[NamespaceInfo, FileIO, None], Optional[bool]] Returns Tuple[Union[NamespaceInfo, FileIO, None], Optional[bool]]
""" """
@@ -40,25 +40,25 @@ def get_module_info(infer_state, sys_path=None, full_name=None, **kwargs):
sys.path = temp sys.path = temp
def list_module_names(infer_state, search_path): def list_module_names(inference_state, search_path):
return [ return [
force_unicode(name) force_unicode(name)
for module_loader, name, is_pkg in iter_modules(search_path) for module_loader, name, is_pkg in iter_modules(search_path)
] ]
def get_builtin_module_names(infer_state): def get_builtin_module_names(inference_state):
return list(map(force_unicode, sys.builtin_module_names)) return list(map(force_unicode, sys.builtin_module_names))
def _test_raise_error(infer_state, exception_type): def _test_raise_error(inference_state, exception_type):
""" """
Raise an error to simulate certain problems for unit tests. Raise an error to simulate certain problems for unit tests.
""" """
raise exception_type raise exception_type
def _test_print(infer_state, stderr=None, stdout=None): def _test_print(inference_state, stderr=None, stdout=None):
""" """
Force some prints in the subprocesses. This exists for unit tests. Force some prints in the subprocesses. This exists for unit tests.
""" """
@@ -82,5 +82,5 @@ def _get_init_path(directory_path):
return None return None
def safe_literal_eval(infer_state, value): def safe_literal_eval(inference_state, value):
return parser_utils.safe_literal_eval(value) return parser_utils.safe_literal_eval(value)

View File

@@ -14,7 +14,7 @@ from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \
from jedi.inference.base_value import Value, ValueSet, NO_VALUES from jedi.inference.base_value import Value, ValueSet, NO_VALUES
from jedi.inference.lazy_value import LazyKnownValue from jedi.inference.lazy_value import LazyKnownValue
from jedi.inference.compiled.access import _sentinel from jedi.inference.compiled.access import _sentinel
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import inference_state_function_cache
from jedi.inference.helpers import reraise_getitem_errors from jedi.inference.helpers import reraise_getitem_errors
from jedi.inference.signature import BuiltinSignature from jedi.inference.signature import BuiltinSignature
@@ -41,15 +41,15 @@ class CheckAttribute(object):
class CompiledObject(Value): class CompiledObject(Value):
def __init__(self, infer_state, access_handle, parent_context=None): def __init__(self, inference_state, access_handle, parent_context=None):
super(CompiledObject, self).__init__(infer_state, parent_context) super(CompiledObject, self).__init__(inference_state, parent_context)
self.access_handle = access_handle self.access_handle = access_handle
def py__call__(self, arguments): def py__call__(self, arguments):
return_annotation = self.access_handle.get_return_annotation() return_annotation = self.access_handle.get_return_annotation()
if return_annotation is not None: if return_annotation is not None:
# TODO the return annotation may also be a string. # TODO the return annotation may also be a string.
return create_from_access_path(self.infer_state, return_annotation).execute_annotation() return create_from_access_path(self.inference_state, return_annotation).execute_annotation()
try: try:
self.access_handle.getattr_paths(u'__call__') self.access_handle.getattr_paths(u'__call__')
@@ -59,26 +59,26 @@ class CompiledObject(Value):
if self.access_handle.is_class(): if self.access_handle.is_class():
from jedi.inference.value import CompiledInstance from jedi.inference.value import CompiledInstance
return ValueSet([ return ValueSet([
CompiledInstance(self.infer_state, self.parent_context, self, arguments) CompiledInstance(self.inference_state, self.parent_context, self, arguments)
]) ])
else: else:
return ValueSet(self._execute_function(arguments)) return ValueSet(self._execute_function(arguments))
@CheckAttribute() @CheckAttribute()
def py__class__(self): def py__class__(self):
return create_from_access_path(self.infer_state, self.access_handle.py__class__()) return create_from_access_path(self.inference_state, self.access_handle.py__class__())
@CheckAttribute() @CheckAttribute()
def py__mro__(self): def py__mro__(self):
return (self,) + tuple( return (self,) + tuple(
create_from_access_path(self.infer_state, access) create_from_access_path(self.inference_state, access)
for access in self.access_handle.py__mro__accesses() for access in self.access_handle.py__mro__accesses()
) )
@CheckAttribute() @CheckAttribute()
def py__bases__(self): def py__bases__(self):
return tuple( return tuple(
create_from_access_path(self.infer_state, access) create_from_access_path(self.inference_state, access)
for access in self.access_handle.py__bases__() for access in self.access_handle.py__bases__()
) )
@@ -178,7 +178,7 @@ class CompiledObject(Value):
search_global shouldn't change the fact that there's one dict, this way search_global shouldn't change the fact that there's one dict, this way
there's only one `object`. there's only one `object`.
""" """
return CompiledObjectFilter(self.infer_state, self, is_instance) return CompiledObjectFilter(self.inference_state, self, is_instance)
@CheckAttribute(u'__getitem__') @CheckAttribute(u'__getitem__')
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
@@ -187,7 +187,7 @@ class CompiledObject(Value):
if access is None: if access is None:
return NO_VALUES return NO_VALUES
return ValueSet([create_from_access_path(self.infer_state, access)]) return ValueSet([create_from_access_path(self.inference_state, access)])
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
all_access_paths = self.access_handle.py__getitem__all_values() all_access_paths = self.access_handle.py__getitem__all_values()
@@ -196,7 +196,7 @@ class CompiledObject(Value):
# object. # object.
return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node) return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node)
return ValueSet( return ValueSet(
create_from_access_path(self.infer_state, access) create_from_access_path(self.inference_state, access)
for access in all_access_paths for access in all_access_paths
) )
@@ -215,7 +215,7 @@ class CompiledObject(Value):
return return
for access in access_path_list: for access in access_path_list:
yield LazyKnownValue(create_from_access_path(self.infer_state, access)) yield LazyKnownValue(create_from_access_path(self.inference_state, access))
def py__name__(self): def py__name__(self):
return self.access_handle.py__name__() return self.access_handle.py__name__()
@@ -237,12 +237,12 @@ class CompiledObject(Value):
try: try:
# TODO wtf is this? this is exactly the same as the thing # TODO wtf is this? this is exactly the same as the thing
# below. It uses getattr as well. # below. It uses getattr as well.
self.infer_state.builtins_module.access_handle.getattr_paths(name) self.inference_state.builtins_module.access_handle.getattr_paths(name)
except AttributeError: except AttributeError:
continue continue
else: else:
bltn_obj = builtin_from_name(self.infer_state, name) bltn_obj = builtin_from_name(self.inference_state, name)
for result in self.infer_state.execute(bltn_obj, params): for result in self.inference_state.execute(bltn_obj, params):
yield result yield result
for type_ in docstrings.infer_return_types(self): for type_ in docstrings.infer_return_types(self):
yield type_ yield type_
@@ -257,20 +257,20 @@ class CompiledObject(Value):
def execute_operation(self, other, operator): def execute_operation(self, other, operator):
return create_from_access_path( return create_from_access_path(
self.infer_state, self.inference_state,
self.access_handle.execute_operation(other.access_handle, operator) self.access_handle.execute_operation(other.access_handle, operator)
) )
def negate(self): def negate(self):
return create_from_access_path(self.infer_state, self.access_handle.negate()) return create_from_access_path(self.inference_state, self.access_handle.negate())
def get_metaclasses(self): def get_metaclasses(self):
return NO_VALUES return NO_VALUES
class CompiledName(AbstractNameDefinition): class CompiledName(AbstractNameDefinition):
def __init__(self, infer_state, parent_context, name): def __init__(self, inference_state, parent_context, name):
self._infer_state = infer_state self._inference_state = inference_state
self.parent_context = parent_context self.parent_context = parent_context
self.string_name = name self.string_name = name
@@ -296,7 +296,7 @@ class CompiledName(AbstractNameDefinition):
@underscore_memoization @underscore_memoization
def infer(self): def infer(self):
return ValueSet([_create_from_name( return ValueSet([_create_from_name(
self._infer_state, self.parent_context, self.string_name self._inference_state, self.parent_context, self.string_name
)]) )])
@@ -322,12 +322,12 @@ class SignatureParamName(ParamNameInterface, AbstractNameDefinition):
def infer(self): def infer(self):
p = self._signature_param p = self._signature_param
infer_state = self.parent_context.infer_state inference_state = self.parent_context.inference_state
values = NO_VALUES values = NO_VALUES
if p.has_default: if p.has_default:
values = ValueSet([create_from_access_path(infer_state, p.default)]) values = ValueSet([create_from_access_path(inference_state, p.default)])
if p.has_annotation: if p.has_annotation:
annotation = create_from_access_path(infer_state, p.annotation) annotation = create_from_access_path(inference_state, p.annotation)
values |= annotation.execute_with_values() values |= annotation.execute_with_values()
return values return values
@@ -364,8 +364,8 @@ class EmptyCompiledName(AbstractNameDefinition):
completions, just give Jedi the option to return this object. It infers to completions, just give Jedi the option to return this object. It infers to
nothing. nothing.
""" """
def __init__(self, infer_state, name): def __init__(self, inference_state, name):
self.parent_context = infer_state.builtins_module self.parent_context = inference_state.builtins_module
self.string_name = name self.string_name = name
def infer(self): def infer(self):
@@ -375,8 +375,8 @@ class EmptyCompiledName(AbstractNameDefinition):
class CompiledObjectFilter(AbstractFilter): class CompiledObjectFilter(AbstractFilter):
name_class = CompiledName name_class = CompiledName
def __init__(self, infer_state, compiled_object, is_instance=False): def __init__(self, inference_state, compiled_object, is_instance=False):
self._infer_state = infer_state self._inference_state = inference_state
self.compiled_object = compiled_object self.compiled_object = compiled_object
self.is_instance = is_instance self.is_instance = is_instance
@@ -399,7 +399,7 @@ class CompiledObjectFilter(AbstractFilter):
# Always use unicode objects in Python 2 from here. # Always use unicode objects in Python 2 from here.
name = force_unicode(name) name = force_unicode(name)
if (is_descriptor and not self._infer_state.allow_descriptor_getattr) or not has_attribute: if (is_descriptor and not self._inference_state.allow_descriptor_getattr) or not has_attribute:
return [self._get_cached_name(name, is_empty=True)] return [self._get_cached_name(name, is_empty=True)]
if self.is_instance and name not in dir_callback(): if self.is_instance and name not in dir_callback():
@@ -409,7 +409,7 @@ class CompiledObjectFilter(AbstractFilter):
@memoize_method @memoize_method
def _get_cached_name(self, name, is_empty=False): def _get_cached_name(self, name, is_empty=False):
if is_empty: if is_empty:
return EmptyCompiledName(self._infer_state, name) return EmptyCompiledName(self._inference_state, name)
else: else:
return self._create_name(name) return self._create_name(name)
@@ -426,12 +426,12 @@ class CompiledObjectFilter(AbstractFilter):
# ``dir`` doesn't include the type names. # ``dir`` doesn't include the type names.
if not self.is_instance and needs_type_completions: if not self.is_instance and needs_type_completions:
for filter in builtin_from_name(self._infer_state, u'type').get_filters(): for filter in builtin_from_name(self._inference_state, u'type').get_filters():
names += filter.values() names += filter.values()
return names return names
def _create_name(self, name): def _create_name(self, name):
return self.name_class(self._infer_state, self.compiled_object, name) return self.name_class(self._inference_state, self.compiled_object, name)
def __repr__(self): def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.compiled_object) return "<%s: %s>" % (self.__class__.__name__, self.compiled_object)
@@ -507,7 +507,7 @@ def _parse_function_doc(doc):
return param_str, ret return param_str, ret
def _create_from_name(infer_state, compiled_object, name): def _create_from_name(inference_state, compiled_object, name):
access_paths = compiled_object.access_handle.getattr_paths(name, default=None) access_paths = compiled_object.access_handle.getattr_paths(name, default=None)
parent_context = compiled_object parent_context = compiled_object
if parent_context.is_class(): if parent_context.is_class():
@@ -516,26 +516,26 @@ def _create_from_name(infer_state, compiled_object, name):
value = None value = None
for access_path in access_paths: for access_path in access_paths:
value = create_cached_compiled_object( value = create_cached_compiled_object(
infer_state, access_path, parent_context=value inference_state, access_path, parent_context=value
) )
return value return value
def _normalize_create_args(func): def _normalize_create_args(func):
"""The cache doesn't care about keyword vs. normal args.""" """The cache doesn't care about keyword vs. normal args."""
def wrapper(infer_state, obj, parent_context=None): def wrapper(inference_state, obj, parent_context=None):
return func(infer_state, obj, parent_context) return func(inference_state, obj, parent_context)
return wrapper return wrapper
def create_from_access_path(infer_state, access_path): def create_from_access_path(inference_state, access_path):
parent_context = None parent_context = None
for name, access in access_path.accesses: for name, access in access_path.accesses:
parent_context = create_cached_compiled_object(infer_state, access, parent_context) parent_context = create_cached_compiled_object(inference_state, access, parent_context)
return parent_context return parent_context
@_normalize_create_args @_normalize_create_args
@infer_state_function_cache() @inference_state_function_cache()
def create_cached_compiled_object(infer_state, access_handle, parent_context): def create_cached_compiled_object(inference_state, access_handle, parent_context):
return CompiledObject(infer_state, access_handle, parent_context) return CompiledObject(inference_state, access_handle, parent_context)

View File

@@ -24,7 +24,7 @@ from parso import parse, ParserSyntaxError
from jedi._compatibility import u from jedi._compatibility import u
from jedi import debug from jedi import debug
from jedi.inference.utils import indent_block from jedi.inference.utils import indent_block
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import iterator_to_value_set, ValueSet, \ from jedi.inference.base_value import iterator_to_value_set, ValueSet, \
NO_VALUES NO_VALUES
from jedi.inference.lazy_value import LazyKnownValues from jedi.inference.lazy_value import LazyKnownValues
@@ -205,7 +205,7 @@ def _infer_for_statement_string(module_value, string):
# will be impossible to use `...` (Ellipsis) as a token. Docstring types # will be impossible to use `...` (Ellipsis) as a token. Docstring types
# don't need to conform with the current grammar. # don't need to conform with the current grammar.
debug.dbg('Parse docstring code %s', string, color='BLUE') debug.dbg('Parse docstring code %s', string, color='BLUE')
grammar = module_value.infer_state.latest_grammar grammar = module_value.inference_state.latest_grammar
try: try:
module = grammar.parse(code.format(indent_block(string)), error_recovery=False) module = grammar.parse(code.format(indent_block(string)), error_recovery=False)
except ParserSyntaxError: except ParserSyntaxError:
@@ -223,7 +223,7 @@ def _infer_for_statement_string(module_value, string):
from jedi.inference.value import FunctionValue from jedi.inference.value import FunctionValue
function_value = FunctionValue( function_value = FunctionValue(
module_value.infer_state, module_value.inference_state,
module_value, module_value,
funcdef funcdef
) )
@@ -243,12 +243,12 @@ def _execute_types_in_stmt(module_value, stmt):
""" """
definitions = module_value.infer_node(stmt) definitions = module_value.infer_node(stmt)
return ValueSet.from_sets( return ValueSet.from_sets(
_execute_array_values(module_value.infer_state, d) _execute_array_values(module_value.inference_state, d)
for d in definitions for d in definitions
) )
def _execute_array_values(infer_state, array): def _execute_array_values(inference_state, array):
""" """
Tuples indicate that there's not just one return value, but the listed Tuples indicate that there's not just one return value, but the listed
ones. `(str, int)` means that it returns a tuple with both types. ones. `(str, int)` means that it returns a tuple with both types.
@@ -258,16 +258,16 @@ def _execute_array_values(infer_state, array):
values = [] values = []
for lazy_value in array.py__iter__(): for lazy_value in array.py__iter__():
objects = ValueSet.from_sets( objects = ValueSet.from_sets(
_execute_array_values(infer_state, typ) _execute_array_values(inference_state, typ)
for typ in lazy_value.infer() for typ in lazy_value.infer()
) )
values.append(LazyKnownValues(objects)) values.append(LazyKnownValues(objects))
return {FakeSequence(infer_state, array.array_type, values)} return {FakeSequence(inference_state, array.array_type, values)}
else: else:
return array.execute_annotation() return array.execute_annotation()
@infer_state_method_cache() @inference_state_method_cache()
def infer_param(execution_value, param): def infer_param(execution_value, param):
from jedi.inference.value.instance import InstanceArguments from jedi.inference.value.instance import InstanceArguments
from jedi.inference.value import FunctionExecutionValue from jedi.inference.value import FunctionExecutionValue
@@ -294,7 +294,7 @@ def infer_param(execution_value, param):
return types return types
@infer_state_method_cache() @inference_state_method_cache()
@iterator_to_value_set @iterator_to_value_set
def infer_return_types(function_value): def infer_return_types(function_value):
def search_return_in_docstr(code): def search_return_in_docstr(code):

View File

@@ -19,7 +19,7 @@ It works as follows:
from jedi import settings from jedi import settings
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import inference_state_function_cache
from jedi.inference import imports from jedi.inference import imports
from jedi.inference.arguments import TreeArguments from jedi.inference.arguments import TreeArguments
from jedi.inference.param import create_default_params from jedi.inference.param import create_default_params
@@ -39,12 +39,12 @@ class DynamicExecutedParams(object):
Simulates being a parameter while actually just being multiple params. Simulates being a parameter while actually just being multiple params.
""" """
def __init__(self, infer_state, executed_params): def __init__(self, inference_state, executed_params):
self.infer_state = infer_state self.inference_state = inference_state
self._executed_params = executed_params self._executed_params = executed_params
def infer(self): def infer(self):
with recursion.execution_allowed(self.infer_state, self) as allowed: with recursion.execution_allowed(self.inference_state, self) as allowed:
# We need to catch recursions that may occur, because an # We need to catch recursions that may occur, because an
# anonymous functions can create an anonymous parameter that is # anonymous functions can create an anonymous parameter that is
# more or less self referencing. # more or less self referencing.
@@ -54,7 +54,7 @@ class DynamicExecutedParams(object):
@debug.increase_indent @debug.increase_indent
def search_params(infer_state, execution_value, funcdef): def search_params(inference_state, execution_value, funcdef):
""" """
A dynamic search for param values. If you try to complete a type: A dynamic search for param values. If you try to complete a type:
@@ -70,7 +70,7 @@ def search_params(infer_state, execution_value, funcdef):
if not settings.dynamic_params: if not settings.dynamic_params:
return create_default_params(execution_value, funcdef) return create_default_params(execution_value, funcdef)
infer_state.dynamic_params_depth += 1 inference_state.dynamic_params_depth += 1
try: try:
path = execution_value.get_root_value().py__file__() path = execution_value.get_root_value().py__file__()
if path is not None and is_stdlib_path(path): if path is not None and is_stdlib_path(path):
@@ -91,7 +91,7 @@ def search_params(infer_state, execution_value, funcdef):
try: try:
module_value = execution_value.get_root_value() module_value = execution_value.get_root_value()
function_executions = _search_function_executions( function_executions = _search_function_executions(
infer_state, inference_state,
module_value, module_value,
funcdef, funcdef,
string_name=string_name, string_name=string_name,
@@ -101,7 +101,7 @@ def search_params(infer_state, execution_value, funcdef):
function_execution.get_executed_params_and_issues()[0] function_execution.get_executed_params_and_issues()[0]
for function_execution in function_executions for function_execution in function_executions
)) ))
params = [DynamicExecutedParams(infer_state, executed_params) params = [DynamicExecutedParams(inference_state, executed_params)
for executed_params in zipped_params] for executed_params in zipped_params]
# Inferes the ExecutedParams to types. # Inferes the ExecutedParams to types.
else: else:
@@ -110,12 +110,12 @@ def search_params(infer_state, execution_value, funcdef):
debug.dbg('Dynamic param result finished', color='MAGENTA') debug.dbg('Dynamic param result finished', color='MAGENTA')
return params return params
finally: finally:
infer_state.dynamic_params_depth -= 1 inference_state.dynamic_params_depth -= 1
@infer_state_function_cache(default=None) @inference_state_function_cache(default=None)
@to_list @to_list
def _search_function_executions(infer_state, module_value, funcdef, string_name): def _search_function_executions(inference_state, module_value, funcdef, string_name):
""" """
Returns a list of param names. Returns a list of param names.
""" """
@@ -129,7 +129,7 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name)
found_executions = False found_executions = False
i = 0 i = 0
for for_mod_value in imports.get_modules_containing_name( for for_mod_value in imports.get_modules_containing_name(
infer_state, [module_value], string_name): inference_state, [module_value], string_name):
if not isinstance(module_value, ModuleValue): if not isinstance(module_value, ModuleValue):
return return
for name, trailer in _get_possible_nodes(for_mod_value, string_name): for name, trailer in _get_possible_nodes(for_mod_value, string_name):
@@ -138,12 +138,12 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name)
# This is a simple way to stop Jedi's dynamic param recursion # This is a simple way to stop Jedi's dynamic param recursion
# from going wild: The deeper Jedi's in the recursion, the less # from going wild: The deeper Jedi's in the recursion, the less
# code should be inferred. # code should be inferred.
if i * infer_state.dynamic_params_depth > MAX_PARAM_SEARCHES: if i * inference_state.dynamic_params_depth > MAX_PARAM_SEARCHES:
return return
random_value = infer_state.create_value(for_mod_value, name) random_value = inference_state.create_value(for_mod_value, name)
for function_execution in _check_name_for_execution( for function_execution in _check_name_for_execution(
infer_state, random_value, compare_node, name, trailer): inference_state, random_value, compare_node, name, trailer):
found_executions = True found_executions = True
yield function_execution yield function_execution
@@ -178,17 +178,17 @@ def _get_possible_nodes(module_value, func_string_name):
yield name, trailer yield name, trailer
def _check_name_for_execution(infer_state, value, compare_node, name, trailer): def _check_name_for_execution(inference_state, value, compare_node, name, trailer):
from jedi.inference.value.function import FunctionExecutionValue from jedi.inference.value.function import FunctionExecutionValue
def create_func_excs(): def create_func_excs():
arglist = trailer.children[1] arglist = trailer.children[1]
if arglist == ')': if arglist == ')':
arglist = None arglist = None
args = TreeArguments(infer_state, value, arglist, trailer) args = TreeArguments(inference_state, value, arglist, trailer)
if value_node.type == 'classdef': if value_node.type == 'classdef':
created_instance = instance.TreeInstance( created_instance = instance.TreeInstance(
infer_state, inference_state,
v.parent_context, v.parent_context,
v, v,
args args
@@ -198,7 +198,7 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer):
else: else:
yield v.get_function_execution(args) yield v.get_function_execution(args)
for v in infer_state.goto_definitions(value, name): for v in inference_state.goto_definitions(value, name):
value_node = v.tree_node value_node = v.tree_node
if compare_node == value_node: if compare_node == value_node:
for func_execution in create_func_excs(): for func_execution in create_func_excs():
@@ -219,9 +219,9 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer):
execution_value = next(create_func_excs()) execution_value = next(create_func_excs())
for name, trailer in _get_possible_nodes(module_value, params[0].string_name): for name, trailer in _get_possible_nodes(module_value, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos: if value_node.start_pos < name.start_pos < value_node.end_pos:
random_value = infer_state.create_value(execution_value, name) random_value = inference_state.create_value(execution_value, name)
iterator = _check_name_for_execution( iterator = _check_name_for_execution(
infer_state, inference_state,
random_value, random_value,
compare_node, compare_node,
name, name,

View File

@@ -235,7 +235,7 @@ class _BuiltinMappedMethod(Value):
def __init__(self, builtin_value, method, builtin_func): def __init__(self, builtin_value, method, builtin_func):
super(_BuiltinMappedMethod, self).__init__( super(_BuiltinMappedMethod, self).__init__(
builtin_value.infer_state, builtin_value.inference_state,
parent_context=builtin_value parent_context=builtin_value
) )
self._method = method self._method = method
@@ -260,7 +260,7 @@ class SpecialMethodFilter(DictFilter):
def __init__(self, parent_context, string_name, value, builtin_value): def __init__(self, parent_context, string_name, value, builtin_value):
callable_, python_version = value callable_, python_version = value
if python_version is not None and \ if python_version is not None and \
python_version != parent_context.infer_state.environment.version_info.major: python_version != parent_context.inference_state.environment.version_info.major:
raise KeyError raise KeyError
self.parent_context = parent_context self.parent_context = parent_context
@@ -327,8 +327,8 @@ class _AttributeOverwriteMixin(object):
class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
LazyValueWrapper)): LazyValueWrapper)):
def __init__(self, infer_state): def __init__(self, inference_state):
self.infer_state = infer_state self.inference_state = inference_state
class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
@@ -344,7 +344,7 @@ def publish_method(method_name, python_version_match=None):
return decorator return decorator
def get_global_filters(infer_state, value, until_position, origin_scope): def get_global_filters(inference_state, value, until_position, origin_scope):
""" """
Returns all filters in order of priority for name resolution. Returns all filters in order of priority for name resolution.
@@ -363,7 +363,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope):
>>> scope >>> scope
<Function: func@3-5> <Function: func@3-5>
>>> value = script._get_module().create_value(scope) >>> value = script._get_module().create_value(scope)
>>> filters = list(get_global_filters(value.infer_state, value, (4, 0), None)) >>> filters = list(get_global_filters(value.inference_state, value, (4, 0), None))
First we get the names from the function scope. First we get the names from the function scope.
@@ -407,4 +407,4 @@ def get_global_filters(infer_state, value, until_position, origin_scope):
value = value.parent_context value = value.parent_context
# Add builtins to the global scope. # Add builtins to the global scope.
yield next(infer_state.builtins_module.get_filters()) yield next(inference_state.builtins_module.get_filters())

View File

@@ -33,9 +33,9 @@ from jedi.inference.gradual.conversion import convert_values
class NameFinder(object): class NameFinder(object):
def __init__(self, infer_state, value, name_value, name_or_str, def __init__(self, inference_state, value, name_value, name_or_str,
position=None, analysis_errors=True): position=None, analysis_errors=True):
self._infer_state = infer_state self._inference_state = inference_state
# Make sure that it's not just a syntax tree node. # Make sure that it's not just a syntax tree node.
self._value = value self._value = value
self._name_value = name_value self._name_value = name_value
@@ -113,7 +113,7 @@ class NameFinder(object):
if lambdef is None or position < lambdef.children[-2].start_pos: if lambdef is None or position < lambdef.children[-2].start_pos:
position = ancestor.start_pos position = ancestor.start_pos
return get_global_filters(self._infer_state, self._value, position, origin_scope) return get_global_filters(self._inference_state, self._value, position, origin_scope)
def get_value_filters(self): def get_value_filters(self):
origin_scope = self._get_origin_scope() origin_scope = self._get_origin_scope()
@@ -171,7 +171,7 @@ class NameFinder(object):
def _check_getattr(self, inst): def _check_getattr(self, inst):
"""Checks for both __getattr__ and __getattribute__ methods""" """Checks for both __getattr__ and __getattribute__ methods"""
# str is important, because it shouldn't be `Name`! # str is important, because it shouldn't be `Name`!
name = compiled.create_simple_object(self._infer_state, self._string_name) name = compiled.create_simple_object(self._inference_state, self._string_name)
# This is a little bit special. `__getattribute__` is in Python # This is a little bit special. `__getattribute__` is in Python
# executed before `__getattr__`. But: I know no use case, where # executed before `__getattr__`. But: I know no use case, where
@@ -263,7 +263,7 @@ def _check_isinstance_type(value, element, search_name):
# arglist stuff # arglist stuff
arglist = trailer.children[1] arglist = trailer.children[1]
args = TreeArguments(value.infer_state, value, arglist, trailer) args = TreeArguments(value.inference_state, value, arglist, trailer)
param_list = list(args.unpack()) param_list = list(args.unpack())
# Disallow keyword arguments # Disallow keyword arguments
assert len(param_list) == 2 assert len(param_list) == 2
@@ -273,7 +273,7 @@ def _check_isinstance_type(value, element, search_name):
is_instance_call = helpers.call_of_leaf(lazy_value_object.data) is_instance_call = helpers.call_of_leaf(lazy_value_object.data)
# Do a simple get_code comparison. They should just have the same code, # Do a simple get_code comparison. They should just have the same code,
# and everything will be all right. # and everything will be all right.
normalize = value.infer_state.grammar._normalize normalize = value.inference_state.grammar._normalize
assert normalize(is_instance_call) == normalize(call) assert normalize(is_instance_call) == normalize(call)
except AssertionError: except AssertionError:
return None return None

View File

@@ -106,7 +106,7 @@ def _break_check(value, value_scope, flow_scope, node):
def _check_if(value, node): def _check_if(value, node):
with execution_allowed(value.infer_state, node) as allowed: with execution_allowed(value.inference_state, node) as allowed:
if not allowed: if not allowed:
return UNSURE return UNSURE

View File

@@ -10,7 +10,7 @@ import re
from parso import ParserSyntaxError, parse from parso import ParserSyntaxError, parse
from jedi._compatibility import force_unicode from jedi._compatibility import force_unicode
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \ from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \
AbstractAnnotatedClass AbstractAnnotatedClass
@@ -60,7 +60,7 @@ def _infer_annotation_string(value, string, index=None):
def _get_forward_reference_node(value, string): def _get_forward_reference_node(value, string):
try: try:
new_node = value.infer_state.grammar.parse( new_node = value.inference_state.grammar.parse(
force_unicode(string), force_unicode(string),
start_symbol='eval_input', start_symbol='eval_input',
error_recovery=False error_recovery=False
@@ -106,21 +106,21 @@ def _split_comment_param_declaration(decl_text):
return params return params
@infer_state_method_cache() @inference_state_method_cache()
def infer_param(execution_value, param): def infer_param(execution_value, param):
values = _infer_param(execution_value, param) values = _infer_param(execution_value, param)
infer_state = execution_value.infer_state inference_state = execution_value.inference_state
if param.star_count == 1: if param.star_count == 1:
tuple_ = builtin_from_name(infer_state, 'tuple') tuple_ = builtin_from_name(inference_state, 'tuple')
return ValueSet([GenericClass( return ValueSet([GenericClass(
tuple_, tuple_,
generics=(values,), generics=(values,),
) for c in values]) ) for c in values])
elif param.star_count == 2: elif param.star_count == 2:
dct = builtin_from_name(infer_state, 'dict') dct = builtin_from_name(inference_state, 'dict')
return ValueSet([GenericClass( return ValueSet([GenericClass(
dct, dct,
generics=(ValueSet([builtin_from_name(infer_state, 'str')]), values), generics=(ValueSet([builtin_from_name(inference_state, 'str')]), values),
) for c in values]) ) for c in values])
pass pass
return values return values
@@ -190,7 +190,7 @@ def py__annotations__(funcdef):
return dct return dct
@infer_state_method_cache() @inference_state_method_cache()
def infer_return_types(function_execution_value): def infer_return_types(function_execution_value):
""" """
Infers the type of a function's return value, Infers the type of a function's return value,

View File

@@ -87,11 +87,11 @@ def _load_stub_module(module):
return module return module
from jedi.inference.gradual.typeshed import _try_to_load_stub_cached from jedi.inference.gradual.typeshed import _try_to_load_stub_cached
return _try_to_load_stub_cached( return _try_to_load_stub_cached(
module.infer_state, module.inference_state,
import_names=module.string_names, import_names=module.string_names,
python_value_set=ValueSet([module]), python_value_set=ValueSet([module]),
parent_module_value=None, parent_module_value=None,
sys_path=module.infer_state.get_sys_path(), sys_path=module.inference_state.get_sys_path(),
) )

View File

@@ -89,9 +89,9 @@ def _cache_stub_file_map(version_info):
def import_module_decorator(func): def import_module_decorator(func):
@wraps(func) @wraps(func)
def wrapper(infer_state, import_names, parent_module_value, sys_path, prefer_stubs): def wrapper(inference_state, import_names, parent_module_value, sys_path, prefer_stubs):
try: try:
python_value_set = infer_state.module_cache.get(import_names) python_value_set = inference_state.module_cache.get(import_names)
except KeyError: except KeyError:
if parent_module_value is not None and parent_module_value.is_stub(): if parent_module_value is not None and parent_module_value.is_stub():
parent_module_values = parent_module_value.non_stub_value_set parent_module_values = parent_module_value.non_stub_value_set
@@ -104,19 +104,19 @@ def import_module_decorator(func):
# ``os``. # ``os``.
python_parent = next(iter(parent_module_values)) python_parent = next(iter(parent_module_values))
if python_parent is None: if python_parent is None:
python_parent, = infer_state.import_module(('os',), prefer_stubs=False) python_parent, = inference_state.import_module(('os',), prefer_stubs=False)
python_value_set = python_parent.py__getattribute__('path') python_value_set = python_parent.py__getattribute__('path')
else: else:
python_value_set = ValueSet.from_sets( python_value_set = ValueSet.from_sets(
func(infer_state, import_names, p, sys_path,) func(inference_state, import_names, p, sys_path,)
for p in parent_module_values for p in parent_module_values
) )
infer_state.module_cache.add(import_names, python_value_set) inference_state.module_cache.add(import_names, python_value_set)
if not prefer_stubs: if not prefer_stubs:
return python_value_set return python_value_set
stub = _try_to_load_stub_cached(infer_state, import_names, python_value_set, stub = _try_to_load_stub_cached(inference_state, import_names, python_value_set,
parent_module_value, sys_path) parent_module_value, sys_path)
if stub is not None: if stub is not None:
return ValueSet([stub]) return ValueSet([stub])
@@ -125,21 +125,21 @@ def import_module_decorator(func):
return wrapper return wrapper
def _try_to_load_stub_cached(infer_state, import_names, *args, **kwargs): def _try_to_load_stub_cached(inference_state, import_names, *args, **kwargs):
try: try:
return infer_state.stub_module_cache[import_names] return inference_state.stub_module_cache[import_names]
except KeyError: except KeyError:
pass pass
# TODO is this needed? where are the exceptions coming from that make this # TODO is this needed? where are the exceptions coming from that make this
# necessary? Just remove this line. # necessary? Just remove this line.
infer_state.stub_module_cache[import_names] = None inference_state.stub_module_cache[import_names] = None
infer_state.stub_module_cache[import_names] = result = \ inference_state.stub_module_cache[import_names] = result = \
_try_to_load_stub(infer_state, import_names, *args, **kwargs) _try_to_load_stub(inference_state, import_names, *args, **kwargs)
return result return result
def _try_to_load_stub(infer_state, import_names, python_value_set, def _try_to_load_stub(inference_state, import_names, python_value_set,
parent_module_value, sys_path): parent_module_value, sys_path):
""" """
Trying to load a stub for a set of import_names. Trying to load a stub for a set of import_names.
@@ -150,7 +150,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
if parent_module_value is None and len(import_names) > 1: if parent_module_value is None and len(import_names) > 1:
try: try:
parent_module_value = _try_to_load_stub_cached( parent_module_value = _try_to_load_stub_cached(
infer_state, import_names[:-1], NO_VALUES, inference_state, import_names[:-1], NO_VALUES,
parent_module_value=None, sys_path=sys_path) parent_module_value=None, sys_path=sys_path)
except KeyError: except KeyError:
pass pass
@@ -161,7 +161,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
for p in sys_path: for p in sys_path:
init = os.path.join(p, *import_names) + '-stubs' + os.path.sep + '__init__.pyi' init = os.path.join(p, *import_names) + '-stubs' + os.path.sep + '__init__.pyi'
m = _try_to_load_stub_from_file( m = _try_to_load_stub_from_file(
infer_state, inference_state,
python_value_set, python_value_set,
file_io=FileIO(init), file_io=FileIO(init),
import_names=import_names, import_names=import_names,
@@ -185,7 +185,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
for file_path in file_paths: for file_path in file_paths:
m = _try_to_load_stub_from_file( m = _try_to_load_stub_from_file(
infer_state, inference_state,
python_value_set, python_value_set,
# The file path should end with .pyi # The file path should end with .pyi
file_io=FileIO(file_path), file_io=FileIO(file_path),
@@ -195,7 +195,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
return m return m
# 3. Try to load typeshed # 3. Try to load typeshed
m = _load_from_typeshed(infer_state, python_value_set, parent_module_value, import_names) m = _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names)
if m is not None: if m is not None:
return m return m
@@ -216,7 +216,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
for p in check_path: for p in check_path:
m = _try_to_load_stub_from_file( m = _try_to_load_stub_from_file(
infer_state, inference_state,
python_value_set, python_value_set,
file_io=FileIO(os.path.join(p, *names_for_path) + '.pyi'), file_io=FileIO(os.path.join(p, *names_for_path) + '.pyi'),
import_names=import_names, import_names=import_names,
@@ -229,11 +229,11 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
return None return None
def _load_from_typeshed(infer_state, python_value_set, parent_module_value, import_names): def _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names):
import_name = import_names[-1] import_name = import_names[-1]
map_ = None map_ = None
if len(import_names) == 1: if len(import_names) == 1:
map_ = _cache_stub_file_map(infer_state.grammar.version_info) map_ = _cache_stub_file_map(inference_state.grammar.version_info)
import_name = _IMPORT_MAP.get(import_name, import_name) import_name = _IMPORT_MAP.get(import_name, import_name)
elif isinstance(parent_module_value, StubModuleValue): elif isinstance(parent_module_value, StubModuleValue):
if not parent_module_value.is_package: if not parent_module_value.is_package:
@@ -247,16 +247,16 @@ def _load_from_typeshed(infer_state, python_value_set, parent_module_value, impo
path = map_.get(import_name) path = map_.get(import_name)
if path is not None: if path is not None:
return _try_to_load_stub_from_file( return _try_to_load_stub_from_file(
infer_state, inference_state,
python_value_set, python_value_set,
file_io=FileIO(path), file_io=FileIO(path),
import_names=import_names, import_names=import_names,
) )
def _try_to_load_stub_from_file(infer_state, python_value_set, file_io, import_names): def _try_to_load_stub_from_file(inference_state, python_value_set, file_io, import_names):
try: try:
stub_module_node = infer_state.parse( stub_module_node = inference_state.parse(
file_io=file_io, file_io=file_io,
cache=True, cache=True,
use_latest_grammar=True use_latest_grammar=True
@@ -266,24 +266,24 @@ def _try_to_load_stub_from_file(infer_state, python_value_set, file_io, import_n
return None return None
else: else:
return create_stub_module( return create_stub_module(
infer_state, python_value_set, stub_module_node, file_io, inference_state, python_value_set, stub_module_node, file_io,
import_names import_names
) )
def create_stub_module(infer_state, python_value_set, stub_module_node, file_io, import_names): def create_stub_module(inference_state, python_value_set, stub_module_node, file_io, import_names):
if import_names == ('typing',): if import_names == ('typing',):
module_cls = TypingModuleWrapper module_cls = TypingModuleWrapper
else: else:
module_cls = StubModuleValue module_cls = StubModuleValue
file_name = os.path.basename(file_io.path) file_name = os.path.basename(file_io.path)
stub_module_value = module_cls( stub_module_value = module_cls(
python_value_set, infer_state, stub_module_node, python_value_set, inference_state, stub_module_node,
file_io=file_io, file_io=file_io,
string_names=import_names, string_names=import_names,
# The code was loaded with latest_grammar, so use # The code was loaded with latest_grammar, so use
# that. # that.
code_lines=get_cached_code_lines(infer_state.latest_grammar, file_io.path), code_lines=get_cached_code_lines(inference_state.latest_grammar, file_io.path),
is_package=file_name == '__init__.pyi', is_package=file_name == '__init__.pyi',
) )
return stub_module_value return stub_module_value

View File

@@ -7,7 +7,7 @@ This file deals with all the typing.py cases.
""" """
from jedi._compatibility import unicode, force_unicode from jedi._compatibility import unicode, force_unicode
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.compiled import builtin_from_name from jedi.inference.compiled import builtin_from_name
from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \ from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
iterator_to_value_set, ValueWrapper, LazyValueWrapper iterator_to_value_set, ValueWrapper, LazyValueWrapper
@@ -45,8 +45,8 @@ class TypingName(AbstractTreeName):
class _BaseTypingValue(Value): class _BaseTypingValue(Value):
def __init__(self, infer_state, parent_context, tree_name): def __init__(self, inference_state, parent_context, tree_name):
super(_BaseTypingValue, self).__init__(infer_state, parent_context) super(_BaseTypingValue, self).__init__(inference_state, parent_context)
self._tree_name = tree_name self._tree_name = tree_name
@property @property
@@ -71,7 +71,7 @@ class _BaseTypingValue(Value):
# TODO this is obviously not correct, but at least gives us a class if # TODO this is obviously not correct, but at least gives us a class if
# we have none. Some of these objects don't really have a base class in # we have none. Some of these objects don't really have a base class in
# typeshed. # typeshed.
return builtin_from_name(self.infer_state, u'object') return builtin_from_name(self.inference_state, u'object')
@property @property
def name(self): def name(self):
@@ -87,39 +87,39 @@ class TypingModuleName(NameWrapper):
def _remap(self): def _remap(self):
name = self.string_name name = self.string_name
infer_state = self.parent_context.infer_state inference_state = self.parent_context.inference_state
try: try:
actual = _TYPE_ALIAS_TYPES[name] actual = _TYPE_ALIAS_TYPES[name]
except KeyError: except KeyError:
pass pass
else: else:
yield TypeAlias.create_cached(infer_state, self.parent_context, self.tree_name, actual) yield TypeAlias.create_cached(inference_state, self.parent_context, self.tree_name, actual)
return return
if name in _PROXY_CLASS_TYPES: if name in _PROXY_CLASS_TYPES:
yield TypingClassValue.create_cached(infer_state, self.parent_context, self.tree_name) yield TypingClassValue.create_cached(inference_state, self.parent_context, self.tree_name)
elif name in _PROXY_TYPES: elif name in _PROXY_TYPES:
yield TypingValue.create_cached(infer_state, self.parent_context, self.tree_name) yield TypingValue.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'runtime': elif name == 'runtime':
# We don't want anything here, not sure what this function is # We don't want anything here, not sure what this function is
# supposed to do, since it just appears in the stubs and shouldn't # supposed to do, since it just appears in the stubs and shouldn't
# have any effects there (because it's never executed). # have any effects there (because it's never executed).
return return
elif name == 'TypeVar': elif name == 'TypeVar':
yield TypeVarClass.create_cached(infer_state, self.parent_context, self.tree_name) yield TypeVarClass.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'Any': elif name == 'Any':
yield Any.create_cached(infer_state, self.parent_context, self.tree_name) yield Any.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'TYPE_CHECKING': elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type # This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable. # checking or are in cycles. The user can then check this variable.
yield builtin_from_name(infer_state, u'True') yield builtin_from_name(inference_state, u'True')
elif name == 'overload': elif name == 'overload':
yield OverloadFunction.create_cached(infer_state, self.parent_context, self.tree_name) yield OverloadFunction.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'NewType': elif name == 'NewType':
yield NewTypeFunction.create_cached(infer_state, self.parent_context, self.tree_name) yield NewTypeFunction.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'cast': elif name == 'cast':
# TODO implement cast # TODO implement cast
yield CastFunction.create_cached(infer_state, self.parent_context, self.tree_name) yield CastFunction.create_cached(inference_state, self.parent_context, self.tree_name)
elif name == 'TypedDict': elif name == 'TypedDict':
# TODO doesn't even exist in typeshed/typing.py, yet. But will be # TODO doesn't even exist in typeshed/typing.py, yet. But will be
# added soon. # added soon.
@@ -139,8 +139,8 @@ class TypingModuleFilterWrapper(FilterWrapper):
class _WithIndexBase(_BaseTypingValue): class _WithIndexBase(_BaseTypingValue):
def __init__(self, infer_state, parent_context, name, index_value, value_of_index): def __init__(self, inference_state, parent_context, name, index_value, value_of_index):
super(_WithIndexBase, self).__init__(infer_state, parent_context, name) super(_WithIndexBase, self).__init__(inference_state, parent_context, name)
self._index_value = index_value self._index_value = index_value
self._value_of_index = value_of_index self._value_of_index = value_of_index
@@ -164,7 +164,7 @@ class TypingValueWithIndex(_WithIndexBase):
# Optional is basically just saying it's either None or the actual # Optional is basically just saying it's either None or the actual
# type. # type.
return self.gather_annotation_classes().execute_annotation() \ return self.gather_annotation_classes().execute_annotation() \
| ValueSet([builtin_from_name(self.infer_state, u'None')]) | ValueSet([builtin_from_name(self.inference_state, u'None')])
elif string_name == 'Type': elif string_name == 'Type':
# The type is actually already given in the index_value # The type is actually already given in the index_value
return ValueSet([self._index_value]) return ValueSet([self._index_value])
@@ -174,7 +174,7 @@ class TypingValueWithIndex(_WithIndexBase):
cls = globals()[string_name] cls = globals()[string_name]
return ValueSet([cls( return ValueSet([cls(
self.infer_state, self.inference_state,
self.parent_context, self.parent_context,
self._tree_name, self._tree_name,
self._index_value, self._index_value,
@@ -194,7 +194,7 @@ class TypingValue(_BaseTypingValue):
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
return ValueSet( return ValueSet(
self.index_class.create_cached( self.index_class.create_cached(
self.infer_state, self.inference_state,
self.parent_context, self.parent_context,
self._tree_name, self._tree_name,
index_value, index_value,
@@ -206,7 +206,7 @@ class TypingValue(_BaseTypingValue):
class _TypingClassMixin(object): class _TypingClassMixin(object):
def py__bases__(self): def py__bases__(self):
return [LazyKnownValues( return [LazyKnownValues(
self.infer_state.builtins_module.py__getattribute__('object') self.inference_state.builtins_module.py__getattribute__('object')
)] )]
def get_metaclasses(self): def get_metaclasses(self):
@@ -246,7 +246,7 @@ def _iter_over_arguments(maybe_tuple_value, defining_value):
class TypeAlias(LazyValueWrapper): class TypeAlias(LazyValueWrapper):
def __init__(self, parent_context, origin_tree_name, actual): def __init__(self, parent_context, origin_tree_name, actual):
self.infer_state = parent_context.infer_state self.inference_state = parent_context.inference_state
self.parent_context = parent_context self.parent_context = parent_context
self._origin_tree_name = origin_tree_name self._origin_tree_name = origin_tree_name
self._actual = actual # e.g. builtins.list self._actual = actual # e.g. builtins.list
@@ -263,13 +263,13 @@ class TypeAlias(LazyValueWrapper):
def _get_wrapped_value(self): def _get_wrapped_value(self):
module_name, class_name = self._actual.split('.') module_name, class_name = self._actual.split('.')
if self.infer_state.environment.version_info.major == 2 and module_name == 'builtins': if self.inference_state.environment.version_info.major == 2 and module_name == 'builtins':
module_name = '__builtin__' module_name = '__builtin__'
# TODO use infer_state.import_module? # TODO use inference_state.import_module?
from jedi.inference.imports import Importer from jedi.inference.imports import Importer
module, = Importer( module, = Importer(
self.infer_state, [module_name], self.infer_state.builtins_module self.inference_state, [module_name], self.inference_state.builtins_module
).follow() ).follow()
classes = module.py__getattribute__(class_name) classes = module.py__getattribute__(class_name)
# There should only be one, because it's code that we control. # There should only be one, because it's code that we control.
@@ -358,7 +358,7 @@ class TypeVarClass(_BaseTypingValue):
return NO_VALUES return NO_VALUES
return ValueSet([TypeVar.create_cached( return ValueSet([TypeVar.create_cached(
self.infer_state, self.inference_state,
self.parent_context, self.parent_context,
self._tree_name, self._tree_name,
var_name, var_name,
@@ -382,7 +382,7 @@ class TypeVarClass(_BaseTypingValue):
return None return None
else: else:
safe_value = method(default=None) safe_value = method(default=None)
if self.infer_state.environment.version_info.major == 2: if self.inference_state.environment.version_info.major == 2:
if isinstance(safe_value, bytes): if isinstance(safe_value, bytes):
return force_unicode(safe_value) return force_unicode(safe_value)
if isinstance(safe_value, (str, unicode)): if isinstance(safe_value, (str, unicode)):
@@ -391,8 +391,8 @@ class TypeVarClass(_BaseTypingValue):
class TypeVar(_BaseTypingValue): class TypeVar(_BaseTypingValue):
def __init__(self, infer_state, parent_context, tree_name, var_name, unpacked_args): def __init__(self, inference_state, parent_context, tree_name, var_name, unpacked_args):
super(TypeVar, self).__init__(infer_state, parent_context, tree_name) super(TypeVar, self).__init__(inference_state, parent_context, tree_name)
self._var_name = var_name self._var_name = var_name
self._constraints_lazy_values = [] self._constraints_lazy_values = []
@@ -469,7 +469,7 @@ class NewTypeFunction(_BaseTypingValue):
return NO_VALUES return NO_VALUES
return ValueSet( return ValueSet(
NewType( NewType(
self.infer_state, self.inference_state,
valueualized_node.value, valueualized_node.value,
valueualized_node.node, valueualized_node.node,
second_arg.infer(), second_arg.infer(),
@@ -477,8 +477,8 @@ class NewTypeFunction(_BaseTypingValue):
class NewType(Value): class NewType(Value):
def __init__(self, infer_state, parent_context, tree_node, type_value_set): def __init__(self, inference_state, parent_context, tree_node, type_value_set):
super(NewType, self).__init__(infer_state, parent_context) super(NewType, self).__init__(inference_state, parent_context)
self._type_value_set = type_value_set self._type_value_set = type_value_set
self.tree_node = tree_node self.tree_node = tree_node
@@ -643,7 +643,7 @@ class LazyGenericClass(AbstractAnnotatedClass):
self._index_value = index_value self._index_value = index_value
self._value_of_index = value_of_index self._value_of_index = value_of_index
@infer_state_method_cache() @inference_state_method_cache()
def get_generics(self): def get_generics(self):
return list(_iter_over_arguments(self._index_value, self._value_of_index)) return list(_iter_over_arguments(self._index_value, self._value_of_index))
@@ -668,7 +668,7 @@ class LazyAnnotatedBaseClass(object):
if isinstance(base, AbstractAnnotatedClass): if isinstance(base, AbstractAnnotatedClass):
# Here we have to recalculate the given types. # Here we have to recalculate the given types.
yield GenericClass.create_cached( yield GenericClass.create_cached(
base.infer_state, base.inference_state,
base._wrapped_value, base._wrapped_value,
tuple(self._remap_type_vars(base)), tuple(self._remap_type_vars(base)),
) )
@@ -703,5 +703,5 @@ class InstanceWrapper(ValueWrapper):
except IndexError: except IndexError:
pass pass
elif cls.py__name__() == 'Iterator': elif cls.py__name__() == 'Iterator':
return ValueSet([builtin_from_name(self.infer_state, u'None')]) return ValueSet([builtin_from_name(self.inference_state, u'None')])
return self._wrapped_value.py__stop_iteration_returns() return self._wrapped_value.py__stop_iteration_returns()

View File

@@ -3,7 +3,7 @@ import os
from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module
def load_proper_stub_module(infer_state, file_io, import_names, module_node): def load_proper_stub_module(inference_state, file_io, import_names, module_node):
""" """
This function is given a random .pyi file and should return the proper This function is given a random .pyi file and should return the proper
module. module.
@@ -20,13 +20,13 @@ def load_proper_stub_module(infer_state, file_io, import_names, module_node):
import_names = import_names[:-1] import_names = import_names[:-1]
if import_names is not None: if import_names is not None:
actual_value_set = infer_state.import_module(import_names, prefer_stubs=False) actual_value_set = inference_state.import_module(import_names, prefer_stubs=False)
if not actual_value_set: if not actual_value_set:
return None return None
stub = create_stub_module( stub = create_stub_module(
infer_state, actual_value_set, module_node, file_io, import_names inference_state, actual_value_set, module_node, file_io, import_names
) )
infer_state.stub_module_cache[import_names] = stub inference_state.stub_module_cache[import_names] = stub
return stub return stub
return None return None

View File

@@ -66,7 +66,7 @@ def infer_call_of_leaf(value, leaf, cut_own_trailer=False):
trailer = leaf.parent trailer = leaf.parent
if trailer.type == 'fstring': if trailer.type == 'fstring':
from jedi.inference import compiled from jedi.inference import compiled
return compiled.get_string_value_set(value.infer_state) return compiled.get_string_value_set(value.inference_state)
# The leaf may not be the last or first child, because there exist three # The leaf may not be the last or first child, because there exist three
# different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples # different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples
@@ -195,7 +195,7 @@ def predefine_names(value, flow_scope, dct):
def is_string(value): def is_string(value):
if value.infer_state.environment.version_info.major == 2: if value.inference_state.environment.version_info.major == 2:
str_classes = (unicode, bytes) str_classes = (unicode, bytes)
else: else:
str_classes = (unicode,) str_classes = (unicode,)
@@ -265,5 +265,5 @@ def parse_dotted_names(nodes, is_import_from, until_node=None):
return level, names return level, names
def values_from_qualified_names(infer_state, *names): def values_from_qualified_names(inference_state, *names):
return infer_state.import_module(names[:-1]).py__getattribute__(names[-1]) return inference_state.import_module(names[:-1]).py__getattribute__(names[-1])

View File

@@ -28,7 +28,7 @@ from jedi.inference import helpers
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference.utils import unite from jedi.inference.utils import unite
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.names import ImportName, SubModuleName from jedi.inference.names import ImportName, SubModuleName
from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.typeshed import import_module_decorator from jedi.inference.gradual.typeshed import import_module_decorator
@@ -56,13 +56,13 @@ class ModuleCache(object):
# This memoization is needed, because otherwise we will infinitely loop on # This memoization is needed, because otherwise we will infinitely loop on
# certain imports. # certain imports.
@infer_state_method_cache(default=NO_VALUES) @inference_state_method_cache(default=NO_VALUES)
def infer_import(value, tree_name, is_goto=False): def infer_import(value, tree_name, is_goto=False):
module_value = value.get_root_value() module_value = value.get_root_value()
import_node = search_ancestor(tree_name, 'import_name', 'import_from') import_node = search_ancestor(tree_name, 'import_name', 'import_from')
import_path = import_node.get_path_for_name(tree_name) import_path = import_node.get_path_for_name(tree_name)
from_import_name = None from_import_name = None
infer_state = value.infer_state inference_state = value.inference_state
try: try:
from_names = import_node.get_from_names() from_names = import_node.get_from_names()
except AttributeError: except AttributeError:
@@ -75,7 +75,7 @@ def infer_import(value, tree_name, is_goto=False):
from_import_name = import_path[-1] from_import_name = import_path[-1]
import_path = from_names import_path = from_names
importer = Importer(infer_state, tuple(import_path), importer = Importer(inference_state, tuple(import_path),
module_value, import_node.level) module_value, import_node.level)
types = importer.follow() types = importer.follow()
@@ -101,7 +101,7 @@ def infer_import(value, tree_name, is_goto=False):
if not types: if not types:
path = import_path + [from_import_name] path = import_path + [from_import_name]
importer = Importer(infer_state, tuple(path), importer = Importer(inference_state, tuple(path),
module_value, import_node.level) module_value, import_node.level)
types = importer.follow() types = importer.follow()
# goto only accepts `Name` # goto only accepts `Name`
@@ -183,7 +183,7 @@ def _level_to_base_import_path(project_path, directory, level):
class Importer(object): class Importer(object):
def __init__(self, infer_state, import_path, module_value, level=0): def __init__(self, inference_state, import_path, module_value, level=0):
""" """
An implementation similar to ``__import__``. Use `follow` An implementation similar to ``__import__``. Use `follow`
to actually follow the imports. to actually follow the imports.
@@ -197,7 +197,7 @@ class Importer(object):
:param import_path: List of namespaces (strings or Names). :param import_path: List of namespaces (strings or Names).
""" """
debug.speed('import %s %s' % (import_path, module_value)) debug.speed('import %s %s' % (import_path, module_value))
self._infer_state = infer_state self._inference_state = inference_state
self.level = level self.level = level
self.module_value = module_value self.module_value = module_value
@@ -233,7 +233,7 @@ class Importer(object):
directory = os.path.dirname(path) directory = os.path.dirname(path)
base_import_path, base_directory = _level_to_base_import_path( base_import_path, base_directory = _level_to_base_import_path(
self._infer_state.project._path, directory, level, self._inference_state.project._path, directory, level,
) )
if base_directory is None: if base_directory is None:
# Everything is lost, the relative import does point # Everything is lost, the relative import does point
@@ -265,11 +265,11 @@ class Importer(object):
return self._fixed_sys_path return self._fixed_sys_path
sys_path_mod = ( sys_path_mod = (
self._infer_state.get_sys_path() self._inference_state.get_sys_path()
+ sys_path.check_sys_path_modifications(self.module_value) + sys_path.check_sys_path_modifications(self.module_value)
) )
if self._infer_state.environment.version_info.major == 2: if self._inference_state.environment.version_info.major == 2:
file_path = self.module_value.py__file__() file_path = self.module_value.py__file__()
if file_path is not None: if file_path is not None:
# Python2 uses an old strange way of importing relative imports. # Python2 uses an old strange way of importing relative imports.
@@ -290,7 +290,7 @@ class Importer(object):
value_set = [None] value_set = [None]
for i, name in enumerate(self.import_path): for i, name in enumerate(self.import_path):
value_set = ValueSet.from_sets([ value_set = ValueSet.from_sets([
self._infer_state.import_module( self._inference_state.import_module(
import_names[:i+1], import_names[:i+1],
parent_module_value, parent_module_value,
sys_path sys_path
@@ -311,12 +311,12 @@ class Importer(object):
# add builtin module names # add builtin module names
if search_path is None and in_module is None: if search_path is None and in_module is None:
names += [ImportName(self.module_value, name) names += [ImportName(self.module_value, name)
for name in self._infer_state.compiled_subprocess.get_builtin_module_names()] for name in self._inference_state.compiled_subprocess.get_builtin_module_names()]
if search_path is None: if search_path is None:
search_path = self._sys_path_with_modifications() search_path = self._sys_path_with_modifications()
for name in iter_module_names(self._infer_state, search_path): for name in iter_module_names(self._inference_state, search_path):
if in_module is None: if in_module is None:
n = ImportName(self.module_value, name) n = ImportName(self.module_value, name)
else: else:
@@ -324,7 +324,7 @@ class Importer(object):
names.append(n) names.append(n)
return names return names
def completion_names(self, infer_state, only_modules=False): def completion_names(self, inference_state, only_modules=False):
""" """
:param only_modules: Indicates wheter it's possible to import a :param only_modules: Indicates wheter it's possible to import a
definition that is not defined in a module. definition that is not defined in a module.
@@ -374,12 +374,12 @@ class Importer(object):
@plugin_manager.decorate() @plugin_manager.decorate()
@import_module_decorator @import_module_decorator
def import_module(infer_state, import_names, parent_module_value, sys_path): def import_module(inference_state, import_names, parent_module_value, sys_path):
""" """
This method is very similar to importlib's `_gcd_import`. This method is very similar to importlib's `_gcd_import`.
""" """
if import_names[0] in settings.auto_import_modules: if import_names[0] in settings.auto_import_modules:
module = _load_builtin_module(infer_state, import_names, sys_path) module = _load_builtin_module(inference_state, import_names, sys_path)
if module is None: if module is None:
return NO_VALUES return NO_VALUES
return ValueSet([module]) return ValueSet([module])
@@ -388,7 +388,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
if parent_module_value is None: if parent_module_value is None:
# Override the sys.path. It works only good that way. # Override the sys.path. It works only good that way.
# Injecting the path directly into `find_module` did not work. # Injecting the path directly into `find_module` did not work.
file_io_or_ns, is_pkg = infer_state.compiled_subprocess.get_module_info( file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info(
string=import_names[-1], string=import_names[-1],
full_name=module_name, full_name=module_name,
sys_path=sys_path, sys_path=sys_path,
@@ -409,7 +409,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
# not important to be correct. # not important to be correct.
if not isinstance(path, list): if not isinstance(path, list):
path = [path] path = [path]
file_io_or_ns, is_pkg = infer_state.compiled_subprocess.get_module_info( file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info(
string=import_names[-1], string=import_names[-1],
path=path, path=path,
full_name=module_name, full_name=module_name,
@@ -423,17 +423,17 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
if isinstance(file_io_or_ns, ImplicitNSInfo): if isinstance(file_io_or_ns, ImplicitNSInfo):
from jedi.inference.value.namespace import ImplicitNamespaceValue from jedi.inference.value.namespace import ImplicitNamespaceValue
module = ImplicitNamespaceValue( module = ImplicitNamespaceValue(
infer_state, inference_state,
fullname=file_io_or_ns.name, fullname=file_io_or_ns.name,
paths=file_io_or_ns.paths, paths=file_io_or_ns.paths,
) )
elif file_io_or_ns is None: elif file_io_or_ns is None:
module = _load_builtin_module(infer_state, import_names, sys_path) module = _load_builtin_module(inference_state, import_names, sys_path)
if module is None: if module is None:
return NO_VALUES return NO_VALUES
else: else:
module = _load_python_module( module = _load_python_module(
infer_state, file_io_or_ns, sys_path, inference_state, file_io_or_ns, sys_path,
import_names=import_names, import_names=import_names,
is_package=is_pkg, is_package=is_pkg,
) )
@@ -445,14 +445,14 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
return ValueSet([module]) return ValueSet([module])
def _load_python_module(infer_state, file_io, sys_path=None, def _load_python_module(inference_state, file_io, sys_path=None,
import_names=None, is_package=False): import_names=None, is_package=False):
try: try:
return infer_state.module_cache.get_from_path(file_io.path) return inference_state.module_cache.get_from_path(file_io.path)
except KeyError: except KeyError:
pass pass
module_node = infer_state.parse( module_node = inference_state.parse(
file_io=file_io, file_io=file_io,
cache=True, cache=True,
diff_cache=settings.fast_parser, diff_cache=settings.fast_parser,
@@ -461,21 +461,21 @@ def _load_python_module(infer_state, file_io, sys_path=None,
from jedi.inference.value import ModuleValue from jedi.inference.value import ModuleValue
return ModuleValue( return ModuleValue(
infer_state, module_node, inference_state, module_node,
file_io=file_io, file_io=file_io,
string_names=import_names, string_names=import_names,
code_lines=get_cached_code_lines(infer_state.grammar, file_io.path), code_lines=get_cached_code_lines(inference_state.grammar, file_io.path),
is_package=is_package, is_package=is_package,
) )
def _load_builtin_module(infer_state, import_names=None, sys_path=None): def _load_builtin_module(inference_state, import_names=None, sys_path=None):
if sys_path is None: if sys_path is None:
sys_path = infer_state.get_sys_path() sys_path = inference_state.get_sys_path()
dotted_name = '.'.join(import_names) dotted_name = '.'.join(import_names)
assert dotted_name is not None assert dotted_name is not None
module = compiled.load_module(infer_state, dotted_name=dotted_name, sys_path=sys_path) module = compiled.load_module(inference_state, dotted_name=dotted_name, sys_path=sys_path)
if module is None: if module is None:
# The file might raise an ImportError e.g. and therefore not be # The file might raise an ImportError e.g. and therefore not be
# importable. # importable.
@@ -483,13 +483,13 @@ def _load_builtin_module(infer_state, import_names=None, sys_path=None):
return module return module
def _load_module_from_path(infer_state, file_io, base_names): def _load_module_from_path(inference_state, file_io, base_names):
""" """
This should pretty much only be used for get_modules_containing_name. It's This should pretty much only be used for get_modules_containing_name. It's
here to ensure that a random path is still properly loaded into the Jedi here to ensure that a random path is still properly loaded into the Jedi
module structure. module structure.
""" """
e_sys_path = infer_state.get_sys_path() e_sys_path = inference_state.get_sys_path()
path = file_io.path path = file_io.path
if base_names: if base_names:
module_name = os.path.basename(path) module_name = os.path.basename(path)
@@ -503,16 +503,16 @@ def _load_module_from_path(infer_state, file_io, base_names):
import_names, is_package = sys_path.transform_path_to_dotted(e_sys_path, path) import_names, is_package = sys_path.transform_path_to_dotted(e_sys_path, path)
module = _load_python_module( module = _load_python_module(
infer_state, file_io, inference_state, file_io,
sys_path=e_sys_path, sys_path=e_sys_path,
import_names=import_names, import_names=import_names,
is_package=is_package, is_package=is_package,
) )
infer_state.module_cache.add(import_names, ValueSet([module])) inference_state.module_cache.add(import_names, ValueSet([module]))
return module return module
def get_modules_containing_name(infer_state, modules, name): def get_modules_containing_name(inference_state, modules, name):
""" """
Search a name in the directories of modules. Search a name in the directories of modules.
""" """
@@ -530,7 +530,7 @@ def get_modules_containing_name(infer_state, modules, name):
if name not in code: if name not in code:
return None return None
new_file_io = KnownContentFileIO(file_io.path, code) new_file_io = KnownContentFileIO(file_io.path, code)
m = _load_module_from_path(infer_state, new_file_io, base_names) m = _load_module_from_path(inference_state, new_file_io, base_names)
if isinstance(m, compiled.CompiledObject): if isinstance(m, compiled.CompiledObject):
return None return None
return m return m

View File

@@ -66,10 +66,10 @@ class AbstractArbitraryName(AbstractNameDefinition):
""" """
is_value_name = False is_value_name = False
def __init__(self, infer_state, string): def __init__(self, inference_state, string):
self.infer_state = infer_state self.inference_state = inference_state
self.string_name = string self.string_name = string
self.parent_context = infer_state.builtins_module self.parent_context = inference_state.builtins_module
def infer(self): def infer(self):
return NO_VALUES return NO_VALUES
@@ -103,7 +103,7 @@ class AbstractTreeName(AbstractNameDefinition):
return parent_names + (self.tree_name.value,) return parent_names + (self.tree_name.value,)
def goto(self, **kwargs): def goto(self, **kwargs):
return self.parent_context.infer_state.goto(self.parent_context, self.tree_name, **kwargs) return self.parent_context.inference_state.goto(self.parent_context, self.tree_name, **kwargs)
def is_import(self): def is_import(self):
imp = search_ancestor(self.tree_name, 'import_from', 'import_name') imp = search_ancestor(self.tree_name, 'import_from', 'import_name')
@@ -157,7 +157,7 @@ class TreeNameDefinition(AbstractTreeName):
# Refactor this, should probably be here. # Refactor this, should probably be here.
from jedi.inference.syntax_tree import tree_name_to_values from jedi.inference.syntax_tree import tree_name_to_values
parent = self.parent_context parent = self.parent_context
return tree_name_to_values(parent.infer_state, parent, self.tree_name) return tree_name_to_values(parent.inference_state, parent, self.tree_name)
@property @property
def api_type(self): def api_type(self):
@@ -346,7 +346,7 @@ class ImportName(AbstractNameDefinition):
def infer(self): def infer(self):
from jedi.inference.imports import Importer from jedi.inference.imports import Importer
m = self._from_module_value m = self._from_module_value
return Importer(m.infer_state, [self.string_name], m, level=self._level).follow() return Importer(m.inference_state, [self.string_name], m, level=self._level).follow()
def goto(self): def goto(self):
return [m.name for m in self.infer()] return [m.name for m in self.infer()]

View File

@@ -145,13 +145,13 @@ def get_executed_params_and_issues(execution_value, arguments):
var_arg_iterator.push_back((key, argument)) var_arg_iterator.push_back((key, argument))
break break
lazy_value_list.append(argument) lazy_value_list.append(argument)
seq = iterable.FakeSequence(execution_value.infer_state, u'tuple', lazy_value_list) seq = iterable.FakeSequence(execution_value.inference_state, u'tuple', lazy_value_list)
result_arg = LazyKnownValue(seq) result_arg = LazyKnownValue(seq)
elif param.star_count == 2: elif param.star_count == 2:
if argument is not None: if argument is not None:
too_many_args(argument) too_many_args(argument)
# **kwargs param # **kwargs param
dct = iterable.FakeDict(execution_value.infer_state, dict(non_matching_keys)) dct = iterable.FakeDict(execution_value.inference_state, dict(non_matching_keys))
result_arg = LazyKnownValue(dct) result_arg = LazyKnownValue(dct)
non_matching_keys = {} non_matching_keys = {}
else: else:
@@ -235,11 +235,11 @@ def _error_argument_count(funcdef, actual_count):
def _create_default_param(execution_value, param): def _create_default_param(execution_value, param):
if param.star_count == 1: if param.star_count == 1:
result_arg = LazyKnownValue( result_arg = LazyKnownValue(
iterable.FakeSequence(execution_value.infer_state, u'tuple', []) iterable.FakeSequence(execution_value.inference_state, u'tuple', [])
) )
elif param.star_count == 2: elif param.star_count == 2:
result_arg = LazyKnownValue( result_arg = LazyKnownValue(
iterable.FakeDict(execution_value.infer_state, {}) iterable.FakeDict(execution_value.inference_state, {})
) )
elif param.default is None: elif param.default is None:
result_arg = LazyUnknownValue() result_arg = LazyUnknownValue()

View File

@@ -1,6 +1,6 @@
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import inference_state_function_cache
@infer_state_function_cache() @inference_state_function_cache()
def get_yield_exprs(infer_state, funcdef): def get_yield_exprs(inference_state, funcdef):
return list(funcdef.iter_yield_exprs()) return list(funcdef.iter_yield_exprs())

View File

@@ -56,12 +56,12 @@ class RecursionDetector(object):
@contextmanager @contextmanager
def execution_allowed(infer_state, node): def execution_allowed(inference_state, node):
""" """
A decorator to detect recursions in statements. In a recursion a statement A decorator to detect recursions in statements. In a recursion a statement
at the same place, in the same module may not be executed two times. at the same place, in the same module may not be executed two times.
""" """
pushed_nodes = infer_state.recursion_detector.pushed_nodes pushed_nodes = inference_state.recursion_detector.pushed_nodes
if node in pushed_nodes: if node in pushed_nodes:
debug.warning('catched stmt recursion: %s @%s', node, debug.warning('catched stmt recursion: %s @%s', node,
@@ -78,7 +78,7 @@ def execution_allowed(infer_state, node):
def execution_recursion_decorator(default=NO_VALUES): def execution_recursion_decorator(default=NO_VALUES):
def decorator(func): def decorator(func):
def wrapper(self, **kwargs): def wrapper(self, **kwargs):
detector = self.infer_state.execution_recursion_detector detector = self.inference_state.execution_recursion_detector
limit_reached = detector.push_execution(self) limit_reached = detector.push_execution(self)
try: try:
if limit_reached: if limit_reached:
@@ -96,8 +96,8 @@ class ExecutionRecursionDetector(object):
""" """
Catches recursions of executions. Catches recursions of executions.
""" """
def __init__(self, infer_state): def __init__(self, inference_state):
self._infer_state = infer_state self._inference_state = inference_state
self._recursion_level = 0 self._recursion_level = 0
self._parent_execution_funcs = [] self._parent_execution_funcs = []
@@ -117,7 +117,7 @@ class ExecutionRecursionDetector(object):
module = execution.get_root_value() module = execution.get_root_value()
if module == self._infer_state.builtins_module: if module == self._inference_state.builtins_module:
# We have control over builtins so we know they are not recursing # We have control over builtins so we know they are not recursing
# like crazy. Therefore we just let them execute always, because # like crazy. Therefore we just let them execute always, because
# they usually just help a lot with getting good results. # they usually just help a lot with getting good results.

View File

@@ -40,7 +40,7 @@ def _iter_nodes_for_param(param_name):
values = _to_callables(value, trailer) values = _to_callables(value, trailer)
args = TreeArguments.create_cached( args = TreeArguments.create_cached(
execution_value.infer_state, execution_value.inference_state,
value=value, value=value,
argument_node=trailer.children[1], argument_node=trailer.children[1],
trailer=trailer, trailer=trailer,

View File

@@ -23,7 +23,7 @@ from jedi.inference.value import TreeInstance
from jedi.inference.finder import NameFinder from jedi.inference.finder import NameFinder
from jedi.inference.helpers import is_string, is_literal, is_number from jedi.inference.helpers import is_string, is_literal, is_number
from jedi.inference.compiled.access import COMPARISON_OPERATORS from jedi.inference.compiled.access import COMPARISON_OPERATORS
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.gradual.stub_value import VersionInfo from jedi.inference.gradual.stub_value import VersionInfo
from jedi.inference.gradual import annotation from jedi.inference.gradual import annotation
from jedi.inference.value.decorator import Decoratee from jedi.inference.value.decorator import Decoratee
@@ -41,14 +41,14 @@ def _limit_value_infers(func):
""" """
def wrapper(value, *args, **kwargs): def wrapper(value, *args, **kwargs):
n = value.tree_node n = value.tree_node
infer_state = value.infer_state inference_state = value.inference_state
try: try:
infer_state.inferred_element_counts[n] += 1 inference_state.inferred_element_counts[n] += 1
if infer_state.inferred_element_counts[n] > 300: if inference_state.inferred_element_counts[n] > 300:
debug.warning('In value %s there were too many inferences.', n) debug.warning('In value %s there were too many inferences.', n)
return NO_VALUES return NO_VALUES
except KeyError: except KeyError:
infer_state.inferred_element_counts[n] = 1 inference_state.inferred_element_counts[n] = 1
return func(value, *args, **kwargs) return func(value, *args, **kwargs)
return wrapper return wrapper
@@ -70,7 +70,7 @@ def _py__stop_iteration_returns(generators):
@_limit_value_infers @_limit_value_infers
def infer_node(value, element): def infer_node(value, element):
debug.dbg('infer_node %s@%s in %s', element, element.start_pos, value) debug.dbg('infer_node %s@%s in %s', element, element.start_pos, value)
infer_state = value.infer_state inference_state = value.inference_state
typ = element.type typ = element.type
if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'): if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'):
return infer_atom(value, element) return infer_atom(value, element)
@@ -91,7 +91,7 @@ def infer_node(value, element):
if trailer == '**': # has a power operation. if trailer == '**': # has a power operation.
right = value.infer_node(children[i + 1]) right = value.infer_node(children[i + 1])
value_set = _infer_comparison( value_set = _infer_comparison(
infer_state, inference_state,
value, value,
value_set, value_set,
trailer, trailer,
@@ -105,7 +105,7 @@ def infer_node(value, element):
return value_set return value_set
elif typ in ('testlist_star_expr', 'testlist',): elif typ in ('testlist_star_expr', 'testlist',):
# The implicit tuple in statements. # The implicit tuple in statements.
return ValueSet([iterable.SequenceLiteralValue(infer_state, value, element)]) return ValueSet([iterable.SequenceLiteralValue(inference_state, value, element)])
elif typ in ('not_test', 'factor'): elif typ in ('not_test', 'factor'):
value_set = value.infer_node(element.children[-1]) value_set = value.infer_node(element.children[-1])
for operator in element.children[:-1]: for operator in element.children[:-1]:
@@ -122,7 +122,7 @@ def infer_node(value, element):
if element.value not in ('.', '...'): if element.value not in ('.', '...'):
origin = element.parent origin = element.parent
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin)) raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
return ValueSet([compiled.builtin_from_name(infer_state, u'Ellipsis')]) return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')])
elif typ == 'dotted_name': elif typ == 'dotted_name':
value_set = infer_atom(value, element.children[0]) value_set = infer_atom(value, element.children[0])
for next_name in element.children[2::2]: for next_name in element.children[2::2]:
@@ -158,7 +158,7 @@ def infer_trailer(value, atom_values, trailer):
if trailer_op == '[': if trailer_op == '[':
trailer_op, node, _ = trailer.children trailer_op, node, _ = trailer.children
return atom_values.get_item( return atom_values.get_item(
infer_subscript_list(value.infer_state, value, node), infer_subscript_list(value.inference_state, value, node),
ValueualizedNode(value, trailer) ValueualizedNode(value, trailer)
) )
else: else:
@@ -170,7 +170,7 @@ def infer_trailer(value, atom_values, trailer):
) )
else: else:
assert trailer_op == '(', 'trailer_op is actually %s' % trailer_op assert trailer_op == '(', 'trailer_op is actually %s' % trailer_op
args = arguments.TreeArguments(value.infer_state, value, node, trailer) args = arguments.TreeArguments(value.inference_state, value, node, trailer)
return atom_values.execute(args) return atom_values.execute(args)
@@ -183,7 +183,7 @@ def infer_atom(value, atom):
if atom.type == 'name': if atom.type == 'name':
if atom.value in ('True', 'False', 'None'): if atom.value in ('True', 'False', 'None'):
# Python 2... # Python 2...
return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) return ValueSet([compiled.builtin_from_name(value.inference_state, atom.value)])
# This is the first global lookup. # This is the first global lookup.
stmt = tree.search_ancestor( stmt = tree.search_ancestor(
@@ -207,7 +207,7 @@ def infer_atom(value, atom):
elif atom.type == 'keyword': elif atom.type == 'keyword':
# For False/True/None # For False/True/None
if atom.value in ('False', 'True', 'None'): if atom.value in ('False', 'True', 'None'):
return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) return ValueSet([compiled.builtin_from_name(value.inference_state, atom.value)])
elif atom.value == 'print': elif atom.value == 'print':
# print e.g. could be inferred like this in Python 2.7 # print e.g. could be inferred like this in Python 2.7
return NO_VALUES return NO_VALUES
@@ -218,17 +218,17 @@ def infer_atom(value, atom):
assert False, 'Cannot infer the keyword %s' % atom assert False, 'Cannot infer the keyword %s' % atom
elif isinstance(atom, tree.Literal): elif isinstance(atom, tree.Literal):
string = value.infer_state.compiled_subprocess.safe_literal_eval(atom.value) string = value.inference_state.compiled_subprocess.safe_literal_eval(atom.value)
return ValueSet([compiled.create_simple_object(value.infer_state, string)]) return ValueSet([compiled.create_simple_object(value.inference_state, string)])
elif atom.type == 'strings': elif atom.type == 'strings':
# Will be multiple string. # Will be multiple string.
value_set = infer_atom(value, atom.children[0]) value_set = infer_atom(value, atom.children[0])
for string in atom.children[1:]: for string in atom.children[1:]:
right = infer_atom(value, string) right = infer_atom(value, string)
value_set = _infer_comparison(value.infer_state, value, value_set, u'+', right) value_set = _infer_comparison(value.inference_state, value, value_set, u'+', right)
return value_set return value_set
elif atom.type == 'fstring': elif atom.type == 'fstring':
return compiled.get_string_value_set(value.infer_state) return compiled.get_string_value_set(value.inference_state)
else: else:
c = atom.children c = atom.children
# Parentheses without commas are not tuples. # Parentheses without commas are not tuples.
@@ -251,7 +251,7 @@ def infer_atom(value, atom):
if comp_for.type in ('comp_for', 'sync_comp_for'): if comp_for.type in ('comp_for', 'sync_comp_for'):
return ValueSet([iterable.comprehension_from_atom( return ValueSet([iterable.comprehension_from_atom(
value.infer_state, value, atom value.inference_state, value, atom
)]) )])
# It's a dict/list/tuple literal. # It's a dict/list/tuple literal.
@@ -262,19 +262,19 @@ def infer_atom(value, atom):
array_node_c = [] array_node_c = []
if c[0] == '{' and (array_node == '}' or ':' in array_node_c or if c[0] == '{' and (array_node == '}' or ':' in array_node_c or
'**' in array_node_c): '**' in array_node_c):
new_value = iterable.DictLiteralValue(value.infer_state, value, atom) new_value = iterable.DictLiteralValue(value.inference_state, value, atom)
else: else:
new_value = iterable.SequenceLiteralValue(value.infer_state, value, atom) new_value = iterable.SequenceLiteralValue(value.inference_state, value, atom)
return ValueSet([new_value]) return ValueSet([new_value])
@_limit_value_infers @_limit_value_infers
def infer_expr_stmt(value, stmt, seek_name=None): def infer_expr_stmt(value, stmt, seek_name=None):
with recursion.execution_allowed(value.infer_state, stmt) as allowed: with recursion.execution_allowed(value.inference_state, stmt) as allowed:
# Here we allow list/set to recurse under certain conditions. To make # Here we allow list/set to recurse under certain conditions. To make
# it possible to resolve stuff like list(set(list(x))), this is # it possible to resolve stuff like list(set(list(x))), this is
# necessary. # necessary.
if not allowed and value.get_root_value() == value.infer_state.builtins_module: if not allowed and value.get_root_value() == value.inference_state.builtins_module:
try: try:
instance = value.var_args.instance instance = value.var_args.instance
except AttributeError: except AttributeError:
@@ -306,7 +306,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None):
if seek_name: if seek_name:
c_node = ValueualizedName(value, seek_name) c_node = ValueualizedName(value, seek_name)
value_set = check_tuple_assignments(value.infer_state, c_node, value_set) value_set = check_tuple_assignments(value.inference_state, c_node, value_set)
first_operator = next(stmt.yield_operators(), None) first_operator = next(stmt.yield_operators(), None)
if first_operator not in ('=', None) and first_operator.type == 'operator': if first_operator not in ('=', None) and first_operator.type == 'operator':
@@ -331,10 +331,10 @@ def _infer_expr_stmt(value, stmt, seek_name=None):
dct = {for_stmt.children[1].value: lazy_value.infer()} dct = {for_stmt.children[1].value: lazy_value.infer()}
with helpers.predefine_names(value, for_stmt, dct): with helpers.predefine_names(value, for_stmt, dct):
t = value.infer_node(rhs) t = value.infer_node(rhs)
left = _infer_comparison(value.infer_state, value, left, operator, t) left = _infer_comparison(value.inference_state, value, left, operator, t)
value_set = left value_set = left
else: else:
value_set = _infer_comparison(value.infer_state, value, left, operator, value_set) value_set = _infer_comparison(value.inference_state, value, left, operator, value_set)
debug.dbg('infer_expr_stmt result %s', value_set) debug.dbg('infer_expr_stmt result %s', value_set)
return value_set return value_set
@@ -358,7 +358,7 @@ def infer_or_test(value, or_test):
types = value.infer_node(right) types = value.infer_node(right)
# Otherwise continue, because of uncertainty. # Otherwise continue, because of uncertainty.
else: else:
types = _infer_comparison(value.infer_state, value, types, operator, types = _infer_comparison(value.inference_state, value, types, operator,
value.infer_node(right)) value.infer_node(right))
debug.dbg('infer_or_test types %s', types) debug.dbg('infer_or_test types %s', types)
return types return types
@@ -377,12 +377,12 @@ def infer_factor(value_set, operator):
b = value.py__bool__() b = value.py__bool__()
if b is None: # Uncertainty. if b is None: # Uncertainty.
return return
yield compiled.create_simple_object(value.infer_state, not b) yield compiled.create_simple_object(value.inference_state, not b)
else: else:
yield value yield value
def _literals_to_types(infer_state, result): def _literals_to_types(inference_state, result):
# Changes literals ('a', 1, 1.0, etc) to its type instances (str(), # Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
# int(), float(), etc). # int(), float(), etc).
new_result = NO_VALUES new_result = NO_VALUES
@@ -390,27 +390,27 @@ def _literals_to_types(infer_state, result):
if is_literal(typ): if is_literal(typ):
# Literals are only valid as long as the operations are # Literals are only valid as long as the operations are
# correct. Otherwise add a value-free instance. # correct. Otherwise add a value-free instance.
cls = compiled.builtin_from_name(infer_state, typ.name.string_name) cls = compiled.builtin_from_name(inference_state, typ.name.string_name)
new_result |= cls.execute_with_values() new_result |= cls.execute_with_values()
else: else:
new_result |= ValueSet([typ]) new_result |= ValueSet([typ])
return new_result return new_result
def _infer_comparison(infer_state, value, left_values, operator, right_values): def _infer_comparison(inference_state, value, left_values, operator, right_values):
if not left_values or not right_values: if not left_values or not right_values:
# illegal slices e.g. cause left/right_result to be None # illegal slices e.g. cause left/right_result to be None
result = (left_values or NO_VALUES) | (right_values or NO_VALUES) result = (left_values or NO_VALUES) | (right_values or NO_VALUES)
return _literals_to_types(infer_state, result) return _literals_to_types(inference_state, result)
else: else:
# I don't think there's a reasonable chance that a string # I don't think there's a reasonable chance that a string
# operation is still correct, once we pass something like six # operation is still correct, once we pass something like six
# objects. # objects.
if len(left_values) * len(right_values) > 6: if len(left_values) * len(right_values) > 6:
return _literals_to_types(infer_state, left_values | right_values) return _literals_to_types(inference_state, left_values | right_values)
else: else:
return ValueSet.from_sets( return ValueSet.from_sets(
_infer_comparison_part(infer_state, value, left, operator, right) _infer_comparison_part(inference_state, value, left, operator, right)
for left in left_values for left in left_values
for right in right_values for right in right_values
) )
@@ -440,8 +440,8 @@ def _is_list(value):
return isinstance(value, iterable.Sequence) and value.array_type == 'list' return isinstance(value, iterable.Sequence) and value.array_type == 'list'
def _bool_to_value(infer_state, bool_): def _bool_to_value(inference_state, bool_):
return compiled.builtin_from_name(infer_state, force_unicode(str(bool_))) return compiled.builtin_from_name(inference_state, force_unicode(str(bool_)))
def _get_tuple_ints(value): def _get_tuple_ints(value):
@@ -461,7 +461,7 @@ def _get_tuple_ints(value):
return numbers return numbers
def _infer_comparison_part(infer_state, value, left, operator, right): def _infer_comparison_part(inference_state, value, left, operator, right):
l_is_num = is_number(left) l_is_num = is_number(left)
r_is_num = is_number(right) r_is_num = is_number(right)
if isinstance(operator, unicode): if isinstance(operator, unicode):
@@ -479,7 +479,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
if l_is_num and r_is_num or is_string(left) and is_string(right): if l_is_num and r_is_num or is_string(left) and is_string(right):
return ValueSet([left.execute_operation(right, str_operator)]) return ValueSet([left.execute_operation(right, str_operator)])
elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right): elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right):
return ValueSet([iterable.MergedArray(infer_state, (left, right))]) return ValueSet([iterable.MergedArray(inference_state, (left, right))])
elif str_operator == '-': elif str_operator == '-':
if l_is_num and r_is_num: if l_is_num and r_is_num:
return ValueSet([left.execute_operation(right, str_operator)]) return ValueSet([left.execute_operation(right, str_operator)])
@@ -499,18 +499,18 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
if str_operator in ('is', '!=', '==', 'is not'): if str_operator in ('is', '!=', '==', 'is not'):
operation = COMPARISON_OPERATORS[str_operator] operation = COMPARISON_OPERATORS[str_operator]
bool_ = operation(left, right) bool_ = operation(left, right)
return ValueSet([_bool_to_value(infer_state, bool_)]) return ValueSet([_bool_to_value(inference_state, bool_)])
if isinstance(left, VersionInfo): if isinstance(left, VersionInfo):
version_info = _get_tuple_ints(right) version_info = _get_tuple_ints(right)
if version_info is not None: if version_info is not None:
bool_result = compiled.access.COMPARISON_OPERATORS[operator]( bool_result = compiled.access.COMPARISON_OPERATORS[operator](
infer_state.environment.version_info, inference_state.environment.version_info,
tuple(version_info) tuple(version_info)
) )
return ValueSet([_bool_to_value(infer_state, bool_result)]) return ValueSet([_bool_to_value(inference_state, bool_result)])
return ValueSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)]) return ValueSet([_bool_to_value(inference_state, True), _bool_to_value(inference_state, False)])
elif str_operator == 'in': elif str_operator == 'in':
return NO_VALUES return NO_VALUES
@@ -531,7 +531,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
return result return result
def _remove_statements(infer_state, value, stmt, name): def _remove_statements(inference_state, value, stmt, name):
""" """
This is the part where statements are being stripped. This is the part where statements are being stripped.
@@ -547,7 +547,7 @@ def _remove_statements(infer_state, value, stmt, name):
@plugin_manager.decorate() @plugin_manager.decorate()
def tree_name_to_values(infer_state, value, tree_name): def tree_name_to_values(inference_state, value, tree_name):
value_set = NO_VALUES value_set = NO_VALUES
module_node = value.get_root_value().tree_node module_node = value.get_root_value().tree_node
# First check for annotations, like: `foo: int = 3` # First check for annotations, like: `foo: int = 3`
@@ -570,15 +570,15 @@ def tree_name_to_values(infer_state, value, tree_name):
if node is None: if node is None:
node = tree_name.parent node = tree_name.parent
if node.type == 'global_stmt': if node.type == 'global_stmt':
value = infer_state.create_value(value, tree_name) value = inference_state.create_value(value, tree_name)
finder = NameFinder(infer_state, value, value, tree_name.value) finder = NameFinder(inference_state, value, value, tree_name.value)
filters = finder.get_global_filters() filters = finder.get_global_filters()
# For global_stmt lookups, we only need the first possible scope, # For global_stmt lookups, we only need the first possible scope,
# which means the function itself. # which means the function itself.
filters = [next(filters)] filters = [next(filters)]
return finder.find(filters, attribute_lookup=False) return finder.find(filters, attribute_lookup=False)
elif node.type not in ('import_from', 'import_name'): elif node.type not in ('import_from', 'import_name'):
value = infer_state.create_value(value, tree_name) value = inference_state.create_value(value, tree_name)
return infer_atom(value, tree_name) return infer_atom(value, tree_name)
typ = node.type typ = node.type
@@ -602,9 +602,9 @@ def tree_name_to_values(infer_state, value, tree_name):
is_async=node.parent.type == 'async_stmt', is_async=node.parent.type == 'async_stmt',
) )
c_node = ValueualizedName(value, tree_name) c_node = ValueualizedName(value, tree_name)
types = check_tuple_assignments(infer_state, c_node, for_types) types = check_tuple_assignments(inference_state, c_node, for_types)
elif typ == 'expr_stmt': elif typ == 'expr_stmt':
types = _remove_statements(infer_state, value, node, tree_name) types = _remove_statements(inference_state, value, node, tree_name)
elif typ == 'with_stmt': elif typ == 'with_stmt':
value_managers = value.infer_node(node.get_test_node_from_name(tree_name)) value_managers = value.infer_node(node.get_test_node_from_name(tree_name))
enter_methods = value_managers.py__getattribute__(u'__enter__') enter_methods = value_managers.py__getattribute__(u'__enter__')
@@ -628,7 +628,7 @@ def tree_name_to_values(infer_state, value, tree_name):
# We don't want to have functions/classes that are created by the same # We don't want to have functions/classes that are created by the same
# tree_node. # tree_node.
@infer_state_method_cache() @inference_state_method_cache()
def _apply_decorators(value, node): def _apply_decorators(value, node):
""" """
Returns the function, that should to be executed in the end. Returns the function, that should to be executed in the end.
@@ -636,7 +636,7 @@ def _apply_decorators(value, node):
""" """
if node.type == 'classdef': if node.type == 'classdef':
decoratee_value = ClassValue( decoratee_value = ClassValue(
value.infer_state, value.inference_state,
parent_context=value, parent_context=value,
tree_node=node tree_node=node
) )
@@ -674,7 +674,7 @@ def _apply_decorators(value, node):
return values return values
def check_tuple_assignments(infer_state, valueualized_name, value_set): def check_tuple_assignments(inference_state, valueualized_name, value_set):
""" """
Checks if tuples are assigned. Checks if tuples are assigned.
""" """
@@ -698,7 +698,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set):
return value_set return value_set
def infer_subscript_list(infer_state, value, index): def infer_subscript_list(inference_state, value, index):
""" """
Handles slices in subscript nodes. Handles slices in subscript nodes.
""" """
@@ -724,7 +724,7 @@ def infer_subscript_list(infer_state, value, index):
return ValueSet([iterable.Slice(value, *result)]) return ValueSet([iterable.Slice(value, *result)])
elif index.type == 'subscriptlist': elif index.type == 'subscriptlist':
return ValueSet([iterable.SequenceLiteralValue(infer_state, value, index)]) return ValueSet([iterable.SequenceLiteralValue(inference_state, value, index)])
# No slices # No slices
return value.infer_node(index) return value.infer_node(index)

View File

@@ -1,7 +1,7 @@
import os import os
from jedi._compatibility import unicode, force_unicode, all_suffixes from jedi._compatibility import unicode, force_unicode, all_suffixes
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import ValueualizedNode from jedi.inference.base_value import ValueualizedNode
from jedi.inference.helpers import is_string from jedi.inference.helpers import is_string
from jedi.common.utils import traverse_parents from jedi.common.utils import traverse_parents
@@ -92,7 +92,7 @@ def _paths_from_list_modifications(module_value, trailer1, trailer2):
yield abs_path yield abs_path
@infer_state_method_cache(default=[]) @inference_state_method_cache(default=[])
def check_sys_path_modifications(module_value): def check_sys_path_modifications(module_value):
""" """
Detect sys.path modifications within module. Detect sys.path modifications within module.
@@ -130,20 +130,20 @@ def check_sys_path_modifications(module_value):
return added return added
def discover_buildout_paths(infer_state, script_path): def discover_buildout_paths(inference_state, script_path):
buildout_script_paths = set() buildout_script_paths = set()
for buildout_script_path in _get_buildout_script_paths(script_path): for buildout_script_path in _get_buildout_script_paths(script_path):
for path in _get_paths_from_buildout_script(infer_state, buildout_script_path): for path in _get_paths_from_buildout_script(inference_state, buildout_script_path):
buildout_script_paths.add(path) buildout_script_paths.add(path)
return buildout_script_paths return buildout_script_paths
def _get_paths_from_buildout_script(infer_state, buildout_script_path): def _get_paths_from_buildout_script(inference_state, buildout_script_path):
file_io = FileIO(buildout_script_path) file_io = FileIO(buildout_script_path)
try: try:
module_node = infer_state.parse( module_node = inference_state.parse(
file_io=file_io, file_io=file_io,
cache=True, cache=True,
cache_path=settings.cache_directory cache_path=settings.cache_directory
@@ -154,9 +154,9 @@ def _get_paths_from_buildout_script(infer_state, buildout_script_path):
from jedi.inference.value import ModuleValue from jedi.inference.value import ModuleValue
module = ModuleValue( module = ModuleValue(
infer_state, module_node, file_io, inference_state, module_node, file_io,
string_names=None, string_names=None,
code_lines=get_cached_code_lines(infer_state.grammar, buildout_script_path), code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path),
) )
for path in check_sys_path_modifications(module): for path in check_sys_path_modifications(module):
yield path yield path

View File

@@ -41,7 +41,7 @@ def usages(module_value, tree_name):
modules = set(m for m in modules if m.is_module() and not m.is_compiled()) modules = set(m for m in modules if m.is_module() and not m.is_compiled())
non_matching_usage_maps = {} non_matching_usage_maps = {}
for m in imports.get_modules_containing_name(module_value.infer_state, modules, search_name): for m in imports.get_modules_containing_name(module_value.inference_state, modules, search_name):
for name_leaf in m.tree_node.get_used_names().get(search_name, []): for name_leaf in m.tree_node.get_used_names().get(search_name, []):
new = _find_names(m, name_leaf) new = _find_names(m, name_leaf)
if any(tree_name in found_names for tree_name in new): if any(tree_name in found_names for tree_name in new):

View File

@@ -2,7 +2,7 @@ from parso.python import tree
from jedi._compatibility import use_metaclass from jedi._compatibility import use_metaclass
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_method_cache, CachedMetaClass from jedi.inference.cache import inference_state_method_cache, CachedMetaClass
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference import recursion from jedi.inference import recursion
from jedi.inference import docstrings from jedi.inference import docstrings
@@ -97,7 +97,7 @@ class FunctionMixin(object):
if arguments is None: if arguments is None:
arguments = AnonymousArguments() arguments = AnonymousArguments()
return FunctionExecutionValue(self.infer_state, self.parent_context, self, arguments) return FunctionExecutionValue(self.inference_state, self.parent_context, self, arguments)
def get_signatures(self): def get_signatures(self):
return [TreeSignature(f) for f in self.get_signature_functions()] return [TreeSignature(f) for f in self.get_signature_functions()]
@@ -112,14 +112,14 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
def create(tree_node): def create(tree_node):
if value.is_class(): if value.is_class():
return MethodValue( return MethodValue(
value.infer_state, value.inference_state,
value, value,
parent_context=parent_context, parent_context=parent_context,
tree_node=tree_node tree_node=tree_node
) )
else: else:
return cls( return cls(
value.infer_state, value.inference_state,
parent_context=parent_context, parent_context=parent_context,
tree_node=tree_node tree_node=tree_node
) )
@@ -140,7 +140,7 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
return function return function
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.infer_state, u'types', u'FunctionType') c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType')
return c return c
def get_default_param_value(self): def get_default_param_value(self):
@@ -151,8 +151,8 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
class MethodValue(FunctionValue): class MethodValue(FunctionValue):
def __init__(self, infer_state, class_value, *args, **kwargs): def __init__(self, inference_state, class_value, *args, **kwargs):
super(MethodValue, self).__init__(infer_state, *args, **kwargs) super(MethodValue, self).__init__(inference_state, *args, **kwargs)
self.class_value = class_value self.class_value = class_value
def get_default_param_value(self): def get_default_param_value(self):
@@ -170,16 +170,16 @@ class MethodValue(FunctionValue):
class FunctionExecutionValue(TreeValue): class FunctionExecutionValue(TreeValue):
function_execution_filter = FunctionExecutionFilter function_execution_filter = FunctionExecutionFilter
def __init__(self, infer_state, parent_context, function_value, var_args): def __init__(self, inference_state, parent_context, function_value, var_args):
super(FunctionExecutionValue, self).__init__( super(FunctionExecutionValue, self).__init__(
infer_state, inference_state,
parent_context, parent_context,
function_value.tree_node, function_value.tree_node,
) )
self.function_value = function_value self.function_value = function_value
self.var_args = var_args self.var_args = var_args
@infer_state_method_cache(default=NO_VALUES) @inference_state_method_cache(default=NO_VALUES)
@recursion.execution_recursion_decorator() @recursion.execution_recursion_decorator()
def get_return_values(self, check_yields=False): def get_return_values(self, check_yields=False):
funcdef = self.tree_node funcdef = self.tree_node
@@ -188,7 +188,7 @@ class FunctionExecutionValue(TreeValue):
if check_yields: if check_yields:
value_set = NO_VALUES value_set = NO_VALUES
returns = get_yield_exprs(self.infer_state, funcdef) returns = get_yield_exprs(self.inference_state, funcdef)
else: else:
returns = funcdef.iter_return_stmts() returns = funcdef.iter_return_stmts()
from jedi.inference.gradual.annotation import infer_return_types from jedi.inference.gradual.annotation import infer_return_types
@@ -213,7 +213,7 @@ class FunctionExecutionValue(TreeValue):
try: try:
children = r.children children = r.children
except AttributeError: except AttributeError:
ctx = compiled.builtin_from_name(self.infer_state, u'None') ctx = compiled.builtin_from_name(self.inference_state, u'None')
value_set |= ValueSet([ctx]) value_set |= ValueSet([ctx])
else: else:
value_set |= self.infer_node(children[1]) value_set |= self.infer_node(children[1])
@@ -225,7 +225,7 @@ class FunctionExecutionValue(TreeValue):
def _get_yield_lazy_value(self, yield_expr): def _get_yield_lazy_value(self, yield_expr):
if yield_expr.type == 'keyword': if yield_expr.type == 'keyword':
# `yield` just yields None. # `yield` just yields None.
ctx = compiled.builtin_from_name(self.infer_state, u'None') ctx = compiled.builtin_from_name(self.inference_state, u'None')
yield LazyKnownValue(ctx) yield LazyKnownValue(ctx)
return return
@@ -242,7 +242,7 @@ class FunctionExecutionValue(TreeValue):
# TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend # TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef', for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
'while_stmt', 'if_stmt')) 'while_stmt', 'if_stmt'))
for y in get_yield_exprs(self.infer_state, self.tree_node)] for y in get_yield_exprs(self.inference_state, self.tree_node)]
# Calculate if the yields are placed within the same for loop. # Calculate if the yields are placed within the same for loop.
yields_order = [] yields_order = []
@@ -297,7 +297,7 @@ class FunctionExecutionValue(TreeValue):
until_position=until_position, until_position=until_position,
origin_scope=origin_scope) origin_scope=origin_scope)
@infer_state_method_cache() @inference_state_method_cache()
def get_executed_params_and_issues(self): def get_executed_params_and_issues(self):
return self.var_args.get_executed_params_and_issues(self) return self.var_args.get_executed_params_and_issues(self)
@@ -322,16 +322,16 @@ class FunctionExecutionValue(TreeValue):
""" """
Created to be used by inheritance. Created to be used by inheritance.
""" """
infer_state = self.infer_state inference_state = self.inference_state
is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef') is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef')
is_generator = bool(get_yield_exprs(infer_state, self.tree_node)) is_generator = bool(get_yield_exprs(inference_state, self.tree_node))
from jedi.inference.gradual.typing import GenericClass from jedi.inference.gradual.typing import GenericClass
if is_coroutine: if is_coroutine:
if is_generator: if is_generator:
if infer_state.environment.version_info < (3, 6): if inference_state.environment.version_info < (3, 6):
return NO_VALUES return NO_VALUES
async_generator_classes = infer_state.typing_module \ async_generator_classes = inference_state.typing_module \
.py__getattribute__('AsyncGenerator') .py__getattribute__('AsyncGenerator')
yield_values = self.merge_yield_values(is_async=True) yield_values = self.merge_yield_values(is_async=True)
@@ -343,9 +343,9 @@ class FunctionExecutionValue(TreeValue):
for c in async_generator_classes for c in async_generator_classes
).execute_annotation() ).execute_annotation()
else: else:
if infer_state.environment.version_info < (3, 5): if inference_state.environment.version_info < (3, 5):
return NO_VALUES return NO_VALUES
async_classes = infer_state.typing_module.py__getattribute__('Coroutine') async_classes = inference_state.typing_module.py__getattribute__('Coroutine')
return_values = self.get_return_values() return_values = self.get_return_values()
# Only the first generic is relevant. # Only the first generic is relevant.
generics = (return_values.py__class__(), NO_VALUES, NO_VALUES) generics = (return_values.py__class__(), NO_VALUES, NO_VALUES)
@@ -354,7 +354,7 @@ class FunctionExecutionValue(TreeValue):
).execute_annotation() ).execute_annotation()
else: else:
if is_generator: if is_generator:
return ValueSet([iterable.Generator(infer_state, self)]) return ValueSet([iterable.Generator(inference_state, self)])
else: else:
return self.get_return_values() return self.get_return_values()
@@ -379,7 +379,7 @@ class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
if matched: if matched:
return value_set return value_set
if self.infer_state.is_analysis: if self.inference_state.is_analysis:
# In this case we want precision. # In this case we want precision.
return NO_VALUES return NO_VALUES
return ValueSet.from_sets(fe.infer() for fe in function_executions) return ValueSet.from_sets(fe.infer() for fe in function_executions)
@@ -411,7 +411,7 @@ def _find_overload_functions(value, tree_node):
while True: while True:
filter = ParserTreeFilter( filter = ParserTreeFilter(
value.infer_state, value.inference_state,
value, value,
until_position=tree_node.start_pos until_position=tree_node.start_pos
) )

View File

@@ -10,7 +10,7 @@ from jedi.inference.names import ValueName, TreeNameDefinition
from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \ from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \
iterator_to_value_set, ValueWrapper iterator_to_value_set, ValueWrapper
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.arguments import AnonymousArguments, \ from jedi.inference.arguments import AnonymousArguments, \
ValuesArguments, TreeArgumentsWrapper ValuesArguments, TreeArgumentsWrapper
from jedi.inference.value.function import \ from jedi.inference.value.function import \
@@ -50,7 +50,7 @@ class AnonymousInstanceArguments(AnonymousArguments):
# executions of this function, we have all the params already. # executions of this function, we have all the params already.
return [self_param], [] return [self_param], []
executed_params = list(search_params( executed_params = list(search_params(
execution_value.infer_state, execution_value.inference_state,
execution_value, execution_value,
execution_value.tree_node execution_value.tree_node
)) ))
@@ -61,8 +61,8 @@ class AnonymousInstanceArguments(AnonymousArguments):
class AbstractInstanceValue(Value): class AbstractInstanceValue(Value):
api_type = u'instance' api_type = u'instance'
def __init__(self, infer_state, parent_context, class_value, var_args): def __init__(self, inference_state, parent_context, class_value, var_args):
super(AbstractInstanceValue, self).__init__(infer_state, parent_context) super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
# Generated instances are classes that are just generated by self # Generated instances are classes that are just generated by self
# (No var_args) used. # (No var_args) used.
self.class_value = class_value self.class_value = class_value
@@ -117,7 +117,7 @@ class AbstractInstanceValue(Value):
names = self.get_function_slot_names(u'__get__') names = self.get_function_slot_names(u'__get__')
if names: if names:
if obj is None: if obj is None:
obj = compiled.builtin_from_name(self.infer_state, u'None') obj = compiled.builtin_from_name(self.inference_state, u'None')
return self.execute_function_slots(names, obj, class_value) return self.execute_function_slots(names, obj, class_value)
else: else:
return ValueSet([self]) return ValueSet([self])
@@ -168,7 +168,7 @@ class AbstractInstanceValue(Value):
for generator in self.execute_function_slots(iter_slot_names): for generator in self.execute_function_slots(iter_slot_names):
if generator.is_instance() and not generator.is_compiled(): if generator.is_instance() and not generator.is_compiled():
# `__next__` logic. # `__next__` logic.
if self.infer_state.environment.version_info.major == 2: if self.inference_state.environment.version_info.major == 2:
name = u'next' name = u'next'
else: else:
name = u'__next__' name = u'__next__'
@@ -199,7 +199,7 @@ class AbstractInstanceValue(Value):
bound_method = BoundMethod(self, function) bound_method = BoundMethod(self, function)
yield bound_method.get_function_execution(self.var_args) yield bound_method.get_function_execution(self.var_args)
@infer_state_method_cache() @inference_state_method_cache()
def create_instance_value(self, class_value, node): def create_instance_value(self, class_value, node):
if node.parent.type in ('funcdef', 'classdef'): if node.parent.type in ('funcdef', 'classdef'):
node = node.parent node = node.parent
@@ -219,7 +219,7 @@ class AbstractInstanceValue(Value):
else: else:
return bound_method.get_function_execution() return bound_method.get_function_execution()
elif scope.type == 'classdef': elif scope.type == 'classdef':
class_value = ClassValue(self.infer_state, parent_context, scope) class_value = ClassValue(self.inference_state, parent_context, scope)
return class_value return class_value
elif scope.type in ('comp_for', 'sync_comp_for'): elif scope.type in ('comp_for', 'sync_comp_for'):
# Comprehensions currently don't have a special scope in Jedi. # Comprehensions currently don't have a special scope in Jedi.
@@ -238,9 +238,9 @@ class AbstractInstanceValue(Value):
class CompiledInstance(AbstractInstanceValue): class CompiledInstance(AbstractInstanceValue):
def __init__(self, infer_state, parent_context, class_value, var_args): def __init__(self, inference_state, parent_context, class_value, var_args):
self._original_var_args = var_args self._original_var_args = var_args
super(CompiledInstance, self).__init__(infer_state, parent_context, class_value, var_args) super(CompiledInstance, self).__init__(inference_state, parent_context, class_value, var_args)
@property @property
def name(self): def name(self):
@@ -258,16 +258,16 @@ class CompiledInstance(AbstractInstanceValue):
class TreeInstance(AbstractInstanceValue): class TreeInstance(AbstractInstanceValue):
def __init__(self, infer_state, parent_context, class_value, var_args): def __init__(self, inference_state, parent_context, class_value, var_args):
# I don't think that dynamic append lookups should happen here. That # I don't think that dynamic append lookups should happen here. That
# sounds more like something that should go to py__iter__. # sounds more like something that should go to py__iter__.
if class_value.py__name__() in ['list', 'set'] \ if class_value.py__name__() in ['list', 'set'] \
and parent_context.get_root_value() == infer_state.builtins_module: and parent_context.get_root_value() == inference_state.builtins_module:
# compare the module path with the builtin name. # compare the module path with the builtin name.
if settings.dynamic_array_additions: if settings.dynamic_array_additions:
var_args = iterable.get_dynamic_array_instance(self, var_args) var_args = iterable.get_dynamic_array_instance(self, var_args)
super(TreeInstance, self).__init__(infer_state, parent_context, super(TreeInstance, self).__init__(inference_state, parent_context,
class_value, var_args) class_value, var_args)
self.tree_node = class_value.tree_node self.tree_node = class_value.tree_node
@@ -277,7 +277,7 @@ class TreeInstance(AbstractInstanceValue):
# This can recurse, if the initialization of the class includes a reference # This can recurse, if the initialization of the class includes a reference
# to itself. # to itself.
@infer_state_method_cache(default=None) @inference_state_method_cache(default=None)
def _get_annotated_class_object(self): def _get_annotated_class_object(self):
from jedi.inference.gradual.annotation import py__annotations__, \ from jedi.inference.gradual.annotation import py__annotations__, \
infer_type_vars_for_execution infer_type_vars_for_execution
@@ -313,9 +313,9 @@ class TreeInstance(AbstractInstanceValue):
class AnonymousInstance(TreeInstance): class AnonymousInstance(TreeInstance):
def __init__(self, infer_state, parent_context, class_value): def __init__(self, inference_state, parent_context, class_value):
super(AnonymousInstance, self).__init__( super(AnonymousInstance, self).__init__(
infer_state, inference_state,
parent_context, parent_context,
class_value, class_value,
var_args=AnonymousInstanceArguments(self), var_args=AnonymousInstanceArguments(self),
@@ -327,9 +327,9 @@ class AnonymousInstance(TreeInstance):
class CompiledInstanceName(compiled.CompiledName): class CompiledInstanceName(compiled.CompiledName):
def __init__(self, infer_state, instance, klass, name): def __init__(self, inference_state, instance, klass, name):
super(CompiledInstanceName, self).__init__( super(CompiledInstanceName, self).__init__(
infer_state, inference_state,
klass.parent_context, klass.parent_context,
name.string_name name.string_name
) )
@@ -361,7 +361,7 @@ class CompiledInstanceClassFilter(AbstractFilter):
def _convert(self, names): def _convert(self, names):
klass = self._class_filter.compiled_object klass = self._class_filter.compiled_object
return [ return [
CompiledInstanceName(self._instance.infer_state, self._instance, klass, n) CompiledInstanceName(self._instance.inference_state, self._instance, klass, n)
for n in names for n in names
] ]
@@ -375,7 +375,7 @@ class BoundMethod(FunctionMixin, ValueWrapper):
return True return True
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.infer_state, u'types', u'MethodType') c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType')
return c return c
def _get_arguments(self, arguments): def _get_arguments(self, arguments):

View File

@@ -34,7 +34,7 @@ from jedi.inference.helpers import get_int_or_none, is_string, \
predefine_names, infer_call_of_leaf, reraise_getitem_errors, \ predefine_names, infer_call_of_leaf, reraise_getitem_errors, \
SimpleGetItemNotFound SimpleGetItemNotFound
from jedi.inference.utils import safe_property, to_list from jedi.inference.utils import safe_property, to_list
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \ from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
publish_method publish_method
from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \ from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \
@@ -44,7 +44,7 @@ from jedi.parser_utils import get_sync_comp_fors
class IterableMixin(object): class IterableMixin(object):
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
# At the moment, safe values are simple values like "foo", 1 and not # At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the # lists/dicts. Therefore as a small speed optimization we can just do the
@@ -66,7 +66,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
array_type = None array_type = None
def _get_wrapped_value(self): def _get_wrapped_value(self):
generator, = self.infer_state.typing_module \ generator, = self.inference_state.typing_module \
.py__getattribute__('Generator') \ .py__getattribute__('Generator') \
.execute_annotation() .execute_annotation()
return generator return generator
@@ -88,7 +88,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
@property @property
def name(self): def name(self):
@@ -97,8 +97,8 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
class Generator(GeneratorBase): class Generator(GeneratorBase):
"""Handling of `yield` functions.""" """Handling of `yield` functions."""
def __init__(self, infer_state, func_execution_value): def __init__(self, inference_state, func_execution_value):
super(Generator, self).__init__(infer_state) super(Generator, self).__init__(inference_state)
self._func_execution_value = func_execution_value self._func_execution_value = func_execution_value
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
@@ -114,13 +114,13 @@ class Generator(GeneratorBase):
class CompForValue(TreeValue): class CompForValue(TreeValue):
@classmethod @classmethod
def from_comp_for(cls, parent_context, comp_for): def from_comp_for(cls, parent_context, comp_for):
return cls(parent_context.infer_state, parent_context, comp_for) return cls(parent_context.inference_state, parent_context, comp_for)
def get_filters(self, search_global=False, until_position=None, origin_scope=None): def get_filters(self, search_global=False, until_position=None, origin_scope=None):
yield ParserTreeFilter(self) yield ParserTreeFilter(self)
def comprehension_from_atom(infer_state, value, atom): def comprehension_from_atom(inference_state, value, atom):
bracket = atom.children[0] bracket = atom.children[0]
test_list_comp = atom.children[1] test_list_comp = atom.children[1]
@@ -131,7 +131,7 @@ def comprehension_from_atom(infer_state, value, atom):
sync_comp_for = sync_comp_for.children[1] sync_comp_for = sync_comp_for.children[1]
return DictComprehension( return DictComprehension(
infer_state, inference_state,
value, value,
sync_comp_for_node=sync_comp_for, sync_comp_for_node=sync_comp_for,
key_node=test_list_comp.children[0], key_node=test_list_comp.children[0],
@@ -149,7 +149,7 @@ def comprehension_from_atom(infer_state, value, atom):
sync_comp_for = sync_comp_for.children[1] sync_comp_for = sync_comp_for.children[1]
return cls( return cls(
infer_state, inference_state,
defining_value=value, defining_value=value,
sync_comp_for_node=sync_comp_for, sync_comp_for_node=sync_comp_for,
entry_node=test_list_comp.children[0], entry_node=test_list_comp.children[0],
@@ -157,7 +157,7 @@ def comprehension_from_atom(infer_state, value, atom):
class ComprehensionMixin(object): class ComprehensionMixin(object):
@infer_state_method_cache() @inference_state_method_cache()
def _get_comp_for_value(self, parent_context, comp_for): def _get_comp_for_value(self, parent_context, comp_for):
return CompForValue.from_comp_for(parent_context, comp_for) return CompForValue.from_comp_for(parent_context, comp_for)
@@ -192,7 +192,7 @@ class ComprehensionMixin(object):
else: else:
yield iterated yield iterated
@infer_state_method_cache(default=[]) @inference_state_method_cache(default=[])
@to_list @to_list
def _iterate(self): def _iterate(self):
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node)) comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
@@ -224,7 +224,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
def _get_wrapped_value(self): def _get_wrapped_value(self):
from jedi.inference.gradual.typing import GenericClass from jedi.inference.gradual.typing import GenericClass
klass = compiled.builtin_from_name(self.infer_state, self.array_type) klass = compiled.builtin_from_name(self.inference_state, self.array_type)
c, = GenericClass(klass, self._get_generics()).execute_annotation() c, = GenericClass(klass, self._get_generics()).execute_annotation()
return c return c
@@ -232,11 +232,11 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
return None # We don't know the length, because of appends. return None # We don't know the length, because of appends.
def py__class__(self): def py__class__(self):
return compiled.builtin_from_name(self.infer_state, self.array_type) return compiled.builtin_from_name(self.inference_state, self.array_type)
@safe_property @safe_property
def parent(self): def parent(self):
return self.infer_state.builtins_module return self.inference_state.builtins_module
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
if self.array_type == 'dict': if self.array_type == 'dict':
@@ -245,9 +245,9 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
class _BaseComprehension(ComprehensionMixin): class _BaseComprehension(ComprehensionMixin):
def __init__(self, infer_state, defining_value, sync_comp_for_node, entry_node): def __init__(self, inference_state, defining_value, sync_comp_for_node, entry_node):
assert sync_comp_for_node.type == 'sync_comp_for' assert sync_comp_for_node.type == 'sync_comp_for'
super(_BaseComprehension, self).__init__(infer_state) super(_BaseComprehension, self).__init__(inference_state)
self._defining_value = defining_value self._defining_value = defining_value
self._sync_comp_for_node = sync_comp_for_node self._sync_comp_for_node = sync_comp_for_node
self._entry_node = entry_node self._entry_node = entry_node
@@ -277,9 +277,9 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase):
class DictComprehension(ComprehensionMixin, Sequence): class DictComprehension(ComprehensionMixin, Sequence):
array_type = u'dict' array_type = u'dict'
def __init__(self, infer_state, defining_value, sync_comp_for_node, key_node, value_node): def __init__(self, inference_state, defining_value, sync_comp_for_node, key_node, value_node):
assert sync_comp_for_node.type == 'sync_comp_for' assert sync_comp_for_node.type == 'sync_comp_for'
super(DictComprehension, self).__init__(infer_state) super(DictComprehension, self).__init__(inference_state)
self._defining_value = defining_value self._defining_value = defining_value
self._sync_comp_for_node = sync_comp_for_node self._sync_comp_for_node = sync_comp_for_node
self._entry_node = key_node self._entry_node = key_node
@@ -308,14 +308,14 @@ class DictComprehension(ComprehensionMixin, Sequence):
@publish_method('values') @publish_method('values')
def _imitate_values(self): def _imitate_values(self):
lazy_value = LazyKnownValues(self._dict_values()) lazy_value = LazyKnownValues(self._dict_values())
return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])])
@publish_method('items') @publish_method('items')
def _imitate_items(self): def _imitate_items(self):
lazy_values = [ lazy_values = [
LazyKnownValue( LazyKnownValue(
FakeSequence( FakeSequence(
self.infer_state, self.inference_state,
u'tuple', u'tuple',
[LazyKnownValues(key), [LazyKnownValues(key),
LazyKnownValues(value)] LazyKnownValues(value)]
@@ -324,7 +324,7 @@ class DictComprehension(ComprehensionMixin, Sequence):
for key, value in self._iterate() for key, value in self._iterate()
] ]
return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)])
def get_mapping_item_values(self): def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values() return self._dict_keys(), self._dict_values()
@@ -341,8 +341,8 @@ class SequenceLiteralValue(Sequence):
'[': u'list', '[': u'list',
'{': u'set'} '{': u'set'}
def __init__(self, infer_state, defining_value, atom): def __init__(self, inference_state, defining_value, atom):
super(SequenceLiteralValue, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(inference_state)
self.atom = atom self.atom = atom
self._defining_value = defining_value self._defining_value = defining_value
@@ -355,7 +355,7 @@ class SequenceLiteralValue(Sequence):
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
"""Here the index is an int/str. Raises IndexError/KeyError.""" """Here the index is an int/str. Raises IndexError/KeyError."""
if self.array_type == u'dict': if self.array_type == u'dict':
compiled_obj_index = compiled.create_simple_object(self.infer_state, index) compiled_obj_index = compiled.create_simple_object(self.inference_state, index)
for key, value in self.get_tree_entries(): for key, value in self.get_tree_entries():
for k in self._defining_value.infer_node(key): for k in self._defining_value.infer_node(key):
try: try:
@@ -471,27 +471,27 @@ class SequenceLiteralValue(Sequence):
class DictLiteralValue(_DictMixin, SequenceLiteralValue): class DictLiteralValue(_DictMixin, SequenceLiteralValue):
array_type = u'dict' array_type = u'dict'
def __init__(self, infer_state, defining_value, atom): def __init__(self, inference_state, defining_value, atom):
super(SequenceLiteralValue, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(inference_state)
self._defining_value = defining_value self._defining_value = defining_value
self.atom = atom self.atom = atom
@publish_method('values') @publish_method('values')
def _imitate_values(self): def _imitate_values(self):
lazy_value = LazyKnownValues(self._dict_values()) lazy_value = LazyKnownValues(self._dict_values())
return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])])
@publish_method('items') @publish_method('items')
def _imitate_items(self): def _imitate_items(self):
lazy_values = [ lazy_values = [
LazyKnownValue(FakeSequence( LazyKnownValue(FakeSequence(
self.infer_state, u'tuple', self.inference_state, u'tuple',
(LazyTreeValue(self._defining_value, key_node), (LazyTreeValue(self._defining_value, key_node),
LazyTreeValue(self._defining_value, value_node)) LazyTreeValue(self._defining_value, value_node))
)) for key_node, value_node in self.get_tree_entries() )) for key_node, value_node in self.get_tree_entries()
] ]
return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)])
def _dict_keys(self): def _dict_keys(self):
return ValueSet.from_sets( return ValueSet.from_sets(
@@ -504,19 +504,19 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
class _FakeArray(SequenceLiteralValue): class _FakeArray(SequenceLiteralValue):
def __init__(self, infer_state, container, type): def __init__(self, inference_state, container, type):
super(SequenceLiteralValue, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(inference_state)
self.array_type = type self.array_type = type
self.atom = container self.atom = container
# TODO is this class really needed? # TODO is this class really needed?
class FakeSequence(_FakeArray): class FakeSequence(_FakeArray):
def __init__(self, infer_state, array_type, lazy_value_list): def __init__(self, inference_state, array_type, lazy_value_list):
""" """
type should be one of "tuple", "list" type should be one of "tuple", "list"
""" """
super(FakeSequence, self).__init__(infer_state, None, array_type) super(FakeSequence, self).__init__(inference_state, None, array_type)
self._lazy_value_list = lazy_value_list self._lazy_value_list = lazy_value_list
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
@@ -538,16 +538,16 @@ class FakeSequence(_FakeArray):
class FakeDict(_DictMixin, _FakeArray): class FakeDict(_DictMixin, _FakeArray):
def __init__(self, infer_state, dct): def __init__(self, inference_state, dct):
super(FakeDict, self).__init__(infer_state, dct, u'dict') super(FakeDict, self).__init__(inference_state, dct, u'dict')
self._dct = dct self._dct = dct
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
for key in self._dct: for key in self._dct:
yield LazyKnownValue(compiled.create_simple_object(self.infer_state, key)) yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key))
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
if is_py3 and self.infer_state.environment.version_info.major == 2: if is_py3 and self.inference_state.environment.version_info.major == 2:
# In Python 2 bytes and unicode compare. # In Python 2 bytes and unicode compare.
if isinstance(index, bytes): if isinstance(index, bytes):
index_unicode = force_unicode(index) index_unicode = force_unicode(index)
@@ -569,7 +569,7 @@ class FakeDict(_DictMixin, _FakeArray):
@publish_method('values') @publish_method('values')
def _values(self): def _values(self):
return ValueSet([FakeSequence( return ValueSet([FakeSequence(
self.infer_state, u'tuple', self.inference_state, u'tuple',
[LazyKnownValues(self._dict_values())] [LazyKnownValues(self._dict_values())]
)]) )])
@@ -587,8 +587,8 @@ class FakeDict(_DictMixin, _FakeArray):
class MergedArray(_FakeArray): class MergedArray(_FakeArray):
def __init__(self, infer_state, arrays): def __init__(self, inference_state, arrays):
super(MergedArray, self).__init__(infer_state, arrays, arrays[-1].array_type) super(MergedArray, self).__init__(inference_state, arrays, arrays[-1].array_type)
self._arrays = arrays self._arrays = arrays
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
@@ -657,7 +657,7 @@ def check_array_additions(value, sequence):
return _check_array_additions(value, sequence) return _check_array_additions(value, sequence)
@infer_state_method_cache(default=NO_VALUES) @inference_state_method_cache(default=NO_VALUES)
@debug.increase_indent @debug.increase_indent
def _check_array_additions(value, sequence): def _check_array_additions(value, sequence):
""" """
@@ -675,7 +675,7 @@ def _check_array_additions(value, sequence):
return NO_VALUES return NO_VALUES
def find_additions(value, arglist, add_name): def find_additions(value, arglist, add_name):
params = list(arguments.TreeArguments(value.infer_state, value, arglist).unpack()) params = list(arguments.TreeArguments(value.inference_state, value, arglist).unpack())
result = set() result = set()
if add_name in ['insert']: if add_name in ['insert']:
params = params[1:] params = params[1:]
@@ -719,7 +719,7 @@ def _check_array_additions(value, sequence):
random_value = value.create_value(name) random_value = value.create_value(name)
with recursion.execution_allowed(value.infer_state, power) as allowed: with recursion.execution_allowed(value.inference_state, power) as allowed:
if allowed: if allowed:
found = infer_call_of_leaf( found = infer_call_of_leaf(
random_value, random_value,
@@ -758,7 +758,7 @@ class _ArrayInstance(HelperValueMixin):
self.var_args = var_args self.var_args = var_args
def py__class__(self): def py__class__(self):
tuple_, = self.instance.infer_state.builtins_module.py__getattribute__('tuple') tuple_, = self.instance.inference_state.builtins_module.py__getattribute__('tuple')
return tuple_ return tuple_
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
@@ -792,7 +792,7 @@ class Slice(object):
def __getattr__(self, name): def __getattr__(self, name):
if self._slice_object is None: if self._slice_object is None:
value = compiled.builtin_from_name(self._value.infer_state, 'slice') value = compiled.builtin_from_name(self._value.inference_state, 'slice')
self._slice_object, = value.execute_with_values() self._slice_object, = value.execute_with_values()
return getattr(self._slice_object, name) return getattr(self._slice_object, name)

View File

@@ -39,8 +39,8 @@ py__doc__() Returns the docstring for a value.
from jedi import debug from jedi import debug
from jedi._compatibility import use_metaclass from jedi._compatibility import use_metaclass
from jedi.parser_utils import get_cached_parent_scope from jedi.parser_utils import get_cached_parent_scope
from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \ from jedi.inference.cache import inference_state_method_cache, CachedMetaClass, \
infer_state_method_generator_cache inference_state_method_generator_cache
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.lazy_value import LazyKnownValues from jedi.inference.lazy_value import LazyKnownValues
from jedi.inference.filters import ParserTreeFilter from jedi.inference.filters import ParserTreeFilter
@@ -73,7 +73,7 @@ class ClassName(TreeNameDefinition):
# We're using a different value to infer, so we cannot call super(). # We're using a different value to infer, so we cannot call super().
from jedi.inference.syntax_tree import tree_name_to_values from jedi.inference.syntax_tree import tree_name_to_values
inferred = tree_name_to_values( inferred = tree_name_to_values(
self.parent_context.infer_state, self._name_value, self.tree_name) self.parent_context.inference_state, self._name_value, self.tree_name)
for result_value in inferred: for result_value in inferred:
if self._apply_decorators: if self._apply_decorators:
@@ -141,10 +141,10 @@ class ClassMixin(object):
from jedi.inference.value import TreeInstance from jedi.inference.value import TreeInstance
if arguments is None: if arguments is None:
arguments = ValuesArguments([]) arguments = ValuesArguments([])
return ValueSet([TreeInstance(self.infer_state, self.parent_context, self, arguments)]) return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
def py__class__(self): def py__class__(self):
return compiled.builtin_from_name(self.infer_state, u'type') return compiled.builtin_from_name(self.inference_state, u'type')
@property @property
def name(self): def name(self):
@@ -159,7 +159,7 @@ class ClassMixin(object):
return list(value_.get_param_names())[1:] return list(value_.get_param_names())[1:]
return [] return []
@infer_state_method_generator_cache() @inference_state_method_generator_cache()
def py__mro__(self): def py__mro__(self):
mro = [self] mro = [self]
yield self yield self
@@ -214,7 +214,7 @@ class ClassMixin(object):
) )
if not is_instance: if not is_instance:
from jedi.inference.compiled import builtin_from_name from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.infer_state, u'type') type_ = builtin_from_name(self.inference_state, u'type')
assert isinstance(type_, ClassValue) assert isinstance(type_, ClassValue)
if type_ != self: if type_ != self:
for instance in type_.py__call__(): for instance in type_.py__call__():
@@ -239,7 +239,7 @@ class ClassMixin(object):
class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
api_type = u'class' api_type = u'class'
@infer_state_method_cache() @inference_state_method_cache()
def list_type_vars(self): def list_type_vars(self):
found = [] found = []
arglist = self.tree_node.get_super_arglist() arglist = self.tree_node.get_super_arglist()
@@ -261,10 +261,10 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
arglist = self.tree_node.get_super_arglist() arglist = self.tree_node.get_super_arglist()
if arglist: if arglist:
from jedi.inference import arguments from jedi.inference import arguments
return arguments.TreeArguments(self.infer_state, self.parent_context, arglist) return arguments.TreeArguments(self.inference_state, self.parent_context, arglist)
return None return None
@infer_state_method_cache(default=()) @inference_state_method_cache(default=())
def py__bases__(self): def py__bases__(self):
args = self._get_bases_arguments() args = self._get_bases_arguments()
if args is not None: if args is not None:
@@ -273,10 +273,10 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
return lst return lst
if self.py__name__() == 'object' \ if self.py__name__() == 'object' \
and self.parent_context == self.infer_state.builtins_module: and self.parent_context == self.inference_state.builtins_module:
return [] return []
return [LazyKnownValues( return [LazyKnownValues(
self.infer_state.builtins_module.py__getattribute__('object') self.inference_state.builtins_module.py__getattribute__('object')
)] )]
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
@@ -320,7 +320,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
debug.dbg('Unprocessed metaclass %s', metaclass) debug.dbg('Unprocessed metaclass %s', metaclass)
return [] return []
@infer_state_method_cache(default=NO_VALUES) @inference_state_method_cache(default=NO_VALUES)
def get_metaclasses(self): def get_metaclasses(self):
args = self._get_bases_arguments() args = self._get_bases_arguments()
if args is not None: if args is not None:

View File

@@ -2,7 +2,7 @@ import re
import os import os
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.names import ValueNameMixin, AbstractNameDefinition from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
from jedi.inference import compiled from jedi.inference import compiled
@@ -27,13 +27,13 @@ class _ModuleAttributeName(AbstractNameDefinition):
def infer(self): def infer(self):
if self._string_value is not None: if self._string_value is not None:
s = self._string_value s = self._string_value
if self.parent_context.infer_state.environment.version_info.major == 2 \ if self.parent_context.inference_state.environment.version_info.major == 2 \
and not isinstance(s, bytes): and not isinstance(s, bytes):
s = s.encode('utf-8') s = s.encode('utf-8')
return ValueSet([ return ValueSet([
create_simple_object(self.parent_context.infer_state, s) create_simple_object(self.parent_context.inference_state, s)
]) ])
return compiled.get_string_value_set(self.parent_context.infer_state) return compiled.get_string_value_set(self.parent_context.inference_state)
class ModuleName(ValueNameMixin, AbstractNameDefinition): class ModuleName(ValueNameMixin, AbstractNameDefinition):
@@ -48,9 +48,9 @@ class ModuleName(ValueNameMixin, AbstractNameDefinition):
return self._name return self._name
def iter_module_names(infer_state, paths): def iter_module_names(inference_state, paths):
# Python modules/packages # Python modules/packages
for n in infer_state.compiled_subprocess.list_module_names(paths): for n in inference_state.compiled_subprocess.list_module_names(paths):
yield n yield n
for path in paths: for path in paths:
@@ -75,7 +75,7 @@ def iter_module_names(infer_state, paths):
class SubModuleDictMixin(object): class SubModuleDictMixin(object):
@infer_state_method_cache() @inference_state_method_cache()
def sub_modules_dict(self): def sub_modules_dict(self):
""" """
Lists modules in the directory of this module (if this module is a Lists modules in the directory of this module (if this module is a
@@ -87,7 +87,7 @@ class SubModuleDictMixin(object):
except AttributeError: except AttributeError:
pass pass
else: else:
mods = iter_module_names(self.infer_state, method()) mods = iter_module_names(self.inference_state, method())
for name in mods: for name in mods:
# It's obviously a relative import to the current module. # It's obviously a relative import to the current module.
names[name] = SubModuleName(self, name) names[name] = SubModuleName(self, name)
@@ -113,7 +113,7 @@ class ModuleMixin(SubModuleDictMixin):
yield star_filter yield star_filter
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.infer_state, u'types', u'ModuleType') c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType')
return c return c
def is_module(self): def is_module(self):
@@ -123,7 +123,7 @@ class ModuleMixin(SubModuleDictMixin):
return False return False
@property @property
@infer_state_method_cache() @inference_state_method_cache()
def name(self): def name(self):
return ModuleName(self, self._string_name) return ModuleName(self, self._string_name)
@@ -140,7 +140,7 @@ class ModuleMixin(SubModuleDictMixin):
# Remove PEP 3149 names # Remove PEP 3149 names
return re.sub(r'\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1)) return re.sub(r'\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
@infer_state_method_cache() @inference_state_method_cache()
def _module_attributes_dict(self): def _module_attributes_dict(self):
names = ['__package__', '__doc__', '__name__'] names = ['__package__', '__doc__', '__name__']
# All the additional module attributes are strings. # All the additional module attributes are strings.
@@ -157,7 +157,7 @@ class ModuleMixin(SubModuleDictMixin):
# I'm not sure if the star import cache is really that effective anymore # I'm not sure if the star import cache is really that effective anymore
# with all the other really fast import caches. Recheck. Also we would need # with all the other really fast import caches. Recheck. Also we would need
# to push the star imports into InferenceState.module_cache, if we reenable this. # to push the star imports into InferenceState.module_cache, if we reenable this.
@infer_state_method_cache([]) @inference_state_method_cache([])
def star_imports(self): def star_imports(self):
from jedi.inference.imports import Importer from jedi.inference.imports import Importer
@@ -165,7 +165,7 @@ class ModuleMixin(SubModuleDictMixin):
for i in self.tree_node.iter_imports(): for i in self.tree_node.iter_imports():
if i.is_star_import(): if i.is_star_import():
new = Importer( new = Importer(
self.infer_state, self.inference_state,
import_path=i.get_paths()[-1], import_path=i.get_paths()[-1],
module_value=self, module_value=self,
level=i.level level=i.level
@@ -190,9 +190,9 @@ class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module' api_type = u'module'
parent_context = None parent_context = None
def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False): def __init__(self, inference_state, module_node, file_io, string_names, code_lines, is_package=False):
super(ModuleValue, self).__init__( super(ModuleValue, self).__init__(
infer_state, inference_state,
parent_context=None, parent_context=None,
tree_node=module_node tree_node=module_node
) )
@@ -242,7 +242,7 @@ class ModuleValue(ModuleMixin, TreeValue):
# It is a namespace, now try to find the rest of the # It is a namespace, now try to find the rest of the
# modules on sys_path or whatever the search_path is. # modules on sys_path or whatever the search_path is.
paths = set() paths = set()
for s in self.infer_state.get_sys_path(): for s in self.inference_state.get_sys_path():
other = os.path.join(s, self.name.string_name) other = os.path.join(s, self.name.string_name)
if os.path.isdir(other): if os.path.isdir(other):
paths.add(other) paths.add(other)

View File

@@ -1,4 +1,4 @@
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import inference_state_method_cache
from jedi.inference.filters import DictFilter from jedi.inference.filters import DictFilter
from jedi.inference.names import ValueNameMixin, AbstractNameDefinition from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
from jedi.inference.base_value import Value from jedi.inference.base_value import Value
@@ -25,9 +25,9 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
api_type = u'module' api_type = u'module'
parent_context = None parent_context = None
def __init__(self, infer_state, fullname, paths): def __init__(self, inference_state, fullname, paths):
super(ImplicitNamespaceValue, self).__init__(infer_state, parent_context=None) super(ImplicitNamespaceValue, self).__init__(inference_state, parent_context=None)
self.infer_state = infer_state self.inference_state = inference_state
self._fullname = fullname self._fullname = fullname
self._paths = paths self._paths = paths
@@ -35,7 +35,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
yield DictFilter(self.sub_modules_dict()) yield DictFilter(self.sub_modules_dict())
@property @property
@infer_state_method_cache() @inference_state_method_cache()
def name(self): def name(self):
string_name = self.py__package__()[-1] string_name = self.py__package__()[-1]
return ImplicitNSName(self, string_name) return ImplicitNSName(self, string_name)

View File

@@ -3,19 +3,19 @@ def import_module(callback):
Handle "magic" Flask extension imports: Handle "magic" Flask extension imports:
``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``. ``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``.
""" """
def wrapper(infer_state, import_names, module_value, *args, **kwargs): def wrapper(inference_state, import_names, module_value, *args, **kwargs):
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'): if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
# New style. # New style.
ipath = (u'flask_' + import_names[2]), ipath = (u'flask_' + import_names[2]),
value_set = callback(infer_state, ipath, None, *args, **kwargs) value_set = callback(inference_state, ipath, None, *args, **kwargs)
if value_set: if value_set:
return value_set return value_set
value_set = callback(infer_state, (u'flaskext',), None, *args, **kwargs) value_set = callback(inference_state, (u'flaskext',), None, *args, **kwargs)
return callback( return callback(
infer_state, inference_state,
(u'flaskext', import_names[2]), (u'flaskext', import_names[2]),
next(iter(value_set)), next(iter(value_set)),
*args, **kwargs *args, **kwargs
) )
return callback(infer_state, import_names, module_value, *args, **kwargs) return callback(inference_state, import_names, module_value, *args, **kwargs)
return wrapper return wrapper

View File

@@ -114,7 +114,7 @@ def execute(callback):
except AttributeError: except AttributeError:
pass pass
else: else:
if value.parent_context == value.infer_state.builtins_module: if value.parent_context == value.inference_state.builtins_module:
module_name = 'builtins' module_name = 'builtins'
elif value.parent_context is not None and value.parent_context.is_module(): elif value.parent_context is not None and value.parent_context.is_module():
module_name = value.parent_context.py__name__() module_name = value.parent_context.py__name__()
@@ -148,7 +148,7 @@ def execute(callback):
return wrapper return wrapper
def _follow_param(infer_state, arguments, index): def _follow_param(inference_state, arguments, index):
try: try:
key, lazy_value = list(arguments.unpack())[index] key, lazy_value = list(arguments.unpack())[index]
except IndexError: except IndexError:
@@ -158,7 +158,7 @@ def _follow_param(infer_state, arguments, index):
def argument_clinic(string, want_obj=False, want_value=False, def argument_clinic(string, want_obj=False, want_value=False,
want_arguments=False, want_infer_state=False, want_arguments=False, want_inference_state=False,
want_callback=False): want_callback=False):
""" """
Works like Argument Clinic (PEP 436), to validate function params. Works like Argument Clinic (PEP 436), to validate function params.
@@ -177,8 +177,8 @@ def argument_clinic(string, want_obj=False, want_value=False,
kwargs['value'] = arguments.value kwargs['value'] = arguments.value
if want_obj: if want_obj:
kwargs['obj'] = obj kwargs['obj'] = obj
if want_infer_state: if want_inference_state:
kwargs['infer_state'] = obj.infer_state kwargs['inference_state'] = obj.inference_state
if want_arguments: if want_arguments:
kwargs['arguments'] = arguments kwargs['arguments'] = arguments
if want_callback: if want_callback:
@@ -202,9 +202,9 @@ def builtins_property(objects, types, obj, arguments):
return lazy_value.infer().py__call__(arguments=ValuesArguments([objects])) return lazy_value.infer().py__call__(arguments=ValuesArguments([objects]))
@argument_clinic('iterator[, default], /', want_infer_state=True) @argument_clinic('iterator[, default], /', want_inference_state=True)
def builtins_next(iterators, defaults, infer_state): def builtins_next(iterators, defaults, inference_state):
if infer_state.environment.version_info.major == 2: if inference_state.environment.version_info.major == 2:
name = 'next' name = 'next'
else: else:
name = '__next__' name = '__next__'
@@ -245,8 +245,8 @@ def builtins_type(objects, bases, dicts):
class SuperInstance(LazyValueWrapper): class SuperInstance(LazyValueWrapper):
"""To be used like the object ``super`` returns.""" """To be used like the object ``super`` returns."""
def __init__(self, infer_state, instance): def __init__(self, inference_state, instance):
self.infer_state = infer_state self.inference_state = inference_state
self._instance = instance # Corresponds to super().__self__ self._instance = instance # Corresponds to super().__self__
def _get_bases(self): def _get_bases(self):
@@ -274,7 +274,7 @@ def builtins_super(types, objects, value):
instance = value.var_args.instance instance = value.var_args.instance
# TODO if a class is given it doesn't have to be the direct super # TODO if a class is given it doesn't have to be the direct super
# class, it can be an anecestor from long ago. # class, it can be an anecestor from long ago.
return ValueSet({SuperInstance(instance.infer_state, instance)}) return ValueSet({SuperInstance(instance.inference_state, instance)})
return NO_VALUES return NO_VALUES
@@ -312,12 +312,12 @@ def builtins_reversed(sequences, obj, arguments):
# necessary, because `reversed` is a function and autocompletion # necessary, because `reversed` is a function and autocompletion
# would fail in certain cases like `reversed(x).__iter__` if we # would fail in certain cases like `reversed(x).__iter__` if we
# just returned the result directly. # just returned the result directly.
seq, = obj.infer_state.typing_module.py__getattribute__('Iterator').execute_with_values() seq, = obj.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values()
return ValueSet([ReversedObject(seq, list(reversed(ordered)))]) return ValueSet([ReversedObject(seq, list(reversed(ordered)))])
@argument_clinic('obj, type, /', want_arguments=True, want_infer_state=True) @argument_clinic('obj, type, /', want_arguments=True, want_inference_state=True)
def builtins_isinstance(objects, types, arguments, infer_state): def builtins_isinstance(objects, types, arguments, inference_state):
bool_results = set() bool_results = set()
for o in objects: for o in objects:
cls = o.py__class__() cls = o.py__class__()
@@ -336,7 +336,7 @@ def builtins_isinstance(objects, types, arguments, infer_state):
if cls_or_tup.is_class(): if cls_or_tup.is_class():
bool_results.add(cls_or_tup in mro) bool_results.add(cls_or_tup in mro)
elif cls_or_tup.name.string_name == 'tuple' \ elif cls_or_tup.name.string_name == 'tuple' \
and cls_or_tup.get_root_value() == infer_state.builtins_module: and cls_or_tup.get_root_value() == inference_state.builtins_module:
# Check for tuples. # Check for tuples.
classes = ValueSet.from_sets( classes = ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
@@ -353,7 +353,7 @@ def builtins_isinstance(objects, types, arguments, infer_state):
analysis.add(lazy_value.value, 'type-error-isinstance', node, message) analysis.add(lazy_value.value, 'type-error-isinstance', node, message)
return ValueSet( return ValueSet(
compiled.builtin_from_name(infer_state, force_unicode(str(b))) compiled.builtin_from_name(inference_state, force_unicode(str(b)))
for b in bool_results for b in bool_results
) )
@@ -430,18 +430,18 @@ def collections_namedtuple(obj, arguments, callback):
inferring the result. inferring the result.
""" """
infer_state = obj.infer_state inference_state = obj.inference_state
# Process arguments # Process arguments
name = u'jedi_unknown_namedtuple' name = u'jedi_unknown_namedtuple'
for c in _follow_param(infer_state, arguments, 0): for c in _follow_param(inference_state, arguments, 0):
x = get_str_or_none(c) x = get_str_or_none(c)
if x is not None: if x is not None:
name = force_unicode(x) name = force_unicode(x)
break break
# TODO here we only use one of the types, we should use all. # TODO here we only use one of the types, we should use all.
param_values = _follow_param(infer_state, arguments, 1) param_values = _follow_param(inference_state, arguments, 1)
if not param_values: if not param_values:
return NO_VALUES return NO_VALUES
_fields = list(param_values)[0] _fields = list(param_values)[0]
@@ -470,16 +470,16 @@ def collections_namedtuple(obj, arguments, callback):
) )
# Parse source code # Parse source code
module = infer_state.grammar.parse(code) module = inference_state.grammar.parse(code)
generated_class = next(module.iter_classdefs()) generated_class = next(module.iter_classdefs())
parent_context = ModuleValue( parent_context = ModuleValue(
infer_state, module, inference_state, module,
file_io=None, file_io=None,
string_names=None, string_names=None,
code_lines=parso.split_lines(code, keepends=True), code_lines=parso.split_lines(code, keepends=True),
) )
return ValueSet([ClassValue(infer_state, parent_context, generated_class)]) return ValueSet([ClassValue(inference_state, parent_context, generated_class)])
class PartialObject(object): class PartialObject(object):
@@ -571,7 +571,7 @@ def _random_choice(sequences):
def _dataclass(obj, arguments, callback): def _dataclass(obj, arguments, callback):
for c in _follow_param(obj.infer_state, arguments, 0): for c in _follow_param(obj.inference_state, arguments, 0):
if c.is_class(): if c.is_class():
return ValueSet([DataclassWrapper(c)]) return ValueSet([DataclassWrapper(c)])
else: else:
@@ -645,7 +645,7 @@ class ItemGetterCallable(ValueWrapper):
value_set |= item_value_set.get_item(lazy_values[0].infer(), None) value_set |= item_value_set.get_item(lazy_values[0].infer(), None)
else: else:
value_set |= ValueSet([iterable.FakeSequence( value_set |= ValueSet([iterable.FakeSequence(
self._wrapped_value.infer_state, self._wrapped_value.inference_state,
'list', 'list',
[ [
LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None)) LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None))
@@ -698,7 +698,7 @@ def _create_string_input_function(func):
s = get_str_or_none(value) s = get_str_or_none(value)
if s is not None: if s is not None:
s = func(s) s = func(s)
yield compiled.create_simple_object(value.infer_state, s) yield compiled.create_simple_object(value.inference_state, s)
values = ValueSet(iterate()) values = ValueSet(iterate())
if values: if values:
return values return values
@@ -724,7 +724,7 @@ def _os_path_join(args_set, callback):
string += force_unicode(s) string += force_unicode(s)
is_first = False is_first = False
else: else:
return ValueSet([compiled.create_simple_object(sequence.infer_state, string)]) return ValueSet([compiled.create_simple_object(sequence.inference_state, string)])
return callback() return callback()
@@ -803,7 +803,7 @@ def get_metaclass_filters(func):
class EnumInstance(LazyValueWrapper): class EnumInstance(LazyValueWrapper):
def __init__(self, cls, name): def __init__(self, cls, name):
self.infer_state = cls.infer_state self.inference_state = cls.inference_state
self._cls = cls # Corresponds to super().__self__ self._cls = cls # Corresponds to super().__self__
self._name = name self._name = name
self.tree_node = self._name.tree_name self.tree_node = self._name.tree_name
@@ -818,7 +818,7 @@ class EnumInstance(LazyValueWrapper):
def get_filters(self, search_global=False, position=None, origin_scope=None): def get_filters(self, search_global=False, position=None, origin_scope=None):
yield DictFilter(dict( yield DictFilter(dict(
name=compiled.create_simple_object(self.infer_state, self._name.string_name).name, name=compiled.create_simple_object(self.inference_state, self._name.string_name).name,
value=self._name, value=self._name,
)) ))
for f in self._get_wrapped_value().get_filters(): for f in self._get_wrapped_value().get_filters():
@@ -826,10 +826,10 @@ class EnumInstance(LazyValueWrapper):
def tree_name_to_values(func): def tree_name_to_values(func):
def wrapper(infer_state, value, tree_name): def wrapper(inference_state, value, tree_name):
if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path': if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path':
return ValueSet({ return ValueSet({
compiled.create_simple_object(infer_state, os.path.sep), compiled.create_simple_object(inference_state, os.path.sep),
}) })
return func(infer_state, value, tree_name) return func(inference_state, value, tree_name)
return wrapper return wrapper

View File

@@ -13,8 +13,8 @@ sys.path.append('a' +* '/thirdparty')
#? ['inference'] #? ['inference']
import inference import inference
#? ['infer_state_function_cache'] #? ['inference_state_function_cache']
inference.infer_state_fu inference.inference_state_fu
# Those don't work because dirname and abspath are not properly understood. # Those don't work because dirname and abspath are not properly understood.
##? ['jedi_'] ##? ['jedi_']

View File

@@ -162,10 +162,10 @@ def cwd_tmpdir(monkeypatch, tmpdir):
@pytest.fixture @pytest.fixture
def infer_state(Script): def inference_state(Script):
return Script('')._infer_state return Script('')._inference_state
@pytest.fixture @pytest.fixture
def same_process_infer_state(Script): def same_process_inference_state(Script):
return Script('', environment=InterpreterEnvironment())._infer_state return Script('', environment=InterpreterEnvironment())._inference_state

View File

@@ -212,7 +212,7 @@ class IntegrationTestCase(object):
def run_goto_definitions(self, compare_cb, environment): def run_goto_definitions(self, compare_cb, environment):
script = self.script(environment) script = self.script(environment)
infer_state = script._infer_state inference_state = script._inference_state
def comparison(definition): def comparison(definition):
suffix = '()' if definition.type == 'instance' else '' suffix = '()' if definition.type == 'instance' else ''
@@ -232,13 +232,13 @@ class IntegrationTestCase(object):
user_value = user_value.get_function_execution() user_value = user_value.get_function_execution()
element.parent = user_value.tree_node element.parent = user_value.tree_node
results = convert_values( results = convert_values(
infer_state.infer_element(user_value, element), inference_state.infer_element(user_value, element),
) )
if not results: if not results:
raise Exception('Could not resolve %s on line %s' raise Exception('Could not resolve %s on line %s'
% (match.string, self.line_nr - 1)) % (match.string, self.line_nr - 1))
should_be |= set(Definition(infer_state, r.name) for r in results) should_be |= set(Definition(inference_state, r.name) for r in results)
debug.dbg('Finished getting types', color='YELLOW') debug.dbg('Finished getting types', color='YELLOW')
# Because the objects have different ids, `repr`, then compare. # Because the objects have different ids, `repr`, then compare.

View File

@@ -42,10 +42,10 @@ def test_versions(version):
assert env.get_sys_path() assert env.get_sys_path()
def test_load_module(infer_state): def test_load_module(inference_state):
access_path = infer_state.compiled_subprocess.load_module( access_path = inference_state.compiled_subprocess.load_module(
dotted_name=u'math', dotted_name=u'math',
sys_path=infer_state.get_sys_path() sys_path=inference_state.get_sys_path()
) )
name, access_handle = access_path.accesses[0] name, access_handle = access_path.accesses[0]
@@ -55,31 +55,31 @@ def test_load_module(infer_state):
access_handle.py__mro__() access_handle.py__mro__()
def test_error_in_environment(infer_state, Script, environment): def test_error_in_environment(inference_state, Script, environment):
if isinstance(environment, InterpreterEnvironment): if isinstance(environment, InterpreterEnvironment):
pytest.skip("We don't catch these errors at the moment.") pytest.skip("We don't catch these errors at the moment.")
# Provoke an error to show how Jedi can recover from it. # Provoke an error to show how Jedi can recover from it.
with pytest.raises(jedi.InternalError): with pytest.raises(jedi.InternalError):
infer_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) inference_state.compiled_subprocess._test_raise_error(KeyboardInterrupt)
# The second time it should raise an InternalError again. # The second time it should raise an InternalError again.
with pytest.raises(jedi.InternalError): with pytest.raises(jedi.InternalError):
infer_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) inference_state.compiled_subprocess._test_raise_error(KeyboardInterrupt)
# Jedi should still work. # Jedi should still work.
def_, = Script('str').goto_definitions() def_, = Script('str').goto_definitions()
assert def_.name == 'str' assert def_.name == 'str'
def test_stdout_in_subprocess(infer_state, Script): def test_stdout_in_subprocess(inference_state, Script):
infer_state.compiled_subprocess._test_print(stdout='.') inference_state.compiled_subprocess._test_print(stdout='.')
Script('1').goto_definitions() Script('1').goto_definitions()
def test_killed_subprocess(infer_state, Script, environment): def test_killed_subprocess(inference_state, Script, environment):
if isinstance(environment, InterpreterEnvironment): if isinstance(environment, InterpreterEnvironment):
pytest.skip("We cannot kill our own process") pytest.skip("We cannot kill our own process")
# Just kill the subprocess. # Just kill the subprocess.
infer_state.compiled_subprocess._compiled_subprocess._get_process().kill() inference_state.compiled_subprocess._compiled_subprocess._get_process().kill()
# Since the process was terminated (and nobody knows about it) the first # Since the process was terminated (and nobody knows about it) the first
# Jedi call fails. # Jedi call fails.
with pytest.raises(jedi.InternalError): with pytest.raises(jedi.InternalError):

View File

@@ -13,12 +13,12 @@ def test_django_default_project(Script):
) )
c, = script.completions() c, = script.completions()
assert c.name == "SomeModel" assert c.name == "SomeModel"
assert script._infer_state.project._django is True assert script._inference_state.project._django is True
def test_interpreter_project_path(): def test_interpreter_project_path():
# Run from anywhere it should be the cwd. # Run from anywhere it should be the cwd.
dir = os.path.join(root_dir, 'test') dir = os.path.join(root_dir, 'test')
with set_cwd(dir): with set_cwd(dir):
project = Interpreter('', [locals()])._infer_state.project project = Interpreter('', [locals()])._inference_state.project
assert project._path == dir assert project._path == dir

View File

@@ -17,7 +17,7 @@ def test_add_dynamic_mods(Script):
# Other fictional modules in another place in the fs. # Other fictional modules in another place in the fs.
src2 = 'from .. import setup; setup.r(1)' src2 = 'from .. import setup; setup.r(1)'
script = Script(src1, path='../setup.py') script = Script(src1, path='../setup.py')
imports.load_module(script._infer_state, os.path.abspath(fname), src2) imports.load_module(script._inference_state, os.path.abspath(fname), src2)
result = script.goto_definitions() result = script.goto_definitions()
assert len(result) == 1 assert len(result) == 1
assert result[0].description == 'class int' assert result[0].description == 'class int'

View File

@@ -63,7 +63,7 @@ def test_sys_path_with_modifications(Script):
""") """)
path = os.path.abspath(os.path.join(os.curdir, 'module_name.py')) path = os.path.abspath(os.path.join(os.curdir, 'module_name.py'))
paths = Script(code, path=path)._infer_state.get_sys_path() paths = Script(code, path=path)._inference_state.get_sys_path()
assert '/tmp/.buildout/eggs/important_package.egg' in paths assert '/tmp/.buildout/eggs/important_package.egg' in paths

View File

@@ -11,8 +11,8 @@ from jedi.inference.compiled.access import DirectObjectAccess
from jedi.inference.gradual.conversion import _stub_to_python_value_set from jedi.inference.gradual.conversion import _stub_to_python_value_set
def test_simple(infer_state, environment): def test_simple(inference_state, environment):
obj = compiled.create_simple_object(infer_state, u'_str_') obj = compiled.create_simple_object(inference_state, u'_str_')
upper, = obj.py__getattribute__(u'upper') upper, = obj.py__getattribute__(u'upper')
objs = list(upper.execute_with_values()) objs = list(upper.execute_with_values())
assert len(objs) == 1 assert len(objs) == 1
@@ -23,15 +23,15 @@ def test_simple(infer_state, environment):
assert objs[0].name.string_name == expected assert objs[0].name.string_name == expected
def test_builtin_loading(infer_state): def test_builtin_loading(inference_state):
string, = infer_state.builtins_module.py__getattribute__(u'str') string, = inference_state.builtins_module.py__getattribute__(u'str')
from_name, = string.py__getattribute__(u'__init__') from_name, = string.py__getattribute__(u'__init__')
assert from_name.tree_node assert from_name.tree_node
assert not from_name.py__doc__() # It's a stub assert not from_name.py__doc__() # It's a stub
def test_next_docstr(infer_state): def test_next_docstr(inference_state):
next_ = compiled.builtin_from_name(infer_state, u'next') next_ = compiled.builtin_from_name(inference_state, u'next')
assert next_.tree_node is not None assert next_.tree_node is not None
assert next_.py__doc__() == '' # It's a stub assert next_.py__doc__() == '' # It's a stub
for non_stub in _stub_to_python_value_set(next_): for non_stub in _stub_to_python_value_set(next_):
@@ -47,12 +47,12 @@ def test_parse_function_doc_illegal_docstr():
assert ('', '') == compiled.value._parse_function_doc(docstr) assert ('', '') == compiled.value._parse_function_doc(docstr)
def test_doc(infer_state): def test_doc(inference_state):
""" """
Even CompiledObject docs always return empty docstrings - not None, that's Even CompiledObject docs always return empty docstrings - not None, that's
just a Jedi API definition. just a Jedi API definition.
""" """
str_ = compiled.create_simple_object(infer_state, u'') str_ = compiled.create_simple_object(inference_state, u'')
# Equals `''.__getnewargs__` # Equals `''.__getnewargs__`
obj, = str_.py__getattribute__(u'__getnewargs__') obj, = str_.py__getattribute__(u'__getnewargs__')
assert obj.py__doc__() == '' assert obj.py__doc__() == ''
@@ -103,7 +103,7 @@ def test_dict_values(Script, environment):
def test_getitem_on_none(Script): def test_getitem_on_none(Script):
script = Script('None[1j]') script = Script('None[1j]')
assert not script.goto_definitions() assert not script.goto_definitions()
issue, = script._infer_state.analysis issue, = script._inference_state.analysis
assert issue.name == 'type-error-not-subscriptable' assert issue.name == 'type-error-not-subscriptable'
@@ -122,7 +122,7 @@ def _return_int():
('ret_int', '_return_int', 'test.test_inference.test_compiled'), ('ret_int', '_return_int', 'test.test_inference.test_compiled'),
] ]
) )
def test_parent_context(same_process_infer_state, attribute, expected_name, expected_parent): def test_parent_context(same_process_inference_state, attribute, expected_name, expected_parent):
import decimal import decimal
class C: class C:
@@ -135,8 +135,8 @@ def test_parent_context(same_process_infer_state, attribute, expected_name, expe
ret_int = _return_int ret_int = _return_int
o = compiled.CompiledObject( o = compiled.CompiledObject(
same_process_infer_state, same_process_inference_state,
DirectObjectAccess(same_process_infer_state, C) DirectObjectAccess(same_process_inference_state, C)
) )
x, = o.py__getattribute__(attribute) x, = o.py__getattribute__(attribute)
assert x.py__name__() == expected_name assert x.py__name__() == expected_name
@@ -163,9 +163,9 @@ def test_parent_context(same_process_infer_state, attribute, expected_name, expe
(Counter("").most_common, ['Counter', 'most_common']), (Counter("").most_common, ['Counter', 'most_common']),
] ]
) )
def test_qualified_names(same_process_infer_state, obj, expected_names): def test_qualified_names(same_process_inference_state, obj, expected_names):
o = compiled.CompiledObject( o = compiled.CompiledObject(
same_process_infer_state, same_process_inference_state,
DirectObjectAccess(same_process_infer_state, obj) DirectObjectAccess(same_process_inference_state, obj)
) )
assert o.get_qualified_names() == tuple(expected_names) assert o.get_qualified_names() == tuple(expected_names)

View File

@@ -66,7 +66,7 @@ def test_instance_doc(Script):
assert defs[0].docstring() == 'Docstring of `TestClass`.' assert defs[0].docstring() == 'Docstring of `TestClass`.'
@unittest.skip('need infer_state class for that') @unittest.skip('need inference_state class for that')
def test_attribute_docstring(Script): def test_attribute_docstring(Script):
defs = Script(""" defs = Script("""
x = None x = None
@@ -75,7 +75,7 @@ def test_attribute_docstring(Script):
assert defs[0].docstring() == 'Docstring of `x`.' assert defs[0].docstring() == 'Docstring of `x`.'
@unittest.skip('need infer_state class for that') @unittest.skip('need inference_state class for that')
def test_multiple_docstrings(Script): def test_multiple_docstrings(Script):
defs = Script(""" defs = Script("""
def func(): def func():

View File

@@ -43,12 +43,12 @@ pkg_zip_path = os.path.join(os.path.dirname(__file__),
'pkg.zip') 'pkg.zip')
def test_find_module_package_zipped(Script, infer_state, environment): def test_find_module_package_zipped(Script, inference_state, environment):
sys_path = environment.get_sys_path() + [pkg_zip_path] sys_path = environment.get_sys_path() + [pkg_zip_path]
script = Script('import pkg; pkg.mod', sys_path=sys_path) script = Script('import pkg; pkg.mod', sys_path=sys_path)
assert len(script.completions()) == 1 assert len(script.completions()) == 1
file_io, is_package = infer_state.compiled_subprocess.get_module_info( file_io, is_package = inference_state.compiled_subprocess.get_module_info(
sys_path=sys_path, sys_path=sys_path,
string=u'pkg', string=u'pkg',
full_name=u'pkg' full_name=u'pkg'
@@ -84,7 +84,7 @@ def test_find_module_package_zipped(Script, infer_state, environment):
] ]
) )
def test_correct_zip_package_behavior(Script, infer_state, environment, code, def test_correct_zip_package_behavior(Script, inference_state, environment, code,
file, package, path, skip_python2): file, package, path, skip_python2):
sys_path = environment.get_sys_path() + [pkg_zip_path] sys_path = environment.get_sys_path() + [pkg_zip_path]
pkg, = Script(code, sys_path=sys_path).goto_definitions() pkg, = Script(code, sys_path=sys_path).goto_definitions()
@@ -96,13 +96,13 @@ def test_correct_zip_package_behavior(Script, infer_state, environment, code,
assert value.py__path__() == [os.path.join(pkg_zip_path, path)] assert value.py__path__() == [os.path.join(pkg_zip_path, path)]
def test_find_module_not_package_zipped(Script, infer_state, environment): def test_find_module_not_package_zipped(Script, inference_state, environment):
path = os.path.join(os.path.dirname(__file__), 'zipped_imports/not_pkg.zip') path = os.path.join(os.path.dirname(__file__), 'zipped_imports/not_pkg.zip')
sys_path = environment.get_sys_path() + [path] sys_path = environment.get_sys_path() + [path]
script = Script('import not_pkg; not_pkg.val', sys_path=sys_path) script = Script('import not_pkg; not_pkg.val', sys_path=sys_path)
assert len(script.completions()) == 1 assert len(script.completions()) == 1
file_io, is_package = infer_state.compiled_subprocess.get_module_info( file_io, is_package = inference_state.compiled_subprocess.get_module_info(
sys_path=sys_path, sys_path=sys_path,
string=u'not_pkg', string=u'not_pkg',
full_name=u'not_pkg' full_name=u'not_pkg'
@@ -310,16 +310,16 @@ def test_compiled_import_none(monkeypatch, Script):
(os.path.join(THIS_DIR, '__init__.py'), True, ('ok', 'lala', 'x', 'test_imports')), (os.path.join(THIS_DIR, '__init__.py'), True, ('ok', 'lala', 'x', 'test_imports')),
] ]
) )
def test_get_modules_containing_name(infer_state, path, goal, is_package): def test_get_modules_containing_name(inference_state, path, goal, is_package):
module = imports._load_python_module( module = imports._load_python_module(
infer_state, inference_state,
FileIO(path), FileIO(path),
import_names=('ok', 'lala', 'x'), import_names=('ok', 'lala', 'x'),
is_package=is_package, is_package=is_package,
) )
assert module assert module
input_module, found_module = imports.get_modules_containing_name( input_module, found_module = imports.get_modules_containing_name(
infer_state, inference_state,
[module], [module],
'string_that_only_exists_here' 'string_that_only_exists_here'
) )
@@ -337,9 +337,9 @@ def test_get_modules_containing_name(infer_state, path, goal, is_package):
('/foo/bar/__init__.py', ('foo', 'bar'), True, ('foo', 'bar')), ('/foo/bar/__init__.py', ('foo', 'bar'), True, ('foo', 'bar')),
] ]
) )
def test_load_module_from_path(infer_state, path, base_names, is_package, names): def test_load_module_from_path(inference_state, path, base_names, is_package, names):
file_io = KnownContentFileIO(path, '') file_io = KnownContentFileIO(path, '')
m = imports._load_module_from_path(infer_state, file_io, base_names) m = imports._load_module_from_path(inference_state, file_io, base_names)
assert m.is_package == is_package assert m.is_package == is_package
assert m.string_names == names assert m.string_names == names
@@ -437,8 +437,8 @@ def test_pre_defined_imports_module(Script, environment, name):
module = Script('', path=path)._get_module() module = Script('', path=path)._get_module()
assert module.string_names == (name,) assert module.string_names == (name,)
assert module.infer_state.builtins_module.py__file__() != path assert module.inference_state.builtins_module.py__file__() != path
assert module.infer_state.typing_module.py__file__() != path assert module.inference_state.typing_module.py__file__() != path
@pytest.mark.parametrize('name', ('builtins', 'typing')) @pytest.mark.parametrize('name', ('builtins', 'typing'))
@@ -454,8 +454,8 @@ def test_import_needed_modules_by_jedi(Script, environment, tmpdir, name):
sys_path=[tmpdir.strpath] + environment.get_sys_path(), sys_path=[tmpdir.strpath] + environment.get_sys_path(),
) )
module, = script.goto_definitions() module, = script.goto_definitions()
assert module._infer_state.builtins_module.py__file__() != module_path assert module._inference_state.builtins_module.py__file__() != module_path
assert module._infer_state.typing_module.py__file__() != module_path assert module._inference_state.typing_module.py__file__() != module_path
def test_import_with_semicolon(Script): def test_import_with_semicolon(Script):

View File

@@ -1,9 +1,9 @@
from textwrap import dedent from textwrap import dedent
def get_definition_and_infer_state(Script, source): def get_definition_and_inference_state(Script, source):
first, = Script(dedent(source)).goto_definitions() first, = Script(dedent(source)).goto_definitions()
return first._name._value, first._infer_state return first._name._value, first._inference_state
def test_function_execution(Script): def test_function_execution(Script):
@@ -16,7 +16,7 @@ def test_function_execution(Script):
def x(): def x():
return str() return str()
x""" x"""
func, infer_state = get_definition_and_infer_state(Script, s) func, inference_state = get_definition_and_inference_state(Script, s)
# Now just use the internals of the result (easiest way to get a fully # Now just use the internals of the result (easiest way to get a fully
# usable function). # usable function).
# Should return the same result both times. # Should return the same result both times.
@@ -29,6 +29,6 @@ def test_class_mro(Script):
class X(object): class X(object):
pass pass
X""" X"""
cls, infer_state = get_definition_and_infer_state(Script, s) cls, inference_state = get_definition_and_inference_state(Script, s)
mro = cls.py__mro__() mro = cls.py__mro__()
assert [c.name.string_name for c in mro] == ['X', 'object'] assert [c.name.string_name for c in mro] == ['X', 'object']