diff --git a/jedi/api/__init__.py b/jedi/api/__init__.py index 62213efd..b3e1121b 100644 --- a/jedi/api/__init__.py +++ b/jedi/api/__init__.py @@ -111,11 +111,11 @@ class Script(object): # TODO deprecate and remove sys_path from the Script API. if sys_path is not None: project._sys_path = sys_path - self._infer_state = InferenceState( + self._inference_state = InferenceState( project, environment=environment, script_path=self.path ) debug.speed('init') - self._module_node, source = self._infer_state.parse_and_get_code( + self._module_node, source = self._inference_state.parse_and_get_code( code=source, path=self.path, encoding=encoding, @@ -156,7 +156,7 @@ class Script(object): is_package = False if self.path is not None: import_names, is_p = transform_path_to_dotted( - self._infer_state.get_sys_path(add_parent_paths=False), + self._inference_state.get_sys_path(add_parent_paths=False), self.path ) if import_names is not None: @@ -170,7 +170,7 @@ class Script(object): if self.path is not None and self.path.endswith('.pyi'): # We are in a stub file. Try to load the stub properly. stub_module = load_proper_stub_module( - self._infer_state, + self._inference_state, file_io, names, self._module_node @@ -182,21 +182,21 @@ class Script(object): names = ('__main__',) module = ModuleValue( - self._infer_state, self._module_node, file_io, + self._inference_state, self._module_node, file_io, string_names=names, code_lines=self._code_lines, is_package=is_package, ) if names[0] not in ('builtins', '__builtin__', 'typing'): # These modules are essential for Jedi, so don't overwrite them. - self._infer_state.module_cache.add(names, ValueSet([module])) + self._inference_state.module_cache.add(names, ValueSet([module])) return module def __repr__(self): return '<%s: %s %r>' % ( self.__class__.__name__, repr(self._orig_path), - self._infer_state.environment, + self._inference_state.environment, ) def completions(self): @@ -209,7 +209,7 @@ class Script(object): """ with debug.increase_indent_cm('completions'): completion = Completion( - self._infer_state, self._get_module(), self._code_lines, + self._inference_state, self._get_module(), self._code_lines, self._pos, self.call_signatures ) return completion.completions() @@ -239,16 +239,16 @@ class Script(object): if leaf is None: return [] - value = self._infer_state.create_value(self._get_module(), leaf) + value = self._inference_state.create_value(self._get_module(), leaf) - values = helpers.infer_goto_definition(self._infer_state, value, leaf) + values = helpers.infer_goto_definition(self._inference_state, value, leaf) values = convert_values( values, only_stubs=only_stubs, prefer_stubs=prefer_stubs, ) - defs = [classes.Definition(self._infer_state, c.name) for c in values] + defs = [classes.Definition(self._inference_state, c.name) for c in values] # The additional set here allows the definitions to become unique in an # API sense. In the internals we want to separate more things than in # the API. @@ -299,8 +299,8 @@ class Script(object): # Without a name we really just want to jump to the result e.g. # executed by `foo()`, if we the cursor is after `)`. return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs) - value = self._infer_state.create_value(self._get_module(), tree_name) - names = list(self._infer_state.goto(value, tree_name)) + value = self._inference_state.create_value(self._get_module(), tree_name) + names = list(self._inference_state.goto(value, tree_name)) if follow_imports: names = filter_follow_imports(names, lambda name: name.is_import()) @@ -310,7 +310,7 @@ class Script(object): prefer_stubs=prefer_stubs, ) - defs = [classes.Definition(self._infer_state, d) for d in set(names)] + defs = [classes.Definition(self._inference_state, d) for d in set(names)] return helpers.sorted_definitions(defs) def usages(self, additional_module_paths=(), **kwargs): @@ -342,7 +342,7 @@ class Script(object): names = usages.usages(self._get_module(), tree_name) - definitions = [classes.Definition(self._infer_state, n) for n in names] + definitions = [classes.Definition(self._inference_state, n) for n in names] if not include_builtins: definitions = [d for d in definitions if not d.in_builtin_module()] return helpers.sorted_definitions(definitions) @@ -368,12 +368,12 @@ class Script(object): if call_details is None: return [] - value = self._infer_state.create_value( + value = self._inference_state.create_value( self._get_module(), call_details.bracket_leaf ) definitions = helpers.cache_call_signatures( - self._infer_state, + self._inference_state, value, call_details.bracket_leaf, self._code_lines, @@ -383,19 +383,19 @@ class Script(object): # TODO here we use stubs instead of the actual values. We should use # the signatures from stubs, but the actual values, probably?! - return [classes.CallSignature(self._infer_state, signature, call_details) + return [classes.CallSignature(self._inference_state, signature, call_details) for signature in definitions.get_signatures()] def _analysis(self): - self._infer_state.is_analysis = True - self._infer_state.analysis_modules = [self._module_node] + self._inference_state.is_analysis = True + self._inference_state.analysis_modules = [self._module_node] module = self._get_module() try: for node in get_executable_nodes(self._module_node): value = module.create_value(node) if node.type in ('funcdef', 'classdef'): # Resolve the decorators. - tree_name_to_values(self._infer_state, value, node.children[1]) + tree_name_to_values(self._inference_state, value, node.children[1]) elif isinstance(node, tree.Import): import_names = set(node.get_defined_names()) if node.is_nested(): @@ -409,16 +409,16 @@ class Script(object): unpack_tuple_to_dict(value, types, testlist) else: if node.type == 'name': - defs = self._infer_state.goto_definitions(value, node) + defs = self._inference_state.goto_definitions(value, node) else: defs = infer_call_of_leaf(value, node) try_iter_content(defs) - self._infer_state.reset_recursion_limitations() + self._inference_state.reset_recursion_limitations() - ana = [a for a in self._infer_state.analysis if self.path == a.path] + ana = [a for a in self._inference_state.analysis if self.path == a.path] return sorted(set(ana), key=lambda x: x.line) finally: - self._infer_state.is_analysis = False + self._inference_state.is_analysis = False class Interpreter(Script): @@ -467,11 +467,11 @@ class Interpreter(Script): super(Interpreter, self).__init__(source, environment=environment, _project=Project(os.getcwd()), **kwds) self.namespaces = namespaces - self._infer_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default + self._inference_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default def _get_module(self): return interpreter.MixedModuleValue( - self._infer_state, + self._inference_state, self._module_node, self.namespaces, file_io=KnownContentFileIO(self.path, self._code), @@ -514,7 +514,7 @@ def names(source=None, path=None, encoding='utf-8', all_scopes=False, module_value = script._get_module() defs = [ classes.Definition( - script._infer_state, + script._inference_state, create_name(name) ) for name in get_module_names(script._module_node, all_scopes) ] diff --git a/jedi/api/classes.py b/jedi/api/classes.py index ee0ee5dc..b0decea9 100644 --- a/jedi/api/classes.py +++ b/jedi/api/classes.py @@ -25,7 +25,7 @@ def _sort_names_by_start_pos(names): return sorted(names, key=lambda s: s.start_pos or (0, 0)) -def defined_names(infer_state, value): +def defined_names(inference_state, value): """ List sub-definitions (e.g., methods in class). @@ -34,11 +34,11 @@ def defined_names(infer_state, value): """ filter = next(value.get_filters(search_global=True)) names = [name for name in filter.values()] - return [Definition(infer_state, n) for n in _sort_names_by_start_pos(names)] + return [Definition(inference_state, n) for n in _sort_names_by_start_pos(names)] def _values_to_definitions(values): - return [Definition(c.infer_state, c.name) for c in values] + return [Definition(c.inference_state, c.name) for c in values] class BaseDefinition(object): @@ -62,8 +62,8 @@ class BaseDefinition(object): 'argparse._ActionsContainer': 'argparse.ArgumentParser', }.items()) - def __init__(self, infer_state, name): - self._infer_state = infer_state + def __init__(self, inference_state, name): + self._inference_state = inference_state self._name = name """ An instance of :class:`parso.python.tree.Name` subclass. @@ -306,7 +306,7 @@ class BaseDefinition(object): only_stubs=only_stubs, prefer_stubs=prefer_stubs, ) - return [self if n == self._name else Definition(self._infer_state, n) + return [self if n == self._name else Definition(self._inference_state, n) for n in names] def infer(self, **kwargs): # Python 2... @@ -329,7 +329,7 @@ class BaseDefinition(object): prefer_stubs=prefer_stubs, ) resulting_names = [c.name for c in values] - return [self if n == self._name else Definition(self._infer_state, n) + return [self if n == self._name else Definition(self._inference_state, n) for n in resulting_names] @property @@ -346,7 +346,7 @@ class BaseDefinition(object): for value in self._name.infer(): for signature in value.get_signatures(): return [ - Definition(self._infer_state, n) + Definition(self._inference_state, n) for n in signature.get_param_names(resolve_stars=True) ] @@ -366,7 +366,7 @@ class BaseDefinition(object): if isinstance(value, FunctionExecutionValue): value = value.function_value - return Definition(self._infer_state, value.name) + return Definition(self._inference_state, value.name) def __repr__(self): return "<%s %sname=%r, description=%r>" % ( @@ -396,7 +396,7 @@ class BaseDefinition(object): return ''.join(lines[start_index:index + after + 1]) def get_signatures(self): - return [Signature(self._infer_state, s) for s in self._name.infer().get_signatures()] + return [Signature(self._inference_state, s) for s in self._name.infer().get_signatures()] def execute(self): return _values_to_definitions(self._name.infer().execute_with_values()) @@ -407,8 +407,8 @@ class Completion(BaseDefinition): `Completion` objects are returned from :meth:`api.Script.completions`. They provide additional information about a completion. """ - def __init__(self, infer_state, name, stack, like_name_length): - super(Completion, self).__init__(infer_state, name) + def __init__(self, inference_state, name, stack, like_name_length): + super(Completion, self).__init__(inference_state, name) self._like_name_length = like_name_length self._stack = stack @@ -512,8 +512,8 @@ class Definition(BaseDefinition): *Definition* objects are returned from :meth:`api.Script.goto_assignments` or :meth:`api.Script.goto_definitions`. """ - def __init__(self, infer_state, definition): - super(Definition, self).__init__(infer_state, definition) + def __init__(self, inference_state, definition): + super(Definition, self).__init__(inference_state, definition) @property def description(self): @@ -588,7 +588,7 @@ class Definition(BaseDefinition): """ defs = self._name.infer() return sorted( - unite(defined_names(self._infer_state, d) for d in defs), + unite(defined_names(self._inference_state, d) for d in defs), key=lambda s: s._name.start_pos or (0, 0) ) @@ -606,13 +606,13 @@ class Definition(BaseDefinition): return self._name.start_pos == other._name.start_pos \ and self.module_path == other.module_path \ and self.name == other.name \ - and self._infer_state == other._infer_state + and self._inference_state == other._inference_state def __ne__(self, other): return not self.__eq__(other) def __hash__(self): - return hash((self._name.start_pos, self.module_path, self.name, self._infer_state)) + return hash((self._name.start_pos, self.module_path, self.name, self._inference_state)) class Signature(Definition): @@ -621,8 +621,8 @@ class Signature(Definition): It knows what functions you are currently in. e.g. `isinstance(` would return the `isinstance` function. without `(` it would return nothing. """ - def __init__(self, infer_state, signature): - super(Signature, self).__init__(infer_state, signature.name) + def __init__(self, inference_state, signature): + super(Signature, self).__init__(inference_state, signature.name) self._signature = signature @property @@ -630,7 +630,7 @@ class Signature(Definition): """ :return list of ParamDefinition: """ - return [ParamDefinition(self._infer_state, n) + return [ParamDefinition(self._inference_state, n) for n in self._signature.get_param_names(resolve_stars=True)] def to_string(self): @@ -644,8 +644,8 @@ class CallSignature(Signature): return the `isinstance` function with its params. Without `(` it would return nothing. """ - def __init__(self, infer_state, signature, call_details): - super(CallSignature, self).__init__(infer_state, signature) + def __init__(self, inference_state, signature, call_details): + super(CallSignature, self).__init__(inference_state, signature) self._call_details = call_details self._signature = signature diff --git a/jedi/api/completion.py b/jedi/api/completion.py index 91054db1..bc87728c 100644 --- a/jedi/api/completion.py +++ b/jedi/api/completion.py @@ -28,7 +28,7 @@ def get_call_signature_param_names(call_signatures): yield p._name -def filter_names(infer_state, completion_names, stack, like_name): +def filter_names(inference_state, completion_names, stack, like_name): comp_dct = {} if settings.case_insensitive_completion: like_name = like_name.lower() @@ -39,7 +39,7 @@ def filter_names(infer_state, completion_names, stack, like_name): if string.startswith(like_name): new = classes.Completion( - infer_state, + inference_state, name, stack, len(like_name) @@ -85,8 +85,8 @@ def get_flow_scope_node(module_node, position): class Completion: - def __init__(self, infer_state, module, code_lines, position, call_signatures_callback): - self._infer_state = infer_state + def __init__(self, inference_state, module, code_lines, position, call_signatures_callback): + self._inference_state = inference_state self._module_value = module self._module_node = module.tree_node self._code_lines = code_lines @@ -104,7 +104,7 @@ class Completion: string, start_leaf = _extract_string_while_in_string(leaf, self._position) if string is not None: completions = list(file_name_completions( - self._infer_state, self._module_value, start_leaf, string, + self._inference_state, self._module_value, start_leaf, string, self._like_name, self._call_signatures_callback, self._code_lines, self._original_position )) @@ -113,7 +113,7 @@ class Completion: completion_names = self._get_value_completions(leaf) - completions = filter_names(self._infer_state, completion_names, + completions = filter_names(self._inference_state, completion_names, self.stack, self._like_name) return sorted(completions, key=lambda x: (x.name.startswith('__'), @@ -135,7 +135,7 @@ class Completion: - In params (also lambda): no completion before = """ - grammar = self._infer_state.grammar + grammar = self._inference_state.grammar self.stack = stack = None try: @@ -234,14 +234,14 @@ class Completion: def _get_keyword_completion_names(self, allowed_transitions): for k in allowed_transitions: if isinstance(k, str) and k.isalpha(): - yield keywords.KeywordName(self._infer_state, k) + yield keywords.KeywordName(self._inference_state, k) def _global_completions(self): value = get_user_scope(self._module_value, self._position) debug.dbg('global completion scope: %s', value) flow_scope_node = get_flow_scope_node(self._module_node, self._position) filters = get_global_filters( - self._infer_state, + self._inference_state, value, self._position, origin_scope=flow_scope_node @@ -253,7 +253,7 @@ class Completion: def _trailer_completions(self, previous_leaf): user_value = get_user_scope(self._module_value, self._position) - inferred_value = self._infer_state.create_value( + inferred_value = self._inference_state.create_value( self._module_value, previous_leaf ) values = infer_call_of_leaf(inferred_value, previous_leaf) @@ -276,8 +276,8 @@ class Completion: def _get_importer_names(self, names, level=0, only_modules=True): names = [n.value for n in names] - i = imports.Importer(self._infer_state, names, self._module_value, level) - return i.completion_names(self._infer_state, only_modules=only_modules) + i = imports.Importer(self._inference_state, names, self._module_value, level) + return i.completion_names(self._inference_state, only_modules=only_modules) def _get_class_value_completions(self, is_function=True): """ diff --git a/jedi/api/environment.py b/jedi/api/environment.py index bd4cebcb..f84114cd 100644 --- a/jedi/api/environment.py +++ b/jedi/api/environment.py @@ -109,8 +109,8 @@ class Environment(_BaseEnvironment): version = '.'.join(str(i) for i in self.version_info) return '<%s: %s in %s>' % (self.__class__.__name__, version, self.path) - def get_infer_state_subprocess(self, infer_state): - return InferenceStateSubprocess(infer_state, self._get_subprocess()) + def get_inference_state_subprocess(self, inference_state): + return InferenceStateSubprocess(inference_state, self._get_subprocess()) @memoize_method def get_sys_path(self): @@ -140,8 +140,8 @@ class SameEnvironment(_SameEnvironmentMixin, Environment): class InterpreterEnvironment(_SameEnvironmentMixin, _BaseEnvironment): - def get_infer_state_subprocess(self, infer_state): - return InferenceStateSameProcess(infer_state) + def get_inference_state_subprocess(self, inference_state): + return InferenceStateSameProcess(inference_state) def get_sys_path(self): return sys.path diff --git a/jedi/api/file_name.py b/jedi/api/file_name.py index 11974bf9..9a5f5b11 100644 --- a/jedi/api/file_name.py +++ b/jedi/api/file_name.py @@ -7,7 +7,7 @@ from jedi.inference.helpers import get_str_or_none from jedi.parser_utils import get_string_quote -def file_name_completions(infer_state, module_value, start_leaf, string, +def file_name_completions(inference_state, module_value, start_leaf, string, like_name, call_signatures_callback, code_lines, position): # First we want to find out what can actually be changed as a name. like_name_length = len(os.path.basename(string) + like_name) @@ -30,7 +30,7 @@ def file_name_completions(infer_state, module_value, start_leaf, string, is_in_os_path_join = False else: string = to_be_added + string - base_path = os.path.join(infer_state.project._path, string) + base_path = os.path.join(inference_state.project._path, string) try: listed = os.listdir(base_path) except FileNotFoundError: @@ -53,8 +53,8 @@ def file_name_completions(infer_state, module_value, start_leaf, string, name += os.path.sep yield classes.Completion( - infer_state, - FileName(infer_state, name[len(must_start_with) - like_name_length:]), + inference_state, + FileName(inference_state, name[len(must_start_with) - like_name_length:]), stack=None, like_name_length=like_name_length ) diff --git a/jedi/api/helpers.py b/jedi/api/helpers.py index 1688dba9..018d63a9 100644 --- a/jedi/api/helpers.py +++ b/jedi/api/helpers.py @@ -136,11 +136,11 @@ def get_stack_at_position(grammar, code_lines, leaf, pos): ) -def infer_goto_definition(infer_state, value, leaf): +def infer_goto_definition(inference_state, value, leaf): if leaf.type == 'name': # In case of a name we can just use goto_definition which does all the # magic itself. - return infer_state.goto_definitions(value, leaf) + return inference_state.goto_definitions(value, leaf) parent = leaf.parent definitions = NO_VALUES @@ -154,7 +154,7 @@ def infer_goto_definition(infer_state, value, leaf): # e.g. `"foo"` or `1.0` return infer_atom(value, leaf) elif leaf.type in ('fstring_string', 'fstring_start', 'fstring_end'): - return get_string_value_set(infer_state) + return get_string_value_set(inference_state) return definitions @@ -376,7 +376,7 @@ def get_call_signature_details(module, position): @call_signature_time_cache("call_signatures_validity") -def cache_call_signatures(infer_state, value, bracket_leaf, code_lines, user_pos): +def cache_call_signatures(inference_state, value, bracket_leaf, code_lines, user_pos): """This function calculates the cache key.""" line_index = user_pos[0] - 1 @@ -391,7 +391,7 @@ def cache_call_signatures(infer_state, value, bracket_leaf, code_lines, user_pos else: yield (module_path, before_bracket, bracket_leaf.start_pos) yield infer_goto_definition( - infer_state, + inference_state, value, bracket_leaf.get_previous_leaf(), ) diff --git a/jedi/api/interpreter.py b/jedi/api/interpreter.py index f7b932bf..147a8d48 100644 --- a/jedi/api/interpreter.py +++ b/jedi/api/interpreter.py @@ -9,9 +9,9 @@ from jedi.inference.compiled.access import create_access_path from jedi.inference.base_value import ValueWrapper -def _create(infer_state, obj): +def _create(inference_state, obj): return compiled.create_from_access_path( - infer_state, create_access_path(infer_state, obj) + inference_state, create_access_path(inference_state, obj) ) @@ -23,9 +23,9 @@ class NamespaceObject(object): class MixedModuleValue(ValueWrapper): type = 'mixed_module' - def __init__(self, infer_state, tree_module, namespaces, file_io, code_lines): + def __init__(self, inference_state, tree_module, namespaces, file_io, code_lines): module_value = ModuleValue( - infer_state, tree_module, + inference_state, tree_module, file_io=file_io, string_names=('__main__',), code_lines=code_lines @@ -38,7 +38,7 @@ class MixedModuleValue(ValueWrapper): yield filter for namespace_obj in self._namespace_objects: - compiled_object = _create(self.infer_state, namespace_obj) + compiled_object = _create(self.inference_state, namespace_obj) mixed_object = mixed.MixedObject( compiled_object=compiled_object, tree_value=self._wrapped_value diff --git a/jedi/api/keywords.py b/jedi/api/keywords.py index e1ce9dc9..822f80ff 100644 --- a/jedi/api/keywords.py +++ b/jedi/api/keywords.py @@ -15,24 +15,24 @@ except ImportError: pydoc_topics = None -def get_operator(infer_state, string, pos): - return Keyword(infer_state, string, pos) +def get_operator(inference_state, string, pos): + return Keyword(inference_state, string, pos) class KeywordName(AbstractArbitraryName): api_type = u'keyword' def infer(self): - return [Keyword(self.infer_state, self.string_name, (0, 0))] + return [Keyword(self.inference_state, self.string_name, (0, 0))] class Keyword(object): api_type = u'keyword' - def __init__(self, infer_state, name, pos): - self.name = KeywordName(infer_state, name) + def __init__(self, inference_state, name, pos): + self.name = KeywordName(inference_state, name) self.start_pos = pos - self.parent = infer_state.builtins_module + self.parent = inference_state.builtins_module @property def names(self): diff --git a/jedi/api/project.py b/jedi/api/project.py index 8c387ae5..f39ba90f 100644 --- a/jedi/api/project.py +++ b/jedi/api/project.py @@ -7,7 +7,7 @@ from jedi.api.environment import SameEnvironment, \ from jedi.api.exceptions import WrongVersion from jedi._compatibility import force_unicode from jedi.inference.sys_path import discover_buildout_paths -from jedi.inference.cache import infer_state_as_method_param_cache +from jedi.inference.cache import inference_state_as_method_param_cache from jedi.common.utils import traverse_parents _CONFIG_FOLDER = '.jedi' @@ -77,8 +77,8 @@ class Project(object): py2_comp(path, **kwargs) - @infer_state_as_method_param_cache() - def _get_base_sys_path(self, infer_state, environment=None): + @inference_state_as_method_param_cache() + def _get_base_sys_path(self, inference_state, environment=None): if self._sys_path is not None: return self._sys_path @@ -93,8 +93,8 @@ class Project(object): pass return sys_path - @infer_state_as_method_param_cache() - def _get_sys_path(self, infer_state, environment=None, add_parent_paths=True): + @inference_state_as_method_param_cache() + def _get_sys_path(self, inference_state, environment=None, add_parent_paths=True): """ Keep this method private for all users of jedi. However internally this one is used like a public method. @@ -102,15 +102,15 @@ class Project(object): suffixed = [] prefixed = [] - sys_path = list(self._get_base_sys_path(infer_state, environment)) + sys_path = list(self._get_base_sys_path(inference_state, environment)) if self._smart_sys_path: prefixed.append(self._path) - if infer_state.script_path is not None: - suffixed += discover_buildout_paths(infer_state, infer_state.script_path) + if inference_state.script_path is not None: + suffixed += discover_buildout_paths(inference_state, inference_state.script_path) if add_parent_paths: - traversed = list(traverse_parents(infer_state.script_path)) + traversed = list(traverse_parents(inference_state.script_path)) # AFAIK some libraries have imports like `foo.foo.bar`, which # leads to the conclusion to by default prefer longer paths diff --git a/jedi/common/value.py b/jedi/common/value.py index df52db03..c96b06e4 100644 --- a/jedi/common/value.py +++ b/jedi/common/value.py @@ -1,6 +1,6 @@ class BaseValue(object): - def __init__(self, infer_state, parent_context=None): - self.infer_state = infer_state + def __init__(self, inference_state, parent_context=None): + self.inference_state = inference_state self.parent_context = parent_context def get_root_value(self): diff --git a/jedi/inference/__init__.py b/jedi/inference/__init__.py index c12cbbeb..eeb78d88 100644 --- a/jedi/inference/__init__.py +++ b/jedi/inference/__init__.py @@ -15,7 +15,7 @@ Type inference of Python code in |jedi| is based on three assumptions: The actual algorithm is based on a principle I call lazy type inference. That said, the typical entry point for static analysis is calling ``infer_expr_stmt``. There's separate logic for autocompletion in the API, the -infer_state is all about inferring an expression. +inference_state is all about inferring an expression. TODO this paragraph is not what jedi does anymore, it's similar, but not the same. @@ -72,7 +72,7 @@ from jedi import parser_utils from jedi.inference.utils import unite from jedi.inference import imports from jedi.inference import recursion -from jedi.inference.cache import infer_state_function_cache +from jedi.inference.cache import inference_state_function_cache from jedi.inference import helpers from jedi.inference.names import TreeNameDefinition, ParamName from jedi.inference.base_value import ValueualizedName, ValueualizedNode, \ @@ -91,7 +91,7 @@ class InferenceState(object): environment = project.get_environment() self.environment = environment self.script_path = script_path - self.compiled_subprocess = environment.get_infer_state_subprocess(self) + self.compiled_subprocess = environment.get_inference_state_subprocess(self) self.grammar = environment.get_grammar() self.latest_grammar = parso.load_grammar(version='3.7') @@ -128,7 +128,7 @@ class InferenceState(object): return value_set @property - @infer_state_function_cache() + @inference_state_function_cache() def builtins_module(self): module_name = u'builtins' if self.environment.version_info.major == 2: @@ -137,7 +137,7 @@ class InferenceState(object): return builtins_module @property - @infer_state_function_cache() + @inference_state_function_cache() def typing_module(self): typing_module, = self.import_module((u'typing',)) return typing_module @@ -233,7 +233,7 @@ class InferenceState(object): return infer_node(value, element) return self._infer_element_cached(value, element) - @infer_state_function_cache(default=NO_VALUES) + @inference_state_function_cache(default=NO_VALUES) def _infer_element_cached(self, value, element): return infer_node(value, element) diff --git a/jedi/inference/analysis.py b/jedi/inference/analysis.py index 2fa3af7a..8aa2b63c 100644 --- a/jedi/inference/analysis.py +++ b/jedi/inference/analysis.py @@ -87,7 +87,7 @@ def add(node_value, error_name, node, message=None, typ=Error, payload=None): module_path = module_value.py__file__() issue_instance = typ(error_name, module_path, node.start_pos, message) debug.warning(str(issue_instance), format=False) - node_value.infer_state.analysis.append(issue_instance) + node_value.inference_state.analysis.append(issue_instance) return issue_instance @@ -149,7 +149,7 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None): for python_cls in exception.mro(): if cls.py__name__() == python_cls.__name__ \ - and cls.parent_context == cls.infer_state.builtins_module: + and cls.parent_context == cls.inference_state.builtins_module: return True return False @@ -192,7 +192,7 @@ def _check_for_exception_catch(node_value, jedi_name, exception, payload=None): arglist = trailer.children[1] assert arglist.type == 'arglist' from jedi.inference.arguments import TreeArguments - args = list(TreeArguments(node_value.infer_state, node_value, arglist).unpack()) + args = list(TreeArguments(node_value.inference_state, node_value, arglist).unpack()) # Arguments should be very simple assert len(args) == 2 diff --git a/jedi/inference/arguments.py b/jedi/inference/arguments.py index ec43178e..34803e51 100644 --- a/jedi/inference/arguments.py +++ b/jedi/inference/arguments.py @@ -11,7 +11,7 @@ from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \ from jedi.inference.names import ParamName, TreeNameDefinition from jedi.inference.base_value import NO_VALUES, ValueSet, ValueualizedNode from jedi.inference.value import iterable -from jedi.inference.cache import infer_state_as_method_param_cache +from jedi.inference.cache import inference_state_as_method_param_cache from jedi.inference.param import get_executed_params_and_issues, ExecutedParam @@ -59,7 +59,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac kwargs.pop('callback', None) try: args += tuple(_iterate_argument_clinic( - value.infer_state, + value.inference_state, arguments, clinic_args )) @@ -72,7 +72,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac return decorator -def _iterate_argument_clinic(infer_state, arguments, parameters): +def _iterate_argument_clinic(inference_state, arguments, parameters): """Uses a list with argument clinic information (see PEP 436).""" iterator = PushBackIterator(arguments.unpack()) for i, (name, optional, allow_kwargs, stars) in enumerate(parameters): @@ -84,7 +84,7 @@ def _iterate_argument_clinic(infer_state, arguments, parameters): break lazy_values.append(argument) - yield ValueSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)]) + yield ValueSet([iterable.FakeSequence(inference_state, u'tuple', lazy_values)]) lazy_values continue elif stars == 2: @@ -161,7 +161,7 @@ class AnonymousArguments(AbstractArguments): def get_executed_params_and_issues(self, execution_value): from jedi.inference.dynamic import search_params return search_params( - execution_value.infer_state, + execution_value.inference_state, execution_value, execution_value.tree_node ), [] @@ -198,17 +198,17 @@ def unpack_arglist(arglist): class TreeArguments(AbstractArguments): - def __init__(self, infer_state, value, argument_node, trailer=None): + def __init__(self, inference_state, value, argument_node, trailer=None): """ :param argument_node: May be an argument_node or a list of nodes. """ self.argument_node = argument_node self.value = value - self._infer_state = infer_state + self._inference_state = inference_state self.trailer = trailer # Can be None, e.g. in a class definition. @classmethod - @infer_state_as_method_param_cache() + @inference_state_as_method_param_cache() def create_cached(cls, *args, **kwargs): return cls(*args, **kwargs) @@ -241,7 +241,7 @@ class TreeArguments(AbstractArguments): if sync_comp_for.type == 'comp_for': sync_comp_for = sync_comp_for.children[1] comp = iterable.GeneratorComprehension( - self._infer_state, + self._inference_state, defining_value=self.value, sync_comp_for_node=sync_comp_for, entry_node=el.children[0], diff --git a/jedi/inference/base_value.py b/jedi/inference/base_value.py index 7084e1fc..69e151f5 100644 --- a/jedi/inference/base_value.py +++ b/jedi/inference/base_value.py @@ -16,7 +16,7 @@ from jedi.parser_utils import clean_scope_docstring from jedi.common import BaseValueSet, BaseValue from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.utils import safe_property -from jedi.inference.cache import infer_state_as_method_param_cache +from jedi.inference.cache import inference_state_as_method_param_cache from jedi.cache import memoize_method _sentinel = object() @@ -31,17 +31,17 @@ class HelperValueMixin(object): value = value.parent_context @classmethod - @infer_state_as_method_param_cache() + @inference_state_as_method_param_cache() def create_cached(cls, *args, **kwargs): return cls(*args, **kwargs) def execute(self, arguments): - return self.infer_state.execute(self, arguments=arguments) + return self.inference_state.execute(self, arguments=arguments) def execute_with_values(self, *value_list): from jedi.inference.arguments import ValuesArguments arguments = ValuesArguments([ValueSet([value]) for value in value_list]) - return self.infer_state.execute(self, arguments) + return self.inference_state.execute(self, arguments) def execute_annotation(self): return self.execute_with_values() @@ -64,7 +64,7 @@ class HelperValueMixin(object): if name_value is None: name_value = self from jedi.inference import finder - f = finder.NameFinder(self.infer_state, self, name_value, name_or_str, + f = finder.NameFinder(self.inference_state, self, name_value, name_or_str, position, analysis_errors=analysis_errors) if search_global: filters = f.get_global_filters() @@ -81,10 +81,10 @@ class HelperValueMixin(object): return await_value_set.execute_with_values() def infer_node(self, node): - return self.infer_state.infer_element(self, node) + return self.inference_state.infer_element(self, node) def create_value(self, node, node_is_value=False, node_is_object=False): - return self.infer_state.create_value(self, node, node_is_value, node_is_object) + return self.inference_state.create_value(self, node, node_is_value, node_is_object) def iterate(self, valueualized_node=None, is_async=False): debug.dbg('iterate %s', self) @@ -239,8 +239,8 @@ class _ValueWrapperBase(HelperValueMixin): return CompiledValueName(self, wrapped_name.string_name) @classmethod - @infer_state_as_method_param_cache() - def create_cached(cls, infer_state, *args, **kwargs): + @inference_state_as_method_param_cache() + def create_cached(cls, inference_state, *args, **kwargs): return cls(*args, **kwargs) def __getattr__(self, name): @@ -271,8 +271,8 @@ class ValueWrapper(_ValueWrapperBase): class TreeValue(Value): - def __init__(self, infer_state, parent_context, tree_node): - super(TreeValue, self).__init__(infer_state, parent_context) + def __init__(self, inference_state, parent_context, tree_node): + super(TreeValue, self).__init__(inference_state, parent_context) self.predefined_names = {} self.tree_node = tree_node @@ -398,7 +398,7 @@ class ValueSet(BaseValueSet): ) def execute(self, arguments): - return ValueSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set) + return ValueSet.from_sets(c.inference_state.execute(c, arguments) for c in self._set) def execute_with_values(self, *args, **kwargs): return ValueSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set) diff --git a/jedi/inference/cache.py b/jedi/inference/cache.py index 36c51d01..839726ac 100644 --- a/jedi/inference/cache.py +++ b/jedi/inference/cache.py @@ -10,7 +10,7 @@ _NO_DEFAULT = object() _RECURSION_SENTINEL = object() -def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second_arg_is_infer_state=False): +def _memoize_default(default=_NO_DEFAULT, inference_state_is_first_arg=False, second_arg_is_inference_state=False): """ This is a typical memoization decorator, BUT there is one difference: To prevent recursion it sets defaults. @@ -21,12 +21,12 @@ def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second def func(function): def wrapper(obj, *args, **kwargs): # TODO These checks are kind of ugly and slow. - if infer_state_is_first_arg: + if inference_state_is_first_arg: cache = obj.memoize_cache - elif second_arg_is_infer_state: + elif second_arg_is_inference_state: cache = args[0].memoize_cache # needed for meta classes else: - cache = obj.infer_state.memoize_cache + cache = obj.inference_state.memoize_cache try: memo = cache[function] @@ -47,23 +47,23 @@ def _memoize_default(default=_NO_DEFAULT, infer_state_is_first_arg=False, second return func -def infer_state_function_cache(default=_NO_DEFAULT): +def inference_state_function_cache(default=_NO_DEFAULT): def decorator(func): - return _memoize_default(default=default, infer_state_is_first_arg=True)(func) + return _memoize_default(default=default, inference_state_is_first_arg=True)(func) return decorator -def infer_state_method_cache(default=_NO_DEFAULT): +def inference_state_method_cache(default=_NO_DEFAULT): def decorator(func): return _memoize_default(default=default)(func) return decorator -def infer_state_as_method_param_cache(): +def inference_state_as_method_param_cache(): def decorator(call): - return _memoize_default(second_arg_is_infer_state=True)(call) + return _memoize_default(second_arg_is_inference_state=True)(call) return decorator @@ -74,19 +74,19 @@ class CachedMetaClass(type): class initializations. Either you do it this way or with decorators, but with decorators you lose class access (isinstance, etc). """ - @infer_state_as_method_param_cache() + @inference_state_as_method_param_cache() def __call__(self, *args, **kwargs): return super(CachedMetaClass, self).__call__(*args, **kwargs) -def infer_state_method_generator_cache(): +def inference_state_method_generator_cache(): """ This is a special memoizer. It memoizes generators and also checks for recursion errors and returns no further iterator elemends in that case. """ def func(function): def wrapper(obj, *args, **kwargs): - cache = obj.infer_state.memoize_cache + cache = obj.inference_state.memoize_cache try: memo = cache[function] except KeyError: diff --git a/jedi/inference/compiled/__init__.py b/jedi/inference/compiled/__init__.py index 5df74795..beef9523 100644 --- a/jedi/inference/compiled/__init__.py +++ b/jedi/inference/compiled/__init__.py @@ -4,8 +4,8 @@ from jedi.inference.compiled.value import CompiledObject, CompiledName, \ from jedi.inference.base_value import ValueWrapper, LazyValueWrapper -def builtin_from_name(infer_state, string): - typing_builtins_module = infer_state.builtins_module +def builtin_from_name(inference_state, string): + typing_builtins_module = inference_state.builtins_module if string in ('None', 'True', 'False'): builtins, = typing_builtins_module.non_stub_value_set filter_ = next(builtins.get_filters()) @@ -18,7 +18,7 @@ def builtin_from_name(infer_state, string): class CompiledValue(LazyValueWrapper): def __init__(self, compiled_obj): - self.infer_state = compiled_obj.infer_state + self.inference_state = compiled_obj.inference_state self._compiled_obj = compiled_obj def __getattribute__(self, name): @@ -29,36 +29,36 @@ class CompiledValue(LazyValueWrapper): def _get_wrapped_value(self): instance, = builtin_from_name( - self.infer_state, self._compiled_obj.name.string_name).execute_with_values() + self.inference_state, self._compiled_obj.name.string_name).execute_with_values() return instance def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self._compiled_obj) -def create_simple_object(infer_state, obj): +def create_simple_object(inference_state, obj): """ Only allows creations of objects that are easily picklable across Python versions. """ assert type(obj) in (int, float, str, bytes, unicode, slice, complex, bool), obj compiled_obj = create_from_access_path( - infer_state, - infer_state.compiled_subprocess.create_simple_object(obj) + inference_state, + inference_state.compiled_subprocess.create_simple_object(obj) ) return CompiledValue(compiled_obj) -def get_string_value_set(infer_state): - return builtin_from_name(infer_state, u'str').execute_with_values() +def get_string_value_set(inference_state): + return builtin_from_name(inference_state, u'str').execute_with_values() -def load_module(infer_state, dotted_name, **kwargs): +def load_module(inference_state, dotted_name, **kwargs): # Temporary, some tensorflow builtins cannot be loaded, so it's tried again # and again and it's really slow. if dotted_name.startswith('tensorflow.'): return None - access_path = infer_state.compiled_subprocess.load_module(dotted_name=dotted_name, **kwargs) + access_path = inference_state.compiled_subprocess.load_module(dotted_name=dotted_name, **kwargs) if access_path is None: return None - return create_from_access_path(infer_state, access_path) + return create_from_access_path(inference_state, access_path) diff --git a/jedi/inference/compiled/access.py b/jedi/inference/compiled/access.py index 5e6bb476..8d948ffa 100644 --- a/jedi/inference/compiled/access.py +++ b/jedi/inference/compiled/access.py @@ -109,8 +109,8 @@ def compiled_objects_cache(attribute_name): Caching the id has the advantage that an object doesn't need to be hashable. """ - def wrapper(infer_state, obj, parent_context=None): - cache = getattr(infer_state, attribute_name) + def wrapper(inference_state, obj, parent_context=None): + cache = getattr(inference_state, attribute_name) # Do a very cheap form of caching here. key = id(obj) try: @@ -119,9 +119,9 @@ def compiled_objects_cache(attribute_name): except KeyError: # TODO wuaaaarrghhhhhhhh if attribute_name == 'mixed_cache': - result = func(infer_state, obj, parent_context) + result = func(inference_state, obj, parent_context) else: - result = func(infer_state, obj) + result = func(inference_state, obj) # Need to cache all of them, otherwise the id could be overwritten. cache[key] = result, obj, parent_context return result @@ -130,11 +130,11 @@ def compiled_objects_cache(attribute_name): return decorator -def create_access(infer_state, obj): - return infer_state.compiled_subprocess.get_or_create_access_handle(obj) +def create_access(inference_state, obj): + return inference_state.compiled_subprocess.get_or_create_access_handle(obj) -def load_module(infer_state, dotted_name, sys_path): +def load_module(inference_state, dotted_name, sys_path): temp, sys.path = sys.path, sys_path try: __import__(dotted_name) @@ -154,7 +154,7 @@ def load_module(infer_state, dotted_name, sys_path): # Just access the cache after import, because of #59 as well as the very # complicated import structure of Python. module = sys.modules[dotted_name] - return create_access_path(infer_state, module) + return create_access_path(inference_state, module) class AccessPath(object): @@ -171,8 +171,8 @@ class AccessPath(object): self.accesses = value -def create_access_path(infer_state, obj): - access = create_access(infer_state, obj) +def create_access_path(inference_state, obj): + access = create_access(inference_state, obj) return AccessPath(access.get_access_path_tuples()) @@ -193,18 +193,18 @@ def get_api_type(obj): class DirectObjectAccess(object): - def __init__(self, infer_state, obj): - self._infer_state = infer_state + def __init__(self, inference_state, obj): + self._inference_state = inference_state self._obj = obj def __repr__(self): return '%s(%s)' % (self.__class__.__name__, self.get_repr()) def _create_access(self, obj): - return create_access(self._infer_state, obj) + return create_access(self._inference_state, obj) def _create_access_path(self, obj): - return create_access_path(self._infer_state, obj) + return create_access_path(self._inference_state, obj) def py__bool__(self): return bool(self._obj) @@ -376,7 +376,7 @@ class DirectObjectAccess(object): return get_api_type(self._obj) def get_access_path_tuples(self): - accesses = [create_access(self._infer_state, o) for o in self._get_objects_path()] + accesses = [create_access(self._inference_state, o) for o in self._get_objects_path()] return [(access.py__name__(), access) for access in accesses] def _get_objects_path(self): diff --git a/jedi/inference/compiled/mixed.py b/jedi/inference/compiled/mixed.py index 0bc64625..8dd42130 100644 --- a/jedi/inference/compiled/mixed.py +++ b/jedi/inference/compiled/mixed.py @@ -15,7 +15,7 @@ from jedi.file_io import FileIO from jedi.inference.base_value import ValueSet, ValueWrapper from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.value import ModuleValue -from jedi.inference.cache import infer_state_function_cache +from jedi.inference.cache import inference_state_function_cache from jedi.inference.compiled.getattr_static import getattr_static from jedi.inference.compiled.access import compiled_objects_cache, \ ALLOWED_GETITEM_TYPES, get_api_type @@ -48,7 +48,7 @@ class MixedObject(ValueWrapper): self.access_handle = compiled_object.access_handle def get_filters(self, *args, **kwargs): - yield MixedObjectFilter(self.infer_state, self) + yield MixedObjectFilter(self.inference_state, self) def get_signatures(self): # Prefer `inspect.signature` over somehow analyzing Python code. It @@ -105,9 +105,9 @@ class MixedName(compiled.CompiledName): values = [None] for access in access_paths: values = ValueSet.from_sets( - _create(self._infer_state, access, parent_context=c) + _create(self._inference_state, access, parent_context=c) if c is None or isinstance(c, MixedObject) - else ValueSet({create_cached_compiled_object(c.infer_state, access, c)}) + else ValueSet({create_cached_compiled_object(c.inference_state, access, c)}) for c in values ) return values @@ -121,9 +121,9 @@ class MixedObjectFilter(compiled.CompiledObjectFilter): name_class = MixedName -@infer_state_function_cache() -def _load_module(infer_state, path): - module_node = infer_state.parse( +@inference_state_function_cache() +def _load_module(inference_state, path): + module_node = inference_state.parse( path=path, cache=True, diff_cache=settings.fast_parser, @@ -131,7 +131,7 @@ def _load_module(infer_state, path): ).get_root_node() # python_module = inspect.getmodule(python_object) # TODO we should actually make something like this possible. - #infer_state.modules[python_module.__name__] = module_node + #inference_state.modules[python_module.__name__] = module_node return module_node @@ -155,7 +155,7 @@ def _get_object_to_check(python_object): raise TypeError # Prevents computation of `repr` within inspect. -def _find_syntax_node_name(infer_state, python_object): +def _find_syntax_node_name(inference_state, python_object): original_object = python_object try: python_object = _get_object_to_check(python_object) @@ -168,13 +168,13 @@ def _find_syntax_node_name(infer_state, python_object): return None file_io = FileIO(path) - module_node = _load_module(infer_state, path) + module_node = _load_module(inference_state, path) if inspect.ismodule(python_object): # We don't need to check names for modules, because there's not really # a way to write a module in a module in Python (and also __name__ can # be something like ``email.utils``). - code_lines = get_cached_code_lines(infer_state.grammar, path) + code_lines = get_cached_code_lines(inference_state.grammar, path) return module_node, module_node, file_io, code_lines try: @@ -214,7 +214,7 @@ def _find_syntax_node_name(infer_state, python_object): if line_names: names = line_names - code_lines = get_cached_code_lines(infer_state.grammar, path) + code_lines = get_cached_code_lines(inference_state.grammar, path) # It's really hard to actually get the right definition, here as a last # resort we just return the last one. This chance might lead to odd # completions at some points but will lead to mostly correct type @@ -230,9 +230,9 @@ def _find_syntax_node_name(infer_state, python_object): @compiled_objects_cache('mixed_cache') -def _create(infer_state, access_handle, parent_context, *args): +def _create(inference_state, access_handle, parent_context, *args): compiled_object = create_cached_compiled_object( - infer_state, + inference_state, access_handle, parent_context=parent_context and parent_context.compiled_object ) @@ -240,7 +240,7 @@ def _create(infer_state, access_handle, parent_context, *args): # TODO accessing this is bad, but it probably doesn't matter that much, # because we're working with interpreteters only here. python_object = access_handle.access._obj - result = _find_syntax_node_name(infer_state, python_object) + result = _find_syntax_node_name(inference_state, python_object) if result is None: # TODO Care about generics from stuff like `[1]` and don't return like this. if type(python_object) in (dict, list, tuple): @@ -257,14 +257,14 @@ def _create(infer_state, access_handle, parent_context, *args): name = compiled_object.get_root_value().py__name__() string_names = tuple(name.split('.')) module_value = ModuleValue( - infer_state, module_node, + inference_state, module_node, file_io=file_io, string_names=string_names, code_lines=code_lines, is_package=hasattr(compiled_object, 'py__path__'), ) if name is not None: - infer_state.module_cache.add(string_names, ValueSet([module_value])) + inference_state.module_cache.add(string_names, ValueSet([module_value])) else: if parent_context.tree_node.get_root_node() != module_node: # This happens e.g. when __module__ is wrong, or when using diff --git a/jedi/inference/compiled/subprocess/__init__.py b/jedi/inference/compiled/subprocess/__init__.py index 323bdad7..bd7c801b 100644 --- a/jedi/inference/compiled/subprocess/__init__.py +++ b/jedi/inference/compiled/subprocess/__init__.py @@ -71,9 +71,9 @@ def _cleanup_process(process, thread): class _InferenceStateProcess(object): - def __init__(self, infer_state): - self._infer_state_weakref = weakref.ref(infer_state) - self._infer_state_id = id(infer_state) + def __init__(self, inference_state): + self._inference_state_weakref = weakref.ref(inference_state) + self._inference_state_id = id(inference_state) self._handles = {} def get_or_create_access_handle(self, obj): @@ -81,7 +81,7 @@ class _InferenceStateProcess(object): try: return self.get_access_handle(id_) except KeyError: - access = DirectObjectAccess(self._infer_state_weakref(), obj) + access = DirectObjectAccess(self._inference_state_weakref(), obj) handle = AccessHandle(self, access, id_) self.set_access_handle(handle) return handle @@ -100,12 +100,12 @@ class InferenceStateSameProcess(_InferenceStateProcess): This is necessary for the Interpreter process. """ def __getattr__(self, name): - return partial(_get_function(name), self._infer_state_weakref()) + return partial(_get_function(name), self._inference_state_weakref()) class InferenceStateSubprocess(_InferenceStateProcess): - def __init__(self, infer_state, compiled_subprocess): - super(InferenceStateSubprocess, self).__init__(infer_state) + def __init__(self, inference_state, compiled_subprocess): + super(InferenceStateSubprocess, self).__init__(inference_state) self._used = False self._compiled_subprocess = compiled_subprocess @@ -116,7 +116,7 @@ class InferenceStateSubprocess(_InferenceStateProcess): self._used = True result = self._compiled_subprocess.run( - self._infer_state_weakref(), + self._inference_state_weakref(), func, args=args, kwargs=kwargs, @@ -148,7 +148,7 @@ class InferenceStateSubprocess(_InferenceStateProcess): def __del__(self): if self._used and not self._compiled_subprocess.is_crashed: - self._compiled_subprocess.delete_infer_state(self._infer_state_id) + self._compiled_subprocess.delete_inference_state(self._inference_state_id) class CompiledSubprocess(object): @@ -158,7 +158,7 @@ class CompiledSubprocess(object): def __init__(self, executable): self._executable = executable - self._infer_state_deletion_queue = queue.deque() + self._inference_state_deletion_queue = queue.deque() self._cleanup_callable = lambda: None def __repr__(self): @@ -205,18 +205,18 @@ class CompiledSubprocess(object): t) return process - def run(self, infer_state, function, args=(), kwargs={}): - # Delete old infer_states. + def run(self, inference_state, function, args=(), kwargs={}): + # Delete old inference_states. while True: try: - infer_state_id = self._infer_state_deletion_queue.pop() + inference_state_id = self._inference_state_deletion_queue.pop() except IndexError: break else: - self._send(infer_state_id, None) + self._send(inference_state_id, None) assert callable(function) - return self._send(id(infer_state), function, args, kwargs) + return self._send(id(inference_state), function, args, kwargs) def get_sys_path(self): return self._send(None, functions.get_sys_path, (), {}) @@ -225,7 +225,7 @@ class CompiledSubprocess(object): self.is_crashed = True self._cleanup_callable() - def _send(self, infer_state_id, function, args=(), kwargs={}): + def _send(self, inference_state_id, function, args=(), kwargs={}): if self.is_crashed: raise InternalError("The subprocess %s has crashed." % self._executable) @@ -233,7 +233,7 @@ class CompiledSubprocess(object): # Python 2 compatibility kwargs = {force_unicode(key): value for key, value in kwargs.items()} - data = infer_state_id, function, args, kwargs + data = inference_state_id, function, args, kwargs try: pickle_dump(data, self._get_process().stdin, self._pickle_protocol) except (socket.error, IOError) as e: @@ -272,59 +272,59 @@ class CompiledSubprocess(object): raise result return result - def delete_infer_state(self, infer_state_id): + def delete_inference_state(self, inference_state_id): """ - Currently we are not deleting infer_state instantly. They only get + Currently we are not deleting inference_state instantly. They only get deleted once the subprocess is used again. It would probably a better solution to move all of this into a thread. However, the memory usage - of a single infer_state shouldn't be that high. + of a single inference_state shouldn't be that high. """ - # With an argument - the infer_state gets deleted. - self._infer_state_deletion_queue.append(infer_state_id) + # With an argument - the inference_state gets deleted. + self._inference_state_deletion_queue.append(inference_state_id) class Listener(object): def __init__(self, pickle_protocol): - self._infer_states = {} + self._inference_states = {} # TODO refactor so we don't need to process anymore just handle # controlling. self._process = _InferenceStateProcess(Listener) self._pickle_protocol = pickle_protocol - def _get_infer_state(self, function, infer_state_id): + def _get_inference_state(self, function, inference_state_id): from jedi.inference import InferenceState try: - infer_state = self._infer_states[infer_state_id] + inference_state = self._inference_states[inference_state_id] except KeyError: from jedi.api.environment import InterpreterEnvironment - infer_state = InferenceState( + inference_state = InferenceState( # The project is not actually needed. Nothing should need to # access it. project=None, environment=InterpreterEnvironment() ) - self._infer_states[infer_state_id] = infer_state - return infer_state + self._inference_states[inference_state_id] = inference_state + return inference_state - def _run(self, infer_state_id, function, args, kwargs): - if infer_state_id is None: + def _run(self, inference_state_id, function, args, kwargs): + if inference_state_id is None: return function(*args, **kwargs) elif function is None: - del self._infer_states[infer_state_id] + del self._inference_states[inference_state_id] else: - infer_state = self._get_infer_state(function, infer_state_id) + inference_state = self._get_inference_state(function, inference_state_id) # Exchange all handles args = list(args) for i, arg in enumerate(args): if isinstance(arg, AccessHandle): - args[i] = infer_state.compiled_subprocess.get_access_handle(arg.id) + args[i] = inference_state.compiled_subprocess.get_access_handle(arg.id) for key, value in kwargs.items(): if isinstance(value, AccessHandle): - kwargs[key] = infer_state.compiled_subprocess.get_access_handle(value.id) + kwargs[key] = inference_state.compiled_subprocess.get_access_handle(value.id) - return function(infer_state, *args, **kwargs) + return function(inference_state, *args, **kwargs) def listen(self): stdout = sys.stdout diff --git a/jedi/inference/compiled/subprocess/functions.py b/jedi/inference/compiled/subprocess/functions.py index fd883c3d..71749346 100644 --- a/jedi/inference/compiled/subprocess/functions.py +++ b/jedi/inference/compiled/subprocess/functions.py @@ -12,20 +12,20 @@ def get_sys_path(): return list(map(cast_path, sys.path)) -def load_module(infer_state, **kwargs): - return access.load_module(infer_state, **kwargs) +def load_module(inference_state, **kwargs): + return access.load_module(inference_state, **kwargs) -def get_compiled_method_return(infer_state, id, attribute, *args, **kwargs): - handle = infer_state.compiled_subprocess.get_access_handle(id) +def get_compiled_method_return(inference_state, id, attribute, *args, **kwargs): + handle = inference_state.compiled_subprocess.get_access_handle(id) return getattr(handle.access, attribute)(*args, **kwargs) -def create_simple_object(infer_state, obj): - return access.create_access_path(infer_state, obj) +def create_simple_object(inference_state, obj): + return access.create_access_path(inference_state, obj) -def get_module_info(infer_state, sys_path=None, full_name=None, **kwargs): +def get_module_info(inference_state, sys_path=None, full_name=None, **kwargs): """ Returns Tuple[Union[NamespaceInfo, FileIO, None], Optional[bool]] """ @@ -40,25 +40,25 @@ def get_module_info(infer_state, sys_path=None, full_name=None, **kwargs): sys.path = temp -def list_module_names(infer_state, search_path): +def list_module_names(inference_state, search_path): return [ force_unicode(name) for module_loader, name, is_pkg in iter_modules(search_path) ] -def get_builtin_module_names(infer_state): +def get_builtin_module_names(inference_state): return list(map(force_unicode, sys.builtin_module_names)) -def _test_raise_error(infer_state, exception_type): +def _test_raise_error(inference_state, exception_type): """ Raise an error to simulate certain problems for unit tests. """ raise exception_type -def _test_print(infer_state, stderr=None, stdout=None): +def _test_print(inference_state, stderr=None, stdout=None): """ Force some prints in the subprocesses. This exists for unit tests. """ @@ -82,5 +82,5 @@ def _get_init_path(directory_path): return None -def safe_literal_eval(infer_state, value): +def safe_literal_eval(inference_state, value): return parser_utils.safe_literal_eval(value) diff --git a/jedi/inference/compiled/value.py b/jedi/inference/compiled/value.py index 4a149f9f..03a1ddba 100644 --- a/jedi/inference/compiled/value.py +++ b/jedi/inference/compiled/value.py @@ -14,7 +14,7 @@ from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \ from jedi.inference.base_value import Value, ValueSet, NO_VALUES from jedi.inference.lazy_value import LazyKnownValue from jedi.inference.compiled.access import _sentinel -from jedi.inference.cache import infer_state_function_cache +from jedi.inference.cache import inference_state_function_cache from jedi.inference.helpers import reraise_getitem_errors from jedi.inference.signature import BuiltinSignature @@ -41,15 +41,15 @@ class CheckAttribute(object): class CompiledObject(Value): - def __init__(self, infer_state, access_handle, parent_context=None): - super(CompiledObject, self).__init__(infer_state, parent_context) + def __init__(self, inference_state, access_handle, parent_context=None): + super(CompiledObject, self).__init__(inference_state, parent_context) self.access_handle = access_handle def py__call__(self, arguments): return_annotation = self.access_handle.get_return_annotation() if return_annotation is not None: # TODO the return annotation may also be a string. - return create_from_access_path(self.infer_state, return_annotation).execute_annotation() + return create_from_access_path(self.inference_state, return_annotation).execute_annotation() try: self.access_handle.getattr_paths(u'__call__') @@ -59,26 +59,26 @@ class CompiledObject(Value): if self.access_handle.is_class(): from jedi.inference.value import CompiledInstance return ValueSet([ - CompiledInstance(self.infer_state, self.parent_context, self, arguments) + CompiledInstance(self.inference_state, self.parent_context, self, arguments) ]) else: return ValueSet(self._execute_function(arguments)) @CheckAttribute() def py__class__(self): - return create_from_access_path(self.infer_state, self.access_handle.py__class__()) + return create_from_access_path(self.inference_state, self.access_handle.py__class__()) @CheckAttribute() def py__mro__(self): return (self,) + tuple( - create_from_access_path(self.infer_state, access) + create_from_access_path(self.inference_state, access) for access in self.access_handle.py__mro__accesses() ) @CheckAttribute() def py__bases__(self): return tuple( - create_from_access_path(self.infer_state, access) + create_from_access_path(self.inference_state, access) for access in self.access_handle.py__bases__() ) @@ -178,7 +178,7 @@ class CompiledObject(Value): search_global shouldn't change the fact that there's one dict, this way there's only one `object`. """ - return CompiledObjectFilter(self.infer_state, self, is_instance) + return CompiledObjectFilter(self.inference_state, self, is_instance) @CheckAttribute(u'__getitem__') def py__simple_getitem__(self, index): @@ -187,7 +187,7 @@ class CompiledObject(Value): if access is None: return NO_VALUES - return ValueSet([create_from_access_path(self.infer_state, access)]) + return ValueSet([create_from_access_path(self.inference_state, access)]) def py__getitem__(self, index_value_set, valueualized_node): all_access_paths = self.access_handle.py__getitem__all_values() @@ -196,7 +196,7 @@ class CompiledObject(Value): # object. return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node) return ValueSet( - create_from_access_path(self.infer_state, access) + create_from_access_path(self.inference_state, access) for access in all_access_paths ) @@ -215,7 +215,7 @@ class CompiledObject(Value): return for access in access_path_list: - yield LazyKnownValue(create_from_access_path(self.infer_state, access)) + yield LazyKnownValue(create_from_access_path(self.inference_state, access)) def py__name__(self): return self.access_handle.py__name__() @@ -237,12 +237,12 @@ class CompiledObject(Value): try: # TODO wtf is this? this is exactly the same as the thing # below. It uses getattr as well. - self.infer_state.builtins_module.access_handle.getattr_paths(name) + self.inference_state.builtins_module.access_handle.getattr_paths(name) except AttributeError: continue else: - bltn_obj = builtin_from_name(self.infer_state, name) - for result in self.infer_state.execute(bltn_obj, params): + bltn_obj = builtin_from_name(self.inference_state, name) + for result in self.inference_state.execute(bltn_obj, params): yield result for type_ in docstrings.infer_return_types(self): yield type_ @@ -257,20 +257,20 @@ class CompiledObject(Value): def execute_operation(self, other, operator): return create_from_access_path( - self.infer_state, + self.inference_state, self.access_handle.execute_operation(other.access_handle, operator) ) def negate(self): - return create_from_access_path(self.infer_state, self.access_handle.negate()) + return create_from_access_path(self.inference_state, self.access_handle.negate()) def get_metaclasses(self): return NO_VALUES class CompiledName(AbstractNameDefinition): - def __init__(self, infer_state, parent_context, name): - self._infer_state = infer_state + def __init__(self, inference_state, parent_context, name): + self._inference_state = inference_state self.parent_context = parent_context self.string_name = name @@ -296,7 +296,7 @@ class CompiledName(AbstractNameDefinition): @underscore_memoization def infer(self): return ValueSet([_create_from_name( - self._infer_state, self.parent_context, self.string_name + self._inference_state, self.parent_context, self.string_name )]) @@ -322,12 +322,12 @@ class SignatureParamName(ParamNameInterface, AbstractNameDefinition): def infer(self): p = self._signature_param - infer_state = self.parent_context.infer_state + inference_state = self.parent_context.inference_state values = NO_VALUES if p.has_default: - values = ValueSet([create_from_access_path(infer_state, p.default)]) + values = ValueSet([create_from_access_path(inference_state, p.default)]) if p.has_annotation: - annotation = create_from_access_path(infer_state, p.annotation) + annotation = create_from_access_path(inference_state, p.annotation) values |= annotation.execute_with_values() return values @@ -364,8 +364,8 @@ class EmptyCompiledName(AbstractNameDefinition): completions, just give Jedi the option to return this object. It infers to nothing. """ - def __init__(self, infer_state, name): - self.parent_context = infer_state.builtins_module + def __init__(self, inference_state, name): + self.parent_context = inference_state.builtins_module self.string_name = name def infer(self): @@ -375,8 +375,8 @@ class EmptyCompiledName(AbstractNameDefinition): class CompiledObjectFilter(AbstractFilter): name_class = CompiledName - def __init__(self, infer_state, compiled_object, is_instance=False): - self._infer_state = infer_state + def __init__(self, inference_state, compiled_object, is_instance=False): + self._inference_state = inference_state self.compiled_object = compiled_object self.is_instance = is_instance @@ -399,7 +399,7 @@ class CompiledObjectFilter(AbstractFilter): # Always use unicode objects in Python 2 from here. name = force_unicode(name) - if (is_descriptor and not self._infer_state.allow_descriptor_getattr) or not has_attribute: + if (is_descriptor and not self._inference_state.allow_descriptor_getattr) or not has_attribute: return [self._get_cached_name(name, is_empty=True)] if self.is_instance and name not in dir_callback(): @@ -409,7 +409,7 @@ class CompiledObjectFilter(AbstractFilter): @memoize_method def _get_cached_name(self, name, is_empty=False): if is_empty: - return EmptyCompiledName(self._infer_state, name) + return EmptyCompiledName(self._inference_state, name) else: return self._create_name(name) @@ -426,12 +426,12 @@ class CompiledObjectFilter(AbstractFilter): # ``dir`` doesn't include the type names. if not self.is_instance and needs_type_completions: - for filter in builtin_from_name(self._infer_state, u'type').get_filters(): + for filter in builtin_from_name(self._inference_state, u'type').get_filters(): names += filter.values() return names def _create_name(self, name): - return self.name_class(self._infer_state, self.compiled_object, name) + return self.name_class(self._inference_state, self.compiled_object, name) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self.compiled_object) @@ -507,7 +507,7 @@ def _parse_function_doc(doc): return param_str, ret -def _create_from_name(infer_state, compiled_object, name): +def _create_from_name(inference_state, compiled_object, name): access_paths = compiled_object.access_handle.getattr_paths(name, default=None) parent_context = compiled_object if parent_context.is_class(): @@ -516,26 +516,26 @@ def _create_from_name(infer_state, compiled_object, name): value = None for access_path in access_paths: value = create_cached_compiled_object( - infer_state, access_path, parent_context=value + inference_state, access_path, parent_context=value ) return value def _normalize_create_args(func): """The cache doesn't care about keyword vs. normal args.""" - def wrapper(infer_state, obj, parent_context=None): - return func(infer_state, obj, parent_context) + def wrapper(inference_state, obj, parent_context=None): + return func(inference_state, obj, parent_context) return wrapper -def create_from_access_path(infer_state, access_path): +def create_from_access_path(inference_state, access_path): parent_context = None for name, access in access_path.accesses: - parent_context = create_cached_compiled_object(infer_state, access, parent_context) + parent_context = create_cached_compiled_object(inference_state, access, parent_context) return parent_context @_normalize_create_args -@infer_state_function_cache() -def create_cached_compiled_object(infer_state, access_handle, parent_context): - return CompiledObject(infer_state, access_handle, parent_context) +@inference_state_function_cache() +def create_cached_compiled_object(inference_state, access_handle, parent_context): + return CompiledObject(inference_state, access_handle, parent_context) diff --git a/jedi/inference/docstrings.py b/jedi/inference/docstrings.py index d52df6de..e5e1241b 100644 --- a/jedi/inference/docstrings.py +++ b/jedi/inference/docstrings.py @@ -24,7 +24,7 @@ from parso import parse, ParserSyntaxError from jedi._compatibility import u from jedi import debug from jedi.inference.utils import indent_block -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.base_value import iterator_to_value_set, ValueSet, \ NO_VALUES from jedi.inference.lazy_value import LazyKnownValues @@ -205,7 +205,7 @@ def _infer_for_statement_string(module_value, string): # will be impossible to use `...` (Ellipsis) as a token. Docstring types # don't need to conform with the current grammar. debug.dbg('Parse docstring code %s', string, color='BLUE') - grammar = module_value.infer_state.latest_grammar + grammar = module_value.inference_state.latest_grammar try: module = grammar.parse(code.format(indent_block(string)), error_recovery=False) except ParserSyntaxError: @@ -223,7 +223,7 @@ def _infer_for_statement_string(module_value, string): from jedi.inference.value import FunctionValue function_value = FunctionValue( - module_value.infer_state, + module_value.inference_state, module_value, funcdef ) @@ -243,12 +243,12 @@ def _execute_types_in_stmt(module_value, stmt): """ definitions = module_value.infer_node(stmt) return ValueSet.from_sets( - _execute_array_values(module_value.infer_state, d) + _execute_array_values(module_value.inference_state, d) for d in definitions ) -def _execute_array_values(infer_state, array): +def _execute_array_values(inference_state, array): """ Tuples indicate that there's not just one return value, but the listed ones. `(str, int)` means that it returns a tuple with both types. @@ -258,16 +258,16 @@ def _execute_array_values(infer_state, array): values = [] for lazy_value in array.py__iter__(): objects = ValueSet.from_sets( - _execute_array_values(infer_state, typ) + _execute_array_values(inference_state, typ) for typ in lazy_value.infer() ) values.append(LazyKnownValues(objects)) - return {FakeSequence(infer_state, array.array_type, values)} + return {FakeSequence(inference_state, array.array_type, values)} else: return array.execute_annotation() -@infer_state_method_cache() +@inference_state_method_cache() def infer_param(execution_value, param): from jedi.inference.value.instance import InstanceArguments from jedi.inference.value import FunctionExecutionValue @@ -294,7 +294,7 @@ def infer_param(execution_value, param): return types -@infer_state_method_cache() +@inference_state_method_cache() @iterator_to_value_set def infer_return_types(function_value): def search_return_in_docstr(code): diff --git a/jedi/inference/dynamic.py b/jedi/inference/dynamic.py index e37bc9ce..41d9ef64 100644 --- a/jedi/inference/dynamic.py +++ b/jedi/inference/dynamic.py @@ -19,7 +19,7 @@ It works as follows: from jedi import settings from jedi import debug -from jedi.inference.cache import infer_state_function_cache +from jedi.inference.cache import inference_state_function_cache from jedi.inference import imports from jedi.inference.arguments import TreeArguments from jedi.inference.param import create_default_params @@ -39,12 +39,12 @@ class DynamicExecutedParams(object): Simulates being a parameter while actually just being multiple params. """ - def __init__(self, infer_state, executed_params): - self.infer_state = infer_state + def __init__(self, inference_state, executed_params): + self.inference_state = inference_state self._executed_params = executed_params def infer(self): - with recursion.execution_allowed(self.infer_state, self) as allowed: + with recursion.execution_allowed(self.inference_state, self) as allowed: # We need to catch recursions that may occur, because an # anonymous functions can create an anonymous parameter that is # more or less self referencing. @@ -54,7 +54,7 @@ class DynamicExecutedParams(object): @debug.increase_indent -def search_params(infer_state, execution_value, funcdef): +def search_params(inference_state, execution_value, funcdef): """ A dynamic search for param values. If you try to complete a type: @@ -70,7 +70,7 @@ def search_params(infer_state, execution_value, funcdef): if not settings.dynamic_params: return create_default_params(execution_value, funcdef) - infer_state.dynamic_params_depth += 1 + inference_state.dynamic_params_depth += 1 try: path = execution_value.get_root_value().py__file__() if path is not None and is_stdlib_path(path): @@ -91,7 +91,7 @@ def search_params(infer_state, execution_value, funcdef): try: module_value = execution_value.get_root_value() function_executions = _search_function_executions( - infer_state, + inference_state, module_value, funcdef, string_name=string_name, @@ -101,7 +101,7 @@ def search_params(infer_state, execution_value, funcdef): function_execution.get_executed_params_and_issues()[0] for function_execution in function_executions )) - params = [DynamicExecutedParams(infer_state, executed_params) + params = [DynamicExecutedParams(inference_state, executed_params) for executed_params in zipped_params] # Inferes the ExecutedParams to types. else: @@ -110,12 +110,12 @@ def search_params(infer_state, execution_value, funcdef): debug.dbg('Dynamic param result finished', color='MAGENTA') return params finally: - infer_state.dynamic_params_depth -= 1 + inference_state.dynamic_params_depth -= 1 -@infer_state_function_cache(default=None) +@inference_state_function_cache(default=None) @to_list -def _search_function_executions(infer_state, module_value, funcdef, string_name): +def _search_function_executions(inference_state, module_value, funcdef, string_name): """ Returns a list of param names. """ @@ -129,7 +129,7 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name) found_executions = False i = 0 for for_mod_value in imports.get_modules_containing_name( - infer_state, [module_value], string_name): + inference_state, [module_value], string_name): if not isinstance(module_value, ModuleValue): return for name, trailer in _get_possible_nodes(for_mod_value, string_name): @@ -138,12 +138,12 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name) # This is a simple way to stop Jedi's dynamic param recursion # from going wild: The deeper Jedi's in the recursion, the less # code should be inferred. - if i * infer_state.dynamic_params_depth > MAX_PARAM_SEARCHES: + if i * inference_state.dynamic_params_depth > MAX_PARAM_SEARCHES: return - random_value = infer_state.create_value(for_mod_value, name) + random_value = inference_state.create_value(for_mod_value, name) for function_execution in _check_name_for_execution( - infer_state, random_value, compare_node, name, trailer): + inference_state, random_value, compare_node, name, trailer): found_executions = True yield function_execution @@ -178,17 +178,17 @@ def _get_possible_nodes(module_value, func_string_name): yield name, trailer -def _check_name_for_execution(infer_state, value, compare_node, name, trailer): +def _check_name_for_execution(inference_state, value, compare_node, name, trailer): from jedi.inference.value.function import FunctionExecutionValue def create_func_excs(): arglist = trailer.children[1] if arglist == ')': arglist = None - args = TreeArguments(infer_state, value, arglist, trailer) + args = TreeArguments(inference_state, value, arglist, trailer) if value_node.type == 'classdef': created_instance = instance.TreeInstance( - infer_state, + inference_state, v.parent_context, v, args @@ -198,7 +198,7 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer): else: yield v.get_function_execution(args) - for v in infer_state.goto_definitions(value, name): + for v in inference_state.goto_definitions(value, name): value_node = v.tree_node if compare_node == value_node: for func_execution in create_func_excs(): @@ -219,9 +219,9 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer): execution_value = next(create_func_excs()) for name, trailer in _get_possible_nodes(module_value, params[0].string_name): if value_node.start_pos < name.start_pos < value_node.end_pos: - random_value = infer_state.create_value(execution_value, name) + random_value = inference_state.create_value(execution_value, name) iterator = _check_name_for_execution( - infer_state, + inference_state, random_value, compare_node, name, diff --git a/jedi/inference/filters.py b/jedi/inference/filters.py index c90c986e..9223cef6 100644 --- a/jedi/inference/filters.py +++ b/jedi/inference/filters.py @@ -235,7 +235,7 @@ class _BuiltinMappedMethod(Value): def __init__(self, builtin_value, method, builtin_func): super(_BuiltinMappedMethod, self).__init__( - builtin_value.infer_state, + builtin_value.inference_state, parent_context=builtin_value ) self._method = method @@ -260,7 +260,7 @@ class SpecialMethodFilter(DictFilter): def __init__(self, parent_context, string_name, value, builtin_value): callable_, python_version = value if python_version is not None and \ - python_version != parent_context.infer_state.environment.version_info.major: + python_version != parent_context.inference_state.environment.version_info.major: raise KeyError self.parent_context = parent_context @@ -327,8 +327,8 @@ class _AttributeOverwriteMixin(object): class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, LazyValueWrapper)): - def __init__(self, infer_state): - self.infer_state = infer_state + def __init__(self, inference_state): + self.inference_state = inference_state class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, @@ -344,7 +344,7 @@ def publish_method(method_name, python_version_match=None): return decorator -def get_global_filters(infer_state, value, until_position, origin_scope): +def get_global_filters(inference_state, value, until_position, origin_scope): """ Returns all filters in order of priority for name resolution. @@ -363,7 +363,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope): >>> scope >>> value = script._get_module().create_value(scope) - >>> filters = list(get_global_filters(value.infer_state, value, (4, 0), None)) + >>> filters = list(get_global_filters(value.inference_state, value, (4, 0), None)) First we get the names from the function scope. @@ -407,4 +407,4 @@ def get_global_filters(infer_state, value, until_position, origin_scope): value = value.parent_context # Add builtins to the global scope. - yield next(infer_state.builtins_module.get_filters()) + yield next(inference_state.builtins_module.get_filters()) diff --git a/jedi/inference/finder.py b/jedi/inference/finder.py index 7a6a3daf..c3b1e7a7 100644 --- a/jedi/inference/finder.py +++ b/jedi/inference/finder.py @@ -33,9 +33,9 @@ from jedi.inference.gradual.conversion import convert_values class NameFinder(object): - def __init__(self, infer_state, value, name_value, name_or_str, + def __init__(self, inference_state, value, name_value, name_or_str, position=None, analysis_errors=True): - self._infer_state = infer_state + self._inference_state = inference_state # Make sure that it's not just a syntax tree node. self._value = value self._name_value = name_value @@ -113,7 +113,7 @@ class NameFinder(object): if lambdef is None or position < lambdef.children[-2].start_pos: position = ancestor.start_pos - return get_global_filters(self._infer_state, self._value, position, origin_scope) + return get_global_filters(self._inference_state, self._value, position, origin_scope) def get_value_filters(self): origin_scope = self._get_origin_scope() @@ -171,7 +171,7 @@ class NameFinder(object): def _check_getattr(self, inst): """Checks for both __getattr__ and __getattribute__ methods""" # str is important, because it shouldn't be `Name`! - name = compiled.create_simple_object(self._infer_state, self._string_name) + name = compiled.create_simple_object(self._inference_state, self._string_name) # This is a little bit special. `__getattribute__` is in Python # executed before `__getattr__`. But: I know no use case, where @@ -263,7 +263,7 @@ def _check_isinstance_type(value, element, search_name): # arglist stuff arglist = trailer.children[1] - args = TreeArguments(value.infer_state, value, arglist, trailer) + args = TreeArguments(value.inference_state, value, arglist, trailer) param_list = list(args.unpack()) # Disallow keyword arguments assert len(param_list) == 2 @@ -273,7 +273,7 @@ def _check_isinstance_type(value, element, search_name): is_instance_call = helpers.call_of_leaf(lazy_value_object.data) # Do a simple get_code comparison. They should just have the same code, # and everything will be all right. - normalize = value.infer_state.grammar._normalize + normalize = value.inference_state.grammar._normalize assert normalize(is_instance_call) == normalize(call) except AssertionError: return None diff --git a/jedi/inference/flow_analysis.py b/jedi/inference/flow_analysis.py index 9d05339d..60732482 100644 --- a/jedi/inference/flow_analysis.py +++ b/jedi/inference/flow_analysis.py @@ -106,7 +106,7 @@ def _break_check(value, value_scope, flow_scope, node): def _check_if(value, node): - with execution_allowed(value.infer_state, node) as allowed: + with execution_allowed(value.inference_state, node) as allowed: if not allowed: return UNSURE diff --git a/jedi/inference/gradual/annotation.py b/jedi/inference/gradual/annotation.py index 69da15a0..8fd686b9 100644 --- a/jedi/inference/gradual/annotation.py +++ b/jedi/inference/gradual/annotation.py @@ -10,7 +10,7 @@ import re from parso import ParserSyntaxError, parse from jedi._compatibility import force_unicode -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \ AbstractAnnotatedClass @@ -60,7 +60,7 @@ def _infer_annotation_string(value, string, index=None): def _get_forward_reference_node(value, string): try: - new_node = value.infer_state.grammar.parse( + new_node = value.inference_state.grammar.parse( force_unicode(string), start_symbol='eval_input', error_recovery=False @@ -106,21 +106,21 @@ def _split_comment_param_declaration(decl_text): return params -@infer_state_method_cache() +@inference_state_method_cache() def infer_param(execution_value, param): values = _infer_param(execution_value, param) - infer_state = execution_value.infer_state + inference_state = execution_value.inference_state if param.star_count == 1: - tuple_ = builtin_from_name(infer_state, 'tuple') + tuple_ = builtin_from_name(inference_state, 'tuple') return ValueSet([GenericClass( tuple_, generics=(values,), ) for c in values]) elif param.star_count == 2: - dct = builtin_from_name(infer_state, 'dict') + dct = builtin_from_name(inference_state, 'dict') return ValueSet([GenericClass( dct, - generics=(ValueSet([builtin_from_name(infer_state, 'str')]), values), + generics=(ValueSet([builtin_from_name(inference_state, 'str')]), values), ) for c in values]) pass return values @@ -190,7 +190,7 @@ def py__annotations__(funcdef): return dct -@infer_state_method_cache() +@inference_state_method_cache() def infer_return_types(function_execution_value): """ Infers the type of a function's return value, diff --git a/jedi/inference/gradual/conversion.py b/jedi/inference/gradual/conversion.py index 291480b9..93a0b631 100644 --- a/jedi/inference/gradual/conversion.py +++ b/jedi/inference/gradual/conversion.py @@ -87,11 +87,11 @@ def _load_stub_module(module): return module from jedi.inference.gradual.typeshed import _try_to_load_stub_cached return _try_to_load_stub_cached( - module.infer_state, + module.inference_state, import_names=module.string_names, python_value_set=ValueSet([module]), parent_module_value=None, - sys_path=module.infer_state.get_sys_path(), + sys_path=module.inference_state.get_sys_path(), ) diff --git a/jedi/inference/gradual/typeshed.py b/jedi/inference/gradual/typeshed.py index 42bb75e0..a57b622b 100644 --- a/jedi/inference/gradual/typeshed.py +++ b/jedi/inference/gradual/typeshed.py @@ -89,9 +89,9 @@ def _cache_stub_file_map(version_info): def import_module_decorator(func): @wraps(func) - def wrapper(infer_state, import_names, parent_module_value, sys_path, prefer_stubs): + def wrapper(inference_state, import_names, parent_module_value, sys_path, prefer_stubs): try: - python_value_set = infer_state.module_cache.get(import_names) + python_value_set = inference_state.module_cache.get(import_names) except KeyError: if parent_module_value is not None and parent_module_value.is_stub(): parent_module_values = parent_module_value.non_stub_value_set @@ -104,19 +104,19 @@ def import_module_decorator(func): # ``os``. python_parent = next(iter(parent_module_values)) if python_parent is None: - python_parent, = infer_state.import_module(('os',), prefer_stubs=False) + python_parent, = inference_state.import_module(('os',), prefer_stubs=False) python_value_set = python_parent.py__getattribute__('path') else: python_value_set = ValueSet.from_sets( - func(infer_state, import_names, p, sys_path,) + func(inference_state, import_names, p, sys_path,) for p in parent_module_values ) - infer_state.module_cache.add(import_names, python_value_set) + inference_state.module_cache.add(import_names, python_value_set) if not prefer_stubs: return python_value_set - stub = _try_to_load_stub_cached(infer_state, import_names, python_value_set, + stub = _try_to_load_stub_cached(inference_state, import_names, python_value_set, parent_module_value, sys_path) if stub is not None: return ValueSet([stub]) @@ -125,21 +125,21 @@ def import_module_decorator(func): return wrapper -def _try_to_load_stub_cached(infer_state, import_names, *args, **kwargs): +def _try_to_load_stub_cached(inference_state, import_names, *args, **kwargs): try: - return infer_state.stub_module_cache[import_names] + return inference_state.stub_module_cache[import_names] except KeyError: pass # TODO is this needed? where are the exceptions coming from that make this # necessary? Just remove this line. - infer_state.stub_module_cache[import_names] = None - infer_state.stub_module_cache[import_names] = result = \ - _try_to_load_stub(infer_state, import_names, *args, **kwargs) + inference_state.stub_module_cache[import_names] = None + inference_state.stub_module_cache[import_names] = result = \ + _try_to_load_stub(inference_state, import_names, *args, **kwargs) return result -def _try_to_load_stub(infer_state, import_names, python_value_set, +def _try_to_load_stub(inference_state, import_names, python_value_set, parent_module_value, sys_path): """ Trying to load a stub for a set of import_names. @@ -150,7 +150,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, if parent_module_value is None and len(import_names) > 1: try: parent_module_value = _try_to_load_stub_cached( - infer_state, import_names[:-1], NO_VALUES, + inference_state, import_names[:-1], NO_VALUES, parent_module_value=None, sys_path=sys_path) except KeyError: pass @@ -161,7 +161,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, for p in sys_path: init = os.path.join(p, *import_names) + '-stubs' + os.path.sep + '__init__.pyi' m = _try_to_load_stub_from_file( - infer_state, + inference_state, python_value_set, file_io=FileIO(init), import_names=import_names, @@ -185,7 +185,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, for file_path in file_paths: m = _try_to_load_stub_from_file( - infer_state, + inference_state, python_value_set, # The file path should end with .pyi file_io=FileIO(file_path), @@ -195,7 +195,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, return m # 3. Try to load typeshed - m = _load_from_typeshed(infer_state, python_value_set, parent_module_value, import_names) + m = _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names) if m is not None: return m @@ -216,7 +216,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, for p in check_path: m = _try_to_load_stub_from_file( - infer_state, + inference_state, python_value_set, file_io=FileIO(os.path.join(p, *names_for_path) + '.pyi'), import_names=import_names, @@ -229,11 +229,11 @@ def _try_to_load_stub(infer_state, import_names, python_value_set, return None -def _load_from_typeshed(infer_state, python_value_set, parent_module_value, import_names): +def _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names): import_name = import_names[-1] map_ = None if len(import_names) == 1: - map_ = _cache_stub_file_map(infer_state.grammar.version_info) + map_ = _cache_stub_file_map(inference_state.grammar.version_info) import_name = _IMPORT_MAP.get(import_name, import_name) elif isinstance(parent_module_value, StubModuleValue): if not parent_module_value.is_package: @@ -247,16 +247,16 @@ def _load_from_typeshed(infer_state, python_value_set, parent_module_value, impo path = map_.get(import_name) if path is not None: return _try_to_load_stub_from_file( - infer_state, + inference_state, python_value_set, file_io=FileIO(path), import_names=import_names, ) -def _try_to_load_stub_from_file(infer_state, python_value_set, file_io, import_names): +def _try_to_load_stub_from_file(inference_state, python_value_set, file_io, import_names): try: - stub_module_node = infer_state.parse( + stub_module_node = inference_state.parse( file_io=file_io, cache=True, use_latest_grammar=True @@ -266,24 +266,24 @@ def _try_to_load_stub_from_file(infer_state, python_value_set, file_io, import_n return None else: return create_stub_module( - infer_state, python_value_set, stub_module_node, file_io, + inference_state, python_value_set, stub_module_node, file_io, import_names ) -def create_stub_module(infer_state, python_value_set, stub_module_node, file_io, import_names): +def create_stub_module(inference_state, python_value_set, stub_module_node, file_io, import_names): if import_names == ('typing',): module_cls = TypingModuleWrapper else: module_cls = StubModuleValue file_name = os.path.basename(file_io.path) stub_module_value = module_cls( - python_value_set, infer_state, stub_module_node, + python_value_set, inference_state, stub_module_node, file_io=file_io, string_names=import_names, # The code was loaded with latest_grammar, so use # that. - code_lines=get_cached_code_lines(infer_state.latest_grammar, file_io.path), + code_lines=get_cached_code_lines(inference_state.latest_grammar, file_io.path), is_package=file_name == '__init__.pyi', ) return stub_module_value diff --git a/jedi/inference/gradual/typing.py b/jedi/inference/gradual/typing.py index e301f4a7..f9809521 100644 --- a/jedi/inference/gradual/typing.py +++ b/jedi/inference/gradual/typing.py @@ -7,7 +7,7 @@ This file deals with all the typing.py cases. """ from jedi._compatibility import unicode, force_unicode from jedi import debug -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.compiled import builtin_from_name from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \ iterator_to_value_set, ValueWrapper, LazyValueWrapper @@ -45,8 +45,8 @@ class TypingName(AbstractTreeName): class _BaseTypingValue(Value): - def __init__(self, infer_state, parent_context, tree_name): - super(_BaseTypingValue, self).__init__(infer_state, parent_context) + def __init__(self, inference_state, parent_context, tree_name): + super(_BaseTypingValue, self).__init__(inference_state, parent_context) self._tree_name = tree_name @property @@ -71,7 +71,7 @@ class _BaseTypingValue(Value): # TODO this is obviously not correct, but at least gives us a class if # we have none. Some of these objects don't really have a base class in # typeshed. - return builtin_from_name(self.infer_state, u'object') + return builtin_from_name(self.inference_state, u'object') @property def name(self): @@ -87,39 +87,39 @@ class TypingModuleName(NameWrapper): def _remap(self): name = self.string_name - infer_state = self.parent_context.infer_state + inference_state = self.parent_context.inference_state try: actual = _TYPE_ALIAS_TYPES[name] except KeyError: pass else: - yield TypeAlias.create_cached(infer_state, self.parent_context, self.tree_name, actual) + yield TypeAlias.create_cached(inference_state, self.parent_context, self.tree_name, actual) return if name in _PROXY_CLASS_TYPES: - yield TypingClassValue.create_cached(infer_state, self.parent_context, self.tree_name) + yield TypingClassValue.create_cached(inference_state, self.parent_context, self.tree_name) elif name in _PROXY_TYPES: - yield TypingValue.create_cached(infer_state, self.parent_context, self.tree_name) + yield TypingValue.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'runtime': # We don't want anything here, not sure what this function is # supposed to do, since it just appears in the stubs and shouldn't # have any effects there (because it's never executed). return elif name == 'TypeVar': - yield TypeVarClass.create_cached(infer_state, self.parent_context, self.tree_name) + yield TypeVarClass.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'Any': - yield Any.create_cached(infer_state, self.parent_context, self.tree_name) + yield Any.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'TYPE_CHECKING': # This is needed for e.g. imports that are only available for type # checking or are in cycles. The user can then check this variable. - yield builtin_from_name(infer_state, u'True') + yield builtin_from_name(inference_state, u'True') elif name == 'overload': - yield OverloadFunction.create_cached(infer_state, self.parent_context, self.tree_name) + yield OverloadFunction.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'NewType': - yield NewTypeFunction.create_cached(infer_state, self.parent_context, self.tree_name) + yield NewTypeFunction.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'cast': # TODO implement cast - yield CastFunction.create_cached(infer_state, self.parent_context, self.tree_name) + yield CastFunction.create_cached(inference_state, self.parent_context, self.tree_name) elif name == 'TypedDict': # TODO doesn't even exist in typeshed/typing.py, yet. But will be # added soon. @@ -139,8 +139,8 @@ class TypingModuleFilterWrapper(FilterWrapper): class _WithIndexBase(_BaseTypingValue): - def __init__(self, infer_state, parent_context, name, index_value, value_of_index): - super(_WithIndexBase, self).__init__(infer_state, parent_context, name) + def __init__(self, inference_state, parent_context, name, index_value, value_of_index): + super(_WithIndexBase, self).__init__(inference_state, parent_context, name) self._index_value = index_value self._value_of_index = value_of_index @@ -164,7 +164,7 @@ class TypingValueWithIndex(_WithIndexBase): # Optional is basically just saying it's either None or the actual # type. return self.gather_annotation_classes().execute_annotation() \ - | ValueSet([builtin_from_name(self.infer_state, u'None')]) + | ValueSet([builtin_from_name(self.inference_state, u'None')]) elif string_name == 'Type': # The type is actually already given in the index_value return ValueSet([self._index_value]) @@ -174,7 +174,7 @@ class TypingValueWithIndex(_WithIndexBase): cls = globals()[string_name] return ValueSet([cls( - self.infer_state, + self.inference_state, self.parent_context, self._tree_name, self._index_value, @@ -194,7 +194,7 @@ class TypingValue(_BaseTypingValue): def py__getitem__(self, index_value_set, valueualized_node): return ValueSet( self.index_class.create_cached( - self.infer_state, + self.inference_state, self.parent_context, self._tree_name, index_value, @@ -206,7 +206,7 @@ class TypingValue(_BaseTypingValue): class _TypingClassMixin(object): def py__bases__(self): return [LazyKnownValues( - self.infer_state.builtins_module.py__getattribute__('object') + self.inference_state.builtins_module.py__getattribute__('object') )] def get_metaclasses(self): @@ -246,7 +246,7 @@ def _iter_over_arguments(maybe_tuple_value, defining_value): class TypeAlias(LazyValueWrapper): def __init__(self, parent_context, origin_tree_name, actual): - self.infer_state = parent_context.infer_state + self.inference_state = parent_context.inference_state self.parent_context = parent_context self._origin_tree_name = origin_tree_name self._actual = actual # e.g. builtins.list @@ -263,13 +263,13 @@ class TypeAlias(LazyValueWrapper): def _get_wrapped_value(self): module_name, class_name = self._actual.split('.') - if self.infer_state.environment.version_info.major == 2 and module_name == 'builtins': + if self.inference_state.environment.version_info.major == 2 and module_name == 'builtins': module_name = '__builtin__' - # TODO use infer_state.import_module? + # TODO use inference_state.import_module? from jedi.inference.imports import Importer module, = Importer( - self.infer_state, [module_name], self.infer_state.builtins_module + self.inference_state, [module_name], self.inference_state.builtins_module ).follow() classes = module.py__getattribute__(class_name) # There should only be one, because it's code that we control. @@ -358,7 +358,7 @@ class TypeVarClass(_BaseTypingValue): return NO_VALUES return ValueSet([TypeVar.create_cached( - self.infer_state, + self.inference_state, self.parent_context, self._tree_name, var_name, @@ -382,7 +382,7 @@ class TypeVarClass(_BaseTypingValue): return None else: safe_value = method(default=None) - if self.infer_state.environment.version_info.major == 2: + if self.inference_state.environment.version_info.major == 2: if isinstance(safe_value, bytes): return force_unicode(safe_value) if isinstance(safe_value, (str, unicode)): @@ -391,8 +391,8 @@ class TypeVarClass(_BaseTypingValue): class TypeVar(_BaseTypingValue): - def __init__(self, infer_state, parent_context, tree_name, var_name, unpacked_args): - super(TypeVar, self).__init__(infer_state, parent_context, tree_name) + def __init__(self, inference_state, parent_context, tree_name, var_name, unpacked_args): + super(TypeVar, self).__init__(inference_state, parent_context, tree_name) self._var_name = var_name self._constraints_lazy_values = [] @@ -469,7 +469,7 @@ class NewTypeFunction(_BaseTypingValue): return NO_VALUES return ValueSet( NewType( - self.infer_state, + self.inference_state, valueualized_node.value, valueualized_node.node, second_arg.infer(), @@ -477,8 +477,8 @@ class NewTypeFunction(_BaseTypingValue): class NewType(Value): - def __init__(self, infer_state, parent_context, tree_node, type_value_set): - super(NewType, self).__init__(infer_state, parent_context) + def __init__(self, inference_state, parent_context, tree_node, type_value_set): + super(NewType, self).__init__(inference_state, parent_context) self._type_value_set = type_value_set self.tree_node = tree_node @@ -643,7 +643,7 @@ class LazyGenericClass(AbstractAnnotatedClass): self._index_value = index_value self._value_of_index = value_of_index - @infer_state_method_cache() + @inference_state_method_cache() def get_generics(self): return list(_iter_over_arguments(self._index_value, self._value_of_index)) @@ -668,7 +668,7 @@ class LazyAnnotatedBaseClass(object): if isinstance(base, AbstractAnnotatedClass): # Here we have to recalculate the given types. yield GenericClass.create_cached( - base.infer_state, + base.inference_state, base._wrapped_value, tuple(self._remap_type_vars(base)), ) @@ -703,5 +703,5 @@ class InstanceWrapper(ValueWrapper): except IndexError: pass elif cls.py__name__() == 'Iterator': - return ValueSet([builtin_from_name(self.infer_state, u'None')]) + return ValueSet([builtin_from_name(self.inference_state, u'None')]) return self._wrapped_value.py__stop_iteration_returns() diff --git a/jedi/inference/gradual/utils.py b/jedi/inference/gradual/utils.py index 500df0e7..470ed659 100644 --- a/jedi/inference/gradual/utils.py +++ b/jedi/inference/gradual/utils.py @@ -3,7 +3,7 @@ import os from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module -def load_proper_stub_module(infer_state, file_io, import_names, module_node): +def load_proper_stub_module(inference_state, file_io, import_names, module_node): """ This function is given a random .pyi file and should return the proper module. @@ -20,13 +20,13 @@ def load_proper_stub_module(infer_state, file_io, import_names, module_node): import_names = import_names[:-1] if import_names is not None: - actual_value_set = infer_state.import_module(import_names, prefer_stubs=False) + actual_value_set = inference_state.import_module(import_names, prefer_stubs=False) if not actual_value_set: return None stub = create_stub_module( - infer_state, actual_value_set, module_node, file_io, import_names + inference_state, actual_value_set, module_node, file_io, import_names ) - infer_state.stub_module_cache[import_names] = stub + inference_state.stub_module_cache[import_names] = stub return stub return None diff --git a/jedi/inference/helpers.py b/jedi/inference/helpers.py index 2c99c95d..58079a4c 100644 --- a/jedi/inference/helpers.py +++ b/jedi/inference/helpers.py @@ -66,7 +66,7 @@ def infer_call_of_leaf(value, leaf, cut_own_trailer=False): trailer = leaf.parent if trailer.type == 'fstring': from jedi.inference import compiled - return compiled.get_string_value_set(value.infer_state) + return compiled.get_string_value_set(value.inference_state) # The leaf may not be the last or first child, because there exist three # different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples @@ -195,7 +195,7 @@ def predefine_names(value, flow_scope, dct): def is_string(value): - if value.infer_state.environment.version_info.major == 2: + if value.inference_state.environment.version_info.major == 2: str_classes = (unicode, bytes) else: str_classes = (unicode,) @@ -265,5 +265,5 @@ def parse_dotted_names(nodes, is_import_from, until_node=None): return level, names -def values_from_qualified_names(infer_state, *names): - return infer_state.import_module(names[:-1]).py__getattribute__(names[-1]) +def values_from_qualified_names(inference_state, *names): + return inference_state.import_module(names[:-1]).py__getattribute__(names[-1]) diff --git a/jedi/inference/imports.py b/jedi/inference/imports.py index a431f511..73d7ff0f 100644 --- a/jedi/inference/imports.py +++ b/jedi/inference/imports.py @@ -28,7 +28,7 @@ from jedi.inference import helpers from jedi.inference import compiled from jedi.inference import analysis from jedi.inference.utils import unite -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.names import ImportName, SubModuleName from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.gradual.typeshed import import_module_decorator @@ -56,13 +56,13 @@ class ModuleCache(object): # This memoization is needed, because otherwise we will infinitely loop on # certain imports. -@infer_state_method_cache(default=NO_VALUES) +@inference_state_method_cache(default=NO_VALUES) def infer_import(value, tree_name, is_goto=False): module_value = value.get_root_value() import_node = search_ancestor(tree_name, 'import_name', 'import_from') import_path = import_node.get_path_for_name(tree_name) from_import_name = None - infer_state = value.infer_state + inference_state = value.inference_state try: from_names = import_node.get_from_names() except AttributeError: @@ -75,7 +75,7 @@ def infer_import(value, tree_name, is_goto=False): from_import_name = import_path[-1] import_path = from_names - importer = Importer(infer_state, tuple(import_path), + importer = Importer(inference_state, tuple(import_path), module_value, import_node.level) types = importer.follow() @@ -101,7 +101,7 @@ def infer_import(value, tree_name, is_goto=False): if not types: path = import_path + [from_import_name] - importer = Importer(infer_state, tuple(path), + importer = Importer(inference_state, tuple(path), module_value, import_node.level) types = importer.follow() # goto only accepts `Name` @@ -183,7 +183,7 @@ def _level_to_base_import_path(project_path, directory, level): class Importer(object): - def __init__(self, infer_state, import_path, module_value, level=0): + def __init__(self, inference_state, import_path, module_value, level=0): """ An implementation similar to ``__import__``. Use `follow` to actually follow the imports. @@ -197,7 +197,7 @@ class Importer(object): :param import_path: List of namespaces (strings or Names). """ debug.speed('import %s %s' % (import_path, module_value)) - self._infer_state = infer_state + self._inference_state = inference_state self.level = level self.module_value = module_value @@ -233,7 +233,7 @@ class Importer(object): directory = os.path.dirname(path) base_import_path, base_directory = _level_to_base_import_path( - self._infer_state.project._path, directory, level, + self._inference_state.project._path, directory, level, ) if base_directory is None: # Everything is lost, the relative import does point @@ -265,11 +265,11 @@ class Importer(object): return self._fixed_sys_path sys_path_mod = ( - self._infer_state.get_sys_path() + self._inference_state.get_sys_path() + sys_path.check_sys_path_modifications(self.module_value) ) - if self._infer_state.environment.version_info.major == 2: + if self._inference_state.environment.version_info.major == 2: file_path = self.module_value.py__file__() if file_path is not None: # Python2 uses an old strange way of importing relative imports. @@ -290,7 +290,7 @@ class Importer(object): value_set = [None] for i, name in enumerate(self.import_path): value_set = ValueSet.from_sets([ - self._infer_state.import_module( + self._inference_state.import_module( import_names[:i+1], parent_module_value, sys_path @@ -311,12 +311,12 @@ class Importer(object): # add builtin module names if search_path is None and in_module is None: names += [ImportName(self.module_value, name) - for name in self._infer_state.compiled_subprocess.get_builtin_module_names()] + for name in self._inference_state.compiled_subprocess.get_builtin_module_names()] if search_path is None: search_path = self._sys_path_with_modifications() - for name in iter_module_names(self._infer_state, search_path): + for name in iter_module_names(self._inference_state, search_path): if in_module is None: n = ImportName(self.module_value, name) else: @@ -324,7 +324,7 @@ class Importer(object): names.append(n) return names - def completion_names(self, infer_state, only_modules=False): + def completion_names(self, inference_state, only_modules=False): """ :param only_modules: Indicates wheter it's possible to import a definition that is not defined in a module. @@ -374,12 +374,12 @@ class Importer(object): @plugin_manager.decorate() @import_module_decorator -def import_module(infer_state, import_names, parent_module_value, sys_path): +def import_module(inference_state, import_names, parent_module_value, sys_path): """ This method is very similar to importlib's `_gcd_import`. """ if import_names[0] in settings.auto_import_modules: - module = _load_builtin_module(infer_state, import_names, sys_path) + module = _load_builtin_module(inference_state, import_names, sys_path) if module is None: return NO_VALUES return ValueSet([module]) @@ -388,7 +388,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): if parent_module_value is None: # Override the sys.path. It works only good that way. # Injecting the path directly into `find_module` did not work. - file_io_or_ns, is_pkg = infer_state.compiled_subprocess.get_module_info( + file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info( string=import_names[-1], full_name=module_name, sys_path=sys_path, @@ -409,7 +409,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): # not important to be correct. if not isinstance(path, list): path = [path] - file_io_or_ns, is_pkg = infer_state.compiled_subprocess.get_module_info( + file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info( string=import_names[-1], path=path, full_name=module_name, @@ -423,17 +423,17 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): if isinstance(file_io_or_ns, ImplicitNSInfo): from jedi.inference.value.namespace import ImplicitNamespaceValue module = ImplicitNamespaceValue( - infer_state, + inference_state, fullname=file_io_or_ns.name, paths=file_io_or_ns.paths, ) elif file_io_or_ns is None: - module = _load_builtin_module(infer_state, import_names, sys_path) + module = _load_builtin_module(inference_state, import_names, sys_path) if module is None: return NO_VALUES else: module = _load_python_module( - infer_state, file_io_or_ns, sys_path, + inference_state, file_io_or_ns, sys_path, import_names=import_names, is_package=is_pkg, ) @@ -445,14 +445,14 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): return ValueSet([module]) -def _load_python_module(infer_state, file_io, sys_path=None, +def _load_python_module(inference_state, file_io, sys_path=None, import_names=None, is_package=False): try: - return infer_state.module_cache.get_from_path(file_io.path) + return inference_state.module_cache.get_from_path(file_io.path) except KeyError: pass - module_node = infer_state.parse( + module_node = inference_state.parse( file_io=file_io, cache=True, diff_cache=settings.fast_parser, @@ -461,21 +461,21 @@ def _load_python_module(infer_state, file_io, sys_path=None, from jedi.inference.value import ModuleValue return ModuleValue( - infer_state, module_node, + inference_state, module_node, file_io=file_io, string_names=import_names, - code_lines=get_cached_code_lines(infer_state.grammar, file_io.path), + code_lines=get_cached_code_lines(inference_state.grammar, file_io.path), is_package=is_package, ) -def _load_builtin_module(infer_state, import_names=None, sys_path=None): +def _load_builtin_module(inference_state, import_names=None, sys_path=None): if sys_path is None: - sys_path = infer_state.get_sys_path() + sys_path = inference_state.get_sys_path() dotted_name = '.'.join(import_names) assert dotted_name is not None - module = compiled.load_module(infer_state, dotted_name=dotted_name, sys_path=sys_path) + module = compiled.load_module(inference_state, dotted_name=dotted_name, sys_path=sys_path) if module is None: # The file might raise an ImportError e.g. and therefore not be # importable. @@ -483,13 +483,13 @@ def _load_builtin_module(infer_state, import_names=None, sys_path=None): return module -def _load_module_from_path(infer_state, file_io, base_names): +def _load_module_from_path(inference_state, file_io, base_names): """ This should pretty much only be used for get_modules_containing_name. It's here to ensure that a random path is still properly loaded into the Jedi module structure. """ - e_sys_path = infer_state.get_sys_path() + e_sys_path = inference_state.get_sys_path() path = file_io.path if base_names: module_name = os.path.basename(path) @@ -503,16 +503,16 @@ def _load_module_from_path(infer_state, file_io, base_names): import_names, is_package = sys_path.transform_path_to_dotted(e_sys_path, path) module = _load_python_module( - infer_state, file_io, + inference_state, file_io, sys_path=e_sys_path, import_names=import_names, is_package=is_package, ) - infer_state.module_cache.add(import_names, ValueSet([module])) + inference_state.module_cache.add(import_names, ValueSet([module])) return module -def get_modules_containing_name(infer_state, modules, name): +def get_modules_containing_name(inference_state, modules, name): """ Search a name in the directories of modules. """ @@ -530,7 +530,7 @@ def get_modules_containing_name(infer_state, modules, name): if name not in code: return None new_file_io = KnownContentFileIO(file_io.path, code) - m = _load_module_from_path(infer_state, new_file_io, base_names) + m = _load_module_from_path(inference_state, new_file_io, base_names) if isinstance(m, compiled.CompiledObject): return None return m diff --git a/jedi/inference/names.py b/jedi/inference/names.py index 87fbec74..dd55f6e7 100644 --- a/jedi/inference/names.py +++ b/jedi/inference/names.py @@ -66,10 +66,10 @@ class AbstractArbitraryName(AbstractNameDefinition): """ is_value_name = False - def __init__(self, infer_state, string): - self.infer_state = infer_state + def __init__(self, inference_state, string): + self.inference_state = inference_state self.string_name = string - self.parent_context = infer_state.builtins_module + self.parent_context = inference_state.builtins_module def infer(self): return NO_VALUES @@ -103,7 +103,7 @@ class AbstractTreeName(AbstractNameDefinition): return parent_names + (self.tree_name.value,) def goto(self, **kwargs): - return self.parent_context.infer_state.goto(self.parent_context, self.tree_name, **kwargs) + return self.parent_context.inference_state.goto(self.parent_context, self.tree_name, **kwargs) def is_import(self): imp = search_ancestor(self.tree_name, 'import_from', 'import_name') @@ -157,7 +157,7 @@ class TreeNameDefinition(AbstractTreeName): # Refactor this, should probably be here. from jedi.inference.syntax_tree import tree_name_to_values parent = self.parent_context - return tree_name_to_values(parent.infer_state, parent, self.tree_name) + return tree_name_to_values(parent.inference_state, parent, self.tree_name) @property def api_type(self): @@ -346,7 +346,7 @@ class ImportName(AbstractNameDefinition): def infer(self): from jedi.inference.imports import Importer m = self._from_module_value - return Importer(m.infer_state, [self.string_name], m, level=self._level).follow() + return Importer(m.inference_state, [self.string_name], m, level=self._level).follow() def goto(self): return [m.name for m in self.infer()] diff --git a/jedi/inference/param.py b/jedi/inference/param.py index 11ecedc1..e7d1e1a6 100644 --- a/jedi/inference/param.py +++ b/jedi/inference/param.py @@ -145,13 +145,13 @@ def get_executed_params_and_issues(execution_value, arguments): var_arg_iterator.push_back((key, argument)) break lazy_value_list.append(argument) - seq = iterable.FakeSequence(execution_value.infer_state, u'tuple', lazy_value_list) + seq = iterable.FakeSequence(execution_value.inference_state, u'tuple', lazy_value_list) result_arg = LazyKnownValue(seq) elif param.star_count == 2: if argument is not None: too_many_args(argument) # **kwargs param - dct = iterable.FakeDict(execution_value.infer_state, dict(non_matching_keys)) + dct = iterable.FakeDict(execution_value.inference_state, dict(non_matching_keys)) result_arg = LazyKnownValue(dct) non_matching_keys = {} else: @@ -235,11 +235,11 @@ def _error_argument_count(funcdef, actual_count): def _create_default_param(execution_value, param): if param.star_count == 1: result_arg = LazyKnownValue( - iterable.FakeSequence(execution_value.infer_state, u'tuple', []) + iterable.FakeSequence(execution_value.inference_state, u'tuple', []) ) elif param.star_count == 2: result_arg = LazyKnownValue( - iterable.FakeDict(execution_value.infer_state, {}) + iterable.FakeDict(execution_value.inference_state, {}) ) elif param.default is None: result_arg = LazyUnknownValue() diff --git a/jedi/inference/parser_cache.py b/jedi/inference/parser_cache.py index 95116d40..c9b9b2bd 100644 --- a/jedi/inference/parser_cache.py +++ b/jedi/inference/parser_cache.py @@ -1,6 +1,6 @@ -from jedi.inference.cache import infer_state_function_cache +from jedi.inference.cache import inference_state_function_cache -@infer_state_function_cache() -def get_yield_exprs(infer_state, funcdef): +@inference_state_function_cache() +def get_yield_exprs(inference_state, funcdef): return list(funcdef.iter_yield_exprs()) diff --git a/jedi/inference/recursion.py b/jedi/inference/recursion.py index 1d40dfd8..97cbcfda 100644 --- a/jedi/inference/recursion.py +++ b/jedi/inference/recursion.py @@ -56,12 +56,12 @@ class RecursionDetector(object): @contextmanager -def execution_allowed(infer_state, node): +def execution_allowed(inference_state, node): """ A decorator to detect recursions in statements. In a recursion a statement at the same place, in the same module may not be executed two times. """ - pushed_nodes = infer_state.recursion_detector.pushed_nodes + pushed_nodes = inference_state.recursion_detector.pushed_nodes if node in pushed_nodes: debug.warning('catched stmt recursion: %s @%s', node, @@ -78,7 +78,7 @@ def execution_allowed(infer_state, node): def execution_recursion_decorator(default=NO_VALUES): def decorator(func): def wrapper(self, **kwargs): - detector = self.infer_state.execution_recursion_detector + detector = self.inference_state.execution_recursion_detector limit_reached = detector.push_execution(self) try: if limit_reached: @@ -96,8 +96,8 @@ class ExecutionRecursionDetector(object): """ Catches recursions of executions. """ - def __init__(self, infer_state): - self._infer_state = infer_state + def __init__(self, inference_state): + self._inference_state = inference_state self._recursion_level = 0 self._parent_execution_funcs = [] @@ -117,7 +117,7 @@ class ExecutionRecursionDetector(object): module = execution.get_root_value() - if module == self._infer_state.builtins_module: + if module == self._inference_state.builtins_module: # We have control over builtins so we know they are not recursing # like crazy. Therefore we just let them execute always, because # they usually just help a lot with getting good results. diff --git a/jedi/inference/star_args.py b/jedi/inference/star_args.py index 4fcd8049..3ee70787 100644 --- a/jedi/inference/star_args.py +++ b/jedi/inference/star_args.py @@ -40,7 +40,7 @@ def _iter_nodes_for_param(param_name): values = _to_callables(value, trailer) args = TreeArguments.create_cached( - execution_value.infer_state, + execution_value.inference_state, value=value, argument_node=trailer.children[1], trailer=trailer, diff --git a/jedi/inference/syntax_tree.py b/jedi/inference/syntax_tree.py index 97e52567..3d2efd9c 100644 --- a/jedi/inference/syntax_tree.py +++ b/jedi/inference/syntax_tree.py @@ -23,7 +23,7 @@ from jedi.inference.value import TreeInstance from jedi.inference.finder import NameFinder from jedi.inference.helpers import is_string, is_literal, is_number from jedi.inference.compiled.access import COMPARISON_OPERATORS -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.gradual.stub_value import VersionInfo from jedi.inference.gradual import annotation from jedi.inference.value.decorator import Decoratee @@ -41,14 +41,14 @@ def _limit_value_infers(func): """ def wrapper(value, *args, **kwargs): n = value.tree_node - infer_state = value.infer_state + inference_state = value.inference_state try: - infer_state.inferred_element_counts[n] += 1 - if infer_state.inferred_element_counts[n] > 300: + inference_state.inferred_element_counts[n] += 1 + if inference_state.inferred_element_counts[n] > 300: debug.warning('In value %s there were too many inferences.', n) return NO_VALUES except KeyError: - infer_state.inferred_element_counts[n] = 1 + inference_state.inferred_element_counts[n] = 1 return func(value, *args, **kwargs) return wrapper @@ -70,7 +70,7 @@ def _py__stop_iteration_returns(generators): @_limit_value_infers def infer_node(value, element): debug.dbg('infer_node %s@%s in %s', element, element.start_pos, value) - infer_state = value.infer_state + inference_state = value.inference_state typ = element.type if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'): return infer_atom(value, element) @@ -91,7 +91,7 @@ def infer_node(value, element): if trailer == '**': # has a power operation. right = value.infer_node(children[i + 1]) value_set = _infer_comparison( - infer_state, + inference_state, value, value_set, trailer, @@ -105,7 +105,7 @@ def infer_node(value, element): return value_set elif typ in ('testlist_star_expr', 'testlist',): # The implicit tuple in statements. - return ValueSet([iterable.SequenceLiteralValue(infer_state, value, element)]) + return ValueSet([iterable.SequenceLiteralValue(inference_state, value, element)]) elif typ in ('not_test', 'factor'): value_set = value.infer_node(element.children[-1]) for operator in element.children[:-1]: @@ -122,7 +122,7 @@ def infer_node(value, element): if element.value not in ('.', '...'): origin = element.parent raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin)) - return ValueSet([compiled.builtin_from_name(infer_state, u'Ellipsis')]) + return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')]) elif typ == 'dotted_name': value_set = infer_atom(value, element.children[0]) for next_name in element.children[2::2]: @@ -158,7 +158,7 @@ def infer_trailer(value, atom_values, trailer): if trailer_op == '[': trailer_op, node, _ = trailer.children return atom_values.get_item( - infer_subscript_list(value.infer_state, value, node), + infer_subscript_list(value.inference_state, value, node), ValueualizedNode(value, trailer) ) else: @@ -170,7 +170,7 @@ def infer_trailer(value, atom_values, trailer): ) else: assert trailer_op == '(', 'trailer_op is actually %s' % trailer_op - args = arguments.TreeArguments(value.infer_state, value, node, trailer) + args = arguments.TreeArguments(value.inference_state, value, node, trailer) return atom_values.execute(args) @@ -183,7 +183,7 @@ def infer_atom(value, atom): if atom.type == 'name': if atom.value in ('True', 'False', 'None'): # Python 2... - return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) + return ValueSet([compiled.builtin_from_name(value.inference_state, atom.value)]) # This is the first global lookup. stmt = tree.search_ancestor( @@ -207,7 +207,7 @@ def infer_atom(value, atom): elif atom.type == 'keyword': # For False/True/None if atom.value in ('False', 'True', 'None'): - return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) + return ValueSet([compiled.builtin_from_name(value.inference_state, atom.value)]) elif atom.value == 'print': # print e.g. could be inferred like this in Python 2.7 return NO_VALUES @@ -218,17 +218,17 @@ def infer_atom(value, atom): assert False, 'Cannot infer the keyword %s' % atom elif isinstance(atom, tree.Literal): - string = value.infer_state.compiled_subprocess.safe_literal_eval(atom.value) - return ValueSet([compiled.create_simple_object(value.infer_state, string)]) + string = value.inference_state.compiled_subprocess.safe_literal_eval(atom.value) + return ValueSet([compiled.create_simple_object(value.inference_state, string)]) elif atom.type == 'strings': # Will be multiple string. value_set = infer_atom(value, atom.children[0]) for string in atom.children[1:]: right = infer_atom(value, string) - value_set = _infer_comparison(value.infer_state, value, value_set, u'+', right) + value_set = _infer_comparison(value.inference_state, value, value_set, u'+', right) return value_set elif atom.type == 'fstring': - return compiled.get_string_value_set(value.infer_state) + return compiled.get_string_value_set(value.inference_state) else: c = atom.children # Parentheses without commas are not tuples. @@ -251,7 +251,7 @@ def infer_atom(value, atom): if comp_for.type in ('comp_for', 'sync_comp_for'): return ValueSet([iterable.comprehension_from_atom( - value.infer_state, value, atom + value.inference_state, value, atom )]) # It's a dict/list/tuple literal. @@ -262,19 +262,19 @@ def infer_atom(value, atom): array_node_c = [] if c[0] == '{' and (array_node == '}' or ':' in array_node_c or '**' in array_node_c): - new_value = iterable.DictLiteralValue(value.infer_state, value, atom) + new_value = iterable.DictLiteralValue(value.inference_state, value, atom) else: - new_value = iterable.SequenceLiteralValue(value.infer_state, value, atom) + new_value = iterable.SequenceLiteralValue(value.inference_state, value, atom) return ValueSet([new_value]) @_limit_value_infers def infer_expr_stmt(value, stmt, seek_name=None): - with recursion.execution_allowed(value.infer_state, stmt) as allowed: + with recursion.execution_allowed(value.inference_state, stmt) as allowed: # Here we allow list/set to recurse under certain conditions. To make # it possible to resolve stuff like list(set(list(x))), this is # necessary. - if not allowed and value.get_root_value() == value.infer_state.builtins_module: + if not allowed and value.get_root_value() == value.inference_state.builtins_module: try: instance = value.var_args.instance except AttributeError: @@ -306,7 +306,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None): if seek_name: c_node = ValueualizedName(value, seek_name) - value_set = check_tuple_assignments(value.infer_state, c_node, value_set) + value_set = check_tuple_assignments(value.inference_state, c_node, value_set) first_operator = next(stmt.yield_operators(), None) if first_operator not in ('=', None) and first_operator.type == 'operator': @@ -331,10 +331,10 @@ def _infer_expr_stmt(value, stmt, seek_name=None): dct = {for_stmt.children[1].value: lazy_value.infer()} with helpers.predefine_names(value, for_stmt, dct): t = value.infer_node(rhs) - left = _infer_comparison(value.infer_state, value, left, operator, t) + left = _infer_comparison(value.inference_state, value, left, operator, t) value_set = left else: - value_set = _infer_comparison(value.infer_state, value, left, operator, value_set) + value_set = _infer_comparison(value.inference_state, value, left, operator, value_set) debug.dbg('infer_expr_stmt result %s', value_set) return value_set @@ -358,7 +358,7 @@ def infer_or_test(value, or_test): types = value.infer_node(right) # Otherwise continue, because of uncertainty. else: - types = _infer_comparison(value.infer_state, value, types, operator, + types = _infer_comparison(value.inference_state, value, types, operator, value.infer_node(right)) debug.dbg('infer_or_test types %s', types) return types @@ -377,12 +377,12 @@ def infer_factor(value_set, operator): b = value.py__bool__() if b is None: # Uncertainty. return - yield compiled.create_simple_object(value.infer_state, not b) + yield compiled.create_simple_object(value.inference_state, not b) else: yield value -def _literals_to_types(infer_state, result): +def _literals_to_types(inference_state, result): # Changes literals ('a', 1, 1.0, etc) to its type instances (str(), # int(), float(), etc). new_result = NO_VALUES @@ -390,27 +390,27 @@ def _literals_to_types(infer_state, result): if is_literal(typ): # Literals are only valid as long as the operations are # correct. Otherwise add a value-free instance. - cls = compiled.builtin_from_name(infer_state, typ.name.string_name) + cls = compiled.builtin_from_name(inference_state, typ.name.string_name) new_result |= cls.execute_with_values() else: new_result |= ValueSet([typ]) return new_result -def _infer_comparison(infer_state, value, left_values, operator, right_values): +def _infer_comparison(inference_state, value, left_values, operator, right_values): if not left_values or not right_values: # illegal slices e.g. cause left/right_result to be None result = (left_values or NO_VALUES) | (right_values or NO_VALUES) - return _literals_to_types(infer_state, result) + return _literals_to_types(inference_state, result) else: # I don't think there's a reasonable chance that a string # operation is still correct, once we pass something like six # objects. if len(left_values) * len(right_values) > 6: - return _literals_to_types(infer_state, left_values | right_values) + return _literals_to_types(inference_state, left_values | right_values) else: return ValueSet.from_sets( - _infer_comparison_part(infer_state, value, left, operator, right) + _infer_comparison_part(inference_state, value, left, operator, right) for left in left_values for right in right_values ) @@ -440,8 +440,8 @@ def _is_list(value): return isinstance(value, iterable.Sequence) and value.array_type == 'list' -def _bool_to_value(infer_state, bool_): - return compiled.builtin_from_name(infer_state, force_unicode(str(bool_))) +def _bool_to_value(inference_state, bool_): + return compiled.builtin_from_name(inference_state, force_unicode(str(bool_))) def _get_tuple_ints(value): @@ -461,7 +461,7 @@ def _get_tuple_ints(value): return numbers -def _infer_comparison_part(infer_state, value, left, operator, right): +def _infer_comparison_part(inference_state, value, left, operator, right): l_is_num = is_number(left) r_is_num = is_number(right) if isinstance(operator, unicode): @@ -479,7 +479,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right): if l_is_num and r_is_num or is_string(left) and is_string(right): return ValueSet([left.execute_operation(right, str_operator)]) elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right): - return ValueSet([iterable.MergedArray(infer_state, (left, right))]) + return ValueSet([iterable.MergedArray(inference_state, (left, right))]) elif str_operator == '-': if l_is_num and r_is_num: return ValueSet([left.execute_operation(right, str_operator)]) @@ -499,18 +499,18 @@ def _infer_comparison_part(infer_state, value, left, operator, right): if str_operator in ('is', '!=', '==', 'is not'): operation = COMPARISON_OPERATORS[str_operator] bool_ = operation(left, right) - return ValueSet([_bool_to_value(infer_state, bool_)]) + return ValueSet([_bool_to_value(inference_state, bool_)]) if isinstance(left, VersionInfo): version_info = _get_tuple_ints(right) if version_info is not None: bool_result = compiled.access.COMPARISON_OPERATORS[operator]( - infer_state.environment.version_info, + inference_state.environment.version_info, tuple(version_info) ) - return ValueSet([_bool_to_value(infer_state, bool_result)]) + return ValueSet([_bool_to_value(inference_state, bool_result)]) - return ValueSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)]) + return ValueSet([_bool_to_value(inference_state, True), _bool_to_value(inference_state, False)]) elif str_operator == 'in': return NO_VALUES @@ -531,7 +531,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right): return result -def _remove_statements(infer_state, value, stmt, name): +def _remove_statements(inference_state, value, stmt, name): """ This is the part where statements are being stripped. @@ -547,7 +547,7 @@ def _remove_statements(infer_state, value, stmt, name): @plugin_manager.decorate() -def tree_name_to_values(infer_state, value, tree_name): +def tree_name_to_values(inference_state, value, tree_name): value_set = NO_VALUES module_node = value.get_root_value().tree_node # First check for annotations, like: `foo: int = 3` @@ -570,15 +570,15 @@ def tree_name_to_values(infer_state, value, tree_name): if node is None: node = tree_name.parent if node.type == 'global_stmt': - value = infer_state.create_value(value, tree_name) - finder = NameFinder(infer_state, value, value, tree_name.value) + value = inference_state.create_value(value, tree_name) + finder = NameFinder(inference_state, value, value, tree_name.value) filters = finder.get_global_filters() # For global_stmt lookups, we only need the first possible scope, # which means the function itself. filters = [next(filters)] return finder.find(filters, attribute_lookup=False) elif node.type not in ('import_from', 'import_name'): - value = infer_state.create_value(value, tree_name) + value = inference_state.create_value(value, tree_name) return infer_atom(value, tree_name) typ = node.type @@ -602,9 +602,9 @@ def tree_name_to_values(infer_state, value, tree_name): is_async=node.parent.type == 'async_stmt', ) c_node = ValueualizedName(value, tree_name) - types = check_tuple_assignments(infer_state, c_node, for_types) + types = check_tuple_assignments(inference_state, c_node, for_types) elif typ == 'expr_stmt': - types = _remove_statements(infer_state, value, node, tree_name) + types = _remove_statements(inference_state, value, node, tree_name) elif typ == 'with_stmt': value_managers = value.infer_node(node.get_test_node_from_name(tree_name)) enter_methods = value_managers.py__getattribute__(u'__enter__') @@ -628,7 +628,7 @@ def tree_name_to_values(infer_state, value, tree_name): # We don't want to have functions/classes that are created by the same # tree_node. -@infer_state_method_cache() +@inference_state_method_cache() def _apply_decorators(value, node): """ Returns the function, that should to be executed in the end. @@ -636,7 +636,7 @@ def _apply_decorators(value, node): """ if node.type == 'classdef': decoratee_value = ClassValue( - value.infer_state, + value.inference_state, parent_context=value, tree_node=node ) @@ -674,7 +674,7 @@ def _apply_decorators(value, node): return values -def check_tuple_assignments(infer_state, valueualized_name, value_set): +def check_tuple_assignments(inference_state, valueualized_name, value_set): """ Checks if tuples are assigned. """ @@ -698,7 +698,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set): return value_set -def infer_subscript_list(infer_state, value, index): +def infer_subscript_list(inference_state, value, index): """ Handles slices in subscript nodes. """ @@ -724,7 +724,7 @@ def infer_subscript_list(infer_state, value, index): return ValueSet([iterable.Slice(value, *result)]) elif index.type == 'subscriptlist': - return ValueSet([iterable.SequenceLiteralValue(infer_state, value, index)]) + return ValueSet([iterable.SequenceLiteralValue(inference_state, value, index)]) # No slices return value.infer_node(index) diff --git a/jedi/inference/sys_path.py b/jedi/inference/sys_path.py index e1729655..35b393e5 100644 --- a/jedi/inference/sys_path.py +++ b/jedi/inference/sys_path.py @@ -1,7 +1,7 @@ import os from jedi._compatibility import unicode, force_unicode, all_suffixes -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.base_value import ValueualizedNode from jedi.inference.helpers import is_string from jedi.common.utils import traverse_parents @@ -92,7 +92,7 @@ def _paths_from_list_modifications(module_value, trailer1, trailer2): yield abs_path -@infer_state_method_cache(default=[]) +@inference_state_method_cache(default=[]) def check_sys_path_modifications(module_value): """ Detect sys.path modifications within module. @@ -130,20 +130,20 @@ def check_sys_path_modifications(module_value): return added -def discover_buildout_paths(infer_state, script_path): +def discover_buildout_paths(inference_state, script_path): buildout_script_paths = set() for buildout_script_path in _get_buildout_script_paths(script_path): - for path in _get_paths_from_buildout_script(infer_state, buildout_script_path): + for path in _get_paths_from_buildout_script(inference_state, buildout_script_path): buildout_script_paths.add(path) return buildout_script_paths -def _get_paths_from_buildout_script(infer_state, buildout_script_path): +def _get_paths_from_buildout_script(inference_state, buildout_script_path): file_io = FileIO(buildout_script_path) try: - module_node = infer_state.parse( + module_node = inference_state.parse( file_io=file_io, cache=True, cache_path=settings.cache_directory @@ -154,9 +154,9 @@ def _get_paths_from_buildout_script(infer_state, buildout_script_path): from jedi.inference.value import ModuleValue module = ModuleValue( - infer_state, module_node, file_io, + inference_state, module_node, file_io, string_names=None, - code_lines=get_cached_code_lines(infer_state.grammar, buildout_script_path), + code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path), ) for path in check_sys_path_modifications(module): yield path diff --git a/jedi/inference/usages.py b/jedi/inference/usages.py index 932e6d56..91891c41 100644 --- a/jedi/inference/usages.py +++ b/jedi/inference/usages.py @@ -41,7 +41,7 @@ def usages(module_value, tree_name): modules = set(m for m in modules if m.is_module() and not m.is_compiled()) non_matching_usage_maps = {} - for m in imports.get_modules_containing_name(module_value.infer_state, modules, search_name): + for m in imports.get_modules_containing_name(module_value.inference_state, modules, search_name): for name_leaf in m.tree_node.get_used_names().get(search_name, []): new = _find_names(m, name_leaf) if any(tree_name in found_names for tree_name in new): diff --git a/jedi/inference/value/function.py b/jedi/inference/value/function.py index bd998e8e..567114a0 100644 --- a/jedi/inference/value/function.py +++ b/jedi/inference/value/function.py @@ -2,7 +2,7 @@ from parso.python import tree from jedi._compatibility import use_metaclass from jedi import debug -from jedi.inference.cache import infer_state_method_cache, CachedMetaClass +from jedi.inference.cache import inference_state_method_cache, CachedMetaClass from jedi.inference import compiled from jedi.inference import recursion from jedi.inference import docstrings @@ -97,7 +97,7 @@ class FunctionMixin(object): if arguments is None: arguments = AnonymousArguments() - return FunctionExecutionValue(self.infer_state, self.parent_context, self, arguments) + return FunctionExecutionValue(self.inference_state, self.parent_context, self, arguments) def get_signatures(self): return [TreeSignature(f) for f in self.get_signature_functions()] @@ -112,14 +112,14 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla def create(tree_node): if value.is_class(): return MethodValue( - value.infer_state, + value.inference_state, value, parent_context=parent_context, tree_node=tree_node ) else: return cls( - value.infer_state, + value.inference_state, parent_context=parent_context, tree_node=tree_node ) @@ -140,7 +140,7 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla return function def py__class__(self): - c, = values_from_qualified_names(self.infer_state, u'types', u'FunctionType') + c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType') return c def get_default_param_value(self): @@ -151,8 +151,8 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla class MethodValue(FunctionValue): - def __init__(self, infer_state, class_value, *args, **kwargs): - super(MethodValue, self).__init__(infer_state, *args, **kwargs) + def __init__(self, inference_state, class_value, *args, **kwargs): + super(MethodValue, self).__init__(inference_state, *args, **kwargs) self.class_value = class_value def get_default_param_value(self): @@ -170,16 +170,16 @@ class MethodValue(FunctionValue): class FunctionExecutionValue(TreeValue): function_execution_filter = FunctionExecutionFilter - def __init__(self, infer_state, parent_context, function_value, var_args): + def __init__(self, inference_state, parent_context, function_value, var_args): super(FunctionExecutionValue, self).__init__( - infer_state, + inference_state, parent_context, function_value.tree_node, ) self.function_value = function_value self.var_args = var_args - @infer_state_method_cache(default=NO_VALUES) + @inference_state_method_cache(default=NO_VALUES) @recursion.execution_recursion_decorator() def get_return_values(self, check_yields=False): funcdef = self.tree_node @@ -188,7 +188,7 @@ class FunctionExecutionValue(TreeValue): if check_yields: value_set = NO_VALUES - returns = get_yield_exprs(self.infer_state, funcdef) + returns = get_yield_exprs(self.inference_state, funcdef) else: returns = funcdef.iter_return_stmts() from jedi.inference.gradual.annotation import infer_return_types @@ -213,7 +213,7 @@ class FunctionExecutionValue(TreeValue): try: children = r.children except AttributeError: - ctx = compiled.builtin_from_name(self.infer_state, u'None') + ctx = compiled.builtin_from_name(self.inference_state, u'None') value_set |= ValueSet([ctx]) else: value_set |= self.infer_node(children[1]) @@ -225,7 +225,7 @@ class FunctionExecutionValue(TreeValue): def _get_yield_lazy_value(self, yield_expr): if yield_expr.type == 'keyword': # `yield` just yields None. - ctx = compiled.builtin_from_name(self.infer_state, u'None') + ctx = compiled.builtin_from_name(self.inference_state, u'None') yield LazyKnownValue(ctx) return @@ -242,7 +242,7 @@ class FunctionExecutionValue(TreeValue): # TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef', 'while_stmt', 'if_stmt')) - for y in get_yield_exprs(self.infer_state, self.tree_node)] + for y in get_yield_exprs(self.inference_state, self.tree_node)] # Calculate if the yields are placed within the same for loop. yields_order = [] @@ -297,7 +297,7 @@ class FunctionExecutionValue(TreeValue): until_position=until_position, origin_scope=origin_scope) - @infer_state_method_cache() + @inference_state_method_cache() def get_executed_params_and_issues(self): return self.var_args.get_executed_params_and_issues(self) @@ -322,16 +322,16 @@ class FunctionExecutionValue(TreeValue): """ Created to be used by inheritance. """ - infer_state = self.infer_state + inference_state = self.inference_state is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef') - is_generator = bool(get_yield_exprs(infer_state, self.tree_node)) + is_generator = bool(get_yield_exprs(inference_state, self.tree_node)) from jedi.inference.gradual.typing import GenericClass if is_coroutine: if is_generator: - if infer_state.environment.version_info < (3, 6): + if inference_state.environment.version_info < (3, 6): return NO_VALUES - async_generator_classes = infer_state.typing_module \ + async_generator_classes = inference_state.typing_module \ .py__getattribute__('AsyncGenerator') yield_values = self.merge_yield_values(is_async=True) @@ -343,9 +343,9 @@ class FunctionExecutionValue(TreeValue): for c in async_generator_classes ).execute_annotation() else: - if infer_state.environment.version_info < (3, 5): + if inference_state.environment.version_info < (3, 5): return NO_VALUES - async_classes = infer_state.typing_module.py__getattribute__('Coroutine') + async_classes = inference_state.typing_module.py__getattribute__('Coroutine') return_values = self.get_return_values() # Only the first generic is relevant. generics = (return_values.py__class__(), NO_VALUES, NO_VALUES) @@ -354,7 +354,7 @@ class FunctionExecutionValue(TreeValue): ).execute_annotation() else: if is_generator: - return ValueSet([iterable.Generator(infer_state, self)]) + return ValueSet([iterable.Generator(inference_state, self)]) else: return self.get_return_values() @@ -379,7 +379,7 @@ class OverloadedFunctionValue(FunctionMixin, ValueWrapper): if matched: return value_set - if self.infer_state.is_analysis: + if self.inference_state.is_analysis: # In this case we want precision. return NO_VALUES return ValueSet.from_sets(fe.infer() for fe in function_executions) @@ -411,7 +411,7 @@ def _find_overload_functions(value, tree_node): while True: filter = ParserTreeFilter( - value.infer_state, + value.inference_state, value, until_position=tree_node.start_pos ) diff --git a/jedi/inference/value/instance.py b/jedi/inference/value/instance.py index 3dea7708..78339710 100644 --- a/jedi/inference/value/instance.py +++ b/jedi/inference/value/instance.py @@ -10,7 +10,7 @@ from jedi.inference.names import ValueName, TreeNameDefinition from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \ iterator_to_value_set, ValueWrapper from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.arguments import AnonymousArguments, \ ValuesArguments, TreeArgumentsWrapper from jedi.inference.value.function import \ @@ -50,7 +50,7 @@ class AnonymousInstanceArguments(AnonymousArguments): # executions of this function, we have all the params already. return [self_param], [] executed_params = list(search_params( - execution_value.infer_state, + execution_value.inference_state, execution_value, execution_value.tree_node )) @@ -61,8 +61,8 @@ class AnonymousInstanceArguments(AnonymousArguments): class AbstractInstanceValue(Value): api_type = u'instance' - def __init__(self, infer_state, parent_context, class_value, var_args): - super(AbstractInstanceValue, self).__init__(infer_state, parent_context) + def __init__(self, inference_state, parent_context, class_value, var_args): + super(AbstractInstanceValue, self).__init__(inference_state, parent_context) # Generated instances are classes that are just generated by self # (No var_args) used. self.class_value = class_value @@ -117,7 +117,7 @@ class AbstractInstanceValue(Value): names = self.get_function_slot_names(u'__get__') if names: if obj is None: - obj = compiled.builtin_from_name(self.infer_state, u'None') + obj = compiled.builtin_from_name(self.inference_state, u'None') return self.execute_function_slots(names, obj, class_value) else: return ValueSet([self]) @@ -168,7 +168,7 @@ class AbstractInstanceValue(Value): for generator in self.execute_function_slots(iter_slot_names): if generator.is_instance() and not generator.is_compiled(): # `__next__` logic. - if self.infer_state.environment.version_info.major == 2: + if self.inference_state.environment.version_info.major == 2: name = u'next' else: name = u'__next__' @@ -199,7 +199,7 @@ class AbstractInstanceValue(Value): bound_method = BoundMethod(self, function) yield bound_method.get_function_execution(self.var_args) - @infer_state_method_cache() + @inference_state_method_cache() def create_instance_value(self, class_value, node): if node.parent.type in ('funcdef', 'classdef'): node = node.parent @@ -219,7 +219,7 @@ class AbstractInstanceValue(Value): else: return bound_method.get_function_execution() elif scope.type == 'classdef': - class_value = ClassValue(self.infer_state, parent_context, scope) + class_value = ClassValue(self.inference_state, parent_context, scope) return class_value elif scope.type in ('comp_for', 'sync_comp_for'): # Comprehensions currently don't have a special scope in Jedi. @@ -238,9 +238,9 @@ class AbstractInstanceValue(Value): class CompiledInstance(AbstractInstanceValue): - def __init__(self, infer_state, parent_context, class_value, var_args): + def __init__(self, inference_state, parent_context, class_value, var_args): self._original_var_args = var_args - super(CompiledInstance, self).__init__(infer_state, parent_context, class_value, var_args) + super(CompiledInstance, self).__init__(inference_state, parent_context, class_value, var_args) @property def name(self): @@ -258,16 +258,16 @@ class CompiledInstance(AbstractInstanceValue): class TreeInstance(AbstractInstanceValue): - def __init__(self, infer_state, parent_context, class_value, var_args): + def __init__(self, inference_state, parent_context, class_value, var_args): # I don't think that dynamic append lookups should happen here. That # sounds more like something that should go to py__iter__. if class_value.py__name__() in ['list', 'set'] \ - and parent_context.get_root_value() == infer_state.builtins_module: + and parent_context.get_root_value() == inference_state.builtins_module: # compare the module path with the builtin name. if settings.dynamic_array_additions: var_args = iterable.get_dynamic_array_instance(self, var_args) - super(TreeInstance, self).__init__(infer_state, parent_context, + super(TreeInstance, self).__init__(inference_state, parent_context, class_value, var_args) self.tree_node = class_value.tree_node @@ -277,7 +277,7 @@ class TreeInstance(AbstractInstanceValue): # This can recurse, if the initialization of the class includes a reference # to itself. - @infer_state_method_cache(default=None) + @inference_state_method_cache(default=None) def _get_annotated_class_object(self): from jedi.inference.gradual.annotation import py__annotations__, \ infer_type_vars_for_execution @@ -313,9 +313,9 @@ class TreeInstance(AbstractInstanceValue): class AnonymousInstance(TreeInstance): - def __init__(self, infer_state, parent_context, class_value): + def __init__(self, inference_state, parent_context, class_value): super(AnonymousInstance, self).__init__( - infer_state, + inference_state, parent_context, class_value, var_args=AnonymousInstanceArguments(self), @@ -327,9 +327,9 @@ class AnonymousInstance(TreeInstance): class CompiledInstanceName(compiled.CompiledName): - def __init__(self, infer_state, instance, klass, name): + def __init__(self, inference_state, instance, klass, name): super(CompiledInstanceName, self).__init__( - infer_state, + inference_state, klass.parent_context, name.string_name ) @@ -361,7 +361,7 @@ class CompiledInstanceClassFilter(AbstractFilter): def _convert(self, names): klass = self._class_filter.compiled_object return [ - CompiledInstanceName(self._instance.infer_state, self._instance, klass, n) + CompiledInstanceName(self._instance.inference_state, self._instance, klass, n) for n in names ] @@ -375,7 +375,7 @@ class BoundMethod(FunctionMixin, ValueWrapper): return True def py__class__(self): - c, = values_from_qualified_names(self.infer_state, u'types', u'MethodType') + c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType') return c def _get_arguments(self, arguments): diff --git a/jedi/inference/value/iterable.py b/jedi/inference/value/iterable.py index 39c8fb16..69e6432f 100644 --- a/jedi/inference/value/iterable.py +++ b/jedi/inference/value/iterable.py @@ -34,7 +34,7 @@ from jedi.inference.helpers import get_int_or_none, is_string, \ predefine_names, infer_call_of_leaf, reraise_getitem_errors, \ SimpleGetItemNotFound from jedi.inference.utils import safe_property, to_list -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \ publish_method from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \ @@ -44,7 +44,7 @@ from jedi.parser_utils import get_sync_comp_fors class IterableMixin(object): def py__stop_iteration_returns(self): - return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) + return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')]) # At the moment, safe values are simple values like "foo", 1 and not # lists/dicts. Therefore as a small speed optimization we can just do the @@ -66,7 +66,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin): array_type = None def _get_wrapped_value(self): - generator, = self.infer_state.typing_module \ + generator, = self.inference_state.typing_module \ .py__getattribute__('Generator') \ .execute_annotation() return generator @@ -88,7 +88,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin): return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) def py__stop_iteration_returns(self): - return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) + return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')]) @property def name(self): @@ -97,8 +97,8 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin): class Generator(GeneratorBase): """Handling of `yield` functions.""" - def __init__(self, infer_state, func_execution_value): - super(Generator, self).__init__(infer_state) + def __init__(self, inference_state, func_execution_value): + super(Generator, self).__init__(inference_state) self._func_execution_value = func_execution_value def py__iter__(self, valueualized_node=None): @@ -114,13 +114,13 @@ class Generator(GeneratorBase): class CompForValue(TreeValue): @classmethod def from_comp_for(cls, parent_context, comp_for): - return cls(parent_context.infer_state, parent_context, comp_for) + return cls(parent_context.inference_state, parent_context, comp_for) def get_filters(self, search_global=False, until_position=None, origin_scope=None): yield ParserTreeFilter(self) -def comprehension_from_atom(infer_state, value, atom): +def comprehension_from_atom(inference_state, value, atom): bracket = atom.children[0] test_list_comp = atom.children[1] @@ -131,7 +131,7 @@ def comprehension_from_atom(infer_state, value, atom): sync_comp_for = sync_comp_for.children[1] return DictComprehension( - infer_state, + inference_state, value, sync_comp_for_node=sync_comp_for, key_node=test_list_comp.children[0], @@ -149,7 +149,7 @@ def comprehension_from_atom(infer_state, value, atom): sync_comp_for = sync_comp_for.children[1] return cls( - infer_state, + inference_state, defining_value=value, sync_comp_for_node=sync_comp_for, entry_node=test_list_comp.children[0], @@ -157,7 +157,7 @@ def comprehension_from_atom(infer_state, value, atom): class ComprehensionMixin(object): - @infer_state_method_cache() + @inference_state_method_cache() def _get_comp_for_value(self, parent_context, comp_for): return CompForValue.from_comp_for(parent_context, comp_for) @@ -192,7 +192,7 @@ class ComprehensionMixin(object): else: yield iterated - @infer_state_method_cache(default=[]) + @inference_state_method_cache(default=[]) @to_list def _iterate(self): comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node)) @@ -224,7 +224,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): def _get_wrapped_value(self): from jedi.inference.gradual.typing import GenericClass - klass = compiled.builtin_from_name(self.infer_state, self.array_type) + klass = compiled.builtin_from_name(self.inference_state, self.array_type) c, = GenericClass(klass, self._get_generics()).execute_annotation() return c @@ -232,11 +232,11 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): return None # We don't know the length, because of appends. def py__class__(self): - return compiled.builtin_from_name(self.infer_state, self.array_type) + return compiled.builtin_from_name(self.inference_state, self.array_type) @safe_property def parent(self): - return self.infer_state.builtins_module + return self.inference_state.builtins_module def py__getitem__(self, index_value_set, valueualized_node): if self.array_type == 'dict': @@ -245,9 +245,9 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): class _BaseComprehension(ComprehensionMixin): - def __init__(self, infer_state, defining_value, sync_comp_for_node, entry_node): + def __init__(self, inference_state, defining_value, sync_comp_for_node, entry_node): assert sync_comp_for_node.type == 'sync_comp_for' - super(_BaseComprehension, self).__init__(infer_state) + super(_BaseComprehension, self).__init__(inference_state) self._defining_value = defining_value self._sync_comp_for_node = sync_comp_for_node self._entry_node = entry_node @@ -277,9 +277,9 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase): class DictComprehension(ComprehensionMixin, Sequence): array_type = u'dict' - def __init__(self, infer_state, defining_value, sync_comp_for_node, key_node, value_node): + def __init__(self, inference_state, defining_value, sync_comp_for_node, key_node, value_node): assert sync_comp_for_node.type == 'sync_comp_for' - super(DictComprehension, self).__init__(infer_state) + super(DictComprehension, self).__init__(inference_state) self._defining_value = defining_value self._sync_comp_for_node = sync_comp_for_node self._entry_node = key_node @@ -308,14 +308,14 @@ class DictComprehension(ComprehensionMixin, Sequence): @publish_method('values') def _imitate_values(self): lazy_value = LazyKnownValues(self._dict_values()) - return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) + return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])]) @publish_method('items') def _imitate_items(self): lazy_values = [ LazyKnownValue( FakeSequence( - self.infer_state, + self.inference_state, u'tuple', [LazyKnownValues(key), LazyKnownValues(value)] @@ -324,7 +324,7 @@ class DictComprehension(ComprehensionMixin, Sequence): for key, value in self._iterate() ] - return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) + return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)]) def get_mapping_item_values(self): return self._dict_keys(), self._dict_values() @@ -341,8 +341,8 @@ class SequenceLiteralValue(Sequence): '[': u'list', '{': u'set'} - def __init__(self, infer_state, defining_value, atom): - super(SequenceLiteralValue, self).__init__(infer_state) + def __init__(self, inference_state, defining_value, atom): + super(SequenceLiteralValue, self).__init__(inference_state) self.atom = atom self._defining_value = defining_value @@ -355,7 +355,7 @@ class SequenceLiteralValue(Sequence): def py__simple_getitem__(self, index): """Here the index is an int/str. Raises IndexError/KeyError.""" if self.array_type == u'dict': - compiled_obj_index = compiled.create_simple_object(self.infer_state, index) + compiled_obj_index = compiled.create_simple_object(self.inference_state, index) for key, value in self.get_tree_entries(): for k in self._defining_value.infer_node(key): try: @@ -471,27 +471,27 @@ class SequenceLiteralValue(Sequence): class DictLiteralValue(_DictMixin, SequenceLiteralValue): array_type = u'dict' - def __init__(self, infer_state, defining_value, atom): - super(SequenceLiteralValue, self).__init__(infer_state) + def __init__(self, inference_state, defining_value, atom): + super(SequenceLiteralValue, self).__init__(inference_state) self._defining_value = defining_value self.atom = atom @publish_method('values') def _imitate_values(self): lazy_value = LazyKnownValues(self._dict_values()) - return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) + return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])]) @publish_method('items') def _imitate_items(self): lazy_values = [ LazyKnownValue(FakeSequence( - self.infer_state, u'tuple', + self.inference_state, u'tuple', (LazyTreeValue(self._defining_value, key_node), LazyTreeValue(self._defining_value, value_node)) )) for key_node, value_node in self.get_tree_entries() ] - return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) + return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)]) def _dict_keys(self): return ValueSet.from_sets( @@ -504,19 +504,19 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue): class _FakeArray(SequenceLiteralValue): - def __init__(self, infer_state, container, type): - super(SequenceLiteralValue, self).__init__(infer_state) + def __init__(self, inference_state, container, type): + super(SequenceLiteralValue, self).__init__(inference_state) self.array_type = type self.atom = container # TODO is this class really needed? class FakeSequence(_FakeArray): - def __init__(self, infer_state, array_type, lazy_value_list): + def __init__(self, inference_state, array_type, lazy_value_list): """ type should be one of "tuple", "list" """ - super(FakeSequence, self).__init__(infer_state, None, array_type) + super(FakeSequence, self).__init__(inference_state, None, array_type) self._lazy_value_list = lazy_value_list def py__simple_getitem__(self, index): @@ -538,16 +538,16 @@ class FakeSequence(_FakeArray): class FakeDict(_DictMixin, _FakeArray): - def __init__(self, infer_state, dct): - super(FakeDict, self).__init__(infer_state, dct, u'dict') + def __init__(self, inference_state, dct): + super(FakeDict, self).__init__(inference_state, dct, u'dict') self._dct = dct def py__iter__(self, valueualized_node=None): for key in self._dct: - yield LazyKnownValue(compiled.create_simple_object(self.infer_state, key)) + yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key)) def py__simple_getitem__(self, index): - if is_py3 and self.infer_state.environment.version_info.major == 2: + if is_py3 and self.inference_state.environment.version_info.major == 2: # In Python 2 bytes and unicode compare. if isinstance(index, bytes): index_unicode = force_unicode(index) @@ -569,7 +569,7 @@ class FakeDict(_DictMixin, _FakeArray): @publish_method('values') def _values(self): return ValueSet([FakeSequence( - self.infer_state, u'tuple', + self.inference_state, u'tuple', [LazyKnownValues(self._dict_values())] )]) @@ -587,8 +587,8 @@ class FakeDict(_DictMixin, _FakeArray): class MergedArray(_FakeArray): - def __init__(self, infer_state, arrays): - super(MergedArray, self).__init__(infer_state, arrays, arrays[-1].array_type) + def __init__(self, inference_state, arrays): + super(MergedArray, self).__init__(inference_state, arrays, arrays[-1].array_type) self._arrays = arrays def py__iter__(self, valueualized_node=None): @@ -657,7 +657,7 @@ def check_array_additions(value, sequence): return _check_array_additions(value, sequence) -@infer_state_method_cache(default=NO_VALUES) +@inference_state_method_cache(default=NO_VALUES) @debug.increase_indent def _check_array_additions(value, sequence): """ @@ -675,7 +675,7 @@ def _check_array_additions(value, sequence): return NO_VALUES def find_additions(value, arglist, add_name): - params = list(arguments.TreeArguments(value.infer_state, value, arglist).unpack()) + params = list(arguments.TreeArguments(value.inference_state, value, arglist).unpack()) result = set() if add_name in ['insert']: params = params[1:] @@ -719,7 +719,7 @@ def _check_array_additions(value, sequence): random_value = value.create_value(name) - with recursion.execution_allowed(value.infer_state, power) as allowed: + with recursion.execution_allowed(value.inference_state, power) as allowed: if allowed: found = infer_call_of_leaf( random_value, @@ -758,7 +758,7 @@ class _ArrayInstance(HelperValueMixin): self.var_args = var_args def py__class__(self): - tuple_, = self.instance.infer_state.builtins_module.py__getattribute__('tuple') + tuple_, = self.instance.inference_state.builtins_module.py__getattribute__('tuple') return tuple_ def py__iter__(self, valueualized_node=None): @@ -792,7 +792,7 @@ class Slice(object): def __getattr__(self, name): if self._slice_object is None: - value = compiled.builtin_from_name(self._value.infer_state, 'slice') + value = compiled.builtin_from_name(self._value.inference_state, 'slice') self._slice_object, = value.execute_with_values() return getattr(self._slice_object, name) diff --git a/jedi/inference/value/klass.py b/jedi/inference/value/klass.py index 0a966ec6..067fc42d 100644 --- a/jedi/inference/value/klass.py +++ b/jedi/inference/value/klass.py @@ -39,8 +39,8 @@ py__doc__() Returns the docstring for a value. from jedi import debug from jedi._compatibility import use_metaclass from jedi.parser_utils import get_cached_parent_scope -from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \ - infer_state_method_generator_cache +from jedi.inference.cache import inference_state_method_cache, CachedMetaClass, \ + inference_state_method_generator_cache from jedi.inference import compiled from jedi.inference.lazy_value import LazyKnownValues from jedi.inference.filters import ParserTreeFilter @@ -73,7 +73,7 @@ class ClassName(TreeNameDefinition): # We're using a different value to infer, so we cannot call super(). from jedi.inference.syntax_tree import tree_name_to_values inferred = tree_name_to_values( - self.parent_context.infer_state, self._name_value, self.tree_name) + self.parent_context.inference_state, self._name_value, self.tree_name) for result_value in inferred: if self._apply_decorators: @@ -141,10 +141,10 @@ class ClassMixin(object): from jedi.inference.value import TreeInstance if arguments is None: arguments = ValuesArguments([]) - return ValueSet([TreeInstance(self.infer_state, self.parent_context, self, arguments)]) + return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)]) def py__class__(self): - return compiled.builtin_from_name(self.infer_state, u'type') + return compiled.builtin_from_name(self.inference_state, u'type') @property def name(self): @@ -159,7 +159,7 @@ class ClassMixin(object): return list(value_.get_param_names())[1:] return [] - @infer_state_method_generator_cache() + @inference_state_method_generator_cache() def py__mro__(self): mro = [self] yield self @@ -214,7 +214,7 @@ class ClassMixin(object): ) if not is_instance: from jedi.inference.compiled import builtin_from_name - type_ = builtin_from_name(self.infer_state, u'type') + type_ = builtin_from_name(self.inference_state, u'type') assert isinstance(type_, ClassValue) if type_ != self: for instance in type_.py__call__(): @@ -239,7 +239,7 @@ class ClassMixin(object): class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): api_type = u'class' - @infer_state_method_cache() + @inference_state_method_cache() def list_type_vars(self): found = [] arglist = self.tree_node.get_super_arglist() @@ -261,10 +261,10 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase arglist = self.tree_node.get_super_arglist() if arglist: from jedi.inference import arguments - return arguments.TreeArguments(self.infer_state, self.parent_context, arglist) + return arguments.TreeArguments(self.inference_state, self.parent_context, arglist) return None - @infer_state_method_cache(default=()) + @inference_state_method_cache(default=()) def py__bases__(self): args = self._get_bases_arguments() if args is not None: @@ -273,10 +273,10 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase return lst if self.py__name__() == 'object' \ - and self.parent_context == self.infer_state.builtins_module: + and self.parent_context == self.inference_state.builtins_module: return [] return [LazyKnownValues( - self.infer_state.builtins_module.py__getattribute__('object') + self.inference_state.builtins_module.py__getattribute__('object') )] def py__getitem__(self, index_value_set, valueualized_node): @@ -320,7 +320,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase debug.dbg('Unprocessed metaclass %s', metaclass) return [] - @infer_state_method_cache(default=NO_VALUES) + @inference_state_method_cache(default=NO_VALUES) def get_metaclasses(self): args = self._get_bases_arguments() if args is not None: diff --git a/jedi/inference/value/module.py b/jedi/inference/value/module.py index f0268471..4332a249 100644 --- a/jedi/inference/value/module.py +++ b/jedi/inference/value/module.py @@ -2,7 +2,7 @@ import re import os from jedi import debug -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.names import ValueNameMixin, AbstractNameDefinition from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter from jedi.inference import compiled @@ -27,13 +27,13 @@ class _ModuleAttributeName(AbstractNameDefinition): def infer(self): if self._string_value is not None: s = self._string_value - if self.parent_context.infer_state.environment.version_info.major == 2 \ + if self.parent_context.inference_state.environment.version_info.major == 2 \ and not isinstance(s, bytes): s = s.encode('utf-8') return ValueSet([ - create_simple_object(self.parent_context.infer_state, s) + create_simple_object(self.parent_context.inference_state, s) ]) - return compiled.get_string_value_set(self.parent_context.infer_state) + return compiled.get_string_value_set(self.parent_context.inference_state) class ModuleName(ValueNameMixin, AbstractNameDefinition): @@ -48,9 +48,9 @@ class ModuleName(ValueNameMixin, AbstractNameDefinition): return self._name -def iter_module_names(infer_state, paths): +def iter_module_names(inference_state, paths): # Python modules/packages - for n in infer_state.compiled_subprocess.list_module_names(paths): + for n in inference_state.compiled_subprocess.list_module_names(paths): yield n for path in paths: @@ -75,7 +75,7 @@ def iter_module_names(infer_state, paths): class SubModuleDictMixin(object): - @infer_state_method_cache() + @inference_state_method_cache() def sub_modules_dict(self): """ Lists modules in the directory of this module (if this module is a @@ -87,7 +87,7 @@ class SubModuleDictMixin(object): except AttributeError: pass else: - mods = iter_module_names(self.infer_state, method()) + mods = iter_module_names(self.inference_state, method()) for name in mods: # It's obviously a relative import to the current module. names[name] = SubModuleName(self, name) @@ -113,7 +113,7 @@ class ModuleMixin(SubModuleDictMixin): yield star_filter def py__class__(self): - c, = values_from_qualified_names(self.infer_state, u'types', u'ModuleType') + c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType') return c def is_module(self): @@ -123,7 +123,7 @@ class ModuleMixin(SubModuleDictMixin): return False @property - @infer_state_method_cache() + @inference_state_method_cache() def name(self): return ModuleName(self, self._string_name) @@ -140,7 +140,7 @@ class ModuleMixin(SubModuleDictMixin): # Remove PEP 3149 names return re.sub(r'\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1)) - @infer_state_method_cache() + @inference_state_method_cache() def _module_attributes_dict(self): names = ['__package__', '__doc__', '__name__'] # All the additional module attributes are strings. @@ -157,7 +157,7 @@ class ModuleMixin(SubModuleDictMixin): # I'm not sure if the star import cache is really that effective anymore # with all the other really fast import caches. Recheck. Also we would need # to push the star imports into InferenceState.module_cache, if we reenable this. - @infer_state_method_cache([]) + @inference_state_method_cache([]) def star_imports(self): from jedi.inference.imports import Importer @@ -165,7 +165,7 @@ class ModuleMixin(SubModuleDictMixin): for i in self.tree_node.iter_imports(): if i.is_star_import(): new = Importer( - self.infer_state, + self.inference_state, import_path=i.get_paths()[-1], module_value=self, level=i.level @@ -190,9 +190,9 @@ class ModuleValue(ModuleMixin, TreeValue): api_type = u'module' parent_context = None - def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False): + def __init__(self, inference_state, module_node, file_io, string_names, code_lines, is_package=False): super(ModuleValue, self).__init__( - infer_state, + inference_state, parent_context=None, tree_node=module_node ) @@ -242,7 +242,7 @@ class ModuleValue(ModuleMixin, TreeValue): # It is a namespace, now try to find the rest of the # modules on sys_path or whatever the search_path is. paths = set() - for s in self.infer_state.get_sys_path(): + for s in self.inference_state.get_sys_path(): other = os.path.join(s, self.name.string_name) if os.path.isdir(other): paths.add(other) diff --git a/jedi/inference/value/namespace.py b/jedi/inference/value/namespace.py index 92920e38..d662e1bc 100644 --- a/jedi/inference/value/namespace.py +++ b/jedi/inference/value/namespace.py @@ -1,4 +1,4 @@ -from jedi.inference.cache import infer_state_method_cache +from jedi.inference.cache import inference_state_method_cache from jedi.inference.filters import DictFilter from jedi.inference.names import ValueNameMixin, AbstractNameDefinition from jedi.inference.base_value import Value @@ -25,9 +25,9 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin): api_type = u'module' parent_context = None - def __init__(self, infer_state, fullname, paths): - super(ImplicitNamespaceValue, self).__init__(infer_state, parent_context=None) - self.infer_state = infer_state + def __init__(self, inference_state, fullname, paths): + super(ImplicitNamespaceValue, self).__init__(inference_state, parent_context=None) + self.inference_state = inference_state self._fullname = fullname self._paths = paths @@ -35,7 +35,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin): yield DictFilter(self.sub_modules_dict()) @property - @infer_state_method_cache() + @inference_state_method_cache() def name(self): string_name = self.py__package__()[-1] return ImplicitNSName(self, string_name) diff --git a/jedi/plugins/flask.py b/jedi/plugins/flask.py index 49fa6d19..6444a6e0 100644 --- a/jedi/plugins/flask.py +++ b/jedi/plugins/flask.py @@ -3,19 +3,19 @@ def import_module(callback): Handle "magic" Flask extension imports: ``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``. """ - def wrapper(infer_state, import_names, module_value, *args, **kwargs): + def wrapper(inference_state, import_names, module_value, *args, **kwargs): if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'): # New style. ipath = (u'flask_' + import_names[2]), - value_set = callback(infer_state, ipath, None, *args, **kwargs) + value_set = callback(inference_state, ipath, None, *args, **kwargs) if value_set: return value_set - value_set = callback(infer_state, (u'flaskext',), None, *args, **kwargs) + value_set = callback(inference_state, (u'flaskext',), None, *args, **kwargs) return callback( - infer_state, + inference_state, (u'flaskext', import_names[2]), next(iter(value_set)), *args, **kwargs ) - return callback(infer_state, import_names, module_value, *args, **kwargs) + return callback(inference_state, import_names, module_value, *args, **kwargs) return wrapper diff --git a/jedi/plugins/stdlib.py b/jedi/plugins/stdlib.py index 48884ee4..60e2ee81 100644 --- a/jedi/plugins/stdlib.py +++ b/jedi/plugins/stdlib.py @@ -114,7 +114,7 @@ def execute(callback): except AttributeError: pass else: - if value.parent_context == value.infer_state.builtins_module: + if value.parent_context == value.inference_state.builtins_module: module_name = 'builtins' elif value.parent_context is not None and value.parent_context.is_module(): module_name = value.parent_context.py__name__() @@ -148,7 +148,7 @@ def execute(callback): return wrapper -def _follow_param(infer_state, arguments, index): +def _follow_param(inference_state, arguments, index): try: key, lazy_value = list(arguments.unpack())[index] except IndexError: @@ -158,7 +158,7 @@ def _follow_param(infer_state, arguments, index): def argument_clinic(string, want_obj=False, want_value=False, - want_arguments=False, want_infer_state=False, + want_arguments=False, want_inference_state=False, want_callback=False): """ Works like Argument Clinic (PEP 436), to validate function params. @@ -177,8 +177,8 @@ def argument_clinic(string, want_obj=False, want_value=False, kwargs['value'] = arguments.value if want_obj: kwargs['obj'] = obj - if want_infer_state: - kwargs['infer_state'] = obj.infer_state + if want_inference_state: + kwargs['inference_state'] = obj.inference_state if want_arguments: kwargs['arguments'] = arguments if want_callback: @@ -202,9 +202,9 @@ def builtins_property(objects, types, obj, arguments): return lazy_value.infer().py__call__(arguments=ValuesArguments([objects])) -@argument_clinic('iterator[, default], /', want_infer_state=True) -def builtins_next(iterators, defaults, infer_state): - if infer_state.environment.version_info.major == 2: +@argument_clinic('iterator[, default], /', want_inference_state=True) +def builtins_next(iterators, defaults, inference_state): + if inference_state.environment.version_info.major == 2: name = 'next' else: name = '__next__' @@ -245,8 +245,8 @@ def builtins_type(objects, bases, dicts): class SuperInstance(LazyValueWrapper): """To be used like the object ``super`` returns.""" - def __init__(self, infer_state, instance): - self.infer_state = infer_state + def __init__(self, inference_state, instance): + self.inference_state = inference_state self._instance = instance # Corresponds to super().__self__ def _get_bases(self): @@ -274,7 +274,7 @@ def builtins_super(types, objects, value): instance = value.var_args.instance # TODO if a class is given it doesn't have to be the direct super # class, it can be an anecestor from long ago. - return ValueSet({SuperInstance(instance.infer_state, instance)}) + return ValueSet({SuperInstance(instance.inference_state, instance)}) return NO_VALUES @@ -312,12 +312,12 @@ def builtins_reversed(sequences, obj, arguments): # necessary, because `reversed` is a function and autocompletion # would fail in certain cases like `reversed(x).__iter__` if we # just returned the result directly. - seq, = obj.infer_state.typing_module.py__getattribute__('Iterator').execute_with_values() + seq, = obj.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values() return ValueSet([ReversedObject(seq, list(reversed(ordered)))]) -@argument_clinic('obj, type, /', want_arguments=True, want_infer_state=True) -def builtins_isinstance(objects, types, arguments, infer_state): +@argument_clinic('obj, type, /', want_arguments=True, want_inference_state=True) +def builtins_isinstance(objects, types, arguments, inference_state): bool_results = set() for o in objects: cls = o.py__class__() @@ -336,7 +336,7 @@ def builtins_isinstance(objects, types, arguments, infer_state): if cls_or_tup.is_class(): bool_results.add(cls_or_tup in mro) elif cls_or_tup.name.string_name == 'tuple' \ - and cls_or_tup.get_root_value() == infer_state.builtins_module: + and cls_or_tup.get_root_value() == inference_state.builtins_module: # Check for tuples. classes = ValueSet.from_sets( lazy_value.infer() @@ -353,7 +353,7 @@ def builtins_isinstance(objects, types, arguments, infer_state): analysis.add(lazy_value.value, 'type-error-isinstance', node, message) return ValueSet( - compiled.builtin_from_name(infer_state, force_unicode(str(b))) + compiled.builtin_from_name(inference_state, force_unicode(str(b))) for b in bool_results ) @@ -430,18 +430,18 @@ def collections_namedtuple(obj, arguments, callback): inferring the result. """ - infer_state = obj.infer_state + inference_state = obj.inference_state # Process arguments name = u'jedi_unknown_namedtuple' - for c in _follow_param(infer_state, arguments, 0): + for c in _follow_param(inference_state, arguments, 0): x = get_str_or_none(c) if x is not None: name = force_unicode(x) break # TODO here we only use one of the types, we should use all. - param_values = _follow_param(infer_state, arguments, 1) + param_values = _follow_param(inference_state, arguments, 1) if not param_values: return NO_VALUES _fields = list(param_values)[0] @@ -470,16 +470,16 @@ def collections_namedtuple(obj, arguments, callback): ) # Parse source code - module = infer_state.grammar.parse(code) + module = inference_state.grammar.parse(code) generated_class = next(module.iter_classdefs()) parent_context = ModuleValue( - infer_state, module, + inference_state, module, file_io=None, string_names=None, code_lines=parso.split_lines(code, keepends=True), ) - return ValueSet([ClassValue(infer_state, parent_context, generated_class)]) + return ValueSet([ClassValue(inference_state, parent_context, generated_class)]) class PartialObject(object): @@ -571,7 +571,7 @@ def _random_choice(sequences): def _dataclass(obj, arguments, callback): - for c in _follow_param(obj.infer_state, arguments, 0): + for c in _follow_param(obj.inference_state, arguments, 0): if c.is_class(): return ValueSet([DataclassWrapper(c)]) else: @@ -645,7 +645,7 @@ class ItemGetterCallable(ValueWrapper): value_set |= item_value_set.get_item(lazy_values[0].infer(), None) else: value_set |= ValueSet([iterable.FakeSequence( - self._wrapped_value.infer_state, + self._wrapped_value.inference_state, 'list', [ LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None)) @@ -698,7 +698,7 @@ def _create_string_input_function(func): s = get_str_or_none(value) if s is not None: s = func(s) - yield compiled.create_simple_object(value.infer_state, s) + yield compiled.create_simple_object(value.inference_state, s) values = ValueSet(iterate()) if values: return values @@ -724,7 +724,7 @@ def _os_path_join(args_set, callback): string += force_unicode(s) is_first = False else: - return ValueSet([compiled.create_simple_object(sequence.infer_state, string)]) + return ValueSet([compiled.create_simple_object(sequence.inference_state, string)]) return callback() @@ -803,7 +803,7 @@ def get_metaclass_filters(func): class EnumInstance(LazyValueWrapper): def __init__(self, cls, name): - self.infer_state = cls.infer_state + self.inference_state = cls.inference_state self._cls = cls # Corresponds to super().__self__ self._name = name self.tree_node = self._name.tree_name @@ -818,7 +818,7 @@ class EnumInstance(LazyValueWrapper): def get_filters(self, search_global=False, position=None, origin_scope=None): yield DictFilter(dict( - name=compiled.create_simple_object(self.infer_state, self._name.string_name).name, + name=compiled.create_simple_object(self.inference_state, self._name.string_name).name, value=self._name, )) for f in self._get_wrapped_value().get_filters(): @@ -826,10 +826,10 @@ class EnumInstance(LazyValueWrapper): def tree_name_to_values(func): - def wrapper(infer_state, value, tree_name): + def wrapper(inference_state, value, tree_name): if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path': return ValueSet({ - compiled.create_simple_object(infer_state, os.path.sep), + compiled.create_simple_object(inference_state, os.path.sep), }) - return func(infer_state, value, tree_name) + return func(inference_state, value, tree_name) return wrapper diff --git a/test/completion/sys_path.py b/test/completion/sys_path.py index ce88a4c3..dd597fa4 100644 --- a/test/completion/sys_path.py +++ b/test/completion/sys_path.py @@ -13,8 +13,8 @@ sys.path.append('a' +* '/thirdparty') #? ['inference'] import inference -#? ['infer_state_function_cache'] -inference.infer_state_fu +#? ['inference_state_function_cache'] +inference.inference_state_fu # Those don't work because dirname and abspath are not properly understood. ##? ['jedi_'] diff --git a/test/conftest.py b/test/conftest.py index dcb9a0eb..35ee31e2 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -162,10 +162,10 @@ def cwd_tmpdir(monkeypatch, tmpdir): @pytest.fixture -def infer_state(Script): - return Script('')._infer_state +def inference_state(Script): + return Script('')._inference_state @pytest.fixture -def same_process_infer_state(Script): - return Script('', environment=InterpreterEnvironment())._infer_state +def same_process_inference_state(Script): + return Script('', environment=InterpreterEnvironment())._inference_state diff --git a/test/run.py b/test/run.py index 1b5868e2..f898e133 100755 --- a/test/run.py +++ b/test/run.py @@ -212,7 +212,7 @@ class IntegrationTestCase(object): def run_goto_definitions(self, compare_cb, environment): script = self.script(environment) - infer_state = script._infer_state + inference_state = script._inference_state def comparison(definition): suffix = '()' if definition.type == 'instance' else '' @@ -232,13 +232,13 @@ class IntegrationTestCase(object): user_value = user_value.get_function_execution() element.parent = user_value.tree_node results = convert_values( - infer_state.infer_element(user_value, element), + inference_state.infer_element(user_value, element), ) if not results: raise Exception('Could not resolve %s on line %s' % (match.string, self.line_nr - 1)) - should_be |= set(Definition(infer_state, r.name) for r in results) + should_be |= set(Definition(inference_state, r.name) for r in results) debug.dbg('Finished getting types', color='YELLOW') # Because the objects have different ids, `repr`, then compare. diff --git a/test/test_api/test_environment.py b/test/test_api/test_environment.py index 64c13021..4917341f 100644 --- a/test/test_api/test_environment.py +++ b/test/test_api/test_environment.py @@ -42,10 +42,10 @@ def test_versions(version): assert env.get_sys_path() -def test_load_module(infer_state): - access_path = infer_state.compiled_subprocess.load_module( +def test_load_module(inference_state): + access_path = inference_state.compiled_subprocess.load_module( dotted_name=u'math', - sys_path=infer_state.get_sys_path() + sys_path=inference_state.get_sys_path() ) name, access_handle = access_path.accesses[0] @@ -55,31 +55,31 @@ def test_load_module(infer_state): access_handle.py__mro__() -def test_error_in_environment(infer_state, Script, environment): +def test_error_in_environment(inference_state, Script, environment): if isinstance(environment, InterpreterEnvironment): pytest.skip("We don't catch these errors at the moment.") # Provoke an error to show how Jedi can recover from it. with pytest.raises(jedi.InternalError): - infer_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) + inference_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) # The second time it should raise an InternalError again. with pytest.raises(jedi.InternalError): - infer_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) + inference_state.compiled_subprocess._test_raise_error(KeyboardInterrupt) # Jedi should still work. def_, = Script('str').goto_definitions() assert def_.name == 'str' -def test_stdout_in_subprocess(infer_state, Script): - infer_state.compiled_subprocess._test_print(stdout='.') +def test_stdout_in_subprocess(inference_state, Script): + inference_state.compiled_subprocess._test_print(stdout='.') Script('1').goto_definitions() -def test_killed_subprocess(infer_state, Script, environment): +def test_killed_subprocess(inference_state, Script, environment): if isinstance(environment, InterpreterEnvironment): pytest.skip("We cannot kill our own process") # Just kill the subprocess. - infer_state.compiled_subprocess._compiled_subprocess._get_process().kill() + inference_state.compiled_subprocess._compiled_subprocess._get_process().kill() # Since the process was terminated (and nobody knows about it) the first # Jedi call fails. with pytest.raises(jedi.InternalError): diff --git a/test/test_api/test_project.py b/test/test_api/test_project.py index b940912a..6750774a 100644 --- a/test/test_api/test_project.py +++ b/test/test_api/test_project.py @@ -13,12 +13,12 @@ def test_django_default_project(Script): ) c, = script.completions() assert c.name == "SomeModel" - assert script._infer_state.project._django is True + assert script._inference_state.project._django is True def test_interpreter_project_path(): # Run from anywhere it should be the cwd. dir = os.path.join(root_dir, 'test') with set_cwd(dir): - project = Interpreter('', [locals()])._infer_state.project + project = Interpreter('', [locals()])._inference_state.project assert project._path == dir diff --git a/test/test_api/test_settings.py b/test/test_api/test_settings.py index e0e862e6..f6629719 100644 --- a/test/test_api/test_settings.py +++ b/test/test_api/test_settings.py @@ -17,7 +17,7 @@ def test_add_dynamic_mods(Script): # Other fictional modules in another place in the fs. src2 = 'from .. import setup; setup.r(1)' script = Script(src1, path='../setup.py') - imports.load_module(script._infer_state, os.path.abspath(fname), src2) + imports.load_module(script._inference_state, os.path.abspath(fname), src2) result = script.goto_definitions() assert len(result) == 1 assert result[0].description == 'class int' diff --git a/test/test_inference/test_buildout_detection.py b/test/test_inference/test_buildout_detection.py index 0fad419b..cd495d3c 100644 --- a/test/test_inference/test_buildout_detection.py +++ b/test/test_inference/test_buildout_detection.py @@ -63,7 +63,7 @@ def test_sys_path_with_modifications(Script): """) path = os.path.abspath(os.path.join(os.curdir, 'module_name.py')) - paths = Script(code, path=path)._infer_state.get_sys_path() + paths = Script(code, path=path)._inference_state.get_sys_path() assert '/tmp/.buildout/eggs/important_package.egg' in paths diff --git a/test/test_inference/test_compiled.py b/test/test_inference/test_compiled.py index 04dac687..d8635286 100644 --- a/test/test_inference/test_compiled.py +++ b/test/test_inference/test_compiled.py @@ -11,8 +11,8 @@ from jedi.inference.compiled.access import DirectObjectAccess from jedi.inference.gradual.conversion import _stub_to_python_value_set -def test_simple(infer_state, environment): - obj = compiled.create_simple_object(infer_state, u'_str_') +def test_simple(inference_state, environment): + obj = compiled.create_simple_object(inference_state, u'_str_') upper, = obj.py__getattribute__(u'upper') objs = list(upper.execute_with_values()) assert len(objs) == 1 @@ -23,15 +23,15 @@ def test_simple(infer_state, environment): assert objs[0].name.string_name == expected -def test_builtin_loading(infer_state): - string, = infer_state.builtins_module.py__getattribute__(u'str') +def test_builtin_loading(inference_state): + string, = inference_state.builtins_module.py__getattribute__(u'str') from_name, = string.py__getattribute__(u'__init__') assert from_name.tree_node assert not from_name.py__doc__() # It's a stub -def test_next_docstr(infer_state): - next_ = compiled.builtin_from_name(infer_state, u'next') +def test_next_docstr(inference_state): + next_ = compiled.builtin_from_name(inference_state, u'next') assert next_.tree_node is not None assert next_.py__doc__() == '' # It's a stub for non_stub in _stub_to_python_value_set(next_): @@ -47,12 +47,12 @@ def test_parse_function_doc_illegal_docstr(): assert ('', '') == compiled.value._parse_function_doc(docstr) -def test_doc(infer_state): +def test_doc(inference_state): """ Even CompiledObject docs always return empty docstrings - not None, that's just a Jedi API definition. """ - str_ = compiled.create_simple_object(infer_state, u'') + str_ = compiled.create_simple_object(inference_state, u'') # Equals `''.__getnewargs__` obj, = str_.py__getattribute__(u'__getnewargs__') assert obj.py__doc__() == '' @@ -103,7 +103,7 @@ def test_dict_values(Script, environment): def test_getitem_on_none(Script): script = Script('None[1j]') assert not script.goto_definitions() - issue, = script._infer_state.analysis + issue, = script._inference_state.analysis assert issue.name == 'type-error-not-subscriptable' @@ -122,7 +122,7 @@ def _return_int(): ('ret_int', '_return_int', 'test.test_inference.test_compiled'), ] ) -def test_parent_context(same_process_infer_state, attribute, expected_name, expected_parent): +def test_parent_context(same_process_inference_state, attribute, expected_name, expected_parent): import decimal class C: @@ -135,8 +135,8 @@ def test_parent_context(same_process_infer_state, attribute, expected_name, expe ret_int = _return_int o = compiled.CompiledObject( - same_process_infer_state, - DirectObjectAccess(same_process_infer_state, C) + same_process_inference_state, + DirectObjectAccess(same_process_inference_state, C) ) x, = o.py__getattribute__(attribute) assert x.py__name__() == expected_name @@ -163,9 +163,9 @@ def test_parent_context(same_process_infer_state, attribute, expected_name, expe (Counter("").most_common, ['Counter', 'most_common']), ] ) -def test_qualified_names(same_process_infer_state, obj, expected_names): +def test_qualified_names(same_process_inference_state, obj, expected_names): o = compiled.CompiledObject( - same_process_infer_state, - DirectObjectAccess(same_process_infer_state, obj) + same_process_inference_state, + DirectObjectAccess(same_process_inference_state, obj) ) assert o.get_qualified_names() == tuple(expected_names) diff --git a/test/test_inference/test_docstring.py b/test/test_inference/test_docstring.py index fea25a36..83e86818 100644 --- a/test/test_inference/test_docstring.py +++ b/test/test_inference/test_docstring.py @@ -66,7 +66,7 @@ def test_instance_doc(Script): assert defs[0].docstring() == 'Docstring of `TestClass`.' -@unittest.skip('need infer_state class for that') +@unittest.skip('need inference_state class for that') def test_attribute_docstring(Script): defs = Script(""" x = None @@ -75,7 +75,7 @@ def test_attribute_docstring(Script): assert defs[0].docstring() == 'Docstring of `x`.' -@unittest.skip('need infer_state class for that') +@unittest.skip('need inference_state class for that') def test_multiple_docstrings(Script): defs = Script(""" def func(): diff --git a/test/test_inference/test_imports.py b/test/test_inference/test_imports.py index f918a75a..7e09b958 100644 --- a/test/test_inference/test_imports.py +++ b/test/test_inference/test_imports.py @@ -43,12 +43,12 @@ pkg_zip_path = os.path.join(os.path.dirname(__file__), 'pkg.zip') -def test_find_module_package_zipped(Script, infer_state, environment): +def test_find_module_package_zipped(Script, inference_state, environment): sys_path = environment.get_sys_path() + [pkg_zip_path] script = Script('import pkg; pkg.mod', sys_path=sys_path) assert len(script.completions()) == 1 - file_io, is_package = infer_state.compiled_subprocess.get_module_info( + file_io, is_package = inference_state.compiled_subprocess.get_module_info( sys_path=sys_path, string=u'pkg', full_name=u'pkg' @@ -84,7 +84,7 @@ def test_find_module_package_zipped(Script, infer_state, environment): ] ) -def test_correct_zip_package_behavior(Script, infer_state, environment, code, +def test_correct_zip_package_behavior(Script, inference_state, environment, code, file, package, path, skip_python2): sys_path = environment.get_sys_path() + [pkg_zip_path] pkg, = Script(code, sys_path=sys_path).goto_definitions() @@ -96,13 +96,13 @@ def test_correct_zip_package_behavior(Script, infer_state, environment, code, assert value.py__path__() == [os.path.join(pkg_zip_path, path)] -def test_find_module_not_package_zipped(Script, infer_state, environment): +def test_find_module_not_package_zipped(Script, inference_state, environment): path = os.path.join(os.path.dirname(__file__), 'zipped_imports/not_pkg.zip') sys_path = environment.get_sys_path() + [path] script = Script('import not_pkg; not_pkg.val', sys_path=sys_path) assert len(script.completions()) == 1 - file_io, is_package = infer_state.compiled_subprocess.get_module_info( + file_io, is_package = inference_state.compiled_subprocess.get_module_info( sys_path=sys_path, string=u'not_pkg', full_name=u'not_pkg' @@ -310,16 +310,16 @@ def test_compiled_import_none(monkeypatch, Script): (os.path.join(THIS_DIR, '__init__.py'), True, ('ok', 'lala', 'x', 'test_imports')), ] ) -def test_get_modules_containing_name(infer_state, path, goal, is_package): +def test_get_modules_containing_name(inference_state, path, goal, is_package): module = imports._load_python_module( - infer_state, + inference_state, FileIO(path), import_names=('ok', 'lala', 'x'), is_package=is_package, ) assert module input_module, found_module = imports.get_modules_containing_name( - infer_state, + inference_state, [module], 'string_that_only_exists_here' ) @@ -337,9 +337,9 @@ def test_get_modules_containing_name(infer_state, path, goal, is_package): ('/foo/bar/__init__.py', ('foo', 'bar'), True, ('foo', 'bar')), ] ) -def test_load_module_from_path(infer_state, path, base_names, is_package, names): +def test_load_module_from_path(inference_state, path, base_names, is_package, names): file_io = KnownContentFileIO(path, '') - m = imports._load_module_from_path(infer_state, file_io, base_names) + m = imports._load_module_from_path(inference_state, file_io, base_names) assert m.is_package == is_package assert m.string_names == names @@ -437,8 +437,8 @@ def test_pre_defined_imports_module(Script, environment, name): module = Script('', path=path)._get_module() assert module.string_names == (name,) - assert module.infer_state.builtins_module.py__file__() != path - assert module.infer_state.typing_module.py__file__() != path + assert module.inference_state.builtins_module.py__file__() != path + assert module.inference_state.typing_module.py__file__() != path @pytest.mark.parametrize('name', ('builtins', 'typing')) @@ -454,8 +454,8 @@ def test_import_needed_modules_by_jedi(Script, environment, tmpdir, name): sys_path=[tmpdir.strpath] + environment.get_sys_path(), ) module, = script.goto_definitions() - assert module._infer_state.builtins_module.py__file__() != module_path - assert module._infer_state.typing_module.py__file__() != module_path + assert module._inference_state.builtins_module.py__file__() != module_path + assert module._inference_state.typing_module.py__file__() != module_path def test_import_with_semicolon(Script): diff --git a/test/test_inference/test_representation.py b/test/test_inference/test_representation.py index e89f5fcf..a602b900 100644 --- a/test/test_inference/test_representation.py +++ b/test/test_inference/test_representation.py @@ -1,9 +1,9 @@ from textwrap import dedent -def get_definition_and_infer_state(Script, source): +def get_definition_and_inference_state(Script, source): first, = Script(dedent(source)).goto_definitions() - return first._name._value, first._infer_state + return first._name._value, first._inference_state def test_function_execution(Script): @@ -16,7 +16,7 @@ def test_function_execution(Script): def x(): return str() x""" - func, infer_state = get_definition_and_infer_state(Script, s) + func, inference_state = get_definition_and_inference_state(Script, s) # Now just use the internals of the result (easiest way to get a fully # usable function). # Should return the same result both times. @@ -29,6 +29,6 @@ def test_class_mro(Script): class X(object): pass X""" - cls, infer_state = get_definition_and_infer_state(Script, s) + cls, inference_state = get_definition_and_inference_state(Script, s) mro = cls.py__mro__() assert [c.name.string_name for c in mro] == ['X', 'object']