1
0
forked from VimPlug/jedi

Some more value -> context renames

This commit is contained in:
Dave Halter
2019-08-21 09:31:23 +02:00
parent 55c08e06ab
commit 02c96b37db
7 changed files with 33 additions and 33 deletions

View File

@@ -371,13 +371,13 @@ class Script(object):
if call_details is None:
return []
value = self._inference_state.create_context(
context = self._inference_state.create_context(
self._get_module_context(),
call_details.bracket_leaf
)
definitions = helpers.cache_call_signatures(
self._inference_state,
value,
context,
call_details.bracket_leaf,
self._code_lines,
self._pos
@@ -395,26 +395,26 @@ class Script(object):
module = self._get_module_context()
try:
for node in get_executable_nodes(self._module_node):
value = module.create_context(node)
context = module.create_context(node)
if node.type in ('funcdef', 'classdef'):
# Resolve the decorators.
tree_name_to_values(self._inference_state, value, node.children[1])
tree_name_to_values(self._inference_state, context, node.children[1])
elif isinstance(node, tree.Import):
import_names = set(node.get_defined_names())
if node.is_nested():
import_names |= set(path[-1] for path in node.get_paths())
for n in import_names:
imports.infer_import(value, n)
imports.infer_import(context, n)
elif node.type == 'expr_stmt':
types = value.infer_node(node)
types = context.infer_node(node)
for testlist in node.children[:-1:2]:
# Iterate tuples.
unpack_tuple_to_dict(value, types, testlist)
unpack_tuple_to_dict(context, types, testlist)
else:
if node.type == 'name':
defs = self._inference_state.goto_definitions(value, node)
defs = self._inference_state.goto_definitions(context, node)
else:
defs = infer_call_of_leaf(value, node)
defs = infer_call_of_leaf(context, node)
try_iter_content(defs)
self._inference_state.reset_recursion_limitations()

View File

@@ -254,10 +254,10 @@ class Completion:
def _trailer_completions(self, previous_leaf):
user_value = get_user_context(self._module_context, self._position)
inferred_value = self._inference_state.create_context(
inferred_context = self._inference_state.create_context(
self._module_context, previous_leaf
)
values = infer_call_of_leaf(inferred_value, previous_leaf)
values = infer_call_of_leaf(inferred_context, previous_leaf)
completion_names = []
debug.dbg('trailer completion values: %s', values, color='MAGENTA')
for value in values:

View File

@@ -136,23 +136,23 @@ def get_stack_at_position(grammar, code_lines, leaf, pos):
)
def infer_goto_definition(inference_state, value, leaf):
def infer_goto_definition(inference_state, context, leaf):
if leaf.type == 'name':
# In case of a name we can just use goto_definition which does all the
# magic itself.
return inference_state.goto_definitions(value, leaf)
return inference_state.goto_definitions(context, leaf)
parent = leaf.parent
definitions = NO_VALUES
if parent.type == 'atom':
# e.g. `(a + b)`
definitions = value.infer_node(leaf.parent)
definitions = context.infer_node(leaf.parent)
elif parent.type == 'trailer':
# e.g. `a()`
definitions = infer_call_of_leaf(value, leaf)
definitions = infer_call_of_leaf(context, leaf)
elif isinstance(leaf, tree.Literal):
# e.g. `"foo"` or `1.0`
return infer_atom(value, leaf)
return infer_atom(context, leaf)
elif leaf.type in ('fstring_string', 'fstring_start', 'fstring_end'):
return get_string_value_set(inference_state)
return definitions
@@ -376,7 +376,7 @@ def get_call_signature_details(module, position):
@call_signature_time_cache("call_signatures_validity")
def cache_call_signatures(inference_state, value, bracket_leaf, code_lines, user_pos):
def cache_call_signatures(inference_state, context, bracket_leaf, code_lines, user_pos):
"""This function calculates the cache key."""
line_index = user_pos[0] - 1
@@ -385,13 +385,13 @@ def cache_call_signatures(inference_state, value, bracket_leaf, code_lines, user
whole = ''.join(other_lines + [before_cursor])
before_bracket = re.match(r'.*\(', whole, re.DOTALL)
module_path = value.get_root_context().py__file__()
module_path = context.get_root_context().py__file__()
if module_path is None:
yield None # Don't cache!
else:
yield (module_path, before_bracket, bracket_leaf.start_pos)
yield infer_goto_definition(
inference_state,
value,
context,
bracket_leaf.get_previous_leaf(),
)

View File

@@ -217,10 +217,10 @@ def _check_name_for_execution(inference_state, context, compare_node, name, trai
execution_context = next(create_func_excs(value))
for name, trailer in _get_possible_nodes(module_context, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos:
random_value = execution_context.create_context(name)
random_context = execution_context.create_context(name)
iterator = _check_name_for_execution(
inference_state,
random_value,
random_context,
compare_node,
name,
trailer

View File

@@ -359,8 +359,8 @@ def get_global_filters(inference_state, context, until_position, origin_scope):
>>> scope = next(module_node.iter_funcdefs())
>>> scope
<Function: func@3-5>
>>> value = script._get_module_context().create_context(scope)
>>> filters = list(get_global_filters(value.inference_state, value, (4, 0), None))
>>> context = script._get_module_context().create_context(scope)
>>> filters = list(get_global_filters(context.inference_state, context, (4, 0), None))
First we get the names from the function scope.

View File

@@ -44,7 +44,7 @@ def deep_ast_copy(obj):
return new_obj
def infer_call_of_leaf(value, leaf, cut_own_trailer=False):
def infer_call_of_leaf(context, leaf, cut_own_trailer=False):
"""
Creates a "call" node that consist of all ``trailer`` and ``power``
objects. E.g. if you call it with ``append``::
@@ -66,15 +66,15 @@ def infer_call_of_leaf(value, leaf, cut_own_trailer=False):
trailer = leaf.parent
if trailer.type == 'fstring':
from jedi.inference import compiled
return compiled.get_string_value_set(value.inference_state)
return compiled.get_string_value_set(context.inference_state)
# The leaf may not be the last or first child, because there exist three
# different trailers: `( x )`, `[ x ]` and `.x`. In the first two examples
# we should not match anything more than x.
if trailer.type != 'trailer' or leaf not in (trailer.children[0], trailer.children[-1]):
if trailer.type == 'atom':
return value.infer_node(trailer)
return value.infer_node(leaf)
return context.infer_node(trailer)
return context.infer_node(leaf)
power = trailer.parent
index = power.children.index(trailer)
@@ -99,10 +99,10 @@ def infer_call_of_leaf(value, leaf, cut_own_trailer=False):
base = trailers[0]
trailers = trailers[1:]
values = value.infer_node(base)
values = context.infer_node(base)
from jedi.inference.syntax_tree import infer_trailer
for trailer in trailers:
values = infer_trailer(value, values, trailer)
values = infer_trailer(context, values, trailer)
return values

View File

@@ -772,8 +772,8 @@ class _ArrayInstance(HelperValueMixin):
class Slice(object):
def __init__(self, python_value, start, stop, step):
self._python_value = python_value
def __init__(self, python_context, start, stop, step):
self._python_context = python_context
self._slice_object = None
# All of them are either a Precedence or None.
self._start = start
@@ -782,7 +782,7 @@ class Slice(object):
def __getattr__(self, name):
if self._slice_object is None:
value = compiled.builtin_from_name(self._python_value.inference_state, 'slice')
value = compiled.builtin_from_name(self._python_context.inference_state, 'slice')
self._slice_object, = value.execute_with_values()
return getattr(self._slice_object, name)
@@ -796,7 +796,7 @@ class Slice(object):
if element is None:
return None
result = self._python_value.infer_node(element)
result = self._python_context.infer_node(element)
if len(result) != 1:
# For simplicity, we want slices to be clear defined with just
# one type. Otherwise we will return an empty slice object.