Merge branch 'parso'

This commit is contained in:
Dave Halter
2018-06-30 14:27:30 +02:00
8 changed files with 44 additions and 81 deletions
+8
View File
@@ -3,6 +3,14 @@
Changelog Changelog
--------- ---------
0.12.1 (2018-06-30)
+++++++++++++++++++
- This release forces you to upgrade parso. If you don't, nothing will work
anymore. Otherwise changes should be limited to bug fixes. Unfortunately Jedi
still uses a few internals of parso that make it hard to keep compatibility
over multiple releases. Parso >=0.3.0 is going to be needed.
0.12.0 (2018-04-15) 0.12.0 (2018-04-15)
+++++++++++++++++++ +++++++++++++++++++
+1 -1
View File
@@ -36,7 +36,7 @@ As you see Jedi is pretty simple and allows you to concentrate on writing a
good text editor, while still having very good IDE features for Python. good text editor, while still having very good IDE features for Python.
""" """
__version__ = '0.12.0' __version__ = '0.12.1'
from jedi.api import Script, Interpreter, set_debug_function, \ from jedi.api import Script, Interpreter, set_debug_function, \
preload_module, names preload_module, names
+3 -2
View File
@@ -404,8 +404,9 @@ class Completion(BaseDefinition):
append = '(' append = '('
if self._name.api_type == 'param' and self._stack is not None: if self._name.api_type == 'param' and self._stack is not None:
node_names = list(self._stack.get_node_names(self._evaluator.grammar._pgen_grammar)) nonterminals = [stack_node.nonterminal for stack_node in self._stack]
if 'trailer' in node_names and 'argument' not in node_names: if 'trailer' in nonterminals and 'argument' not in nonterminals:
# TODO this doesn't work for nested calls.
append += '=' append += '='
name = self._name.string_name name = self._name.string_name
+23 -21
View File
@@ -1,4 +1,4 @@
from parso.python import token from parso.python.token import PythonTokenTypes
from parso.python import tree from parso.python import tree
from parso.tree import search_ancestor, Leaf from parso.tree import search_ancestor, Leaf
@@ -122,11 +122,11 @@ class Completion:
grammar = self._evaluator.grammar grammar = self._evaluator.grammar
try: try:
self.stack = helpers.get_stack_at_position( self.stack = stack = helpers.get_stack_at_position(
grammar, self._code_lines, self._module_node, self._position grammar, self._code_lines, self._module_node, self._position
) )
except helpers.OnErrorLeaf as e: except helpers.OnErrorLeaf as e:
self.stack = None self.stack = stack = None
if e.error_leaf.value == '.': if e.error_leaf.value == '.':
# After ErrorLeaf's that are dots, we will not do any # After ErrorLeaf's that are dots, we will not do any
# completions since this probably just confuses the user. # completions since this probably just confuses the user.
@@ -135,10 +135,10 @@ class Completion:
return self._global_completions() return self._global_completions()
allowed_keywords, allowed_tokens = \ allowed_transitions = \
helpers.get_possible_completion_types(grammar._pgen_grammar, self.stack) list(stack._allowed_transition_names_and_token_types())
if 'if' in allowed_keywords: if 'if' in allowed_transitions:
leaf = self._module_node.get_leaf_for_position(self._position, include_prefixes=True) leaf = self._module_node.get_leaf_for_position(self._position, include_prefixes=True)
previous_leaf = leaf.get_previous_leaf() previous_leaf = leaf.get_previous_leaf()
@@ -164,50 +164,52 @@ class Completion:
# Compare indents # Compare indents
if stmt.start_pos[1] == indent: if stmt.start_pos[1] == indent:
if type_ == 'if_stmt': if type_ == 'if_stmt':
allowed_keywords += ['elif', 'else'] allowed_transitions += ['elif', 'else']
elif type_ == 'try_stmt': elif type_ == 'try_stmt':
allowed_keywords += ['except', 'finally', 'else'] allowed_transitions += ['except', 'finally', 'else']
elif type_ == 'for_stmt': elif type_ == 'for_stmt':
allowed_keywords.append('else') allowed_transitions.append('else')
completion_names = list(self._get_keyword_completion_names(allowed_keywords)) completion_names = list(self._get_keyword_completion_names(allowed_transitions))
if token.NAME in allowed_tokens or token.INDENT in allowed_tokens: if any(t in allowed_transitions for t in (PythonTokenTypes.NAME,
PythonTokenTypes.INDENT)):
# This means that we actually have to do type inference. # This means that we actually have to do type inference.
symbol_names = list(self.stack.get_node_names(grammar._pgen_grammar)) nonterminals = [stack_node.nonterminal for stack_node in stack]
nodes = list(self.stack.get_nodes()) nodes = [node for stack_node in stack for node in stack_node.nodes]
if nodes and nodes[-1] in ('as', 'def', 'class'): if nodes and nodes[-1] in ('as', 'def', 'class'):
# No completions for ``with x as foo`` and ``import x as foo``. # No completions for ``with x as foo`` and ``import x as foo``.
# Also true for defining names as a class or function. # Also true for defining names as a class or function.
return list(self._get_class_context_completions(is_function=True)) return list(self._get_class_context_completions(is_function=True))
elif "import_stmt" in symbol_names: elif "import_stmt" in nonterminals:
level, names = self._parse_dotted_names(nodes, "import_from" in symbol_names) level, names = self._parse_dotted_names(nodes, "import_from" in nonterminals)
only_modules = not ("import_from" in symbol_names and 'import' in nodes) only_modules = not ("import_from" in nonterminals and 'import' in nodes)
completion_names += self._get_importer_names( completion_names += self._get_importer_names(
names, names,
level, level,
only_modules=only_modules, only_modules=only_modules,
) )
elif symbol_names[-1] in ('trailer', 'dotted_name') and nodes[-1] == '.': elif nonterminals[-1] in ('trailer', 'dotted_name') and nodes[-1] == '.':
dot = self._module_node.get_leaf_for_position(self._position) dot = self._module_node.get_leaf_for_position(self._position)
completion_names += self._trailer_completions(dot.get_previous_leaf()) completion_names += self._trailer_completions(dot.get_previous_leaf())
else: else:
completion_names += self._global_completions() completion_names += self._global_completions()
completion_names += self._get_class_context_completions(is_function=False) completion_names += self._get_class_context_completions(is_function=False)
if 'trailer' in symbol_names: if 'trailer' in nonterminals:
call_signatures = self._call_signatures_method() call_signatures = self._call_signatures_method()
completion_names += get_call_signature_param_names(call_signatures) completion_names += get_call_signature_param_names(call_signatures)
return completion_names return completion_names
def _get_keyword_completion_names(self, keywords_): def _get_keyword_completion_names(self, allowed_transitions):
for k in keywords_: for k in allowed_transitions:
yield keywords.KeywordName(self._evaluator, k) if isinstance(k, str) and k.isalpha():
yield keywords.KeywordName(self._evaluator, k)
def _global_completions(self): def _global_completions(self):
context = get_user_scope(self._module_context, self._position) context = get_user_scope(self._module_context, self._position)
+1 -53
View File
@@ -12,7 +12,6 @@ from jedi._compatibility import u
from jedi.evaluate.syntax_tree import eval_atom from jedi.evaluate.syntax_tree import eval_atom
from jedi.evaluate.helpers import evaluate_call_of_leaf from jedi.evaluate.helpers import evaluate_call_of_leaf
from jedi.evaluate.compiled import get_string_context_set from jedi.evaluate.compiled import get_string_context_set
from jedi.evaluate.base_context import ContextSet
from jedi.cache import call_signature_time_cache from jedi.cache import call_signature_time_cache
@@ -127,61 +126,10 @@ def get_stack_at_position(grammar, code_lines, module_node, pos):
try: try:
p.parse(tokens=tokenize_without_endmarker(code)) p.parse(tokens=tokenize_without_endmarker(code))
except EndMarkerReached: except EndMarkerReached:
return Stack(p.pgen_parser.stack) return p.stack
raise SystemError("This really shouldn't happen. There's a bug in Jedi.") raise SystemError("This really shouldn't happen. There's a bug in Jedi.")
class Stack(list):
def get_node_names(self, grammar):
for dfa, state, (node_number, nodes) in self:
yield grammar.number2symbol[node_number]
def get_nodes(self):
for dfa, state, (node_number, nodes) in self:
for node in nodes:
yield node
def get_possible_completion_types(pgen_grammar, stack):
def add_results(label_index):
try:
grammar_labels.append(inversed_tokens[label_index])
except KeyError:
try:
keywords.append(inversed_keywords[label_index])
except KeyError:
t, v = pgen_grammar.labels[label_index]
assert t >= 256
# See if it's a symbol and if we're in its first set
inversed_keywords
itsdfa = pgen_grammar.dfas[t]
itsstates, itsfirst = itsdfa
for first_label_index in itsfirst.keys():
add_results(first_label_index)
inversed_keywords = dict((v, k) for k, v in pgen_grammar.keywords.items())
inversed_tokens = dict((v, k) for k, v in pgen_grammar.tokens.items())
keywords = []
grammar_labels = []
def scan_stack(index):
dfa, state, node = stack[index]
states, first = dfa
arcs = states[state]
for label_index, new_state in arcs:
if label_index == 0:
# An accepting state, check the stack below.
scan_stack(index - 1)
else:
add_results(label_index)
scan_stack(-1)
return keywords, grammar_labels
def evaluate_goto_definition(evaluator, context, leaf): def evaluate_goto_definition(evaluator, context, leaf):
if leaf.type == 'name': if leaf.type == 'name':
# In case of a name we can just use goto_definition which does all the # In case of a name we can just use goto_definition which does all the
+1 -1
View File
@@ -1 +1 @@
parso>=0.2.0 parso>=0.3.0
+4
View File
@@ -137,3 +137,7 @@ def test_async(Script, environment):
names = [c.name for c in comps] names = [c.name for c in comps]
assert 'foo' in names assert 'foo' in names
assert 'hey' in names assert 'hey' in names
def test_with_stmt_error_recovery(Script):
assert Script('with open('') as foo: foo.\na', line=1).completions()
+3 -3
View File
@@ -8,7 +8,9 @@ deps =
docopt docopt
# coloroma for colored debug output # coloroma for colored debug output
colorama colorama
-rrequirements.txt # Overwrite the parso version (only used sometimes).
git+https://github.com/davidhalter/parso.git
# -rrequirements.txt
passenv = JEDI_TEST_ENVIRONMENT passenv = JEDI_TEST_ENVIRONMENT
setenv = setenv =
# https://github.com/tomchristie/django-rest-framework/issues/1957 # https://github.com/tomchristie/django-rest-framework/issues/1957
@@ -23,8 +25,6 @@ setenv =
env36: JEDI_TEST_ENVIRONMENT=36 env36: JEDI_TEST_ENVIRONMENT=36
env37: JEDI_TEST_ENVIRONMENT=37 env37: JEDI_TEST_ENVIRONMENT=37
commands = commands =
# Overwrite the parso version (only used sometimes).
# pip install git+https://github.com/davidhalter/parso.git
py.test {posargs:jedi test} py.test {posargs:jedi test}
[testenv:py27] [testenv:py27]
deps = deps =