1
0
forked from VimPlug/jedi

More test fixes.

This commit is contained in:
Dave Halter
2016-05-31 01:12:07 +02:00
parent c12dbe0b9e
commit ad8d730a57
6 changed files with 27 additions and 17 deletions

View File

@@ -136,7 +136,7 @@ class Completion:
try:
stack = helpers.get_stack_at_position(grammar, self._source, self._module, pos)
except helpers.OnErrorLeaf:
return []
return self._simple_complete(completion_parts)
allowed_keywords, allowed_tokens = \
helpers.get_possible_completion_types(grammar, stack)

View File

@@ -98,6 +98,7 @@ def get_stack_at_position(grammar, source, module, pos):
if pos <= user_stmt.start_pos:
try:
leaf = user_stmt.get_previous_leaf()
print(user_stmt, leaf)
except IndexError:
pass
else:

View File

@@ -414,23 +414,24 @@ class ParserWithRecovery(Parser):
#print('err', token.tok_name[typ], repr(value), start_pos, len(stack), index)
if self._stack_removal(grammar, stack, arcs, index + 1, value, start_pos):
#add_token_callback(typ, value, prefix, start_pos)
pass
add_token_callback(typ, value, prefix, start_pos)
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
else:
#error_leaf = ErrorToken(self.position_modifier, value, start_pos, prefix)
#stack = [(None, [error_leaf])]
# TODO document the shizzle!
#self._error_statements.append(ErrorStatement(stack, None, None,
# self.position_modifier, error_leaf.end_pos))
error_leaf = pt.ErrorLeaf(self.position_modifier, value, start_pos, prefix)
error_leaf = pt.ErrorLeaf(self.position_modifier, typ, value, start_pos, prefix)
stack[-1][2][1].append(error_leaf)
return
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
'''
if value in ('import', 'class', 'def', 'try', 'while', 'return', '\n'):
# Those can always be new statements.
add_token_callback(typ, value, prefix, start_pos)
@@ -445,6 +446,7 @@ class ParserWithRecovery(Parser):
# anyway (compile.c does that for Python), because Python's grammar
# doesn't stop you from defining `continue` in a module, etc.
add_token_callback(typ, value, prefix, start_pos)
'''
def _stack_removal(self, grammar, stack, arcs, start_index, value, start_pos):
def clear_names(children):

View File

@@ -121,11 +121,9 @@ class PgenParser(object):
break
else:
# We never broke out -- EOF is too soon -- Unfinished statement.
# TODO the arcs argument [] is not correctly defined.
self.error_recovery(self.grammar, self.stack, [], type_, value,
start_pos, prefix, self.addtoken)
# Add the ENDMARKER again.
if not self.addtoken(type_, value, prefix, start_pos):
# However, the error recovery might have added the token again, if
# the stack is empty, we're fine.
if self.stack:
raise InternalParseError("incomplete input", type_, value, start_pos)
return self.rootnode

View File

@@ -43,6 +43,7 @@ from jedi import common
from jedi._compatibility import (Python3Method, encoding, is_py3, utf8_repr,
literal_eval, use_metaclass, unicode)
from jedi import cache
from jedi.parser import token
def is_node(node, *symbol_names):
@@ -653,9 +654,17 @@ class ErrorLeaf(Leaf):
"""
TODO doc
"""
__slots__ = ()
__slots__ = ('original_type')
type = 'error_leaf'
def __init__(self, position_modifier, original_type, value, start_pos, prefix=''):
super(ErrorLeaf, self).__init__(position_modifier, value, start_pos, prefix)
self.original_type = original_type
def __repr__(self):
token_type = token.tok_name[self.original_type]
return "<%s: %s, %s)>" % (type(self).__name__, token_type, repr(self.value))
class IsScopeMeta(type):
def __instancecheck__(self, other):

View File

@@ -10,7 +10,7 @@ import os
import sys
from jedi import Interpreter
from jedi.api.helpers import completion_parts
from jedi.api.helpers import get_completion_parts
from jedi.parser.user_context import UserContext
@@ -73,7 +73,7 @@ def setup_readline(namespace_module=__main__):
interpreter = Interpreter(text, [namespace_module.__dict__])
path = UserContext(text, (1, len(text))).get_path_until_cursor()
path, dot, like = completion_parts(path)
path, dot, like = get_completion_parts(path)
before = text[:len(text) - len(like)]
completions = interpreter.completions()
finally: