Fix a few issues caused by the refactoring.

This commit is contained in:
Dave Halter
2016-05-29 19:49:35 +02:00
parent feef45f4bb
commit daa68b66ad
8 changed files with 60 additions and 33 deletions

View File

@@ -50,7 +50,7 @@ def check_error_statements(module, pos):
return None, 0, False, False
def get_code(code, start_pos, end_pos):
def _get_code(code, start_pos, end_pos):
"""
:param code_start_pos: is where the code starts.
"""
@@ -61,7 +61,7 @@ def get_code(code, start_pos, end_pos):
lines[-1] = lines[-1][:end_pos[1]]
# Remove first line indentation.
lines[0] = lines[0][start_pos[1]:]
return ''.join(lines)
return '\n'.join(lines)
def get_user_or_error_stmt(module, position):
@@ -82,6 +82,9 @@ def get_stack_at_position(grammar, source, module, pos):
"""
user_stmt = get_user_or_error_stmt(module, pos)
if user_stmt is not None and user_stmt.type in ('indent', 'dedent'):
code = ''
else:
if user_stmt is None:
user_stmt = module.get_leaf_for_position(pos, include_prefixes=True)
# Only if were in front of the leaf we want to get the stack,
@@ -98,9 +101,8 @@ def get_stack_at_position(grammar, source, module, pos):
else:
user_stmt = get_user_or_error_stmt(module, leaf.start_pos)
print(user_stmt.start_pos, pos)
code = get_code(source, user_stmt.start_pos, pos)
code = _get_code(source, user_stmt.start_pos, pos)
# Remove whitespace at the end. Necessary, because the tokenizer will parse
# an error token (there's no new line at the end in our case). This doesn't
# alter any truth about the valid tokens at that position.

View File

@@ -26,7 +26,8 @@ def deep_ast_copy(obj, parent=None, new_elements=None):
new_children = []
for child in obj.children:
typ = child.type
if typ in ('whitespace', 'operator', 'keyword', 'number', 'string'):
if typ in ('whitespace', 'operator', 'keyword', 'number', 'string',
'indent', 'dedent'):
# At the moment we're not actually copying those primitive
# elements, because there's really no need to. The parents are
# obviously wrong, but that's not an issue.

View File

@@ -33,7 +33,7 @@ def _evaluate_for_annotation(evaluator, annotation):
if (isinstance(definition, CompiledObject) and
isinstance(definition.obj, str)):
try:
p = Parser(load_grammar(), definition.obj, start='eval_input')
p = Parser(load_grammar(), definition.obj, start_symbol='eval_input')
element = p.get_parsed_node()
except ParseError:
debug.warning('Annotation not parsed: %s' % definition.obj)

View File

@@ -113,9 +113,15 @@ class ErrorStatement(object):
of the stack at which time its parents don't yet exist..
"""
start_pos = self.start_pos
for c in root_node.children:
try:
children = root_node.children
except AttributeError:
self.parent = root_node
else:
for c in children:
if c.start_pos < start_pos <= c.end_pos:
return self.set_parent(c)
self.set_parent(c)
return
self.parent = root_node
@@ -288,6 +294,10 @@ class Parser(object):
return pt.Number(self.position_modifier, value, start_pos, prefix)
elif type in (NEWLINE, ENDMARKER):
return pt.Whitespace(self.position_modifier, value, start_pos, prefix)
elif type == INDENT:
return pt.Indent(self.position_modifier, value, start_pos, prefix)
elif type == DEDENT:
return pt.Dedent(self.position_modifier, value, start_pos, prefix)
else:
return pt.Operator(self.position_modifier, value, start_pos, prefix)

View File

@@ -121,7 +121,8 @@ class PgenParser(object):
break
else:
# We never broke out -- EOF is too soon -- Unfinished statement.
self.error_recovery(self.grammar, self.stack, type_, value,
# TODO the arcs argument [] is not correctly defined.
self.error_recovery(self.grammar, self.stack, [], type_, value,
start_pos, prefix, self.addtoken)
# Add the ENDMARKER again.
if not self.addtoken(type_, value, prefix, start_pos):

View File

@@ -433,6 +433,16 @@ class String(Literal):
__slots__ = ()
class Indent(Leaf):
type = 'indent'
__slots__ = ()
class Dedent(Leaf):
type = 'indent'
__slots__ = ()
class Operator(Leaf):
type = 'operator'
__slots__ = ()

View File

@@ -238,7 +238,10 @@ class V:
V(1).b()
#? int()
V(1).c()
#? []
#?
V(1).d()
# Only keywords should be possible to complete.
#? ['is', 'in', 'not', 'and', 'or', 'if']
V(1).d()

View File

@@ -32,5 +32,5 @@ Just because there's a def keyword, doesn't mean it should not be able to
complete to definition.
"""
definition = 0
#? ['definition', 'def']
#? ['definition']
str(def