Fix a few issues caused by the refactoring.

This commit is contained in:
Dave Halter
2016-05-29 19:49:35 +02:00
parent feef45f4bb
commit daa68b66ad
8 changed files with 60 additions and 33 deletions

View File

@@ -50,7 +50,7 @@ def check_error_statements(module, pos):
return None, 0, False, False return None, 0, False, False
def get_code(code, start_pos, end_pos): def _get_code(code, start_pos, end_pos):
""" """
:param code_start_pos: is where the code starts. :param code_start_pos: is where the code starts.
""" """
@@ -61,7 +61,7 @@ def get_code(code, start_pos, end_pos):
lines[-1] = lines[-1][:end_pos[1]] lines[-1] = lines[-1][:end_pos[1]]
# Remove first line indentation. # Remove first line indentation.
lines[0] = lines[0][start_pos[1]:] lines[0] = lines[0][start_pos[1]:]
return ''.join(lines) return '\n'.join(lines)
def get_user_or_error_stmt(module, position): def get_user_or_error_stmt(module, position):
@@ -82,29 +82,31 @@ def get_stack_at_position(grammar, source, module, pos):
""" """
user_stmt = get_user_or_error_stmt(module, pos) user_stmt = get_user_or_error_stmt(module, pos)
if user_stmt is None: if user_stmt is not None and user_stmt.type in ('indent', 'dedent'):
user_stmt = module.get_leaf_for_position(pos, include_prefixes=True) code = ''
# Only if were in front of the leaf we want to get the stack, else:
# because after there's probably a newline or whatever that would if user_stmt is None:
# be actually tokenized and is not just prefix. user_stmt = module.get_leaf_for_position(pos, include_prefixes=True)
if pos <= user_stmt.start_pos: # Only if were in front of the leaf we want to get the stack,
leaf = user_stmt.get_previous_leaf() # because after there's probably a newline or whatever that would
for error_stmt in reversed(module.error_statements): # be actually tokenized and is not just prefix.
if leaf.start_pos <= error_stmt.start_pos <= user_stmt.start_pos: if pos <= user_stmt.start_pos:
# The leaf appears not to be the last leaf. It's actually an leaf = user_stmt.get_previous_leaf()
# error statement. for error_stmt in reversed(module.error_statements):
user_stmt = error_stmt if leaf.start_pos <= error_stmt.start_pos <= user_stmt.start_pos:
break # The leaf appears not to be the last leaf. It's actually an
else: # error statement.
user_stmt = get_user_or_error_stmt(module, leaf.start_pos) user_stmt = error_stmt
break
else:
user_stmt = get_user_or_error_stmt(module, leaf.start_pos)
print(user_stmt.start_pos, pos)
print(user_stmt.start_pos, pos) code = _get_code(source, user_stmt.start_pos, pos)
code = get_code(source, user_stmt.start_pos, pos) # Remove whitespace at the end. Necessary, because the tokenizer will parse
# Remove whitespace at the end. Necessary, because the tokenizer will parse # an error token (there's no new line at the end in our case). This doesn't
# an error token (there's no new line at the end in our case). This doesn't # alter any truth about the valid tokens at that position.
# alter any truth about the valid tokens at that position. code = code.strip()
code = code.strip()
class EndMarkerReached(Exception): class EndMarkerReached(Exception):
pass pass

View File

@@ -26,7 +26,8 @@ def deep_ast_copy(obj, parent=None, new_elements=None):
new_children = [] new_children = []
for child in obj.children: for child in obj.children:
typ = child.type typ = child.type
if typ in ('whitespace', 'operator', 'keyword', 'number', 'string'): if typ in ('whitespace', 'operator', 'keyword', 'number', 'string',
'indent', 'dedent'):
# At the moment we're not actually copying those primitive # At the moment we're not actually copying those primitive
# elements, because there's really no need to. The parents are # elements, because there's really no need to. The parents are
# obviously wrong, but that's not an issue. # obviously wrong, but that's not an issue.

View File

@@ -33,7 +33,7 @@ def _evaluate_for_annotation(evaluator, annotation):
if (isinstance(definition, CompiledObject) and if (isinstance(definition, CompiledObject) and
isinstance(definition.obj, str)): isinstance(definition.obj, str)):
try: try:
p = Parser(load_grammar(), definition.obj, start='eval_input') p = Parser(load_grammar(), definition.obj, start_symbol='eval_input')
element = p.get_parsed_node() element = p.get_parsed_node()
except ParseError: except ParseError:
debug.warning('Annotation not parsed: %s' % definition.obj) debug.warning('Annotation not parsed: %s' % definition.obj)

View File

@@ -113,11 +113,17 @@ class ErrorStatement(object):
of the stack at which time its parents don't yet exist.. of the stack at which time its parents don't yet exist..
""" """
start_pos = self.start_pos start_pos = self.start_pos
for c in root_node.children: try:
if c.start_pos < start_pos <= c.end_pos: children = root_node.children
return self.set_parent(c) except AttributeError:
self.parent = root_node
else:
for c in children:
if c.start_pos < start_pos <= c.end_pos:
self.set_parent(c)
return
self.parent = root_node self.parent = root_node
class ErrorToken(tree.LeafWithNewLines): class ErrorToken(tree.LeafWithNewLines):
@@ -288,6 +294,10 @@ class Parser(object):
return pt.Number(self.position_modifier, value, start_pos, prefix) return pt.Number(self.position_modifier, value, start_pos, prefix)
elif type in (NEWLINE, ENDMARKER): elif type in (NEWLINE, ENDMARKER):
return pt.Whitespace(self.position_modifier, value, start_pos, prefix) return pt.Whitespace(self.position_modifier, value, start_pos, prefix)
elif type == INDENT:
return pt.Indent(self.position_modifier, value, start_pos, prefix)
elif type == DEDENT:
return pt.Dedent(self.position_modifier, value, start_pos, prefix)
else: else:
return pt.Operator(self.position_modifier, value, start_pos, prefix) return pt.Operator(self.position_modifier, value, start_pos, prefix)

View File

@@ -121,7 +121,8 @@ class PgenParser(object):
break break
else: else:
# We never broke out -- EOF is too soon -- Unfinished statement. # We never broke out -- EOF is too soon -- Unfinished statement.
self.error_recovery(self.grammar, self.stack, type_, value, # TODO the arcs argument [] is not correctly defined.
self.error_recovery(self.grammar, self.stack, [], type_, value,
start_pos, prefix, self.addtoken) start_pos, prefix, self.addtoken)
# Add the ENDMARKER again. # Add the ENDMARKER again.
if not self.addtoken(type_, value, prefix, start_pos): if not self.addtoken(type_, value, prefix, start_pos):

View File

@@ -433,6 +433,16 @@ class String(Literal):
__slots__ = () __slots__ = ()
class Indent(Leaf):
type = 'indent'
__slots__ = ()
class Dedent(Leaf):
type = 'indent'
__slots__ = ()
class Operator(Leaf): class Operator(Leaf):
type = 'operator' type = 'operator'
__slots__ = () __slots__ = ()

View File

@@ -238,7 +238,10 @@ class V:
V(1).b() V(1).b()
#? int() #? int()
V(1).c() V(1).c()
#? [] #?
V(1).d()
# Only keywords should be possible to complete.
#? ['is', 'in', 'not', 'and', 'or', 'if']
V(1).d() V(1).d()

View File

@@ -32,5 +32,5 @@ Just because there's a def keyword, doesn't mean it should not be able to
complete to definition. complete to definition.
""" """
definition = 0 definition = 0
#? ['definition', 'def'] #? ['definition']
str(def str(def