forked from VimPlug/jedi
Progress and actually passing a few tests.
This commit is contained in:
@@ -20,7 +20,6 @@ import re
|
||||
|
||||
from jedi.parser import tree as pt
|
||||
from jedi.parser import tokenize
|
||||
from jedi.parser import token
|
||||
from jedi.parser.token import (DEDENT, INDENT, ENDMARKER, NEWLINE, NUMBER,
|
||||
STRING, OP, ERRORTOKEN)
|
||||
from jedi.parser.pgen2.pgen import generate_grammar
|
||||
@@ -75,14 +74,13 @@ class ErrorStatement(object):
|
||||
)
|
||||
|
||||
@property
|
||||
def next_start_pos(self):
|
||||
def end_pos(self):
|
||||
s = self._next_start_pos
|
||||
return s[0] + self._position_modifier.line, s[1]
|
||||
|
||||
@property
|
||||
def first_pos(self):
|
||||
first_type, nodes = self.stack[0]
|
||||
return nodes[0].start_pos
|
||||
def start_pos(self):
|
||||
return next(self._iter_nodes()).start_pos
|
||||
|
||||
@property
|
||||
def first_type(self):
|
||||
@@ -96,8 +94,15 @@ class ErrorStatement(object):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_code(self):
|
||||
return ''.join(node.get_code() for _, nodes in self.stack for node in nodes)
|
||||
def _iter_nodes(self):
|
||||
for _, nodes in self.stack:
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
def get_code(self, include_prefix=True):
|
||||
iterator = self._iter_nodes()
|
||||
first = next(iterator)
|
||||
return first.get_code(include_prefix=include_prefix) + ''.join(node.get_code() for node in iterator)
|
||||
|
||||
|
||||
class ParserSyntaxError(object):
|
||||
@@ -144,7 +149,7 @@ class Parser(object):
|
||||
|
||||
self._used_names = {}
|
||||
self._scope_names_stack = [{}]
|
||||
self._error_statement_stacks = []
|
||||
self._error_statements = []
|
||||
self._last_failed_start_pos = (0, 0)
|
||||
self._global_names = []
|
||||
|
||||
@@ -164,20 +169,19 @@ class Parser(object):
|
||||
|
||||
self._start_symbol = start_symbol
|
||||
self._grammar = grammar
|
||||
self._tokenizer = tokenizer
|
||||
if tokenizer is None:
|
||||
self._tokenizer = tokenize.source_tokens(source, use_exact_op_types=True)
|
||||
|
||||
self._parsed = None
|
||||
|
||||
if start_parsing:
|
||||
self.parse()
|
||||
if tokenizer is None:
|
||||
tokenizer = tokenize.source_tokens(source, use_exact_op_types=True)
|
||||
self.parse(tokenizer)
|
||||
|
||||
def parse(self):
|
||||
def parse(self, tokenizer):
|
||||
if self._parsed is not None:
|
||||
return self._parsed
|
||||
|
||||
self._parsed = self.pgen_parser.parse(self._tokenize(self._tokenizer))
|
||||
self._parsed = self.pgen_parser.parse(self._tokenize(tokenizer))
|
||||
|
||||
if self._start_symbol == 'file_input' != self._parsed.type:
|
||||
# If there's only one statement, we get back a non-module. That's
|
||||
@@ -198,7 +202,7 @@ class Parser(object):
|
||||
raise ParseError
|
||||
yield typ, value, prefix, start_pos
|
||||
|
||||
def error_recovery(self, grammar, stack, typ, value, start_pos, prefix,
|
||||
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
||||
add_token_callback):
|
||||
raise ParseError
|
||||
|
||||
@@ -308,7 +312,6 @@ class Parser(object):
|
||||
endmarker._start_pos = newline._start_pos
|
||||
break
|
||||
|
||||
|
||||
class ParserWithRecovery(Parser):
|
||||
"""
|
||||
This class is used to parse a Python file, it then divides them into a
|
||||
@@ -340,7 +343,7 @@ class ParserWithRecovery(Parser):
|
||||
self.module.used_names = self._used_names
|
||||
self.module.path = module_path
|
||||
self.module.global_names = self._global_names
|
||||
self.module.error_statement_stacks = self._error_statement_stacks
|
||||
self.module.error_statements = self._error_statements
|
||||
|
||||
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
||||
add_token_callback):
|
||||
@@ -427,7 +430,7 @@ class ParserWithRecovery(Parser):
|
||||
self._scope_names_stack.pop()
|
||||
if failed_stack:
|
||||
err = ErrorStatement(failed_stack, arcs, value, self.position_modifier, start_pos)
|
||||
self._error_statement_stacks.append(err)
|
||||
self._error_statements.append(err)
|
||||
|
||||
self._last_failed_start_pos = start_pos
|
||||
|
||||
|
||||
@@ -45,8 +45,8 @@ class FastModule(tree.Module):
|
||||
return [name for m in self.modules for name in m.global_names]
|
||||
|
||||
@property
|
||||
def error_statement_stacks(self):
|
||||
return [e for m in self.modules for e in m.error_statement_stacks]
|
||||
def error_statements(self):
|
||||
return [e for m in self.modules for e in m.error_statements]
|
||||
|
||||
def __repr__(self):
|
||||
return "<fast.%s: %s@%s-%s>" % (type(self).__name__, self.name,
|
||||
@@ -59,8 +59,8 @@ class FastModule(tree.Module):
|
||||
def global_names(self, value):
|
||||
pass
|
||||
|
||||
@error_statement_stacks.setter
|
||||
def error_statement_stacks(self, value):
|
||||
@error_statements.setter
|
||||
def error_statements(self, value):
|
||||
pass
|
||||
|
||||
@used_names.setter
|
||||
|
||||
@@ -38,6 +38,7 @@ from inspect import cleandoc
|
||||
from itertools import chain
|
||||
import textwrap
|
||||
|
||||
from jedi import common
|
||||
from jedi._compatibility import (Python3Method, encoding, is_py3, utf8_repr,
|
||||
literal_eval, use_metaclass, unicode)
|
||||
from jedi import cache
|
||||
@@ -196,6 +197,28 @@ class Base(object):
|
||||
def nodes_to_execute(self, last_added=False):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_code_with_error_statements(self, include_prefix=False):
|
||||
module = self.get_parent_until()
|
||||
source = self.get_code(include_prefix=include_prefix)
|
||||
start_pos, end_pos = self.start_pos, self.end_pos
|
||||
# Check for error statements that are inside the node.
|
||||
error_statements = [
|
||||
e for e in module.error_statements
|
||||
if start_pos <= e.start_pos and end_pos >= e.end_pos
|
||||
]
|
||||
lines = common.splitlines(source)
|
||||
# Note: Error statements must not be sorted. The positions are only
|
||||
# correct if we insert them the way that they were tokenized.
|
||||
for error_statement in error_statements:
|
||||
line_index = error_statement.start_pos[0] - start_pos[0]
|
||||
|
||||
line = lines[line_index]
|
||||
index = error_statement.start_pos[1]
|
||||
line = line[:index] + error_statement.get_code() + line[index:]
|
||||
lines[line_index] = line
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class Leaf(Base):
|
||||
__slots__ = ('position_modifier', 'value', 'parent', '_start_pos', 'prefix')
|
||||
@@ -246,10 +269,13 @@ class Leaf(Base):
|
||||
except AttributeError: # A Leaf doesn't have children.
|
||||
return node
|
||||
|
||||
def get_code(self, normalized=False):
|
||||
def get_code(self, normalized=False, include_prefix=True):
|
||||
if normalized:
|
||||
return self.value
|
||||
return self.prefix + self.value
|
||||
if include_prefix:
|
||||
return self.prefix + self.value
|
||||
else:
|
||||
return self.value
|
||||
|
||||
def next_sibling(self):
|
||||
"""
|
||||
@@ -304,11 +330,11 @@ class LeafWithNewLines(Leaf):
|
||||
end_pos_col = len(lines[-1])
|
||||
return end_pos_line, end_pos_col
|
||||
|
||||
|
||||
@utf8_repr
|
||||
def __repr__(self):
|
||||
return "<%s: %r>" % (type(self).__name__, self.value)
|
||||
|
||||
|
||||
class Whitespace(LeafWithNewLines):
|
||||
"""Contains NEWLINE and ENDMARKER tokens."""
|
||||
__slots__ = ()
|
||||
@@ -452,9 +478,13 @@ class BaseNode(Base):
|
||||
def end_pos(self):
|
||||
return self.children[-1].end_pos
|
||||
|
||||
def get_code(self, normalized=False):
|
||||
# TODO implement normalized (dependin on context).
|
||||
return "".join(c.get_code(normalized) for c in self.children)
|
||||
def get_code(self, normalized=False, include_prefix=True):
|
||||
# TODO implement normalized (depending on context).
|
||||
if include_prefix:
|
||||
return "".join(c.get_code(normalized) for c in self.children)
|
||||
else:
|
||||
first = self.children[0].get_code(include_prefix=False)
|
||||
return first + "".join(c.get_code(normalized) for c in self.children[1:])
|
||||
|
||||
@Python3Method
|
||||
def name_for_position(self, position):
|
||||
@@ -468,6 +498,16 @@ class BaseNode(Base):
|
||||
return result
|
||||
return None
|
||||
|
||||
def get_leaf_for_position(self, position):
|
||||
for c in self.children:
|
||||
if c.start_pos <= position <= c.end_pos:
|
||||
try:
|
||||
return c.get_leaf_for_position(position)
|
||||
except AttributeError:
|
||||
return c
|
||||
|
||||
raise ValueError("Position does not exist.")
|
||||
|
||||
@Python3Method
|
||||
def get_statement_for_position(self, pos):
|
||||
for c in self.children:
|
||||
@@ -633,7 +673,7 @@ class Module(Scope):
|
||||
of a module.
|
||||
"""
|
||||
__slots__ = ('path', 'global_names', 'used_names', '_name',
|
||||
'error_statement_stacks')
|
||||
'error_statements')
|
||||
type = 'file_input'
|
||||
|
||||
def __init__(self, children):
|
||||
|
||||
Reference in New Issue
Block a user