forked from VimPlug/jedi
Start moving stuff to the parser.
This commit is contained in:
@@ -27,3 +27,11 @@ class ParserSyntaxError(Exception):
|
|||||||
self.position = position
|
self.position = position
|
||||||
|
|
||||||
|
|
||||||
|
class Parser(object):
|
||||||
|
AST_MAPPING = {}
|
||||||
|
|
||||||
|
def __init__(self, grammar, tokens, start_symbol='file_input'):
|
||||||
|
self._grammar = grammar
|
||||||
|
self._start_symbol = start_symbol
|
||||||
|
self._parsed = None
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ from jedi.parser import tokenize
|
|||||||
from jedi.parser.token import (DEDENT, INDENT, ENDMARKER, NEWLINE, NUMBER,
|
from jedi.parser.token import (DEDENT, INDENT, ENDMARKER, NEWLINE, NUMBER,
|
||||||
STRING, tok_name)
|
STRING, tok_name)
|
||||||
from jedi.parser.pgen2.parse import PgenParser
|
from jedi.parser.pgen2.parse import PgenParser
|
||||||
from jedi.parser.parser import ParserSyntaxError
|
from jedi.parser.parser import ParserSyntaxError, Parser as BaseParser
|
||||||
|
|
||||||
|
|
||||||
class Parser(object):
|
class Parser(BaseParser):
|
||||||
AST_MAPPING = {
|
AST_MAPPING = {
|
||||||
'expr_stmt': tree.ExprStmt,
|
'expr_stmt': tree.ExprStmt,
|
||||||
'classdef': tree.Class,
|
'classdef': tree.Class,
|
||||||
@@ -41,6 +41,7 @@ class Parser(object):
|
|||||||
|
|
||||||
def __init__(self, grammar, source, start_symbol='file_input',
|
def __init__(self, grammar, source, start_symbol='file_input',
|
||||||
tokens=None, start_parsing=True):
|
tokens=None, start_parsing=True):
|
||||||
|
super(Parser, self).__init__(grammar, tokens, start_symbol)
|
||||||
# Todo Remove start_parsing (with False)
|
# Todo Remove start_parsing (with False)
|
||||||
|
|
||||||
self.source = source
|
self.source = source
|
||||||
@@ -50,11 +51,6 @@ class Parser(object):
|
|||||||
source += '\n'
|
source += '\n'
|
||||||
self._added_newline = True
|
self._added_newline = True
|
||||||
|
|
||||||
self._start_symbol = start_symbol
|
|
||||||
self._grammar = grammar
|
|
||||||
|
|
||||||
self._parsed = None
|
|
||||||
|
|
||||||
if start_parsing:
|
if start_parsing:
|
||||||
if tokens is None:
|
if tokens is None:
|
||||||
tokens = tokenize.source_tokens(source, use_exact_op_types=True)
|
tokens = tokenize.source_tokens(source, use_exact_op_types=True)
|
||||||
|
|||||||
Reference in New Issue
Block a user