mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-06 12:54:29 +08:00
Change the default of use_exact_op_types in the tokenizer.
This commit is contained in:
@@ -118,7 +118,7 @@ class Grammar(object):
|
||||
tokenize_lines[-1] += '\n'
|
||||
tokenize_lines.append('')
|
||||
|
||||
tokens = self._tokenizer(tokenize_lines, use_exact_op_types=True)
|
||||
tokens = self._tokenizer(tokenize_lines)
|
||||
|
||||
p = self._parser(self._pgen_grammar, error_recovery=error_recovery, start_symbol=start_symbol)
|
||||
root_node = p.parse(tokens=tokens)
|
||||
|
||||
@@ -13,7 +13,7 @@ from parso import tokenize
|
||||
class ParserGenerator(object):
|
||||
def __init__(self, bnf_text):
|
||||
self._bnf_text = bnf_text
|
||||
self.generator = tokenize.source_tokens(bnf_text)
|
||||
self.generator = tokenize.source_tokens(bnf_text, use_exact_op_types=False)
|
||||
self._gettoken() # Initialize lookahead
|
||||
self.dfas, self.startsymbol = self._parse()
|
||||
self.first = {} # map from symbol name to set of tokens
|
||||
|
||||
@@ -308,7 +308,7 @@ class DiffParser(object):
|
||||
is_first_token = True
|
||||
omitted_first_indent = False
|
||||
indents = []
|
||||
tokens = generate_tokens(lines, use_exact_op_types=True)
|
||||
tokens = generate_tokens(lines)
|
||||
stack = self._active_parser.pgen_parser.stack
|
||||
for typ, string, start_pos, prefix in tokens:
|
||||
start_pos = start_pos[0] + line_offset, start_pos[1]
|
||||
|
||||
@@ -204,13 +204,13 @@ class TokenInfo(namedtuple('Token', ['type', 'string', 'start_pos', 'prefix'])):
|
||||
return self.start_pos[0], self.start_pos[1] + len(self.string)
|
||||
|
||||
|
||||
def source_tokens(source, use_exact_op_types=False):
|
||||
def source_tokens(source, use_exact_op_types=True):
|
||||
"""Generate tokens from a the source code (string)."""
|
||||
lines = splitlines(source, keepends=True)
|
||||
return generate_tokens(lines, use_exact_op_types)
|
||||
|
||||
|
||||
def generate_tokens(lines, use_exact_op_types=False):
|
||||
def generate_tokens(lines, use_exact_op_types=True):
|
||||
"""
|
||||
A heavily modified Python standard library tokenizer.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user