forked from VimPlug/jedi
Removed the line_offset from tokenize, we have better ways to modify positions, now.
This commit is contained in:
@@ -391,7 +391,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
nodes.remove(node)
|
||||
break
|
||||
else:
|
||||
tokenizer = FastTokenizer(parser_code, 0)
|
||||
tokenizer = FastTokenizer(parser_code)
|
||||
self.number_parsers_used += 1
|
||||
#print('CODE', repr(source))
|
||||
p = Parser(self._grammar, parser_code, self.module_path, tokenizer=tokenizer)
|
||||
@@ -410,10 +410,9 @@ class FastTokenizer(object):
|
||||
"""
|
||||
Breaks when certain conditions are met, i.e. a new function or class opens.
|
||||
"""
|
||||
def __init__(self, source, line_offset=0):
|
||||
# TODO remove the whole line_offset stuff, it's not used anymore.
|
||||
def __init__(self, source):
|
||||
self.source = source
|
||||
self._gen = source_tokens(source, line_offset)
|
||||
self._gen = source_tokens(source)
|
||||
self._closed = False
|
||||
|
||||
# fast parser options
|
||||
|
||||
@@ -139,14 +139,14 @@ ALWAYS_BREAK_TOKEN = (';', 'import', 'from', 'class', 'def', 'try', 'except',
|
||||
'finally', 'while', 'return')
|
||||
|
||||
|
||||
def source_tokens(source, line_offset=0):
|
||||
def source_tokens(source):
|
||||
"""Generate tokens from a the source code (string)."""
|
||||
source = source + '\n' # end with \n, because the parser needs it
|
||||
readline = StringIO(source).readline
|
||||
return generate_tokens(readline, line_offset)
|
||||
return generate_tokens(readline)
|
||||
|
||||
|
||||
def generate_tokens(readline, line_offset=0):
|
||||
def generate_tokens(readline):
|
||||
"""
|
||||
A heavily modified Python standard library tokenizer.
|
||||
|
||||
@@ -156,7 +156,7 @@ def generate_tokens(readline, line_offset=0):
|
||||
"""
|
||||
paren_level = 0 # count parentheses
|
||||
indents = [0]
|
||||
lnum = line_offset
|
||||
lnum = 0
|
||||
numchars = '0123456789'
|
||||
contstr = ''
|
||||
contline = None
|
||||
|
||||
Reference in New Issue
Block a user