forked from VimPlug/jedi
start and end don't exst anymore in parser.token.Token, it's now start_pos/end_pos as everywhere else
This commit is contained in:
@@ -45,7 +45,7 @@ class Parser(object):
|
|||||||
self._gen = PushBackTokenizer(tokenizer)
|
self._gen = PushBackTokenizer(tokenizer)
|
||||||
|
|
||||||
# initialize global Scope
|
# initialize global Scope
|
||||||
start_pos = next(self._gen).start
|
start_pos = next(self._gen).start_pos
|
||||||
self._gen.push_last_back()
|
self._gen.push_last_back()
|
||||||
self.module = pr.SubModule(module_path, start_pos, top_module)
|
self.module = pr.SubModule(module_path, start_pos, top_module)
|
||||||
self._scope = self.module
|
self._scope = self.module
|
||||||
@@ -62,7 +62,7 @@ class Parser(object):
|
|||||||
pass
|
pass
|
||||||
s = self._scope
|
s = self._scope
|
||||||
while s is not None:
|
while s is not None:
|
||||||
s.end_pos = self._gen.current.end
|
s.end_pos = self._gen.current.end_pos
|
||||||
s = s.parent
|
s = s.parent
|
||||||
|
|
||||||
# clean up unused decorators
|
# clean up unused decorators
|
||||||
@@ -71,12 +71,12 @@ class Parser(object):
|
|||||||
# because of `self.module.used_names`.
|
# because of `self.module.used_names`.
|
||||||
d.parent = self.module
|
d.parent = self.module
|
||||||
|
|
||||||
self.module.end_pos = self._gen.current.end
|
self.module.end_pos = self._gen.current.end_pos
|
||||||
if self._gen.current.type in (tokenize.NEWLINE,):
|
if self._gen.current.type in (tokenize.NEWLINE,):
|
||||||
# This case is only relevant with the FastTokenizer, because
|
# This case is only relevant with the FastTokenizer, because
|
||||||
# otherwise there's always an EndMarker.
|
# otherwise there's always an EndMarker.
|
||||||
# we added a newline before, so we need to "remove" it again.
|
# we added a newline before, so we need to "remove" it again.
|
||||||
self.module.end_pos = self._gen.previous.end
|
self.module.end_pos = self._gen.previous.end_pos
|
||||||
|
|
||||||
del self._gen
|
del self._gen
|
||||||
|
|
||||||
@@ -115,17 +115,17 @@ class Parser(object):
|
|||||||
# token maybe a name or star
|
# token maybe a name or star
|
||||||
return None, tok
|
return None, tok
|
||||||
|
|
||||||
first_pos = tok.start
|
first_pos = tok.start_pos
|
||||||
append((tok.string, first_pos))
|
append((tok.string, first_pos))
|
||||||
while True:
|
while True:
|
||||||
end_pos = tok.end
|
end_pos = tok.end_pos
|
||||||
tok = next(self._gen)
|
tok = next(self._gen)
|
||||||
if tok.string != '.':
|
if tok.string != '.':
|
||||||
break
|
break
|
||||||
tok = next(self._gen)
|
tok = next(self._gen)
|
||||||
if tok.type != tokenize.NAME:
|
if tok.type != tokenize.NAME:
|
||||||
break
|
break
|
||||||
append((tok.string, tok.start))
|
append((tok.string, tok.start_pos))
|
||||||
|
|
||||||
n = pr.Name(self.module, names, first_pos, end_pos) if names else None
|
n = pr.Name(self.module, names, first_pos, end_pos) if names else None
|
||||||
return n, tok
|
return n, tok
|
||||||
@@ -208,13 +208,13 @@ class Parser(object):
|
|||||||
:return: Return a Scope representation of the tokens.
|
:return: Return a Scope representation of the tokens.
|
||||||
:rtype: Function
|
:rtype: Function
|
||||||
"""
|
"""
|
||||||
first_pos = self._gen.current.start
|
first_pos = self._gen.current.start_pos
|
||||||
tok = next(self._gen)
|
tok = next(self._gen)
|
||||||
if tok.type != tokenize.NAME:
|
if tok.type != tokenize.NAME:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
fname = pr.Name(self.module, [(tok.string, tok.start)], tok.start,
|
fname = pr.Name(self.module, [(tok.string, tok.start_pos)], tok.start_pos,
|
||||||
tok.end)
|
tok.end_pos)
|
||||||
|
|
||||||
tok = next(self._gen)
|
tok = next(self._gen)
|
||||||
if tok.string != '(':
|
if tok.string != '(':
|
||||||
@@ -246,15 +246,15 @@ class Parser(object):
|
|||||||
:return: Return a Scope representation of the tokens.
|
:return: Return a Scope representation of the tokens.
|
||||||
:rtype: Class
|
:rtype: Class
|
||||||
"""
|
"""
|
||||||
first_pos = self._gen.current.start
|
first_pos = self._gen.current.start_pos
|
||||||
cname = next(self._gen)
|
cname = next(self._gen)
|
||||||
if cname.type != tokenize.NAME:
|
if cname.type != tokenize.NAME:
|
||||||
debug.warning("class: syntax err, token is not a name@%s (%s: %s)",
|
debug.warning("class: syntax err, token is not a name@%s (%s: %s)",
|
||||||
cname.start[0], tokenize.tok_name[cname.type], cname.string)
|
cname.start_pos[0], tokenize.tok_name[cname.type], cname.string)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
cname = pr.Name(self.module, [(cname.string, cname.start)],
|
cname = pr.Name(self.module, [(cname.string, cname.start_pos)],
|
||||||
cname.start, cname.end)
|
cname.start_pos, cname.end_pos)
|
||||||
|
|
||||||
super = []
|
super = []
|
||||||
_next = next(self._gen)
|
_next = next(self._gen)
|
||||||
@@ -263,7 +263,7 @@ class Parser(object):
|
|||||||
_next = next(self._gen)
|
_next = next(self._gen)
|
||||||
|
|
||||||
if _next.string != ':':
|
if _next.string != ':':
|
||||||
debug.warning("class syntax: %s@%s", cname, _next.start[0])
|
debug.warning("class syntax: %s@%s", cname, _next.start_pos[0])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return pr.Class(self.module, cname, super, first_pos)
|
return pr.Class(self.module, cname, super, first_pos)
|
||||||
@@ -296,7 +296,7 @@ class Parser(object):
|
|||||||
next(self._gen)
|
next(self._gen)
|
||||||
tok = next(self._gen)
|
tok = next(self._gen)
|
||||||
|
|
||||||
first_pos = tok.start
|
first_pos = tok.start_pos
|
||||||
opening_brackets = ['{', '(', '[']
|
opening_brackets = ['{', '(', '[']
|
||||||
closing_brackets = ['}', ')', ']']
|
closing_brackets = ['}', ')', ']']
|
||||||
|
|
||||||
@@ -375,7 +375,7 @@ class Parser(object):
|
|||||||
)
|
)
|
||||||
return None, tok
|
return None, tok
|
||||||
|
|
||||||
stmt = stmt_class(self.module, tok_list, first_pos, tok.end,
|
stmt = stmt_class(self.module, tok_list, first_pos, tok.end_pos,
|
||||||
as_names=as_names,
|
as_names=as_names,
|
||||||
names_are_set_vars=names_are_set_vars)
|
names_are_set_vars=names_are_set_vars)
|
||||||
|
|
||||||
@@ -405,7 +405,7 @@ class Parser(object):
|
|||||||
for tok in self._gen:
|
for tok in self._gen:
|
||||||
token_type = tok.type
|
token_type = tok.type
|
||||||
tok_str = tok.string
|
tok_str = tok.string
|
||||||
first_pos = tok.start
|
first_pos = tok.start_pos
|
||||||
self.module.temp_used_names = []
|
self.module.temp_used_names = []
|
||||||
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]', \
|
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]', \
|
||||||
# tok, tokenize.tok_name[token_type], start_position[0])
|
# tok, tokenize.tok_name[token_type], start_position[0])
|
||||||
@@ -451,14 +451,14 @@ class Parser(object):
|
|||||||
e = (alias or m).end_pos
|
e = (alias or m).end_pos
|
||||||
else:
|
else:
|
||||||
# TODO cleanup like e = (alias or name or self._gen.current).end_pos
|
# TODO cleanup like e = (alias or name or self._gen.current).end_pos
|
||||||
e = self._gen.current.end
|
e = self._gen.current.end_pos
|
||||||
end_pos = self._gen.current.end if count + 1 == len(imports) else e
|
end_pos = self._gen.current.end_pos if count + 1 == len(imports) else e
|
||||||
i = pr.Import(self.module, first_pos, end_pos, m,
|
i = pr.Import(self.module, first_pos, end_pos, m,
|
||||||
alias, defunct=defunct)
|
alias, defunct=defunct)
|
||||||
self._check_user_stmt(i)
|
self._check_user_stmt(i)
|
||||||
self._scope.add_import(i)
|
self._scope.add_import(i)
|
||||||
if not imports:
|
if not imports:
|
||||||
i = pr.Import(self.module, first_pos, self._gen.current.end,
|
i = pr.Import(self.module, first_pos, self._gen.current.end_pos,
|
||||||
None, defunct=True)
|
None, defunct=True)
|
||||||
self._check_user_stmt(i)
|
self._check_user_stmt(i)
|
||||||
self.freshscope = False
|
self.freshscope = False
|
||||||
@@ -479,7 +479,7 @@ class Parser(object):
|
|||||||
tok_str = 'import'
|
tok_str = 'import'
|
||||||
mod = None
|
mod = None
|
||||||
if not mod and not relative_count or tok_str != "import":
|
if not mod and not relative_count or tok_str != "import":
|
||||||
debug.warning("from: syntax error@%s", tok.start[0])
|
debug.warning("from: syntax error@%s", tok.start_pos[0])
|
||||||
defunct = True
|
defunct = True
|
||||||
if tok_str != 'import':
|
if tok_str != 'import':
|
||||||
self._gen.push_last_back()
|
self._gen.push_last_back()
|
||||||
@@ -492,8 +492,8 @@ class Parser(object):
|
|||||||
e = (alias or name).end_pos
|
e = (alias or name).end_pos
|
||||||
else:
|
else:
|
||||||
# TODO cleanup like e = (alias or name or self._gen.current).end_pos
|
# TODO cleanup like e = (alias or name or self._gen.current).end_pos
|
||||||
e = self._gen.current.end
|
e = self._gen.current.end_pos
|
||||||
end_pos = self._gen.current.end if count + 1 == len(names) else e
|
end_pos = self._gen.current.end_pos if count + 1 == len(names) else e
|
||||||
i = pr.Import(self.module, first_pos, end_pos, name,
|
i = pr.Import(self.module, first_pos, end_pos, name,
|
||||||
alias, mod, star, relative_count,
|
alias, mod, star, relative_count,
|
||||||
defunct=defunct or defunct2)
|
defunct=defunct or defunct2)
|
||||||
@@ -505,7 +505,7 @@ class Parser(object):
|
|||||||
set_stmt, tok = self._parse_statement(added_breaks=['in'],
|
set_stmt, tok = self._parse_statement(added_breaks=['in'],
|
||||||
names_are_set_vars=True)
|
names_are_set_vars=True)
|
||||||
if tok.string != 'in':
|
if tok.string != 'in':
|
||||||
debug.warning('syntax err, for flow incomplete @%s', tok.start[0])
|
debug.warning('syntax err, for flow incomplete @%s', tok.start_pos[0])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
statement, tok = self._parse_statement()
|
statement, tok = self._parse_statement()
|
||||||
@@ -552,10 +552,10 @@ class Parser(object):
|
|||||||
s = self._scope.add_statement(f)
|
s = self._scope.add_statement(f)
|
||||||
self._scope = s
|
self._scope = s
|
||||||
if tok.string != ':':
|
if tok.string != ':':
|
||||||
debug.warning('syntax err, flow started @%s', tok.start[0])
|
debug.warning('syntax err, flow started @%s', tok.start_pos[0])
|
||||||
# returns
|
# returns
|
||||||
elif tok_str in ['return', 'yield']:
|
elif tok_str in ['return', 'yield']:
|
||||||
s = tok.start
|
s = tok.start_pos
|
||||||
self.freshscope = False
|
self.freshscope = False
|
||||||
# add returns to the scope
|
# add returns to the scope
|
||||||
func = self._scope.get_parent_until(pr.Function)
|
func = self._scope.get_parent_until(pr.Function)
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ class TokenInfo(object):
|
|||||||
elif key == 2:
|
elif key == 2:
|
||||||
return (self._start_pos_line, self._start_pos_col)
|
return (self._start_pos_line, self._start_pos_col)
|
||||||
elif key == 3:
|
elif key == 3:
|
||||||
return self.end
|
return self.end_pos
|
||||||
else:
|
else:
|
||||||
raise IndexError("list index out of range")
|
raise IndexError("list index out of range")
|
||||||
|
|
||||||
@@ -91,11 +91,7 @@ class TokenInfo(object):
|
|||||||
return (self._start_pos_line, self._start_pos_col)
|
return (self._start_pos_line, self._start_pos_col)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def start(self):
|
def end_pos(self):
|
||||||
return (self._start_pos_line, self._start_pos_col)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def end(self):
|
|
||||||
"""Returns end position respecting multiline tokens."""
|
"""Returns end position respecting multiline tokens."""
|
||||||
end_pos_line = self._start_pos_line
|
end_pos_line = self._start_pos_line
|
||||||
lines = self.string.split('\n')
|
lines = self.string.split('\n')
|
||||||
|
|||||||
Reference in New Issue
Block a user