mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-19 20:11:12 +08:00
remove start_pos definition from statement parser
This commit is contained in:
@@ -1102,8 +1102,8 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
|
|||||||
for i, tok in token_iterator:
|
for i, tok in token_iterator:
|
||||||
if isinstance(tok, Base):
|
if isinstance(tok, Base):
|
||||||
# the token is a Name, which has already been parsed
|
# the token is a Name, which has already been parsed
|
||||||
|
tok_str = tok
|
||||||
token_type = None
|
token_type = None
|
||||||
start_pos = tok.start_pos
|
|
||||||
end_pos = tok.end_pos
|
end_pos = tok.end_pos
|
||||||
|
|
||||||
if is_assignment(tok):
|
if is_assignment(tok):
|
||||||
@@ -1115,57 +1115,55 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
token_type = tok.type
|
token_type = tok.type
|
||||||
start_pos = tok.start_pos
|
|
||||||
end_pos = tok.end_pos
|
end_pos = tok.end_pos
|
||||||
tok = tok.string
|
tok_str = tok.string
|
||||||
if tok == 'as': # just ignore as, because it sets values
|
if tok_str == 'as': # just ignore as, because it sets values
|
||||||
next(token_iterator, None)
|
next(token_iterator, None)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if tok == 'lambda':
|
if tok_str == 'lambda':
|
||||||
lambd, tok = parse_lambda(token_iterator)
|
lambd, tok_str = parse_lambda(token_iterator)
|
||||||
if lambd is not None:
|
if lambd is not None:
|
||||||
result.append(lambd)
|
result.append(lambd)
|
||||||
if tok not in (')', ','):
|
if tok_str not in (')', ','):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
is_literal = token_type in [tokenize.STRING, tokenize.NUMBER]
|
is_literal = token_type in [tokenize.STRING, tokenize.NUMBER]
|
||||||
if isinstance(tok, Name) or is_literal:
|
if isinstance(tok_str, Name) or is_literal:
|
||||||
cls = Literal if is_literal else Call
|
cls = Literal if is_literal else Call
|
||||||
|
|
||||||
call = cls(self._sub_module, tok, start_pos, end_pos, self)
|
call = cls(self._sub_module, tok_str, tok.start_pos, end_pos, self)
|
||||||
if is_chain:
|
if is_chain:
|
||||||
result[-1].set_next(call)
|
result[-1].set_next(call)
|
||||||
else:
|
else:
|
||||||
result.append(call)
|
result.append(call)
|
||||||
is_chain = False
|
is_chain = False
|
||||||
elif tok in brackets.keys():
|
elif tok_str in brackets.keys():
|
||||||
arr, is_ass = parse_array(
|
arr, is_ass = parse_array(
|
||||||
token_iterator, brackets[tok.string], start_pos
|
token_iterator, brackets[tok.string], tok.start_pos
|
||||||
)
|
)
|
||||||
if result and isinstance(result[-1], StatementElement):
|
if result and isinstance(result[-1], StatementElement):
|
||||||
result[-1].set_execution(arr)
|
result[-1].set_execution(arr)
|
||||||
else:
|
else:
|
||||||
arr.parent = self
|
arr.parent = self
|
||||||
result.append(arr)
|
result.append(arr)
|
||||||
elif tok == '.':
|
elif tok_str == '.':
|
||||||
if result and isinstance(result[-1], StatementElement):
|
if result and isinstance(result[-1], StatementElement):
|
||||||
is_chain = True
|
is_chain = True
|
||||||
elif tok == ',': # implies a tuple
|
elif tok_str == ',': # implies a tuple
|
||||||
# expression is now an array not a statement anymore
|
# expression is now an array not a statement anymore
|
||||||
t = result[0]
|
t = result[0]
|
||||||
start_pos = t[2] if isinstance(t, tuple) else t.start_pos
|
start_pos = t.start_pos
|
||||||
|
|
||||||
# get the correct index
|
# get the correct index
|
||||||
i, tok = next(token_iterator, (len(self.token_list), None))
|
i, tok_str = next(token_iterator, (len(self.token_list), None))
|
||||||
if tok is not None:
|
if tok_str is not None:
|
||||||
token_iterator.push_back((i, tok))
|
token_iterator.push_back((i, tok_str))
|
||||||
t = self.token_list[i - 1]
|
t = self.token_list[i - 1]
|
||||||
try:
|
try:
|
||||||
e = t.end_pos
|
e = t.end_pos
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
e = (t[2][0], t[2][1] + len(t[1])) \
|
e = t.start_pos
|
||||||
if isinstance(t, tuple) else t.start_pos
|
|
||||||
|
|
||||||
stmt = Statement(
|
stmt = Statement(
|
||||||
self._sub_module,
|
self._sub_module,
|
||||||
@@ -1184,8 +1182,8 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
|
|||||||
result = []
|
result = []
|
||||||
is_chain = False
|
is_chain = False
|
||||||
else:
|
else:
|
||||||
if tok != '\n' and token_type != tokenize.COMMENT:
|
if tok_str != '\n' and token_type != tokenize.COMMENT:
|
||||||
result.append(tok)
|
result.append(tok_str)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user