1
0
forked from VimPlug/jedi

Fixed list comprehension name lookups.

This commit is contained in:
Dave Halter
2014-12-01 00:08:27 +01:00
parent 3928f466cf
commit 68bd9160e2
4 changed files with 10 additions and 11 deletions

View File

@@ -215,6 +215,8 @@ class Evaluator(object):
# This is the first global lookup.
stmt = atom.get_definition()
scope = stmt.get_parent_until(pr.IsScope, include_current=True)
if isinstance(stmt, pr.CompFor):
stmt = stmt.get_parent_until((pr.ClassOrFunc, pr.ExprStmt))
return self.find_types(scope, atom, stmt.start_pos, search_global=True)
elif isinstance(atom, pr.Literal):
return [compiled.create(self, atom.eval())]

View File

@@ -151,6 +151,9 @@ class Comprehension(IterableWrapper):
return helpers.deep_ast_copy(comprehension.children[0], {comprehension: last_comp})
def get_exact_index_types(self, index):
return [self._evaluator.eval_element(self.eval_node())[index]]
def __repr__(self):
return "<e%s of %s>" % (type(self).__name__, self._atom)

View File

@@ -88,14 +88,8 @@ class _RecursionNode(object):
if not other:
return None
# List Comprehensions start on the same line as its statement.
# Therefore we have the unfortunate situation of the same start_pos for
# two statements.
is_list_comp = lambda x: isinstance(x, pr.ListComprehension)
return self.script == other.script \
and self.position == other.position \
and not is_list_comp(self.stmt.parent) \
and not is_list_comp(other.parent) \
and not self.is_ignored and not other.is_ignored

View File

@@ -81,14 +81,14 @@ class UserContext(object):
return u(''), start_cursor
is_first = False
# print 'tok', token_type, tok_str, force_point
#print('tok', tok_type, tok_str, force_point)
if last_type == tok_type == tokenize.NAME:
string += ' '
if level > 0:
if level:
if tok_str in close_brackets:
level += 1
if tok_str in open_brackets:
elif tok_str in open_brackets:
level -= 1
elif tok_str == '.':
force_point = False
@@ -127,10 +127,10 @@ class UserContext(object):
l = first_line if x == start_pos[0] else l
start_cursor = x, len(l) - end[1]
string += tok_str
if keyword.iskeyword(tok_str[::-1]):
#if keyword.iskeyword(tok_str[::-1]):
# Keywords should always stop. The path will always end with
# them.
break
# break
last_type = tok_type
# string can still contain spaces at the end