1
0
forked from VimPlug/jedi

Merge pull request #360 from davidhalter/evaluator

Refactoring: Use an Evaluator class.
This commit is contained in:
Dave Halter
2013-12-26 17:49:00 -08:00
40 changed files with 1261 additions and 1279 deletions

View File

@@ -36,14 +36,6 @@ good text editor, while still having very good IDE features for Python.
__version__ = 0, 7, 1, 'alpha1'
import sys
# python imports are hell sometimes. Especially the combination of relative
# imports and circular imports... Just avoid it:
sys.path.insert(0, __path__[0])
from .api import Script, Interpreter, NotFoundError, set_debug_function
from .api import preload_module, defined_names
from . import settings
sys.path.pop(0)
from jedi.api import Script, Interpreter, NotFoundError, set_debug_function
from jedi.api import preload_module, defined_names
from jedi import settings

View File

@@ -13,6 +13,7 @@ except ImportError:
is_py3k = sys.hexversion >= 0x03000000
is_py33 = sys.hexversion >= 0x03030000
is_py26 = sys.hexversion < 0x02700000
def find_module_py33(string, path=None):

View File

@@ -25,13 +25,13 @@ from jedi import cache
from jedi import modules
from jedi import interpret
from jedi._compatibility import next, unicode, builtins
import keywords
import evaluate
import api_classes
import evaluate_representation as er
import dynamic
import imports
import builtin
from jedi.evaluate import Evaluator, filter_private_variable
from jedi.evaluate import representation as er
from jedi import keywords
from jedi.evaluate import builtin
from jedi.evaluate import imports
from jedi.evaluate import dynamic
from jedi import api_classes
class NotFoundError(Exception):
@@ -82,7 +82,7 @@ class Script(object):
if not (0 <= self._column <= line_len):
raise ValueError('`column` parameter is not in a valid range.')
api_classes._clear_caches()
api_classes.clear_caches()
debug.reset_time()
self.source = modules.source_to_unicode(source, encoding)
self._pos = self._line, self._column
@@ -90,6 +90,7 @@ class Script(object):
path, source=self.source, position=self._pos)
self._source_path = path
self.path = None if path is None else os.path.abspath(path)
self._evaluator = Evaluator()
debug.speed('init')
@property
@@ -110,7 +111,6 @@ class Script(object):
""" lazy parser."""
return self._module.parser
@api_classes._clear_caches_after_call
def completions(self):
"""
Return :class:`api_classes.Completion` objects. Those objects contain
@@ -159,9 +159,9 @@ class Script(object):
if settings.case_insensitive_completion \
and n.lower().startswith(like.lower()) \
or n.startswith(like):
if not evaluate.filter_private_variable(s,
if not filter_private_variable(s,
user_stmt or self._parser.user_scope, n):
new = api_classes.Completion(c, needs_dot, len(like), s)
new = api_classes.Completion(self._evaluator, c, needs_dot, len(like), s)
k = (new.name, new.complete) # key
if k in comp_dct and settings.no_completion_duplicates:
comp_dct[k]._same_name_completions.append(new)
@@ -180,7 +180,7 @@ class Script(object):
scopes = list(self._prepare_goto(path, True))
except NotFoundError:
scopes = []
scope_generator = evaluate.get_names_of_scope(
scope_generator = self._evaluator.get_names_of_scope(
self._parser.user_scope, self._pos)
completions = []
for scope, name_list in scope_generator:
@@ -242,7 +242,7 @@ class Script(object):
else:
# just parse one statement, take it and evaluate it
stmt = self._get_under_cursor_stmt(goto_path)
scopes = evaluate.follow_statement(stmt)
scopes = self._evaluator.follow_statement(stmt)
return scopes
def _get_under_cursor_stmt(self, cursor_txt):
@@ -318,7 +318,6 @@ class Script(object):
sig = self.call_signatures()
return sig[0] if sig else None
@api_classes._clear_caches_after_call
def goto_definitions(self):
"""
Return the definitions of a the path under the cursor. goto function!
@@ -378,11 +377,10 @@ class Script(object):
# add keywords
scopes |= keywords.keywords(string=goto_path, pos=self._pos)
d = set([api_classes.Definition(s) for s in scopes
d = set([api_classes.Definition(self._evaluator, s) for s in scopes
if s is not imports.ImportPath.GlobalNamespace])
return self._sorted_defs(d)
@api_classes._clear_caches_after_call
def goto_assignments(self):
"""
Return the first definition found. Imports and statements aren't
@@ -393,7 +391,7 @@ class Script(object):
:rtype: list of :class:`api_classes.Definition`
"""
results, _ = self._goto()
d = [api_classes.Definition(d) for d in set(results)
d = [api_classes.Definition(self._evaluator, d) for d in set(results)
if d is not imports.ImportPath.GlobalNamespace]
return self._sorted_defs(d)
@@ -412,7 +410,7 @@ class Script(object):
for d in defs:
if isinstance(d.parent, pr.Import) \
and d.start_pos == (0, 0):
i = imports.ImportPath(d.parent).follow(is_goto=True)
i = imports.ImportPath(self._evaluator, d.parent).follow(is_goto=True)
definitions.remove(d)
definitions |= follow_inexistent_imports(i)
return definitions
@@ -440,7 +438,7 @@ class Script(object):
definitions.append(import_name[0])
else:
stmt = self._get_under_cursor_stmt(goto_path)
defs, search_name = evaluate.goto(stmt)
defs, search_name = self._evaluator.goto(stmt)
definitions = follow_inexistent_imports(defs)
if isinstance(user_stmt, pr.Statement):
c = user_stmt.get_commands()
@@ -452,7 +450,6 @@ class Script(object):
definitions = [user_stmt]
return definitions, search_name
@api_classes._clear_caches_after_call
def usages(self, additional_module_paths=()):
"""
Return :class:`api_classes.Usage` objects, which contain all
@@ -476,27 +473,25 @@ class Script(object):
if unicode(v.names[-1]) == search_name]
if not isinstance(user_stmt, pr.Import):
# import case is looked at with add_import_name option
definitions = dynamic.usages_add_import_modules(definitions,
search_name)
definitions = usages_add_import_modules(self._evaluator, definitions, search_name)
module = set([d.get_parent_until() for d in definitions])
module.add(self._parser.module)
names = dynamic.usages(definitions, search_name, module)
names = usages(self._evaluator, definitions, search_name, module)
for d in set(definitions):
if isinstance(d, pr.Module):
names.append(api_classes.Usage(d, d))
names.append(api_classes.Usage(self._evaluator, d, d))
elif isinstance(d, er.Instance):
# Instances can be ignored, because they are being created by
# ``__getattr__``.
pass
else:
names.append(api_classes.Usage(d.names[-1], d))
names.append(api_classes.Usage(self._evaluator, d.names[-1], d))
settings.dynamic_flow_information = temp
return self._sorted_defs(set(names))
@api_classes._clear_caches_after_call
def call_signatures(self):
"""
Return the function object of the call you're currently in.
@@ -520,7 +515,7 @@ class Script(object):
user_stmt = self._user_stmt()
with common.scale_speed_settings(settings.scale_call_signatures):
_callable = lambda: evaluate.follow_call(call)
_callable = lambda: self._evaluator.follow_call(call)
origins = cache.cache_call_signatures(_callable, user_stmt)
debug.speed('func_call followed')
@@ -555,7 +550,7 @@ class Script(object):
context = self._module.get_context()
just_from = next(context) == 'from'
i = imports.ImportPath(user_stmt, is_like_search,
i = imports.ImportPath(self._evaluator, user_stmt, is_like_search,
kill_count=kill_count, direct_resolve=True,
is_just_from=just_from)
return i, cur_name_part
@@ -673,7 +668,7 @@ def defined_names(source, path=None, encoding='utf-8'):
modules.source_to_unicode(source, encoding),
module_path=path,
)
return api_classes._defined_names(parser.module)
return api_classes._defined_names(Evaluator(), parser.module)
def preload_module(*modules):
@@ -699,3 +694,81 @@ def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,
debug.enable_warning = warnings
debug.enable_notice = notices
debug.enable_speed = speed
# TODO move to a better place.
def usages(evaluator, definitions, search_name, mods):
def compare_array(definitions):
""" `definitions` are being compared by module/start_pos, because
sometimes the id's of the objects change (e.g. executions).
"""
result = []
for d in definitions:
module = d.get_parent_until()
result.append((module, d.start_pos))
return result
def check_call(call):
result = []
follow = [] # There might be multiple search_name's in one call_path
call_path = list(call.generate_call_path())
for i, name in enumerate(call_path):
# name is `pr.NamePart`.
if name == search_name:
follow.append(call_path[:i + 1])
for f in follow:
follow_res, search = evaluator.goto(call.parent, f)
follow_res = usages_add_import_modules(evaluator, follow_res, search)
compare_follow_res = compare_array(follow_res)
# compare to see if they match
if any(r in compare_definitions for r in compare_follow_res):
scope = call.parent
result.append(api_classes.Usage(evaluator, search, scope))
return result
if not definitions:
return set()
compare_definitions = compare_array(definitions)
mods |= set([d.get_parent_until() for d in definitions])
names = []
for m in dynamic.get_directory_modules_for_name(mods, search_name):
try:
stmts = m.used_names[search_name]
except KeyError:
continue
for stmt in stmts:
if isinstance(stmt, pr.Import):
count = 0
imps = []
for i in stmt.get_all_import_names():
for name_part in i.names:
count += 1
if name_part == search_name:
imps.append((count, name_part))
for used_count, name_part in imps:
i = imports.ImportPath(evaluator, stmt, kill_count=count - used_count,
direct_resolve=True)
f = i.follow(is_goto=True)
if set(f) & set(definitions):
names.append(api_classes.Usage(evaluator, name_part, stmt))
else:
for call in dynamic._scan_statement(stmt, search_name,
assignment_details=True):
names += check_call(call)
return names
def usages_add_import_modules(evaluator, definitions, search_name):
""" Adds the modules of the imports """
new = set()
for d in definitions:
if isinstance(d.parent, pr.Import):
s = imports.ImportPath(evaluator, d.parent, direct_resolve=True)
with common.ignored(IndexError):
new.add(s.follow(is_goto=True)[0])
return set(definitions) | new

View File

@@ -3,8 +3,6 @@ The :mod:`api_classes` module contains the return classes of the API. These
classes are the much bigger part of the whole API, because they contain the
interesting information about completion and goto operations.
"""
from __future__ import with_statement
import warnings
import functools
@@ -13,26 +11,17 @@ from jedi import settings
from jedi import common
from jedi.parser import representation as pr
from jedi import cache
import keywords
import recursion
import dynamic
import evaluate
import imports
import evaluate_representation as er
from jedi.evaluate import representation as er
from jedi.evaluate import imports
from jedi import keywords
def _clear_caches():
def clear_caches():
"""
Clear all caches of this and related modules. The only cache that will not
be deleted is the module cache.
"""
cache.clear_caches()
dynamic.search_param_cache.clear()
recursion.ExecutionRecursionDecorator.reset()
evaluate.follow_statement.reset()
imports.imports_processed = 0
def _clear_caches_after_call(func):
@@ -42,7 +31,7 @@ def _clear_caches_after_call(func):
@functools.wraps(func)
def wrapper(*args, **kwds):
result = func(*args, **kwds)
_clear_caches()
clear_caches()
return result
return wrapper
@@ -69,7 +58,8 @@ class BaseDefinition(object):
'_sre.SRE_Pattern': 're.RegexObject',
}.items())
def __init__(self, definition, start_pos):
def __init__(self, evaluator, definition, start_pos):
self._evaluator = evaluator
self._start_pos = start_pos
self._definition = definition
"""
@@ -307,8 +297,8 @@ class Completion(BaseDefinition):
`Completion` objects are returned from :meth:`api.Script.completions`. They
provide additional information about a completion.
"""
def __init__(self, name, needs_dot, like_name_length, base):
super(Completion, self).__init__(name.parent, name.start_pos)
def __init__(self, evaluator, name, needs_dot, like_name_length, base):
super(Completion, self).__init__(evaluator, name.parent, name.start_pos)
self._name = name
self._needs_dot = needs_dot
@@ -411,15 +401,15 @@ class Completion(BaseDefinition):
"""
if self._followed_definitions is None:
if self._definition.isinstance(pr.Statement):
defs = evaluate.follow_statement(self._definition)
defs = self._evaluator.follow_statement(self._definition)
elif self._definition.isinstance(pr.Import):
defs = imports.strip_imports([self._definition])
defs = imports.strip_imports(self._evaluator, [self._definition])
else:
return [self]
self._followed_definitions = \
[BaseDefinition(d, d.start_pos) for d in defs]
_clear_caches()
[BaseDefinition(self._evaluator, d, d.start_pos) for d in defs]
clear_caches()
return self._followed_definitions
@@ -432,8 +422,8 @@ class Definition(BaseDefinition):
*Definition* objects are returned from :meth:`api.Script.goto_assignments`
or :meth:`api.Script.goto_definitions`.
"""
def __init__(self, definition):
super(Definition, self).__init__(definition, definition.start_pos)
def __init__(self, evaluator, definition):
super(Definition, self).__init__(evaluator, definition, definition.start_pos)
@property
def name(self):
@@ -550,26 +540,26 @@ class Definition(BaseDefinition):
d = d.var
if isinstance(d, pr.Name):
d = d.parent
return _defined_names(d)
return _defined_names(self._evaluator, d)
def _defined_names(scope):
def _defined_names(evaluator, scope):
"""
List sub-definitions (e.g., methods in class).
:type scope: Scope
:rtype: list of Definition
"""
pair = next(evaluate.get_names_of_scope(
pair = next(evaluator.get_names_of_scope(
scope, star_search=False, include_builtin=False), None)
names = pair[1] if pair else []
return [Definition(d) for d in sorted(names, key=lambda s: s.start_pos)]
return [Definition(evaluator, d) for d in sorted(names, key=lambda s: s.start_pos)]
class Usage(BaseDefinition):
"""TODO: document this"""
def __init__(self, name_part, scope):
super(Usage, self).__init__(scope, name_part.start_pos)
def __init__(self, evaluator, name_part, scope):
super(Usage, self).__init__(evaluator, scope, name_part.start_pos)
self.text = unicode(name_part)
self.end_pos = name_part.end_pos

View File

@@ -5,9 +5,6 @@ available:
- module caching (`load_module` and `save_module`), which uses pickle and is
really important to assure low load times of modules like ``numpy``.
- the popular ``memoize_default`` works like a typical memoize and returns the
default otherwise.
- ``CachedMetaClass`` uses ``memoize_default`` to do the same with classes.
- ``time_cache`` can be used to cache something for just a limited time span,
which can be useful if there's user interaction and the user cannot react
faster than a certain time.
@@ -34,12 +31,9 @@ from jedi import settings
from jedi import common
from jedi import debug
# memoize caches will be deleted after every action
memoize_caches = []
_time_caches = []
time_caches = []
star_import_cache = {}
_star_import_cache = {}
# for fast_parser, should not be deleted
parser_cache = {}
@@ -60,20 +54,15 @@ def clear_caches(delete_all=False):
:param delete_all: Deletes also the cache that is normally not deleted,
like parser cache, which is important for faster parsing.
"""
global memoize_caches, time_caches
# memorize_caches must never be deleted, because the dicts will get lost in
# the wrappers.
for m in memoize_caches:
m.clear()
global _time_caches
if delete_all:
time_caches = []
star_import_cache.clear()
_time_caches = []
_star_import_cache.clear()
parser_cache.clear()
else:
# normally just kill the expired entries, not all
for tc in time_caches:
for tc in _time_caches:
# check time_cache for expired entries
for key, (t, value) in list(tc.items()):
if t < time.time():
@@ -81,41 +70,6 @@ def clear_caches(delete_all=False):
del tc[key]
def memoize_default(default=None, cache=memoize_caches):
""" This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
Preventing recursion is in this case the much bigger use than speed. I
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
def func(function):
memo = {}
cache.append(memo)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
if key in memo:
return memo[key]
else:
memo[key] = default
rv = function(*args, **kwargs)
memo[key] = rv
return rv
return wrapper
return func
class CachedMetaClass(type):
""" This is basically almost the same than the decorator above, it just
caches class initializations. I haven't found any other way, so I do it
with meta classes.
"""
@memoize_default()
def __call__(self, *args, **kwargs):
return super(CachedMetaClass, self).__call__(*args, **kwargs)
def time_cache(time_add_setting):
""" This decorator works as follows: Call it with a setting and after that
use the function with a callable that returns the key.
@@ -124,7 +78,7 @@ def time_cache(time_add_setting):
"""
def _temp(key_func):
dct = {}
time_caches.append(dct)
_time_caches.append(dct)
def wrapper(optional_callable, *args, **kwargs):
key = key_func(*args, **kwargs)
@@ -149,15 +103,15 @@ def cache_call_signatures(stmt):
def cache_star_import(func):
def wrapper(scope, *args, **kwargs):
def wrapper(evaluator, scope, *args, **kwargs):
with common.ignored(KeyError):
mods = star_import_cache[scope]
mods = _star_import_cache[scope]
if mods[0] + settings.star_import_cache_validity > time.time():
return mods[1]
# cache is too old and therefore invalid or not available
invalidate_star_import_cache(scope)
mods = func(scope, *args, **kwargs)
star_import_cache[scope] = time.time(), mods
mods = func(evaluator, scope, *args, **kwargs)
_star_import_cache[scope] = time.time(), mods
return mods
return wrapper
@@ -166,9 +120,9 @@ def cache_star_import(func):
def invalidate_star_import_cache(module, only_main=False):
""" Important if some new modules are being reparsed """
with common.ignored(KeyError):
t, mods = star_import_cache[module]
t, mods = _star_import_cache[module]
del star_import_cache[module]
del _star_import_cache[module]
for m in mods:
invalidate_star_import_cache(m, only_main=True)
@@ -176,7 +130,7 @@ def invalidate_star_import_cache(module, only_main=False):
if not only_main:
# We need a list here because otherwise the list is being changed
# during the iteration in py3k: iteritems -> items.
for key, (t, mods) in list(star_import_cache.items()):
for key, (t, mods) in list(_star_import_cache.items()):
if module in mods:
invalidate_star_import_cache(key)
@@ -220,7 +174,7 @@ def save_module(path, name, parser, pickling=True):
class _ModulePickling(object):
version = 5
version = 6
"""
Version number (integer) for file system cache.
@@ -292,7 +246,7 @@ class _ModulePickling(object):
else:
# 0 means version is not defined (= always delete cache):
if data.get('version', 0) != self.version:
self.delete_cache()
self.clear_cache()
self.__index = {}
else:
self.__index = data['index']
@@ -311,7 +265,7 @@ class _ModulePickling(object):
json.dump(data, f)
self.__index = None
def delete_cache(self):
def clear_cache(self):
shutil.rmtree(self._cache_directory())
def _get_hashed_path(self, path):

View File

@@ -2,8 +2,8 @@
import sys
import contextlib
import functools
import tokenizer as tokenize
from jedi.parser import tokenizer as tokenize
from jedi._compatibility import next, reraise
from jedi import settings

View File

@@ -1,4 +1,4 @@
from _compatibility import u, encoding, is_py3k
from jedi._compatibility import u, encoding, is_py3k
import inspect
import time

View File

@@ -16,10 +16,8 @@ annotations.
import re
from jedi import cache
from jedi.evaluate.cache import memoize_default
from jedi.parser import Parser
import evaluate
import evaluate_representation as er
DOCSTRING_PARAM_PATTERNS = [
r'\s*:type\s+%s:\s*([^\n]+)', # Sphinx
@@ -34,8 +32,8 @@ DOCSTRING_RETURN_PATTERNS = [
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
@cache.memoize_default()
def follow_param(param):
@memoize_default(None, evaluator_is_first_arg=True)
def follow_param(evaluator, param):
func = param.parent_function
# print func, param, param.parent_function
param_str = _search_param_in_docstr(func.docstr, str(param.get_name()))
@@ -54,7 +52,7 @@ def follow_param(param):
p = Parser(param_str, None, user_position, no_docstr=True)
if p.user_stmt is None:
return []
return evaluate.follow_statement(p.user_stmt)
return evaluator.follow_statement(p.user_stmt)
return []
@@ -106,13 +104,15 @@ def _strip_rest_role(type_str):
return type_str
def find_return_types(func):
def find_return_types(evaluator, func):
def search_return_in_docstr(code):
for p in DOCSTRING_RETURN_PATTERNS:
match = p.search(code)
if match:
return match.group(1)
from jedi.evaluate import representation as er
if isinstance(func, er.InstanceElement):
func = func.var
@@ -127,4 +127,4 @@ def find_return_types(func):
if p.user_stmt is None:
return []
p.user_stmt.parent = func
return list(evaluate.follow_statement(p.user_stmt))
return list(evaluator.follow_statement(p.user_stmt))

View File

@@ -1,826 +0,0 @@
"""
Evaluation of Python code in |jedi| is based on three assumptions:
* Code is recursive (to weaken this assumption, the :mod:`dynamic` module
exists).
* No magic is being used:
- metaclasses
- ``setattr()`` / ``__import__()``
- writing to ``globals()``, ``locals()``, ``object.__dict__``
* The programmer is not a total dick, e.g. like `this
<https://github.com/davidhalter/jedi/issues/24>`_ :-)
That said, there's mainly one entry point in this script: ``follow_statement``.
This is where autocompletion starts. Everything you want to complete is either
a ``Statement`` or some special name like ``class``, which is easy to complete.
Therefore you need to understand what follows after ``follow_statement``. Let's
make an example::
import datetime
datetime.date.toda# <-- cursor here
First of all, this module doesn't care about completion. It really just cares
about ``datetime.date``. At the end of the procedure ``follow_statement`` will
return the ``datetime`` class.
To *visualize* this (simplified):
- ``follow_statement`` - ``<Statement: datetime.date>``
- Unpacking of the statement into ``[[<Call: datetime.date>]]``
- ``follow_call_list``, calls ``follow_call`` with ``<Call: datetime.date>``
- ``follow_call`` - searches the ``datetime`` name within the module.
This is exactly where it starts to get complicated. Now recursions start to
kick in. The statement has not been resolved fully, but now we need to resolve
the datetime import. So it continues
- follow import, which happens in the :mod:`imports` module.
- now the same ``follow_call`` as above calls ``follow_paths`` to follow the
second part of the statement ``date``.
- After ``follow_paths`` returns with the desired ``datetime.date`` class, the
result is being returned and the recursion finishes.
Now what would happen if we wanted ``datetime.date.foo.bar``? Just two more
calls to ``follow_paths`` (which calls itself with a recursion). What if the
import would contain another Statement like this::
from foo import bar
Date = bar.baz
Well... You get it. Just another ``follow_statement`` recursion. It's really
easy. Just that Python is not that easy sometimes. To understand tuple
assignments and different class scopes, a lot more code had to be written. Yet
we're still not talking about Descriptors and Nested List Comprehensions, just
the simple stuff.
So if you want to change something, write a test and then just change what you
want. This module has been tested by about 600 tests. Don't be afraid to break
something. The tests are good enough.
I need to mention now that this recursive approach is really good because it
only *evaluates* what needs to be *evaluated*. All the statements and modules
that are not used are just being ignored. It's a little bit similar to the
backtracking algorithm.
.. todo:: nonlocal statement, needed or can be ignored? (py3k)
"""
from __future__ import with_statement
import sys
import itertools
from jedi._compatibility import next, hasattr, is_py3k, unicode, reraise, u
from jedi import common
from jedi import cache
from jedi.parser import representation as pr
from jedi import debug
import evaluate_representation as er
import recursion
import docstrings
import builtin
import imports
import dynamic
def get_defined_names_for_position(scope, position=None, start_scope=None):
"""
Return filtered version of ``scope.get_defined_names()``.
This function basically does what :meth:`scope.get_defined_names
<parsing_representation.Scope.get_defined_names>` does.
- If `position` is given, delete all names defined after `position`.
- For special objects like instances, `position` is ignored and all
names are returned.
:type scope: :class:`parsing_representation.IsScope`
:param scope: Scope in which names are searched.
:param position: The position as a line/column tuple, default is infinity.
"""
names = scope.get_defined_names()
# Instances have special rules, always return all the possible completions,
# because class variables are always valid and the `self.` variables, too.
if (not position or isinstance(scope, (er.Array, er.Instance))
or start_scope != scope
and isinstance(start_scope, (pr.Function, er.Execution))):
return names
names_new = []
for n in names:
if n.start_pos[0] is not None and n.start_pos < position:
names_new.append(n)
return names_new
def get_names_of_scope(scope, position=None, star_search=True,
include_builtin=True):
"""
Get all completions (names) possible for the current scope.
The star search option is only here to provide an optimization. Otherwise
the whole thing would probably start a little recursive madness.
This function is used to include names from outer scopes. For example,
when the current scope is function:
>>> from jedi.parser import Parser
>>> parser = Parser('''
... x = ['a', 'b', 'c']
... def func():
... y = None
... ''')
>>> scope = parser.module.subscopes[0]
>>> scope
<Function: func@3-4>
`get_names_of_scope` is a generator. First it yields names from
most inner scope.
>>> pairs = list(get_names_of_scope(scope))
>>> pairs[0]
(<Function: func@3-4>, [<Name: y@4,4>])
Then it yield the names from one level outer scope. For this
example, this is the most outer scope.
>>> pairs[1]
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
Finally, it yields names from builtin, if `include_builtin` is
true (default).
>>> pairs[2] #doctest: +ELLIPSIS
(<Module: ...builtin...>, [<Name: ...>, ...])
:rtype: [(pr.Scope, [pr.Name])]
:return: Return an generator that yields a pair of scope and names.
"""
in_func_scope = scope
non_flow = scope.get_parent_until(pr.Flow, reverse=True)
while scope:
if isinstance(scope, pr.SubModule) and scope.parent:
# we don't want submodules to report if we have modules.
scope = scope.parent
continue
# `pr.Class` is used, because the parent is never `Class`.
# Ignore the Flows, because the classes and functions care for that.
# InstanceElement of Class is ignored, if it is not the start scope.
if not (scope != non_flow and scope.isinstance(pr.Class)
or scope.isinstance(pr.Flow)
or scope.isinstance(er.Instance)
and non_flow.isinstance(er.Function)):
try:
if isinstance(scope, er.Instance):
for g in scope.scope_generator():
yield g
else:
yield scope, get_defined_names_for_position(scope,
position, in_func_scope)
except StopIteration:
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
# is a list comprehension
yield scope, scope.get_set_vars(is_internal_call=True)
scope = scope.parent
# This is used, because subscopes (Flow scopes) would distort the
# results.
if scope and scope.isinstance(er.Function, pr.Function, er.Execution):
in_func_scope = scope
# Add star imports.
if star_search:
for s in imports.remove_star_imports(non_flow.get_parent_until()):
for g in get_names_of_scope(s, star_search=False):
yield g
# Add builtins to the global scope.
if include_builtin:
builtin_scope = builtin.Builtin.scope
yield builtin_scope, builtin_scope.get_defined_names()
def find_name(scope, name_str, position=None, search_global=False,
is_goto=False, resolve_decorator=True):
"""
This is the search function. The most important part to debug.
`remove_statements` and `filter_statements` really are the core part of
this completion.
:param position: Position of the last statement -> tuple of line, column
:return: List of Names. Their parents are the scopes, they are defined in.
:rtype: list
"""
def remove_statements(result):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
res_new = []
for r in result:
add = []
if r.isinstance(pr.Statement):
check_instance = None
if isinstance(r, er.InstanceElement) and r.is_class_var:
check_instance = r.instance
r = r.var
# Global variables handling.
if r.is_global():
for token_name in r.token_list[1:]:
if isinstance(token_name, pr.Name):
add = find_name(r.parent, str(token_name))
else:
# generated objects are used within executions, but these
# objects are in functions, and we have to dynamically
# execute first.
if isinstance(r, pr.Param):
func = r.parent
# Instances are typically faked, if the instance is not
# called from outside. Here we check it for __init__
# functions and return.
if isinstance(func, er.InstanceElement) \
and func.instance.is_generated \
and hasattr(func, 'name') \
and str(func.name) == '__init__' \
and r.position_nr > 0: # 0 would be self
r = func.var.params[r.position_nr]
# add docstring knowledge
doc_params = docstrings.follow_param(r)
if doc_params:
res_new += doc_params
continue
if not r.is_generated:
res_new += dynamic.search_params(r)
if not res_new:
c = r.get_commands()[0]
if c in ('*', '**'):
t = 'tuple' if c == '*' else 'dict'
res_new = [er.Instance(
find_name(builtin.Builtin.scope, t)[0])
]
if not r.assignment_details:
# this means that there are no default params,
# so just ignore it.
continue
# Remove the statement docstr stuff for now, that has to be
# implemented with the evaluator class.
#if r.docstr:
#res_new.append(r)
scopes = follow_statement(r, seek_name=name_str)
add += remove_statements(scopes)
if check_instance is not None:
# class renames
add = [er.InstanceElement(check_instance, a, True)
if isinstance(a, (er.Function, pr.Function))
else a for a in add]
res_new += add
else:
if isinstance(r, pr.Class):
r = er.Class(r)
elif isinstance(r, pr.Function):
r = er.Function(r)
if r.isinstance(er.Function) and resolve_decorator:
r = r.get_decorated_func()
res_new.append(r)
debug.dbg('sfn remove, new: %s, old: %s' % (res_new, result))
return res_new
def filter_name(scope_generator):
"""
Filters all variables of a scope (which are defined in the
`scope_generator`), until the name fits.
"""
def handle_for_loops(loop):
# Take the first statement (for has always only
# one, remember `in`). And follow it.
if not loop.inputs:
return []
result = get_iterator_types(follow_statement(loop.inputs[0]))
if len(loop.set_vars) > 1:
commands = loop.set_stmt.get_commands()
# loops with loop.set_vars > 0 only have one command
result = assign_tuples(commands[0], result, name_str)
return result
def process(name):
"""
Returns the parent of a name, which means the element which stands
behind a name.
"""
result = []
no_break_scope = False
par = name.parent
exc = pr.Class, pr.Function
until = lambda: par.parent.parent.get_parent_until(exc)
is_array_assignment = False
if par is None:
pass
elif par.isinstance(pr.Flow):
if par.command == 'for':
result += handle_for_loops(par)
else:
debug.warning('Flow: Why are you here? %s' % par.command)
elif par.isinstance(pr.Param) \
and par.parent is not None \
and isinstance(until(), pr.Class) \
and par.position_nr == 0:
# This is where self gets added - this happens at another
# place, if the var_args are clear. But sometimes the class is
# not known. Therefore add a new instance for self. Otherwise
# take the existing.
if isinstance(scope, er.InstanceElement):
inst = scope.instance
else:
inst = er.Instance(er.Class(until()))
inst.is_generated = True
result.append(inst)
elif par.isinstance(pr.Statement):
def is_execution(calls):
for c in calls:
if isinstance(c, (unicode, str)):
continue
if c.isinstance(pr.Array):
if is_execution(c):
return True
elif c.isinstance(pr.Call):
# Compare start_pos, because names may be different
# because of executions.
if c.name.start_pos == name.start_pos \
and c.execution:
return True
return False
is_exe = False
for assignee, op in par.assignment_details:
is_exe |= is_execution(assignee)
if is_exe:
# filter array[3] = ...
# TODO check executions for dict contents
is_array_assignment = True
else:
details = par.assignment_details
if details and details[0][1] != '=':
no_break_scope = True
# TODO this makes self variables non-breakable. wanted?
if isinstance(name, er.InstanceElement) \
and not name.is_class_var:
no_break_scope = True
result.append(par)
else:
# TODO multi-level import non-breakable
if isinstance(par, pr.Import) and len(par.namespace) > 1:
no_break_scope = True
result.append(par)
return result, no_break_scope, is_array_assignment
flow_scope = scope
result = []
# compare func uses the tuple of line/indent = line/column
comparison_func = lambda name: (name.start_pos)
for nscope, name_list in scope_generator:
break_scopes = []
# here is the position stuff happening (sorting of variables)
for name in sorted(name_list, key=comparison_func, reverse=True):
p = name.parent.parent if name.parent else None
if isinstance(p, er.InstanceElement) \
and isinstance(p.var, pr.Class):
p = p.var
if name_str == name.get_code() and p not in break_scopes:
r, no_break_scope, is_array_assignment = process(name)
if is_goto:
if not is_array_assignment: # shouldn't goto arr[1] =
result.append(name)
else:
result += r
# for comparison we need the raw class
s = nscope.base if isinstance(nscope, er.Class) else nscope
# this means that a definition was found and is not e.g.
# in if/else.
if result and not no_break_scope:
if not name.parent or p == s:
break
break_scopes.append(p)
while flow_scope:
# TODO check if result is in scope -> no evaluation necessary
n = dynamic.check_flow_information(flow_scope, name_str,
position)
if n:
result = n
break
if result:
break
if flow_scope == nscope:
break
flow_scope = flow_scope.parent
flow_scope = nscope
if result:
break
if not result and isinstance(nscope, er.Instance):
# __getattr__ / __getattribute__
result += check_getattr(nscope, name_str)
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s'
% (name_str, scope, nscope, u(result), position))
return result
def descriptor_check(result):
"""Processes descriptors"""
res_new = []
for r in result:
if isinstance(scope, (er.Instance, er.Class)) \
and hasattr(r, 'get_descriptor_return'):
# handle descriptors
with common.ignored(KeyError):
res_new += r.get_descriptor_return(scope)
continue
res_new.append(r)
return res_new
if search_global:
scope_generator = get_names_of_scope(scope, position=position)
else:
if isinstance(scope, er.Instance):
scope_generator = scope.scope_generator()
else:
if isinstance(scope, (er.Class, pr.Module)):
# classes are only available directly via chaining?
# strange stuff...
names = scope.get_defined_names()
else:
names = get_defined_names_for_position(scope, position)
scope_generator = iter([(scope, names)])
if is_goto:
return filter_name(scope_generator)
return descriptor_check(remove_statements(filter_name(scope_generator)))
def check_getattr(inst, name_str):
"""Checks for both __getattr__ and __getattribute__ methods"""
result = []
# str is important to lose the NamePart!
module = builtin.Builtin.scope
name = pr.String(module, "'%s'" % name_str, (0, 0), (0, 0), inst)
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattr__', [name])
if not result:
# this is a little bit special. `__getattribute__` is executed
# before anything else. But: I know no use case, where this
# could be practical and the jedi would return wrong types. If
# you ever have something, let me know!
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattribute__', [name])
return result
def get_iterator_types(inputs):
"""Returns the types of any iterator (arrays, yields, __iter__, etc)."""
iterators = []
# Take the first statement (for has always only
# one, remember `in`). And follow it.
for it in inputs:
if isinstance(it, (er.Generator, er.Array, dynamic.ArrayInstance)):
iterators.append(it)
else:
if not hasattr(it, 'execute_subscope_by_name'):
debug.warning('iterator/for loop input wrong', it)
continue
try:
iterators += it.execute_subscope_by_name('__iter__')
except KeyError:
debug.warning('iterators: No __iter__ method found.')
result = []
for gen in iterators:
if isinstance(gen, er.Array):
# Array is a little bit special, since this is an internal
# array, but there's also the list builtin, which is
# another thing.
result += gen.get_index_types()
elif isinstance(gen, er.Instance):
# __iter__ returned an instance.
name = '__next__' if is_py3k else 'next'
try:
result += gen.execute_subscope_by_name(name)
except KeyError:
debug.warning('Instance has no __next__ function', gen)
else:
# is a generator
result += gen.iter_content()
return result
def assign_tuples(tup, results, seek_name):
"""
This is a normal assignment checker. In python functions and other things
can return tuples:
>>> a, b = 1, ""
>>> a, (b, c) = 1, ("", 1.0)
Here, if `seek_name` is "a", the number type will be returned.
The first part (before `=`) is the param tuples, the second one result.
:type tup: pr.Array
"""
def eval_results(index):
types = []
for r in results:
try:
func = r.get_exact_index_types
except AttributeError:
debug.warning("invalid tuple lookup %s of result %s in %s"
% (tup, results, seek_name))
else:
with common.ignored(IndexError):
types += func(index)
return types
result = []
for i, stmt in enumerate(tup):
# Used in assignments. There is just one call and no other things,
# therefore we can just assume, that the first part is important.
command = stmt.get_commands()[0]
if tup.type == pr.Array.NOARRAY:
# unnessecary braces -> just remove.
r = results
else:
r = eval_results(i)
# LHS of tuples can be nested, so resolve it recursively
result += find_assignments(command, r, seek_name)
return result
def find_assignments(lhs, results, seek_name):
"""
Check if `seek_name` is in the left hand side `lhs` of assignment.
`lhs` can simply be a variable (`pr.Call`) or a tuple/list (`pr.Array`)
representing the following cases::
a = 1 # lhs is pr.Call
(a, b) = 2 # lhs is pr.Array
:type lhs: pr.Call
:type results: list
:type seek_name: str
"""
if isinstance(lhs, pr.Array):
return assign_tuples(lhs, results, seek_name)
elif lhs.name.names[-1] == seek_name:
return results
else:
return []
@recursion.RecursionDecorator
@cache.memoize_default(default=())
def follow_statement(stmt, seek_name=None):
"""
The starting point of the completion. A statement always owns a call list,
which are the calls, that a statement does.
In case multiple names are defined in the statement, `seek_name` returns
the result for this name.
:param stmt: A `pr.Statement`.
:param seek_name: A string.
"""
debug.dbg('follow_stmt %s (%s)' % (stmt, seek_name))
commands = stmt.get_commands()
debug.dbg('calls: %s' % commands)
result = follow_call_list(commands)
# Assignment checking is only important if the statement defines multiple
# variables.
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
new_result = []
for ass_commands, op in stmt.assignment_details:
new_result += find_assignments(ass_commands[0], result, seek_name)
result = new_result
return set(result)
@common.rethrow_uncaught
def follow_call_list(call_list, follow_array=False):
"""
`call_list` can be either `pr.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
def evaluate_list_comprehension(lc, parent=None):
input = lc.input
nested_lc = lc.input.token_list[0]
if isinstance(nested_lc, pr.ListComprehension):
# is nested LC
input = nested_lc.stmt
module = input.get_parent_until()
# create a for loop, which does the same as list comprehensions
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
loop.parent = parent or lc.get_parent_until(pr.IsScope)
if isinstance(nested_lc, pr.ListComprehension):
loop = evaluate_list_comprehension(nested_lc, loop)
return loop
result = []
calls_iterator = iter(call_list)
for call in calls_iterator:
if pr.Array.is_type(call, pr.Array.NOARRAY):
r = list(itertools.chain.from_iterable(follow_statement(s)
for s in call))
call_path = call.generate_call_path()
next(call_path, None) # the first one has been used already
result += follow_paths(call_path, r, call.parent,
position=call.start_pos)
elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call)
# Caveat: parents are being changed, but this doesn't matter,
# because nothing else uses it.
call.stmt.parent = loop
result += follow_statement(call.stmt)
else:
if isinstance(call, pr.Lambda):
result.append(er.Function(call))
# With things like params, these can also be functions...
elif isinstance(call, pr.Base) and call.isinstance(er.Function,
er.Class, er.Instance, dynamic.ArrayInstance):
result.append(call)
# The string tokens are just operations (+, -, etc.)
elif not isinstance(call, (str, unicode)):
if isinstance(call, pr.Call) and str(call.name) == 'if':
# Ternary operators.
while True:
try:
call = next(calls_iterator)
except StopIteration:
break
with common.ignored(AttributeError):
if str(call.name) == 'else':
break
continue
result += follow_call(call)
elif call == '*':
if [r for r in result if isinstance(r, er.Array)
or isinstance(r, er.Instance)
and str(r.name) == 'str']:
# if it is an iterable, ignore * operations
next(calls_iterator)
return set(result)
def follow_call(call):
"""Follow a call is following a function, variable, string, etc."""
path = call.generate_call_path()
# find the statement of the Scope
s = call
while not s.parent.isinstance(pr.IsScope):
s = s.parent
return follow_call_path(path, s.parent, s.start_pos)
def follow_call_path(path, scope, position):
"""Follows a path generated by `pr.StatementElement.generate_call_path()`"""
current = next(path)
if isinstance(current, pr.Array):
result = [er.Array(current)]
else:
if isinstance(current, pr.NamePart):
# This is the first global lookup.
scopes = find_name(scope, current, position=position,
search_global=True)
else:
# for pr.Literal
scopes = find_name(builtin.Builtin.scope, current.type_as_string())
# Make instances of those number/string objects.
scopes = [er.Instance(s, (current.value,)) for s in scopes]
result = imports.strip_imports(scopes)
return follow_paths(path, result, scope, position=position)
def follow_paths(path, results, call_scope, position=None):
"""
In each result, `path` must be followed. Copies the path iterator.
"""
results_new = []
if results:
if len(results) > 1:
iter_paths = itertools.tee(path, len(results))
else:
iter_paths = [path]
for i, r in enumerate(results):
fp = follow_path(iter_paths[i], r, call_scope, position=position)
if fp is not None:
results_new += fp
else:
# This means stop iteration.
return results
return results_new
def follow_path(path, scope, call_scope, position=None):
"""
Uses a generator and tries to complete the path, e.g.::
foo.bar.baz
`follow_path` is only responsible for completing `.bar.baz`, the rest is
done in the `follow_call` function.
"""
# current is either an Array or a Scope.
try:
current = next(path)
except StopIteration:
return None
debug.dbg('follow %s in scope %s' % (current, scope))
result = []
if isinstance(current, pr.Array):
# This must be an execution, either () or [].
if current.type == pr.Array.LIST:
if hasattr(scope, 'get_index_types'):
result = scope.get_index_types(current)
elif current.type not in [pr.Array.DICT]:
# Scope must be a class or func - make an instance or execution.
debug.dbg('exe', scope)
result = er.Execution(scope, current).get_return_types()
else:
# Curly braces are not allowed, because they make no sense.
debug.warning('strange function call with {}', current, scope)
else:
# The function must not be decorated with something else.
if scope.isinstance(er.Function):
scope = scope.get_magic_method_scope()
else:
# This is the typical lookup while chaining things.
if filter_private_variable(scope, call_scope, current):
return []
result = imports.strip_imports(find_name(scope, current,
position=position))
return follow_paths(path, set(result), call_scope, position=position)
def filter_private_variable(scope, call_scope, var_name):
"""private variables begin with a double underline `__`"""
if isinstance(var_name, (str, unicode)) and isinstance(scope, er.Instance)\
and var_name.startswith('__') and not var_name.endswith('__'):
s = call_scope.get_parent_until((pr.Class, er.Instance))
if s != scope and s != scope.base.base:
return True
return False
def goto(stmt, call_path=None):
if call_path is None:
commands = stmt.get_commands()
if len(commands) == 0:
return [], ''
# Only the first command is important, the rest should basically not
# happen except in broken code (e.g. docstrings that aren't code).
call = commands[0]
if isinstance(call, (str, unicode)):
call_path = [call]
else:
call_path = list(call.generate_call_path())
scope = stmt.get_parent_until(pr.IsScope)
pos = stmt.start_pos
call_path, search = call_path[:-1], call_path[-1]
pos = pos[0], pos[1] + 1
if call_path:
scopes = follow_call_path(iter(call_path), scope, pos)
search_global = False
pos = None
else:
scopes = [scope]
search_global = True
follow_res = []
for s in scopes:
follow_res += find_name(s, search, pos,
search_global=search_global, is_goto=True)
return follow_res, search

827
jedi/evaluate/__init__.py Normal file
View File

@@ -0,0 +1,827 @@
"""
Evaluation of Python code in |jedi| is based on three assumptions:
* Code is recursive (to weaken this assumption, the :mod:`dynamic` module
exists).
* No magic is being used:
- metaclasses
- ``setattr()`` / ``__import__()``
- writing to ``globals()``, ``locals()``, ``object.__dict__``
* The programmer is not a total dick, e.g. like `this
<https://github.com/davidhalter/jedi/issues/24>`_ :-)
That said, there's mainly one entry point in this script: ``follow_statement``.
This is where autocompletion starts. Everything you want to complete is either
a ``Statement`` or some special name like ``class``, which is easy to complete.
Therefore you need to understand what follows after ``follow_statement``. Let's
make an example::
import datetime
datetime.date.toda# <-- cursor here
First of all, this module doesn't care about completion. It really just cares
about ``datetime.date``. At the end of the procedure ``follow_statement`` will
return the ``datetime`` class.
To *visualize* this (simplified):
- ``follow_statement`` - ``<Statement: datetime.date>``
- Unpacking of the statement into ``[[<Call: datetime.date>]]``
- ``follow_call_list``, calls ``follow_call`` with ``<Call: datetime.date>``
- ``follow_call`` - searches the ``datetime`` name within the module.
This is exactly where it starts to get complicated. Now recursions start to
kick in. The statement has not been resolved fully, but now we need to resolve
the datetime import. So it continues
- follow import, which happens in the :mod:`imports` module.
- now the same ``follow_call`` as above calls ``follow_paths`` to follow the
second part of the statement ``date``.
- After ``follow_paths`` returns with the desired ``datetime.date`` class, the
result is being returned and the recursion finishes.
Now what would happen if we wanted ``datetime.date.foo.bar``? Just two more
calls to ``follow_paths`` (which calls itself with a recursion). What if the
import would contain another Statement like this::
from foo import bar
Date = bar.baz
Well... You get it. Just another ``follow_statement`` recursion. It's really
easy. Just that Python is not that easy sometimes. To understand tuple
assignments and different class scopes, a lot more code had to be written. Yet
we're still not talking about Descriptors and Nested List Comprehensions, just
the simple stuff.
So if you want to change something, write a test and then just change what you
want. This module has been tested by about 600 tests. Don't be afraid to break
something. The tests are good enough.
I need to mention now that this recursive approach is really good because it
only *evaluates* what needs to be *evaluated*. All the statements and modules
that are not used are just being ignored. It's a little bit similar to the
backtracking algorithm.
.. todo:: nonlocal statement, needed or can be ignored? (py3k)
"""
from __future__ import with_statement
import sys
import itertools
from jedi._compatibility import next, hasattr, is_py3k, unicode, reraise, u
from jedi import common
from jedi.parser import representation as pr
from jedi import debug
from jedi.evaluate import representation as er
from jedi.evaluate import builtin
from jedi.evaluate import imports
from jedi.evaluate import recursion
from jedi.evaluate.cache import memoize_default
from jedi import docstrings
from jedi.evaluate import dynamic
def get_defined_names_for_position(scope, position=None, start_scope=None):
"""
Return filtered version of ``scope.get_defined_names()``.
This function basically does what :meth:`scope.get_defined_names
<parsing_representation.Scope.get_defined_names>` does.
- If `position` is given, delete all names defined after `position`.
- For special objects like instances, `position` is ignored and all
names are returned.
:type scope: :class:`parsing_representation.IsScope`
:param scope: Scope in which names are searched.
:param position: The position as a line/column tuple, default is infinity.
"""
names = scope.get_defined_names()
# Instances have special rules, always return all the possible completions,
# because class variables are always valid and the `self.` variables, too.
if (not position or isinstance(scope, (er.Array, er.Instance))
or start_scope != scope
and isinstance(start_scope, (pr.Function, er.Execution))):
return names
names_new = []
for n in names:
if n.start_pos[0] is not None and n.start_pos < position:
names_new.append(n)
return names_new
class Evaluator(object):
def __init__(self):
self.memoize_cache = {} # for memoize decorators
self.recursion_detector = recursion.RecursionDetector()
self.execution_recursion_detector = recursion.ExecutionRecursionDetector()
def get_names_of_scope(self, scope, position=None, star_search=True,
include_builtin=True):
"""
Get all completions (names) possible for the current scope.
The star search option is only here to provide an optimization. Otherwise
the whole thing would probably start a little recursive madness.
This function is used to include names from outer scopes. For example,
when the current scope is function:
>>> from jedi.parser import Parser
>>> parser = Parser('''
... x = ['a', 'b', 'c']
... def func():
... y = None
... ''')
>>> scope = parser.module.subscopes[0]
>>> scope
<Function: func@3-4>
`get_names_of_scope` is a generator. First it yields names from
most inner scope.
>>> pairs = list(Evaluator().get_names_of_scope(scope))
>>> pairs[0]
(<Function: func@3-4>, [<Name: y@4,4>])
Then it yield the names from one level outer scope. For this
example, this is the most outer scope.
>>> pairs[1]
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
Finally, it yields names from builtin, if `include_builtin` is
true (default).
>>> pairs[2] #doctest: +ELLIPSIS
(<Module: ...builtin...>, [<Name: ...>, ...])
:rtype: [(pr.Scope, [pr.Name])]
:return: Return an generator that yields a pair of scope and names.
"""
in_func_scope = scope
non_flow = scope.get_parent_until(pr.Flow, reverse=True)
while scope:
if isinstance(scope, pr.SubModule) and scope.parent:
# we don't want submodules to report if we have modules.
scope = scope.parent
continue
# `pr.Class` is used, because the parent is never `Class`.
# Ignore the Flows, because the classes and functions care for that.
# InstanceElement of Class is ignored, if it is not the start scope.
if not (scope != non_flow and scope.isinstance(pr.Class)
or scope.isinstance(pr.Flow)
or scope.isinstance(er.Instance)
and non_flow.isinstance(er.Function)):
try:
if isinstance(scope, er.Instance):
for g in scope.scope_generator():
yield g
else:
yield scope, get_defined_names_for_position(scope,
position, in_func_scope)
except StopIteration:
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
# is a list comprehension
yield scope, scope.get_set_vars(is_internal_call=True)
scope = scope.parent
# This is used, because subscopes (Flow scopes) would distort the
# results.
if scope and scope.isinstance(er.Function, pr.Function, er.Execution):
in_func_scope = scope
# Add star imports.
if star_search:
for s in imports.remove_star_imports(self, non_flow.get_parent_until()):
for g in self.get_names_of_scope(s, star_search=False):
yield g
# Add builtins to the global scope.
if include_builtin:
builtin_scope = builtin.Builtin.scope
yield builtin_scope, builtin_scope.get_defined_names()
def find_name(self, scope, name_str, position=None, search_global=False,
is_goto=False, resolve_decorator=True):
"""
This is the search function. The most important part to debug.
`remove_statements` and `filter_statements` really are the core part of
this completion.
:param position: Position of the last statement -> tuple of line, column
:return: List of Names. Their parents are the scopes, they are defined in.
:rtype: list
"""
def remove_statements(result):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
res_new = []
for r in result:
add = []
if r.isinstance(pr.Statement):
check_instance = None
if isinstance(r, er.InstanceElement) and r.is_class_var:
check_instance = r.instance
r = r.var
# Global variables handling.
if r.is_global():
for token_name in r.token_list[1:]:
if isinstance(token_name, pr.Name):
add = self.find_name(r.parent, str(token_name))
else:
# generated objects are used within executions, but these
# objects are in functions, and we have to dynamically
# execute first.
if isinstance(r, pr.Param):
func = r.parent
# Instances are typically faked, if the instance is not
# called from outside. Here we check it for __init__
# functions and return.
if isinstance(func, er.InstanceElement) \
and func.instance.is_generated \
and hasattr(func, 'name') \
and str(func.name) == '__init__' \
and r.position_nr > 0: # 0 would be self
r = func.var.params[r.position_nr]
# add docstring knowledge
doc_params = docstrings.follow_param(self, r)
if doc_params:
res_new += doc_params
continue
if not r.is_generated:
res_new += dynamic.search_params(self, r)
if not res_new:
c = r.get_commands()[0]
if c in ('*', '**'):
t = 'tuple' if c == '*' else 'dict'
res_new = [er.Instance(
self, self.find_name(builtin.Builtin.scope, t)[0])
]
if not r.assignment_details:
# this means that there are no default params,
# so just ignore it.
continue
# Remove the statement docstr stuff for now, that has to be
# implemented with the evaluator class.
#if r.docstr:
#res_new.append(r)
scopes = self.follow_statement(r, seek_name=name_str)
add += remove_statements(scopes)
if check_instance is not None:
# class renames
add = [er.InstanceElement(self, check_instance, a, True)
if isinstance(a, (er.Function, pr.Function))
else a for a in add]
res_new += add
else:
if isinstance(r, pr.Class):
r = er.Class(self, r)
elif isinstance(r, pr.Function):
r = er.Function(self, r)
if r.isinstance(er.Function) and resolve_decorator:
r = r.get_decorated_func()
res_new.append(r)
debug.dbg('sfn remove, new: %s, old: %s' % (res_new, result))
return res_new
def filter_name(scope_generator):
"""
Filters all variables of a scope (which are defined in the
`scope_generator`), until the name fits.
"""
def handle_for_loops(loop):
# Take the first statement (for has always only
# one, remember `in`). And follow it.
if not loop.inputs:
return []
result = get_iterator_types(self.follow_statement(loop.inputs[0]))
if len(loop.set_vars) > 1:
commands = loop.set_stmt.get_commands()
# loops with loop.set_vars > 0 only have one command
result = assign_tuples(commands[0], result, name_str)
return result
def process(name):
"""
Returns the parent of a name, which means the element which stands
behind a name.
"""
result = []
no_break_scope = False
par = name.parent
exc = pr.Class, pr.Function
until = lambda: par.parent.parent.get_parent_until(exc)
is_array_assignment = False
if par is None:
pass
elif par.isinstance(pr.Flow):
if par.command == 'for':
result += handle_for_loops(par)
else:
debug.warning('Flow: Why are you here? %s' % par.command)
elif par.isinstance(pr.Param) \
and par.parent is not None \
and isinstance(until(), pr.Class) \
and par.position_nr == 0:
# This is where self gets added - this happens at another
# place, if the var_args are clear. But sometimes the class is
# not known. Therefore add a new instance for self. Otherwise
# take the existing.
if isinstance(scope, er.InstanceElement):
inst = scope.instance
else:
inst = er.Instance(self, er.Class(self, until()))
inst.is_generated = True
result.append(inst)
elif par.isinstance(pr.Statement):
def is_execution(calls):
for c in calls:
if isinstance(c, (unicode, str)):
continue
if c.isinstance(pr.Array):
if is_execution(c):
return True
elif c.isinstance(pr.Call):
# Compare start_pos, because names may be different
# because of executions.
if c.name.start_pos == name.start_pos \
and c.execution:
return True
return False
is_exe = False
for assignee, op in par.assignment_details:
is_exe |= is_execution(assignee)
if is_exe:
# filter array[3] = ...
# TODO check executions for dict contents
is_array_assignment = True
else:
details = par.assignment_details
if details and details[0][1] != '=':
no_break_scope = True
# TODO this makes self variables non-breakable. wanted?
if isinstance(name, er.InstanceElement) \
and not name.is_class_var:
no_break_scope = True
result.append(par)
else:
# TODO multi-level import non-breakable
if isinstance(par, pr.Import) and len(par.namespace) > 1:
no_break_scope = True
result.append(par)
return result, no_break_scope, is_array_assignment
flow_scope = scope
result = []
# compare func uses the tuple of line/indent = line/column
comparison_func = lambda name: (name.start_pos)
for nscope, name_list in scope_generator:
break_scopes = []
# here is the position stuff happening (sorting of variables)
for name in sorted(name_list, key=comparison_func, reverse=True):
p = name.parent.parent if name.parent else None
if isinstance(p, er.InstanceElement) \
and isinstance(p.var, pr.Class):
p = p.var
if name_str == name.get_code() and p not in break_scopes:
r, no_break_scope, is_array_assignment = process(name)
if is_goto:
if not is_array_assignment: # shouldn't goto arr[1] =
result.append(name)
else:
result += r
# for comparison we need the raw class
s = nscope.base if isinstance(nscope, er.Class) else nscope
# this means that a definition was found and is not e.g.
# in if/else.
if result and not no_break_scope:
if not name.parent or p == s:
break
break_scopes.append(p)
while flow_scope:
# TODO check if result is in scope -> no evaluation necessary
n = dynamic.check_flow_information(self, flow_scope,
name_str, position)
if n:
result = n
break
if result:
break
if flow_scope == nscope:
break
flow_scope = flow_scope.parent
flow_scope = nscope
if result:
break
if not result and isinstance(nscope, er.Instance):
# __getattr__ / __getattribute__
result += check_getattr(nscope, name_str)
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s'
% (name_str, scope, nscope, u(result), position))
return result
def descriptor_check(result):
"""Processes descriptors"""
res_new = []
for r in result:
if isinstance(scope, (er.Instance, er.Class)) \
and hasattr(r, 'get_descriptor_return'):
# handle descriptors
with common.ignored(KeyError):
res_new += r.get_descriptor_return(scope)
continue
res_new.append(r)
return res_new
if search_global:
scope_generator = self.get_names_of_scope(scope, position=position)
else:
if isinstance(scope, er.Instance):
scope_generator = scope.scope_generator()
else:
if isinstance(scope, (er.Class, pr.Module)):
# classes are only available directly via chaining?
# strange stuff...
names = scope.get_defined_names()
else:
names = get_defined_names_for_position(scope, position)
scope_generator = iter([(scope, names)])
if is_goto:
return filter_name(scope_generator)
return descriptor_check(remove_statements(filter_name(scope_generator)))
@memoize_default(default=(), evaluator_is_first_arg=True)
@recursion.recursion_decorator
def follow_statement(self, stmt, seek_name=None):
"""
The starting point of the completion. A statement always owns a call list,
which are the calls, that a statement does.
In case multiple names are defined in the statement, `seek_name` returns
the result for this name.
:param stmt: A `pr.Statement`.
:param seek_name: A string.
"""
debug.dbg('follow_stmt %s (%s)' % (stmt, seek_name))
commands = stmt.get_commands()
debug.dbg('calls: %s' % commands)
result = self.follow_call_list(commands)
# Assignment checking is only important if the statement defines multiple
# variables.
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
new_result = []
for ass_commands, op in stmt.assignment_details:
new_result += find_assignments(ass_commands[0], result, seek_name)
result = new_result
return set(result)
@common.rethrow_uncaught
def follow_call_list(self, call_list, follow_array=False):
"""
`call_list` can be either `pr.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
def evaluate_list_comprehension(lc, parent=None):
input = lc.input
nested_lc = lc.input.token_list[0]
if isinstance(nested_lc, pr.ListComprehension):
# is nested LC
input = nested_lc.stmt
module = input.get_parent_until()
# create a for loop, which does the same as list comprehensions
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
loop.parent = parent or lc.get_parent_until(pr.IsScope)
if isinstance(nested_lc, pr.ListComprehension):
loop = evaluate_list_comprehension(nested_lc, loop)
return loop
result = []
calls_iterator = iter(call_list)
for call in calls_iterator:
if pr.Array.is_type(call, pr.Array.NOARRAY):
r = list(itertools.chain.from_iterable(self.follow_statement(s)
for s in call))
call_path = call.generate_call_path()
next(call_path, None) # the first one has been used already
result += self.follow_paths(call_path, r, call.parent,
position=call.start_pos)
elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call)
# Caveat: parents are being changed, but this doesn't matter,
# because nothing else uses it.
call.stmt.parent = loop
result += self.follow_statement(call.stmt)
else:
if isinstance(call, pr.Lambda):
result.append(er.Function(self, call))
# With things like params, these can also be functions...
elif isinstance(call, pr.Base) and call.isinstance(
er.Function, er.Class, er.Instance, dynamic.ArrayInstance):
result.append(call)
# The string tokens are just operations (+, -, etc.)
elif not isinstance(call, (str, unicode)):
if isinstance(call, pr.Call) and str(call.name) == 'if':
# Ternary operators.
while True:
try:
call = next(calls_iterator)
except StopIteration:
break
with common.ignored(AttributeError):
if str(call.name) == 'else':
break
continue
result += self.follow_call(call)
elif call == '*':
if [r for r in result if isinstance(r, er.Array)
or isinstance(r, er.Instance)
and str(r.name) == 'str']:
# if it is an iterable, ignore * operations
next(calls_iterator)
return set(result)
def follow_call(self, call):
"""Follow a call is following a function, variable, string, etc."""
path = call.generate_call_path()
# find the statement of the Scope
s = call
while not s.parent.isinstance(pr.IsScope):
s = s.parent
return self.follow_call_path(path, s.parent, s.start_pos)
def follow_call_path(self, path, scope, position):
"""Follows a path generated by `pr.StatementElement.generate_call_path()`"""
current = next(path)
if isinstance(current, pr.Array):
result = [er.Array(self, current)]
else:
if isinstance(current, pr.NamePart):
# This is the first global lookup.
scopes = self.find_name(scope, current, position=position,
search_global=True)
else:
# for pr.Literal
scopes = self.find_name(builtin.Builtin.scope, current.type_as_string())
# Make instances of those number/string objects.
scopes = [er.Instance(self, s, (current.value,)) for s in scopes]
result = imports.strip_imports(self, scopes)
return self.follow_paths(path, result, scope, position=position)
def follow_paths(self, path, results, call_scope, position=None):
"""
In each result, `path` must be followed. Copies the path iterator.
"""
results_new = []
if results:
if len(results) > 1:
iter_paths = itertools.tee(path, len(results))
else:
iter_paths = [path]
for i, r in enumerate(results):
fp = self.follow_path(iter_paths[i], r, call_scope, position=position)
if fp is not None:
results_new += fp
else:
# This means stop iteration.
return results
return results_new
def follow_path(self, path, scope, call_scope, position=None):
"""
Uses a generator and tries to complete the path, e.g.::
foo.bar.baz
`follow_path` is only responsible for completing `.bar.baz`, the rest is
done in the `follow_call` function.
"""
# current is either an Array or a Scope.
try:
current = next(path)
except StopIteration:
return None
debug.dbg('follow %s in scope %s' % (current, scope))
result = []
if isinstance(current, pr.Array):
# This must be an execution, either () or [].
if current.type == pr.Array.LIST:
if hasattr(scope, 'get_index_types'):
result = scope.get_index_types(current)
elif current.type not in [pr.Array.DICT]:
# Scope must be a class or func - make an instance or execution.
debug.dbg('exe', scope)
result = self.execute(scope, current)
else:
# Curly braces are not allowed, because they make no sense.
debug.warning('strange function call with {}', current, scope)
else:
# The function must not be decorated with something else.
if scope.isinstance(er.Function):
scope = scope.get_magic_method_scope()
else:
# This is the typical lookup while chaining things.
if filter_private_variable(scope, call_scope, current):
return []
result = imports.strip_imports(self, self.find_name(scope, current,
position=position))
return self.follow_paths(path, set(result), call_scope, position=position)
def execute(self, scope, params, evaluate_generator=False):
return er.Execution(self, scope, params).get_return_types(evaluate_generator)
def goto(self, stmt, call_path=None):
if call_path is None:
commands = stmt.get_commands()
if len(commands) == 0:
return [], ''
# Only the first command is important, the rest should basically not
# happen except in broken code (e.g. docstrings that aren't code).
call = commands[0]
if isinstance(call, (str, unicode)):
call_path = [call]
else:
call_path = list(call.generate_call_path())
scope = stmt.get_parent_until(pr.IsScope)
pos = stmt.start_pos
call_path, search = call_path[:-1], call_path[-1]
pos = pos[0], pos[1] + 1
if call_path:
scopes = self.follow_call_path(iter(call_path), scope, pos)
search_global = False
pos = None
else:
scopes = [scope]
search_global = True
follow_res = []
for s in scopes:
follow_res += self.find_name(s, search, pos,
search_global=search_global, is_goto=True)
return follow_res, search
def filter_private_variable(scope, call_scope, var_name):
"""private variables begin with a double underline `__`"""
if isinstance(var_name, (str, unicode)) and isinstance(scope, er.Instance)\
and var_name.startswith('__') and not var_name.endswith('__'):
s = call_scope.get_parent_until((pr.Class, er.Instance))
if s != scope and s != scope.base.base:
return True
return False
def check_getattr(inst, name_str):
"""Checks for both __getattr__ and __getattribute__ methods"""
result = []
# str is important to lose the NamePart!
module = builtin.Builtin.scope
name = pr.String(module, "'%s'" % name_str, (0, 0), (0, 0), inst)
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattr__', [name])
if not result:
# this is a little bit special. `__getattribute__` is executed
# before anything else. But: I know no use case, where this
# could be practical and the jedi would return wrong types. If
# you ever have something, let me know!
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattribute__', [name])
return result
def get_iterator_types(inputs):
"""Returns the types of any iterator (arrays, yields, __iter__, etc)."""
iterators = []
# Take the first statement (for has always only
# one, remember `in`). And follow it.
for it in inputs:
if isinstance(it, (er.Generator, er.Array, dynamic.ArrayInstance)):
iterators.append(it)
else:
if not hasattr(it, 'execute_subscope_by_name'):
debug.warning('iterator/for loop input wrong', it)
continue
try:
iterators += it.execute_subscope_by_name('__iter__')
except KeyError:
debug.warning('iterators: No __iter__ method found.')
result = []
for gen in iterators:
if isinstance(gen, er.Array):
# Array is a little bit special, since this is an internal
# array, but there's also the list builtin, which is
# another thing.
result += gen.get_index_types()
elif isinstance(gen, er.Instance):
# __iter__ returned an instance.
name = '__next__' if is_py3k else 'next'
try:
result += gen.execute_subscope_by_name(name)
except KeyError:
debug.warning('Instance has no __next__ function', gen)
else:
# is a generator
result += gen.iter_content()
return result
def assign_tuples(tup, results, seek_name):
"""
This is a normal assignment checker. In python functions and other things
can return tuples:
>>> a, b = 1, ""
>>> a, (b, c) = 1, ("", 1.0)
Here, if `seek_name` is "a", the number type will be returned.
The first part (before `=`) is the param tuples, the second one result.
:type tup: pr.Array
"""
def eval_results(index):
types = []
for r in results:
try:
func = r.get_exact_index_types
except AttributeError:
debug.warning("invalid tuple lookup %s of result %s in %s"
% (tup, results, seek_name))
else:
with common.ignored(IndexError):
types += func(index)
return types
result = []
for i, stmt in enumerate(tup):
# Used in assignments. There is just one call and no other things,
# therefore we can just assume, that the first part is important.
command = stmt.get_commands()[0]
if tup.type == pr.Array.NOARRAY:
# unnessecary braces -> just remove.
r = results
else:
r = eval_results(i)
# LHS of tuples can be nested, so resolve it recursively
result += find_assignments(command, r, seek_name)
return result
def find_assignments(lhs, results, seek_name):
"""
Check if `seek_name` is in the left hand side `lhs` of assignment.
`lhs` can simply be a variable (`pr.Call`) or a tuple/list (`pr.Array`)
representing the following cases::
a = 1 # lhs is pr.Call
(a, b) = 2 # lhs is pr.Array
:type lhs: pr.Call
:type results: list
:type seek_name: str
"""
if isinstance(lhs, pr.Array):
return assign_tuples(lhs, results, seek_name)
elif lhs.name.names[-1] == seek_name:
return results
else:
return []

View File

@@ -36,7 +36,6 @@ from jedi import common
from jedi import debug
from jedi.parser import Parser
from jedi import modules
import evaluate
class BuiltinModule(modules.CachedModule):
@@ -167,7 +166,7 @@ class BuiltinModule(modules.CachedModule):
if name == '__builtin__' and not is_py3k:
name = 'builtins'
path = os.path.dirname(os.path.abspath(__file__))
with open(os.path.sep.join([path, 'mixin', name]) + '.pym') as f:
with open(os.path.join(path, 'mixin', name) + '.pym') as f:
s = f.read()
except IOError:
return {}
@@ -429,8 +428,7 @@ class Builtin(object):
def scope(self):
return self.builtin.parser.module
@property
def magic_function_scope(self):
def magic_function_scope(self, evaluator):
try:
return self._magic_function_scope
except AttributeError:
@@ -441,7 +439,7 @@ class Builtin(object):
parser = Parser(source, None)
module = parser.module
module.parent = self.scope
typ = evaluate.follow_path(iter(['FunctionType']), module, module)
typ = evaluator.follow_path(iter(['FunctionType']), module, module)
s = self._magic_function_scope = typ.pop()
return s

51
jedi/evaluate/cache.py Normal file
View File

@@ -0,0 +1,51 @@
"""
- the popular ``memoize_default`` works like a typical memoize and returns the
default otherwise.
- ``CachedMetaClass`` uses ``memoize_default`` to do the same with classes.
"""
def memoize_default(default, evaluator_is_first_arg=False, second_arg_is_evaluator=False):
""" This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
Preventing recursion is in this case the much bigger use than speed. I
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
def func(function):
def wrapper(obj, *args, **kwargs):
if evaluator_is_first_arg:
cache = obj.memoize_cache
elif second_arg_is_evaluator: # needed for meta classes
cache = args[0].memoize_cache
else:
cache = obj._evaluator.memoize_cache
try:
memo = cache[function]
except KeyError:
memo = {}
cache[function] = memo
key = (obj, args, frozenset(kwargs.items()))
if key in memo:
return memo[key]
else:
memo[key] = default
rv = function(obj, *args, **kwargs)
memo[key] = rv
return rv
return wrapper
return func
class CachedMetaClass(type):
"""
This is basically almost the same than the decorator above, it just caches
class initializations. I haven't found any other way, so I'm doing it with
meta classes.
"""
@memoize_default(None, second_arg_is_evaluator=True)
def __call__(self, *args, **kwargs):
return super(CachedMetaClass, self).__call__(*args, **kwargs)

View File

@@ -51,26 +51,19 @@ would check whether a flow has the form of ``if isinstance(a, type_or_tuple)``.
Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
check for -> a is a string). There's big potential in these checks.
"""
from __future__ import with_statement
import os
from jedi import cache
from jedi.parser import representation as pr
from jedi import modules
from jedi import settings
from jedi import common
from jedi import debug
from jedi.parser import fast as fast_parser
import api_classes
import evaluate
import imports
import evaluate_representation as er
from jedi.evaluate.cache import memoize_default
# This is something like the sys.path, but only for searching params. It means
# that this is the order in which Jedi searches params.
search_param_modules = ['.']
search_param_cache = {}
def get_directory_modules_for_name(mods, name):
@@ -116,22 +109,6 @@ def get_directory_modules_for_name(mods, name):
yield c
def search_param_memoize(func):
"""
Is only good for search params memoize, respectively the closure,
because it just caches the input, not the func, like normal memoize does.
"""
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
if key in search_param_cache:
return search_param_cache[key]
else:
rv = func(*args, **kwargs)
search_param_cache[key] = rv
return rv
return wrapper
class ParamListener(object):
"""
This listener is used to get the params for a function.
@@ -143,8 +120,8 @@ class ParamListener(object):
self.param_possibilities.append(params)
@cache.memoize_default([])
def search_params(param):
@memoize_default([], evaluator_is_first_arg=True)
def search_params(evaluator, param):
"""
This is a dynamic search for params. If you try to complete a type:
@@ -164,8 +141,8 @@ def search_params(param):
"""
Returns the values of a param, or an empty array.
"""
@search_param_memoize
def get_posibilities(module, func_name):
@memoize_default([], evaluator_is_first_arg=True)
def get_posibilities(evaluator, module, func_name):
try:
possible_stmts = module.used_names[func_name]
except KeyError:
@@ -196,12 +173,13 @@ def search_params(param):
continue
scopes = [scope]
if first:
scopes = evaluate.follow_call_path(iter(first), scope, pos)
scopes = evaluator.follow_call_path(iter(first), scope, pos)
pos = None
from jedi.evaluate import representation as er
for scope in scopes:
s = evaluate.find_name(scope, func_name, position=pos,
search_global=not first,
resolve_decorator=False)
s = evaluator.find_name(scope, func_name, position=pos,
search_global=not first,
resolve_decorator=False)
c = [getattr(escope, 'base_func', None) or escope.base
for escope in s
@@ -209,15 +187,15 @@ def search_params(param):
if compare in c:
# only if we have the correct function we execute
# it, otherwise just ignore it.
evaluate.follow_paths(iter(last), s, scope)
evaluator.follow_paths(iter(last), s, scope)
return listener.param_possibilities
result = []
for params in get_posibilities(module, func_name):
for params in get_posibilities(evaluator, module, func_name):
for p in params:
if str(p) == param_name:
result += evaluate.follow_statement(p.parent)
result += evaluator.follow_statement(p.parent)
return result
func = param.get_parent_until(pr.Function)
@@ -254,7 +232,7 @@ def search_params(param):
return result
def check_array_additions(array):
def check_array_additions(evaluator, array):
""" Just a mapper function for the internal _check_array_additions """
if not pr.Array.is_type(array._array, pr.Array.LIST, pr.Array.SET):
# TODO also check for dict updates
@@ -262,7 +240,7 @@ def check_array_additions(array):
is_list = array._array.type == 'list'
current_module = array._array.get_parent_until()
res = _check_array_additions(array, current_module, is_list)
res = _check_array_additions(evaluator, array, current_module, is_list)
return res
@@ -302,8 +280,8 @@ def _scan_statement(stmt, search_name, assignment_details=False):
return result
@cache.memoize_default([])
def _check_array_additions(compare_array, module, is_list):
@memoize_default([], evaluator_is_first_arg=True)
def _check_array_additions(evaluator, compare_array, module, is_list):
"""
Checks if a `pr.Array` has "add" statements:
>>> a = [""]
@@ -330,7 +308,7 @@ def _check_array_additions(compare_array, module, is_list):
position = c.start_pos
scope = c.get_parent_until(pr.IsScope)
found = evaluate.follow_call_path(backtrack_path, scope, position)
found = evaluator.follow_call_path(backtrack_path, scope, position)
if not compare_array in found:
continue
@@ -339,20 +317,23 @@ def _check_array_additions(compare_array, module, is_list):
continue # no params: just ignore it
if add_name in ['append', 'add']:
for param in params:
result += evaluate.follow_statement(param)
result += evaluator.follow_statement(param)
elif add_name in ['insert']:
try:
second_param = params[1]
except IndexError:
continue
else:
result += evaluate.follow_statement(second_param)
result += evaluator.follow_statement(second_param)
elif add_name in ['extend', 'update']:
for param in params:
iterators = evaluate.follow_statement(param)
iterators = evaluator.follow_statement(param)
result += evaluate.get_iterator_types(iterators)
return result
from jedi.evaluate import representation as er
from jedi import evaluate
def get_execution_parent(element, *stop_classes):
""" Used to get an Instance/Execution parent """
if isinstance(element, er.Array):
@@ -393,21 +374,21 @@ def _check_array_additions(compare_array, module, is_list):
if isinstance(comp_arr_parent, er.InstanceElement):
stmt = er.InstanceElement(comp_arr_parent.instance, stmt)
if evaluate.follow_statement.push_stmt(stmt):
if evaluator.recursion_detector.push_stmt(stmt):
# check recursion
continue
res += check_calls(_scan_statement(stmt, n), n)
evaluate.follow_statement.pop_stmt()
evaluator.recursion_detector.pop_stmt()
# reset settings
settings.dynamic_params_for_other_modules = temp_param_add
return res
def check_array_instances(instance):
def check_array_instances(evaluator, instance):
"""Used for set() and list() instances."""
if not settings.dynamic_arrays_instances:
return instance.var_args
ai = ArrayInstance(instance)
ai = ArrayInstance(evaluator, instance)
return [ai]
@@ -417,7 +398,8 @@ class ArrayInstance(pr.Base):
This is definitely a hack, but a good one :-)
It makes it possible to use set/list conversions.
"""
def __init__(self, instance):
def __init__(self, evaluator, instance):
self._evaluator = evaluator
self.instance = instance
self.var_args = instance.var_args
@@ -427,9 +409,10 @@ class ArrayInstance(pr.Base):
lists/sets are too complicated too handle that.
"""
items = []
from jedi import evaluate
for stmt in self.var_args:
for typ in evaluate.follow_statement(stmt):
if isinstance(typ, er.Instance) and len(typ.var_args):
for typ in self._evaluator.follow_statement(stmt):
if isinstance(typ, evaluate.er.Instance) and len(typ.var_args):
array = typ.var_args[0]
if isinstance(array, ArrayInstance):
# prevent recursions
@@ -449,88 +432,11 @@ class ArrayInstance(pr.Base):
module = self.var_args.get_parent_until()
is_list = str(self.instance.name) == 'list'
items += _check_array_additions(self.instance, module, is_list)
items += _check_array_additions(self._evaluator, self.instance, module, is_list)
return items
def usages(definitions, search_name, mods):
def compare_array(definitions):
""" `definitions` are being compared by module/start_pos, because
sometimes the id's of the objects change (e.g. executions).
"""
result = []
for d in definitions:
module = d.get_parent_until()
result.append((module, d.start_pos))
return result
def check_call(call):
result = []
follow = [] # There might be multiple search_name's in one call_path
call_path = list(call.generate_call_path())
for i, name in enumerate(call_path):
# name is `pr.NamePart`.
if name == search_name:
follow.append(call_path[:i + 1])
for f in follow:
follow_res, search = evaluate.goto(call.parent, f)
follow_res = usages_add_import_modules(follow_res, search)
compare_follow_res = compare_array(follow_res)
# compare to see if they match
if any(r in compare_definitions for r in compare_follow_res):
scope = call.parent
result.append(api_classes.Usage(search, scope))
return result
if not definitions:
return set()
compare_definitions = compare_array(definitions)
mods |= set([d.get_parent_until() for d in definitions])
names = []
for m in get_directory_modules_for_name(mods, search_name):
try:
stmts = m.used_names[search_name]
except KeyError:
continue
for stmt in stmts:
if isinstance(stmt, pr.Import):
count = 0
imps = []
for i in stmt.get_all_import_names():
for name_part in i.names:
count += 1
if name_part == search_name:
imps.append((count, name_part))
for used_count, name_part in imps:
i = imports.ImportPath(stmt, kill_count=count - used_count,
direct_resolve=True)
f = i.follow(is_goto=True)
if set(f) & set(definitions):
names.append(api_classes.Usage(name_part, stmt))
else:
for call in _scan_statement(stmt, search_name,
assignment_details=True):
names += check_call(call)
return names
def usages_add_import_modules(definitions, search_name):
""" Adds the modules of the imports """
new = set()
for d in definitions:
if isinstance(d.parent, pr.Import):
s = imports.ImportPath(d.parent, direct_resolve=True)
with common.ignored(IndexError):
new.add(s.follow(is_goto=True)[0])
return set(definitions) | new
def check_flow_information(flow, search_name, pos):
def check_flow_information(evaluator, flow, search_name, pos):
""" Try to find out the type of a variable just with the information that
is given by the flows: e.g. It is also responsible for assert checks.::
@@ -546,17 +452,18 @@ def check_flow_information(flow, search_name, pos):
for ass in reversed(flow.asserts):
if pos is None or ass.start_pos > pos:
continue
result = _check_isinstance_type(ass, search_name)
result = _check_isinstance_type(evaluator, ass, search_name)
if result:
break
if isinstance(flow, pr.Flow) and not result:
if flow.command in ['if', 'while'] and len(flow.inputs) == 1:
result = _check_isinstance_type(flow.inputs[0], search_name)
result = _check_isinstance_type(evaluator, flow.inputs[0], search_name)
return result
def _check_isinstance_type(stmt, search_name):
def _check_isinstance_type(evaluator, stmt, search_name):
from jedi.evaluate import representation as er
try:
commands = stmt.get_commands()
# this might be removed if we analyze and, etc
@@ -579,11 +486,11 @@ def _check_isinstance_type(stmt, search_name):
return []
result = []
for c in evaluate.follow_call(classes[0]):
for c in evaluator.follow_call(classes[0]):
if isinstance(c, er.Array):
result += c.get_index_types()
else:
result.append(c)
for i, c in enumerate(result):
result[i] = er.Instance(c)
result[i] = er.Instance(evaluator, c)
return result

View File

@@ -24,11 +24,7 @@ from jedi import common
from jedi import debug
from jedi.parser import representation as pr
from jedi import cache
import builtin
import evaluate
# for debugging purposes only
imports_processed = 0
from jedi.evaluate import builtin
class ModuleNotFound(Exception):
@@ -45,8 +41,9 @@ class ImportPath(pr.Base):
GlobalNamespace = GlobalNamespace()
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
def __init__(self, evaluator, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False, is_just_from=False):
self._evaluator = evaluator
self.import_stmt = import_stmt
self.is_like_search = is_like_search
self.direct_resolve = direct_resolve
@@ -131,9 +128,9 @@ class ImportPath(pr.Base):
# ``sys.modules`` modification.
p = (0, 0)
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
p, p, self.import_stmt))
p, p, self.import_stmt))
continue
for s, scope_names in evaluate.get_names_of_scope(scope,
for s, scope_names in self._evaluator.get_names_of_scope(scope,
include_builtin=False):
for n in scope_names:
if self.import_stmt.from_ns is None \
@@ -187,7 +184,7 @@ class ImportPath(pr.Base):
"""
Returns the imported modules.
"""
if evaluate.follow_statement.push_stmt(self.import_stmt):
if self._evaluator.recursion_detector.push_stmt(self.import_stmt):
# check recursion
return []
@@ -196,11 +193,11 @@ class ImportPath(pr.Base):
scope, rest = self._follow_file_system()
except ModuleNotFound:
debug.warning('Module not found: ' + str(self.import_stmt))
evaluate.follow_statement.pop_stmt()
self._evaluator.recursion_detector.pop_stmt()
return []
scopes = [scope]
scopes += remove_star_imports(scope)
scopes += remove_star_imports(self._evaluator, scope)
# follow the rest of the import (not FS -> classes, functions)
if len(rest) > 1 or rest and self.is_like_search:
@@ -211,15 +208,15 @@ class ImportPath(pr.Base):
# ``os.path``, because it's a very important one in Python
# that is being achieved by messing with ``sys.modules`` in
# ``os``.
scopes = evaluate.follow_path(iter(rest), scope, scope)
scopes = self._evaluator.follow_path(iter(rest), scope, scope)
elif rest:
if is_goto:
scopes = itertools.chain.from_iterable(
evaluate.find_name(s, rest[0], is_goto=True)
self._evaluator.find_name(s, rest[0], is_goto=True)
for s in scopes)
else:
scopes = itertools.chain.from_iterable(
evaluate.follow_path(iter(rest), s, s)
self._evaluator.follow_path(iter(rest), s, s)
for s in scopes)
scopes = list(scopes)
@@ -229,7 +226,7 @@ class ImportPath(pr.Base):
scopes = [ImportPath.GlobalNamespace]
debug.dbg('after import', scopes)
evaluate.follow_statement.pop_stmt()
self._evaluator.recursion_detector.pop_stmt()
return scopes
def _is_relative_import(self):
@@ -304,8 +301,6 @@ class ImportPath(pr.Base):
elif self._is_relative_import():
path = self._get_relative_path()
global imports_processed
imports_processed += 1
if path is not None:
importing = find_module(string, [path])
else:
@@ -374,7 +369,7 @@ class ImportPath(pr.Base):
return f.parser.module, rest
def strip_imports(scopes):
def strip_imports(evaluator, scopes):
"""
Here we strip the imports - they don't get resolved necessarily.
Really used anymore? Merge with remove_star_imports?
@@ -382,25 +377,25 @@ def strip_imports(scopes):
result = []
for s in scopes:
if isinstance(s, pr.Import):
result += ImportPath(s).follow()
result += ImportPath(evaluator, s).follow()
else:
result.append(s)
return result
@cache.cache_star_import
def remove_star_imports(scope, ignored_modules=()):
def remove_star_imports(evaluator, scope, ignored_modules=()):
"""
Check a module for star imports:
>>> from module import *
and follow these modules.
"""
modules = strip_imports(i for i in scope.get_imports() if i.star)
modules = strip_imports(evaluator, (i for i in scope.get_imports() if i.star))
new = []
for m in modules:
if m not in ignored_modules:
new += remove_star_imports(m, modules)
new += remove_star_imports(evaluator, m, modules)
modules += new
# Filter duplicate modules.

View File

@@ -0,0 +1,3 @@
class Iterable():
"""Parent class of Generator and Array, exists due to import restrictions."""
pass

View File

@@ -10,30 +10,34 @@ calls.
from jedi.parser import representation as pr
from jedi import debug
from jedi import settings
import evaluate_representation as er
import builtin
from jedi.evaluate import builtin
from jedi.evaluate import interfaces
class RecursionDecorator(object):
def recursion_decorator(func):
def run(evaluator, stmt, *args, **kwargs):
rec_detect = evaluator.recursion_detector
# print stmt, len(self.node_statements())
if rec_detect.push_stmt(stmt):
return []
else:
result = func(evaluator, stmt, *args, **kwargs)
rec_detect.pop_stmt()
return result
return run
class RecursionDetector(object):
"""
A decorator to detect recursions in statements. In a recursion a statement
at the same place, in the same module may not be executed two times.
"""
def __init__(self, func):
self.func = func
self.reset()
def __call__(self, stmt, *args, **kwargs):
# print stmt, len(self.node_statements())
if self.push_stmt(stmt):
return []
else:
result = self.func(stmt, *args, **kwargs)
self.pop_stmt()
return result
def __init__(self):
self.top = None
self.current = None
def push_stmt(self, stmt):
self.current = RecursionNode(stmt, self.current)
self.current = _RecursionNode(stmt, self.current)
check = self._check_recursion()
if check: # TODO remove False!!!!
debug.warning('catched stmt recursion: %s against %s @%s'
@@ -57,10 +61,6 @@ class RecursionDecorator(object):
if not test:
return False
def reset(self):
self.top = None
self.current = None
def node_statements(self):
result = []
n = self.current
@@ -70,7 +70,7 @@ class RecursionDecorator(object):
return result
class RecursionNode(object):
class _RecursionNode(object):
""" A node of the RecursionDecorator. """
def __init__(self, stmt, parent):
self.script = stmt.get_parent_until()
@@ -96,14 +96,29 @@ class RecursionNode(object):
and not self.is_ignored and not other.is_ignored
class ExecutionRecursionDecorator(object):
def execution_recursion_decorator(func):
def run(execution, evaluate_generator=False):
detector = execution._evaluator.execution_recursion_detector
if detector.push_execution(execution, evaluate_generator):
result = []
else:
result = func(execution, evaluate_generator)
detector.pop_execution()
return result
return run
class ExecutionRecursionDetector(object):
"""
Catches recursions of executions.
It is designed like a Singelton. Only one instance should exist.
"""
def __init__(self, func):
self.func = func
self.reset()
def __init__(self):
self.recursion_level = 0
self.parent_execution_funcs = []
self.execution_funcs = set()
self.execution_count = 0
def __call__(self, execution, evaluate_generator=False):
debug.dbg('Execution recursions: %s' % execution, self.recursion_level,
@@ -112,16 +127,14 @@ class ExecutionRecursionDecorator(object):
result = []
else:
result = self.func(execution, evaluate_generator)
self.cleanup()
self.pop_execution()
return result
@classmethod
def cleanup(cls):
def pop_execution(cls):
cls.parent_execution_funcs.pop()
cls.recursion_level -= 1
@classmethod
def check_recursion(cls, execution, evaluate_generator):
def push_execution(cls, execution, evaluate_generator):
in_par_execution_funcs = execution.base in cls.parent_execution_funcs
in_execution_funcs = execution.base in cls.execution_funcs
cls.recursion_level += 1
@@ -132,7 +145,7 @@ class ExecutionRecursionDecorator(object):
if cls.execution_count > settings.max_executions:
return True
if isinstance(execution.base, (er.Generator, er.Array)):
if isinstance(execution.base, interfaces.Iterable):
return False
module = execution.get_parent_until()
if evaluate_generator or module == builtin.Builtin.scope:
@@ -147,10 +160,3 @@ class ExecutionRecursionDecorator(object):
if cls.execution_count > settings.max_executions_without_builtins:
return True
return False
@classmethod
def reset(cls):
cls.recursion_level = 0
cls.parent_execution_funcs = []
cls.execution_funcs = set()
cls.execution_count = 0

View File

@@ -16,16 +16,16 @@ import itertools
from jedi._compatibility import use_metaclass, next, hasattr, unicode
from jedi.parser import representation as pr
from jedi import cache
from jedi import helpers
from jedi import debug
from jedi import common
import recursion
import docstrings
import imports
import evaluate
import builtin
import dynamic
from jedi.evaluate import imports
from jedi.evaluate import builtin
from jedi.evaluate import recursion
from jedi.evaluate.cache import memoize_default, CachedMetaClass
from jedi.evaluate.interfaces import Iterable
from jedi import docstrings
from jedi.evaluate import dynamic
class Executable(pr.IsScope):
@@ -33,7 +33,8 @@ class Executable(pr.IsScope):
An instance is also an executable - because __init__ is called
:param var_args: The param input array, consist of `pr.Array` or list.
"""
def __init__(self, base, var_args=()):
def __init__(self, evaluator, base, var_args=()):
self._evaluator = evaluator
self.base = base
self.var_args = var_args
@@ -52,16 +53,16 @@ class Executable(pr.IsScope):
return self.base
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
class Instance(use_metaclass(CachedMetaClass, Executable)):
"""
This class is used to evaluate instances.
"""
def __init__(self, base, var_args=()):
super(Instance, self).__init__(base, var_args)
def __init__(self, evaluator, base, var_args=()):
super(Instance, self).__init__(evaluator, base, var_args)
if str(base.name) in ['list', 'set'] \
and builtin.Builtin.scope == base.get_parent_until():
# compare the module path with the builtin name.
self.var_args = dynamic.check_array_instances(self)
self.var_args = dynamic.check_array_instances(evaluator, self)
else:
# need to execute the __init__ function, because the dynamic param
# searching needs it.
@@ -71,10 +72,10 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
# (No var_args) used.
self.is_generated = False
@cache.memoize_default()
@memoize_default(None)
def _get_method_execution(self, func):
func = InstanceElement(self, func, True)
return Execution(func, self.var_args)
func = InstanceElement(self._evaluator, self, func, True)
return Execution(self._evaluator, func, self.var_args)
def _get_func_self_name(self, func):
"""
@@ -86,7 +87,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
except IndexError:
return None
@cache.memoize_default([])
@memoize_default([])
def _get_self_attributes(self):
def add_self_dot_name(name):
"""
@@ -95,7 +96,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
"""
n = copy.copy(name)
n.names = n.names[1:]
names.append(InstanceElement(self, n))
names.append(InstanceElement(self._evaluator, self, n))
names = []
# This loop adds the names of the self object, copies them and removes
@@ -124,17 +125,17 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
add_self_dot_name(n)
for s in self.base.get_super_classes():
names += Instance(s)._get_self_attributes()
names += Instance(self._evaluator, s)._get_self_attributes()
return names
def get_subscope_by_name(self, name):
sub = self.base.get_subscope_by_name(name)
return InstanceElement(self, sub, True)
return InstanceElement(self._evaluator, self, sub, True)
def execute_subscope_by_name(self, name, args=()):
method = self.get_subscope_by_name(name)
return Execution(method, args).get_return_types()
return Execution(self._evaluator, method, args).get_return_types()
def get_descriptor_return(self, obj):
""" Throws a KeyError if there's no method. """
@@ -143,7 +144,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
args = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
return self.execute_subscope_by_name('__get__', args)
@cache.memoize_default([])
@memoize_default([])
def get_defined_names(self):
"""
Get the instance vars of a class. This includes the vars of all
@@ -153,7 +154,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
class_names = self.base.instance_names()
for var in class_names:
names.append(InstanceElement(self, var, True))
names.append(InstanceElement(self._evaluator, self, var, True))
return names
def scope_generator(self):
@@ -166,7 +167,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
names = []
class_names = self.base.instance_names()
for var in class_names:
names.append(InstanceElement(self, var, True))
names.append(InstanceElement(self._evaluator, self, var, True))
yield self, names
def get_index_types(self, index=None):
@@ -189,22 +190,23 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
(type(self).__name__, self.base, len(self.var_args or []))
class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)):
class InstanceElement(use_metaclass(CachedMetaClass, pr.Base)):
"""
InstanceElement is a wrapper for any object, that is used as an instance
variable (e.g. self.variable or class methods).
"""
def __init__(self, instance, var, is_class_var=False):
def __init__(self, evaluator, instance, var, is_class_var=False):
self._evaluator = evaluator
if isinstance(var, pr.Function):
var = Function(var)
var = Function(evaluator, var)
elif isinstance(var, pr.Class):
var = Class(var)
var = Class(evaluator, var)
self.instance = instance
self.var = var
self.is_class_var = is_class_var
@property
@cache.memoize_default()
@memoize_default(None)
def parent(self):
par = self.var.parent
if isinstance(par, Class) and par == self.instance.base \
@@ -212,7 +214,7 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)):
and par == self.instance.base.base:
par = self.instance
elif not isinstance(par, pr.Module):
par = InstanceElement(self.instance, par, self.is_class_var)
par = InstanceElement(self.instance._evaluator, self.instance, par, self.is_class_var)
return par
def get_parent_until(self, *args, **kwargs):
@@ -227,13 +229,13 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)):
def get_commands(self):
# Copy and modify the array.
return [InstanceElement(self.instance, command, self.is_class_var)
return [InstanceElement(self.instance._evaluator, self.instance, command, self.is_class_var)
if not isinstance(command, unicode) else command
for command in self.var.get_commands()]
def __iter__(self):
for el in self.var.__iter__():
yield InstanceElement(self.instance, el, self.is_class_var)
yield InstanceElement(self.instance._evaluator, self.instance, el, self.is_class_var)
def __getattr__(self, name):
return getattr(self.var, name)
@@ -245,31 +247,32 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)):
return "<%s of %s>" % (type(self).__name__, self.var)
class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
class Class(use_metaclass(CachedMetaClass, pr.IsScope)):
"""
This class is not only important to extend `pr.Class`, it is also a
important for descriptors (if the descriptor methods are evaluated or not).
"""
def __init__(self, base):
def __init__(self, evaluator, base):
self._evaluator = evaluator
self.base = base
@cache.memoize_default(default=())
@memoize_default(default=())
def get_super_classes(self):
supers = []
# TODO care for mro stuff (multiple super classes).
for s in self.base.supers:
# Super classes are statements.
for cls in evaluate.follow_statement(s):
for cls in self._evaluator.follow_statement(s):
if not isinstance(cls, Class):
debug.warning('Received non class, as a super class')
continue # Just ignore other stuff (user input error).
supers.append(cls)
if not supers and self.base.parent != builtin.Builtin.scope:
# add `object` to classes
supers += evaluate.find_name(builtin.Builtin.scope, 'object')
supers += self._evaluator.find_name(builtin.Builtin.scope, 'object')
return supers
@cache.memoize_default(default=())
@memoize_default(default=())
def instance_names(self):
def in_iterable(name, iterable):
""" checks if the name is in the variable 'iterable'. """
@@ -291,10 +294,10 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
result += super_result
return result
@cache.memoize_default(default=())
@memoize_default(default=())
def get_defined_names(self):
result = self.instance_names()
type_cls = evaluate.find_name(builtin.Builtin.scope, 'type')[0]
type_cls = self._evaluator.find_name(builtin.Builtin.scope, 'type')[0]
return result + type_cls.base.get_defined_names()
def get_subscope_by_name(self, name):
@@ -318,16 +321,17 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
return "<e%s of %s>" % (type(self).__name__, self.base)
class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
class Function(use_metaclass(CachedMetaClass, pr.IsScope)):
"""
Needed because of decorators. Decorators are evaluated here.
"""
def __init__(self, func, is_decorated=False):
def __init__(self, evaluator, func, is_decorated=False):
""" This should not be called directly """
self._evaluator = evaluator
self.base_func = func
self.is_decorated = is_decorated
@cache.memoize_default()
@memoize_default(None)
def _decorated_func(self, instance=None):
"""
Returns the function, that is to be executed in the end.
@@ -339,7 +343,7 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
if not self.is_decorated:
for dec in reversed(self.base_func.decorators):
debug.dbg('decorator:', dec, f)
dec_results = set(evaluate.follow_statement(dec))
dec_results = set(self._evaluator.follow_statement(dec))
if not len(dec_results):
debug.warning('decorator not found: %s on %s' %
(dec, self.base_func))
@@ -349,12 +353,12 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
debug.warning('multiple decorators found', self.base_func,
dec_results)
# Create param array.
old_func = Function(f, is_decorated=True)
old_func = Function(self._evaluator, f, is_decorated=True)
if instance is not None and decorator.isinstance(Function):
old_func = InstanceElement(instance, old_func)
old_func = InstanceElement(self._evaluator, instance, old_func)
instance = None
wrappers = Execution(decorator, (old_func,)).get_return_types()
wrappers = Execution(self._evaluator, decorator, (old_func,)).get_return_types()
if not len(wrappers):
debug.warning('no wrappers found', self.base_func)
return None
@@ -366,7 +370,7 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
debug.dbg('decorator end', f)
if f != self.base_func and isinstance(f, pr.Function):
f = Function(f)
f = Function(self._evaluator, f)
return f
def get_decorated_func(self, instance=None):
@@ -377,14 +381,14 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
# If the decorator func is not found, just ignore the decorator
# function, because sometimes decorators are just really
# complicated.
return Function(self.base_func, True)
return Function(self._evaluator, self.base_func, True)
return decorated_func
def get_magic_method_names(self):
return builtin.Builtin.magic_function_scope.get_defined_names()
return builtin.Builtin.magic_function_scope(self._evaluator).get_defined_names()
def get_magic_method_scope(self):
return builtin.Builtin.magic_function_scope
return builtin.Builtin.magic_function_scope(self._evaluator)
def __getattr__(self, name):
return getattr(self.base_func, name)
@@ -412,12 +416,12 @@ class Execution(Executable):
return []
else:
if isinstance(stmt, pr.Statement):
return evaluate.follow_statement(stmt)
return self._evaluator.follow_statement(stmt)
else:
return [stmt] # just some arbitrary object
@property
@cache.memoize_default()
@memoize_default(None)
def _decorated(self):
"""Get the decorated version of the input"""
base = self.base
@@ -425,8 +429,8 @@ class Execution(Executable):
base = base.get_decorated_func()
return base
@cache.memoize_default(default=())
@recursion.ExecutionRecursionDecorator
@memoize_default(default=())
@recursion.execution_recursion_decorator
def get_return_types(self, evaluate_generator=False):
""" Get the return types of a function. """
base = self._decorated
@@ -452,7 +456,7 @@ class Execution(Executable):
if len(arr_name.var_args) != 1:
debug.warning('jedi getattr is too simple')
key = arr_name.var_args[0]
stmts += evaluate.follow_path(iter([key]), obj, base)
stmts += self._evaluator.follow_path(iter([key]), obj, base)
return stmts
elif func_name == 'type':
# otherwise it would be a metaclass
@@ -467,15 +471,15 @@ class Execution(Executable):
cls = func.get_parent_until(accept + (pr.Class,),
include_current=False)
if isinstance(cls, pr.Class):
cls = Class(cls)
cls = Class(self._evaluator, cls)
su = cls.get_super_classes()
if su:
return [Instance(su[0])]
return [Instance(self._evaluator, su[0])]
return []
if base.isinstance(Class):
# There maybe executions of executions.
return [Instance(base, self.var_args)]
return [Instance(self._evaluator, base, self.var_args)]
elif isinstance(base, Generator):
return base.iter_content()
else:
@@ -495,7 +499,7 @@ class Execution(Executable):
debug.dbg('exec result: %s in %s' % (stmts, self))
return imports.strip_imports(stmts)
return imports.strip_imports(self._evaluator, stmts)
def _get_function_returns(self, func, evaluate_generator):
""" A normal Function execution """
@@ -503,15 +507,15 @@ class Execution(Executable):
for listener in func.listeners:
listener.execute(self._get_params())
if func.is_generator and not evaluate_generator:
return [Generator(func, self.var_args)]
return [Generator(self._evaluator, func, self.var_args)]
else:
stmts = docstrings.find_return_types(func)
stmts = docstrings.find_return_types(self._evaluator, func)
for r in self.returns:
if r is not None:
stmts += evaluate.follow_statement(r)
stmts += self._evaluator.follow_statement(r)
return stmts
@cache.memoize_default(default=())
@memoize_default(default=())
def _get_params(self):
"""
This returns the params for an Execution/Instance and is injected as a
@@ -663,7 +667,7 @@ class Execution(Executable):
if not len(commands):
continue
if commands[0] == '*':
arrays = evaluate.follow_call_list(commands[1:])
arrays = self._evaluator.follow_call_list(commands[1:])
# *args must be some sort of an array, otherwise -> ignore
for array in arrays:
@@ -675,7 +679,7 @@ class Execution(Executable):
yield None, helpers.FakeStatement(field_stmt)
# **kwargs
elif commands[0] == '**':
arrays = evaluate.follow_call_list(commands[1:])
arrays = self._evaluator.follow_call_list(commands[1:])
for array in arrays:
if isinstance(array, Array):
for key_stmt, value_stmt in array.items():
@@ -724,7 +728,7 @@ class Execution(Executable):
copied = helpers.fast_parent_copy(element)
copied.parent = self._scope_copy(copied.parent)
if isinstance(copied, pr.Function):
copied = Function(copied)
copied = Function(self._evaluator, copied)
objects.append(copied)
return objects
@@ -733,7 +737,7 @@ class Execution(Executable):
raise AttributeError('Tried to access %s: %s. Why?' % (name, self))
return getattr(self._decorated, name)
@cache.memoize_default()
@memoize_default(None)
@common.rethrow_uncaught
def _scope_copy(self, scope):
""" Copies a scope (e.g. if) in an execution """
@@ -749,22 +753,22 @@ class Execution(Executable):
return copied
@property
@cache.memoize_default()
@memoize_default([])
def returns(self):
return self._copy_properties('returns')
@property
@cache.memoize_default()
@memoize_default([])
def asserts(self):
return self._copy_properties('asserts')
@property
@cache.memoize_default()
@memoize_default([])
def statements(self):
return self._copy_properties('statements')
@property
@cache.memoize_default()
@memoize_default([])
def subscopes(self):
return self._copy_properties('subscopes')
@@ -776,10 +780,11 @@ class Execution(Executable):
(type(self).__name__, self._decorated)
class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
class Generator(use_metaclass(CachedMetaClass, pr.Base, Iterable)):
""" Cares for `yield` statements. """
def __init__(self, func, var_args):
def __init__(self, evaluator, func, var_args):
super(Generator, self).__init__()
self._evaluator = evaluator
self.func = func
self.var_args = var_args
@@ -804,7 +809,7 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
def iter_content(self):
""" returns the content of __iter__ """
return Execution(self.func, self.var_args).get_return_types(True)
return Execution(self._evaluator, self.func, self.var_args).get_return_types(True)
def get_index_types(self, index=None):
debug.warning('Tried to get array access on a generator', self)
@@ -822,12 +827,13 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
return "<%s of %s>" % (type(self).__name__, self.func)
class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
class Array(use_metaclass(CachedMetaClass, pr.Base, Iterable)):
"""
Used as a mirror to pr.Array, if needed. It defines some getter
methods which are important in this module.
"""
def __init__(self, array):
def __init__(self, evaluator, array):
self._evaluator = evaluator
self._array = array
def get_index_types(self, index_arr=None):
@@ -852,7 +858,7 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
return self.get_exact_index_types(index.var_args[0])
result = list(self._follow_values(self._array.values))
result += dynamic.check_array_additions(self)
result += dynamic.check_array_additions(self._evaluator, self)
return set(result)
def get_exact_index_types(self, mixed_index):
@@ -883,7 +889,7 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
def _follow_values(self, values):
""" helper function for the index getters """
return list(itertools.chain.from_iterable(evaluate.follow_statement(v)
return list(itertools.chain.from_iterable(self._evaluator.follow_statement(v)
for v in values))
def get_defined_names(self):
@@ -892,8 +898,8 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
It returns e.g. for a list: append, pop, ...
"""
# `array.type` is a string with the type, e.g. 'list'.
scope = evaluate.find_name(builtin.Builtin.scope, self._array.type)[0]
scope = Instance(scope)
scope = self._evaluator.find_name(builtin.Builtin.scope, self._array.type)[0]
scope = Instance(self._evaluator, scope)
names = scope.get_defined_names()
return [ArrayMethod(n) for n in names]

View File

@@ -3,9 +3,9 @@ Module to handle interpreted Python objects.
"""
import itertools
import tokenize
from jedi.parser import representation as pr
from jedi.parser import tokenizer as tokenize
from jedi.parser import token

View File

@@ -6,7 +6,7 @@ import keyword
from jedi._compatibility import is_py3k
from jedi.parser import representation as pr
from jedi import common
import builtin
from jedi.evaluate import builtin
try:
from pydoc_data import topics as pydoc_topics

View File

@@ -16,7 +16,6 @@ Apart from those classes there's a ``sys.path`` fetching function, as well as
from __future__ import with_statement
import re
import tokenizer as tokenize
import sys
import os
from ast import literal_eval
@@ -24,6 +23,7 @@ from ast import literal_eval
from jedi._compatibility import exec_function, unicode
from jedi import cache
from jedi.parser import representation as pr
from jedi.parser import tokenizer as tokenize
from jedi.parser import fast
from jedi import debug
from jedi import common
@@ -291,7 +291,7 @@ def get_sys_path():
return [p for p in sys.path if p != ""]
@cache.memoize_default([])
#@cache.memoize_default([]) TODO add some sort of cache again.
def sys_path_with_modifications(module):
def execute_code(code):
c = "import os; from os.path import *; result=%s"

View File

@@ -17,7 +17,6 @@ complexity of the ``Parser`` (there's another parser sitting inside
"""
from __future__ import with_statement
import tokenizer as tokenize
import keyword
from jedi._compatibility import next, StringIO
@@ -25,6 +24,7 @@ from jedi import debug
from jedi import common
from jedi.parser import representation as pr
from jedi.parser import token as token_pr
from jedi.parser import tokenizer as tokenize
class Parser(object):

View File

@@ -226,7 +226,6 @@ class FastParser(use_metaclass(CachedFastParser)):
self.user_position = user_position
self.reset_caches()
try:
self._parse(code)
except:
@@ -372,8 +371,8 @@ class FastParser(use_metaclass(CachedFastParser)):
else:
self.current_node = self.current_node.add_node(node)
if self.current_node.parent and (isinstance(p.user_scope,
pr.SubModule) or p.user_scope is None) \
if self.current_node.parent and (isinstance(p.user_scope, pr.SubModule)
or p.user_scope is None) \
and self.user_position \
and p.start_pos <= self.user_position < p.end_pos:
p.user_scope = self.current_node.parent.content_scope
@@ -407,9 +406,9 @@ class FastParser(use_metaclass(CachedFastParser)):
raise ValueError()
except ValueError:
p = Parser(parser_code, self.module_path,
self.user_position, offset=(line_offset, 0),
is_fast_parser=True, top_module=self.module,
no_docstr=no_docstr)
self.user_position, offset=(line_offset, 0),
is_fast_parser=True, top_module=self.module,
no_docstr=no_docstr)
p.module.parent = self.module
else:
if nodes[index] != self.current_node:

View File

@@ -37,13 +37,13 @@ from __future__ import with_statement
import os
import re
import tokenizer as tokenize
from inspect import cleandoc
from ast import literal_eval
from jedi._compatibility import next, Python3Method, encoding, unicode, is_py3k
from jedi import common
from jedi import debug
from jedi.parser import tokenizer as tokenize
class Base(object):

View File

@@ -8,6 +8,7 @@ Basically this is a stripped down version of the standard library module, so
you can read the documentation there.
"""
from __future__ import absolute_import
import string
import re
from token import *

View File

@@ -26,8 +26,8 @@ setup(name='jedi',
license='MIT',
keywords='python completion refactoring vim',
long_description=readme,
packages=['jedi', 'jedi.parser'],
package_data={'jedi': ['mixin/*.pym']},
packages=['jedi', 'jedi.parser', 'jedi.evaluate'],
package_data={'jedi': ['evlaluate/evaluate/mixin/*.pym']},
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
@@ -45,4 +45,4 @@ setup(name='jedi',
'Topic :: Text Editors :: Integrated Development Environments (IDE)',
'Topic :: Utilities',
],
)
)

View File

@@ -12,11 +12,11 @@ sys.path.append(sys.path[1] + '/thirdparty')
# syntax err
sys.path.append('a' +* '/thirdparty')
#? ['evaluate', 'evaluate_representation']
#? ['evaluate']
import evaluate
#? ['goto']
evaluate.goto
#? ['Evaluator']
evaluate.Evaluator
#? ['jedi_']
import jedi_

View File

@@ -11,9 +11,9 @@ import jedi
def test_is_keyword():
results = Script('import ', 1, 1, None).goto_definitions()
assert len(results) == 1 and results[0].is_keyword == True
assert len(results) == 1 and results[0].is_keyword is True
results = Script('str', 1, 1, None).goto_definitions()
assert len(results) == 1 and results[0].is_keyword == False
assert len(results) == 1 and results[0].is_keyword is False
def make_definitions():
"""
@@ -71,6 +71,7 @@ def test_function_call_signature_in_doc():
doc = defs[0].doc
assert "f(x, y = 1, z = 'a')" in doc
def test_class_call_signature():
defs = Script("""
class Foo:

View File

@@ -1,4 +1,4 @@
from jedi import builtin
from jedi.evaluate import builtin
def test_parse_function_doc_illegal_docstr():

View File

@@ -66,7 +66,7 @@ def test_star_import_cache_duration():
old, jedi.settings.star_import_cache_validity = \
jedi.settings.star_import_cache_validity, new
cache.star_import_cache = {} # first empty...
cache._star_import_cache = {} # first empty...
# path needs to be not-None (otherwise caching effects are not visible)
jedi.Script('', 1, 0, '').completions()
time.sleep(2 * new)
@@ -74,6 +74,6 @@ def test_star_import_cache_duration():
# reset values
jedi.settings.star_import_cache_validity = old
length = len(cache.star_import_cache)
cache.star_import_cache = {}
length = len(cache._star_import_cache)
cache._star_import_cache = {}
assert length == 1

View File

@@ -17,6 +17,7 @@ import textwrap
import jedi
from jedi import api_classes
from jedi.evaluate import Evaluator
from .helpers import TestCase
@@ -82,5 +83,5 @@ def test_keyword_full_name_should_be_none():
# Using `from jedi.keywords import Keyword` here does NOT work
# in Python 3. This is due to the import hack jedi using.
Keyword = api_classes.keywords.Keyword
d = api_classes.Definition(Keyword('(', (0, 0)))
d = api_classes.Definition(Evaluator(), Keyword('(', (0, 0)))
assert d.full_name is None

View File

@@ -7,6 +7,7 @@ import itertools
from jedi import Script
from .helpers import cwd_at
from jedi._compatibility import is_py26
def test_goto_definition_on_import():
@@ -21,7 +22,8 @@ def test_complete_on_empty_import():
assert 10 < len(Script("from . import", 1, 5, '').completions()) < 30
assert 10 < len(Script("from . import classes", 1, 5, '').completions()) < 30
assert len(Script("import").completions()) == 0
assert len(Script("import import", path='').completions()) > 0
if not is_py26:
assert len(Script("import import", path='').completions()) > 0
# 111
assert Script("from datetime import").completions()[0].name == 'import'

View File

@@ -75,13 +75,14 @@ class TestRegression(TestCase):
@cwd_at('jedi')
def test_add_dynamic_mods(self):
api.settings.additional_dynamic_modules = ['dynamic.py']
fname = '__main__.py'
api.settings.additional_dynamic_modules = [fname]
# Fictional module that defines a function.
src1 = "def ret(a): return a"
src1 = "def r(a): return a"
# Other fictional modules in another place in the fs.
src2 = 'from .. import setup; setup.ret(1)'
src2 = 'from .. import setup; setup.r(1)'
# .parser to load the module
api.modules.Module(os.path.abspath('dynamic.py'), src2).parser
api.modules.Module(os.path.abspath(fname), src2).parser
result = Script(src1, path='../setup.py').goto_definitions()
assert len(result) == 1
assert result[0].description == 'class int'