1
0
forked from VimPlug/jedi

Merge branch 'dev' into get_code_fidelity

Conflicts:
	jedi/evaluate/docstrings.py
	jedi/parser/representation.py

Resolving merge problems:
* Introducing docstring for compiled.fake
* Partly fixing poor decision in TokenDocstring __init__
This commit is contained in:
Jean-Louis Fuchs
2014-01-28 00:37:06 +01:00
74 changed files with 4318 additions and 4322 deletions

376
jedi/evaluate/__init__.py Normal file
View File

@@ -0,0 +1,376 @@
"""
Evaluation of Python code in |jedi| is based on three assumptions:
* Code is recursive (to weaken this assumption, the :mod:`dynamic` module
exists).
* No magic is being used:
- metaclasses
- ``setattr()`` / ``__import__()``
- writing to ``globals()``, ``locals()``, ``object.__dict__``
* The programmer is not a total dick, e.g. like `this
<https://github.com/davidhalter/jedi/issues/24>`_ :-)
That said, there's mainly one entry point in this script: ``eval_statement``.
This is where autocompletion starts. Everything you want to complete is either
a ``Statement`` or some special name like ``class``, which is easy to complete.
Therefore you need to understand what follows after ``eval_statement``. Let's
make an example::
import datetime
datetime.date.toda# <-- cursor here
First of all, this module doesn't care about completion. It really just cares
about ``datetime.date``. At the end of the procedure ``eval_statement`` will
return the ``datetime`` class.
To *visualize* this (simplified):
- ``eval_statement`` - ``<Statement: datetime.date>``
- Unpacking of the statement into ``[[<Call: datetime.date>]]``
- ``eval_expression_list``, calls ``eval_call`` with ``<Call: datetime.date>``
- ``eval_call`` - searches the ``datetime`` name within the module.
This is exactly where it starts to get complicated. Now recursions start to
kick in. The statement has not been resolved fully, but now we need to resolve
the datetime import. So it continues
- follow import, which happens in the :mod:`imports` module.
- now the same ``eval_call`` as above calls ``follow_path`` to follow the
second part of the statement ``date``.
- After ``follow_path`` returns with the desired ``datetime.date`` class, the
result is being returned and the recursion finishes.
Now what would happen if we wanted ``datetime.date.foo.bar``? Just two more
calls to ``follow_path`` (which calls itself with a recursion). What if the
import would contain another Statement like this::
from foo import bar
Date = bar.baz
Well... You get it. Just another ``eval_statement`` recursion. It's really
easy. Just that Python is not that easy sometimes. To understand tuple
assignments and different class scopes, a lot more code had to be written. Yet
we're still not talking about Descriptors and Nested List Comprehensions, just
the simple stuff.
So if you want to change something, write a test and then just change what you
want. This module has been tested by about 600 tests. Don't be afraid to break
something. The tests are good enough.
I need to mention now that this recursive approach is really good because it
only *evaluates* what needs to be *evaluated*. All the statements and modules
that are not used are just being ignored. It's a little bit similar to the
backtracking algorithm.
.. todo:: nonlocal statement, needed or can be ignored? (py3k)
"""
import itertools
from jedi._compatibility import next, hasattr, unicode
from jedi import common
from jedi.parser import representation as pr
from jedi import debug
from jedi.evaluate import representation as er
from jedi.evaluate import imports
from jedi.evaluate import recursion
from jedi.evaluate import iterable
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import stdlib
from jedi.evaluate import finder
from jedi.evaluate import compiled
class Evaluator(object):
def __init__(self):
self.memoize_cache = {} # for memoize decorators
self.recursion_detector = recursion.RecursionDetector()
self.execution_recursion_detector = recursion.ExecutionRecursionDetector()
def find_types(self, scope, name_str, position=None, search_global=False,
is_goto=False, resolve_decorator=True):
"""
This is the search function. The most important part to debug.
`remove_statements` and `filter_statements` really are the core part of
this completion.
:param position: Position of the last statement -> tuple of line, column
:return: List of Names. Their parents are the types.
"""
f = finder.NameFinder(self, scope, name_str, position)
scopes = f.scopes(search_global)
if is_goto:
return f.filter_name(scopes)
return f.find(scopes, resolve_decorator)
@memoize_default(default=(), evaluator_is_first_arg=True)
@recursion.recursion_decorator
@debug.increase_indent
def eval_statement(self, stmt, seek_name=None):
"""
The starting point of the completion. A statement always owns a call
list, which are the calls, that a statement does. In case multiple
names are defined in the statement, `seek_name` returns the result for
this name.
:param stmt: A `pr.Statement`.
"""
debug.dbg('eval_statement %s (%s)', stmt, seek_name)
expression_list = stmt.expression_list()
result = self.eval_expression_list(expression_list)
# Assignment checking is only important if the statement defines multiple
# variables.
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
new_result = []
for ass_expression_list, op in stmt.assignment_details:
new_result += finder.find_assignments(ass_expression_list[0], result, seek_name)
result = new_result
return set(result)
def eval_expression_list(self, expression_list, follow_array=False):
"""
`expression_list` can be either `pr.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
def evaluate_list_comprehension(lc, parent=None):
input = lc.input
nested_lc = lc.input.token_list[0]
if isinstance(nested_lc, pr.ListComprehension):
# is nested LC
input = nested_lc.stmt
module = input.get_parent_until()
# create a for loop, which does the same as list comprehensions
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
loop.parent = parent or lc.get_parent_until(pr.IsScope)
if isinstance(nested_lc, pr.ListComprehension):
loop = evaluate_list_comprehension(nested_lc, loop)
return loop
debug.dbg('eval_expression_list: %s', expression_list)
result = []
calls_iterator = iter(expression_list)
for call in calls_iterator:
if pr.Array.is_type(call, pr.Array.NOARRAY):
r = list(itertools.chain.from_iterable(self.eval_statement(s)
for s in call))
call_path = call.generate_call_path()
next(call_path, None) # the first one has been used already
result += self.follow_path(call_path, r, call.parent,
position=call.start_pos)
elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call)
# Caveat: parents are being changed, but this doesn't matter,
# because nothing else uses it.
call.stmt.parent = loop
result += self.eval_statement(call.stmt)
else:
if isinstance(call, pr.Lambda):
result.append(er.Function(self, call))
# With things like params, these can also be functions...
elif isinstance(call, pr.Base) and call.isinstance(
er.Function, er.Class, er.Instance, iterable.ArrayInstance):
result.append(call)
# The string tokens are just operations (+, -, etc.)
elif isinstance(call, compiled.CompiledObject):
result.append(call)
elif not isinstance(call, (str, unicode)):
if isinstance(call, pr.Call) and str(call.name) == 'if':
# Ternary operators.
while True:
try:
call = next(calls_iterator)
except StopIteration:
break
with common.ignored(AttributeError):
if str(call.name) == 'else':
break
continue
result += self.eval_call(call)
elif call == '*':
if [r for r in result if isinstance(r, iterable.Array)
or isinstance(r, compiled.CompiledObject)
and isinstance(r.obj, (str, unicode))]:
# if it is an iterable, ignore * operations
next(calls_iterator)
return set(result)
def eval_call(self, call):
"""Follow a call is following a function, variable, string, etc."""
path = call.generate_call_path()
# find the statement of the Scope
s = call
while not s.parent.isinstance(pr.IsScope):
s = s.parent
return self.eval_call_path(path, s.parent, s.start_pos)
def eval_call_path(self, path, scope, position):
"""
Follows a path generated by `pr.StatementElement.generate_call_path()`.
"""
current = next(path)
if isinstance(current, pr.Array):
types = [iterable.Array(self, current)]
else:
if isinstance(current, pr.NamePart):
# This is the first global lookup.
types = self.find_types(scope, current, position=position,
search_global=True)
else:
# for pr.Literal
types = [compiled.create(current.value)]
types = imports.strip_imports(self, types)
return self.follow_path(path, types, scope, position=position)
def follow_path(self, path, types, call_scope, position=None):
"""
Follows a path like::
self.follow_path(iter(['Foo', 'bar']), [a_type], from_somewhere)
to follow a call like ``module.a_type.Foo.bar`` (in ``from_somewhere``).
"""
results_new = []
iter_paths = itertools.tee(path, len(types))
for i, typ in enumerate(types):
fp = self._follow_path(iter_paths[i], typ, call_scope, position=position)
if fp is not None:
results_new += fp
else:
# This means stop iteration.
return types
return results_new
def _follow_path(self, path, typ, scope, position=None):
"""
Uses a generator and tries to complete the path, e.g.::
foo.bar.baz
`_follow_path` is only responsible for completing `.bar.baz`, the rest
is done in the `follow_call` function.
"""
# current is either an Array or a Scope.
try:
current = next(path)
except StopIteration:
return None
debug.dbg('_follow_path: %s in scope %s', current, typ)
result = []
if isinstance(current, pr.Array):
# This must be an execution, either () or [].
if current.type == pr.Array.LIST:
if hasattr(typ, 'get_index_types'):
result = typ.get_index_types(current)
elif current.type not in [pr.Array.DICT]:
# Scope must be a class or func - make an instance or execution.
result = self.execute(typ, current)
else:
# Curly braces are not allowed, because they make no sense.
debug.warning('strange function call with {} %s %s', current, typ)
else:
# The function must not be decorated with something else.
if typ.isinstance(er.Function):
typ = typ.get_magic_function_scope()
else:
# This is the typical lookup while chaining things.
if filter_private_variable(typ, scope, current):
return []
types = self.find_types(typ, current, position=position)
result = imports.strip_imports(self, types)
return self.follow_path(path, set(result), scope, position=position)
@debug.increase_indent
def execute(self, obj, params=(), evaluate_generator=False):
if obj.isinstance(er.Function):
obj = obj.get_decorated_func()
debug.dbg('execute: %s %s', obj, params)
try:
return stdlib.execute(self, obj, params)
except stdlib.NotInStdLib:
pass
if isinstance(obj, iterable.GeneratorMethod):
return obj.execute()
elif obj.isinstance(compiled.CompiledObject):
if obj.is_executable_class():
return [er.Instance(self, obj, params)]
else:
return list(obj.execute_function(self, params))
elif obj.isinstance(er.Class):
# There maybe executions of executions.
return [er.Instance(self, obj, params)]
else:
stmts = []
if obj.isinstance(er.Function):
stmts = er.FunctionExecution(self, obj, params).get_return_types(evaluate_generator)
else:
if hasattr(obj, 'execute_subscope_by_name'):
try:
stmts = obj.execute_subscope_by_name('__call__', params)
except KeyError:
debug.warning("no __call__ func available %s", obj)
else:
debug.warning("no execution possible %s", obj)
debug.dbg('execute result: %s in %s', stmts, obj)
return imports.strip_imports(self, stmts)
def goto(self, stmt, call_path=None):
if call_path is None:
expression_list = stmt.expression_list()
if len(expression_list) == 0:
return [], ''
# Only the first command is important, the rest should basically not
# happen except in broken code (e.g. docstrings that aren't code).
call = expression_list[0]
if isinstance(call, (str, unicode)):
call_path = [call]
else:
call_path = list(call.generate_call_path())
scope = stmt.get_parent_until(pr.IsScope)
pos = stmt.start_pos
call_path, search = call_path[:-1], call_path[-1]
pos = pos[0], pos[1] + 1
if call_path:
scopes = self.eval_call_path(iter(call_path), scope, pos)
search_global = False
pos = None
else:
scopes = [scope]
search_global = True
follow_res = []
for s in scopes:
follow_res += self.find_types(s, search, pos,
search_global=search_global, is_goto=True)
return follow_res, search
def filter_private_variable(scope, call_scope, var_name):
"""private variables begin with a double underline `__`"""
if isinstance(var_name, (str, unicode)) and isinstance(scope, er.Instance)\
and var_name.startswith('__') and not var_name.endswith('__'):
s = call_scope.get_parent_until((pr.Class, er.Instance, compiled.CompiledObject))
if s != scope:
if isinstance(scope.base, compiled.CompiledObject):
if s != scope.base:
return True
else:
if s != scope.base.base:
return True
return False

51
jedi/evaluate/cache.py Normal file
View File

@@ -0,0 +1,51 @@
"""
- the popular ``memoize_default`` works like a typical memoize and returns the
default otherwise.
- ``CachedMetaClass`` uses ``memoize_default`` to do the same with classes.
"""
def memoize_default(default, evaluator_is_first_arg=False, second_arg_is_evaluator=False):
""" This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
Preventing recursion is in this case the much bigger use than speed. I
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
def func(function):
def wrapper(obj, *args, **kwargs):
if evaluator_is_first_arg:
cache = obj.memoize_cache
elif second_arg_is_evaluator: # needed for meta classes
cache = args[0].memoize_cache
else:
cache = obj._evaluator.memoize_cache
try:
memo = cache[function]
except KeyError:
memo = {}
cache[function] = memo
key = (obj, args, frozenset(kwargs.items()))
if key in memo:
return memo[key]
else:
memo[key] = default
rv = function(obj, *args, **kwargs)
memo[key] = rv
return rv
return wrapper
return func
class CachedMetaClass(type):
"""
This is basically almost the same than the decorator above, it just caches
class initializations. I haven't found any other way, so I'm doing it with
meta classes.
"""
@memoize_default(None, second_arg_is_evaluator=True)
def __call__(self, *args, **kwargs):
return super(CachedMetaClass, self).__call__(*args, **kwargs)

View File

@@ -0,0 +1,308 @@
"""
Imitate the parser representation.
"""
import inspect
import re
import sys
import os
from jedi._compatibility import builtins as _builtins
from jedi import debug
from jedi.parser.representation import Base
from jedi.cache import underscore_memoization
from jedi.evaluate.sys_path import get_sys_path
from . import fake
class CompiledObject(Base):
# comply with the parser
start_pos = 0, 0
asserts = []
path = None # modules have this attribute - set it to None.
def __init__(self, obj, parent=None):
self.obj = obj
self.parent = parent
self.doc = inspect.getdoc(obj)
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.obj)
def get_parent_until(self, *args, **kwargs):
# compiled modules only use functions and classes/methods (2 levels)
return getattr(self.parent, 'parent', self.parent) or self.parent or self
@underscore_memoization
def _parse_function_doc(self):
if self.doc is None:
return '', ''
return _parse_function_doc(self.doc)
def type(self):
cls = self._cls().obj
if inspect.isclass(cls):
return 'class'
elif inspect.ismodule(cls):
return 'module'
elif inspect.isbuiltin(cls) or inspect.ismethod(cls) \
or inspect.ismethoddescriptor(cls):
return 'def'
def is_executable_class(self):
return inspect.isclass(self.obj)
@underscore_memoization
def _cls(self):
# Ensures that a CompiledObject is returned that is not an instance (like list)
if fake.is_class_instance(self.obj):
try:
c = self.obj.__class__
except AttributeError:
# happens with numpy.core.umath._UFUNC_API (you get it
# automatically by doing `import numpy`.
c = type(None)
return CompiledObject(c, self.parent)
return self
def get_defined_names(self):
cls = self._cls()
for name in dir(cls.obj):
yield CompiledName(cls, name)
def instance_names(self):
return self.get_defined_names()
def get_subscope_by_name(self, name):
if name in dir(self._cls().obj):
return CompiledName(self._cls(), name).parent
else:
raise KeyError("CompiledObject doesn't have an attribute '%s'." % name)
@property
def name(self):
# might not exist sometimes (raises AttributeError)
return self._cls().obj.__name__
def execute_function(self, evaluator, params):
if self.type() != 'def':
return
for name in self._parse_function_doc()[1].split():
try:
bltn_obj = _create_from_name(builtin, builtin, name)
except AttributeError:
continue
else:
if isinstance(bltn_obj, CompiledObject):
# We want everything except None.
if bltn_obj.obj is not None:
yield bltn_obj
else:
for result in evaluator.execute(bltn_obj, params):
yield result
@property
@underscore_memoization
def subscopes(self):
"""
Returns only the faked scopes - the other ones are not important for
internal analysis.
"""
module = self.get_parent_until()
faked_subscopes = []
for name in dir(self._cls().obj):
f = fake.get_faked(module.obj, self.obj, name)
if f:
f.parent = self
faked_subscopes.append(f)
return faked_subscopes
def get_self_attributes(self):
return [] # Instance compatibility
def get_imports(self):
return [] # Builtins don't have imports
class CompiledName(object):
def __init__(self, obj, name):
self._obj = obj
self.name = name
self.start_pos = 0, 0 # an illegal start_pos, to make sorting easy.
def get_parent_until(self):
return self.parent.get_parent_until()
def __str__(self):
return self.name
def __repr__(self):
return '<%s: (%s).%s>' % (type(self).__name__, self._obj.name, self.name)
@property
@underscore_memoization
def parent(self):
module = self._obj.get_parent_until()
return _create_from_name(module, self._obj, self.name)
@property
def names(self):
return [self.name] # compatibility with parser.representation.Name
def get_code(self):
return self.name
def load_module(path, name):
if not name:
name = os.path.basename(path)
name = name.rpartition('.')[0] # cut file type (normally .so)
# sometimes there are endings like `_sqlite3.cpython-32mu`
name = re.sub(r'\..*', '', name)
dot_path = []
if path:
p = path
# if path is not in sys.path, we need to make a well defined import
# like `from numpy.core import umath.`
while p and p not in sys.path:
p, sep, mod = p.rpartition(os.path.sep)
dot_path.insert(0, mod.partition('.')[0])
if p:
name = ".".join(dot_path)
path = p
else:
path = os.path.dirname(path)
sys_path = get_sys_path()
if path:
sys_path.insert(0, path)
temp, sys.path = sys.path, sys_path
try:
module = __import__(name, {}, {}, dot_path[:-1])
except AttributeError:
# use sys.modules, because you cannot access some modules
# directly. -> github issue #59
module = sys.modules[name]
sys.path = temp
return CompiledObject(module)
docstr_defaults = {
'floating point number': 'float',
'character': 'str',
'integer': 'int',
'dictionary': 'dict',
'string': 'str',
}
def _parse_function_doc(doc):
"""
Takes a function and returns the params and return value as a tuple.
This is nothing more than a docstring parser.
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
TODO docstrings like 'tuple of integers'
"""
# parse round parentheses: def func(a, (b,c))
try:
count = 0
start = doc.index('(')
for i, s in enumerate(doc[start:]):
if s == '(':
count += 1
elif s == ')':
count -= 1
if count == 0:
end = start + i
break
param_str = doc[start + 1:end]
except (ValueError, UnboundLocalError):
# ValueError for doc.index
# UnboundLocalError for undefined end in last line
debug.dbg('no brackets found - no param')
end = 0
param_str = ''
else:
# remove square brackets, that show an optional param ( = None)
def change_options(m):
args = m.group(1).split(',')
for i, a in enumerate(args):
if a and '=' not in a:
args[i] += '=None'
return ','.join(args)
while True:
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
change_options, param_str)
if changes == 0:
break
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
# parse return value
r = re.search('-[>-]* ', doc[end:end + 7])
if r is None:
ret = ''
else:
index = end + r.end()
# get result type, which can contain newlines
pattern = re.compile(r'(,\n|[^\n-])+')
ret_str = pattern.match(doc, index).group(0).strip()
# New object -> object()
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
ret = docstr_defaults.get(ret_str, ret_str)
return param_str, ret
class Builtin(CompiledObject):
def get_defined_names(self):
# Filter None, because it's really just a keyword, nobody wants to
# access it.
return [d for d in super(Builtin, self).get_defined_names() if d.name != 'None']
def _a_generator(foo):
"""Used to have an object to return for generators."""
yield 42
yield foo
builtin = Builtin(_builtins)
magic_function_class = CompiledObject(type(load_module), parent=builtin)
generator_obj = CompiledObject(_a_generator(1.0))
def _create_from_name(module, parent, name):
faked = fake.get_faked(module.obj, parent.obj, name)
# only functions are necessary.
if faked is not None:
faked.parent = parent
return faked
try:
obj = getattr(parent.obj, name)
except AttributeError:
# happens e.g. in properties of
# PyQt4.QtGui.QStyleOptionComboBox.currentText
# -> just set it to None
obj = None
return CompiledObject(obj, parent)
def create(obj, parent=builtin, module=None):
"""
A very weird interface class to this module. The more options provided the
more acurate loading compiled objects is.
"""
if not inspect.ismodule(obj):
faked = fake.get_faked(module and module.obj, obj)
if faked is not None:
faked.parent = parent
return faked
return CompiledObject(obj, parent)

View File

@@ -0,0 +1,117 @@
"""
Loads functions that are mixed in to the standard library. E.g. builtins are
written in C (binaries), but my autocompletion only understands Python code. By
mixing in Python code, the autocompletion should work much better for builtins.
"""
import os
import inspect
from jedi._compatibility import is_py3, builtins
from jedi.parser import Parser
from jedi.parser import token as token_pr
from jedi.parser.representation import Class
from jedi.evaluate.helpers import FakeName
modules = {}
def _load_faked_module(module):
module_name = module.__name__
if module_name == '__builtin__' and not is_py3:
module_name = 'builtins'
try:
return modules[module_name]
except KeyError:
path = os.path.dirname(os.path.abspath(__file__))
try:
with open(os.path.join(path, 'fake', module_name) + '.pym') as f:
source = f.read()
except IOError:
modules[module_name] = None
return
module = Parser(source, module_name).module
modules[module_name] = module
if module_name == 'builtins' and not is_py3:
# There are two implementations of `open` for either python 2/3.
# -> Rename the python2 version (`look at fake/builtins.pym`).
open_func = search_scope(module, 'open')
open_func.name = FakeName('open_python3')
open_func = search_scope(module, 'open_python2')
open_func.name = FakeName('open')
return module
def search_scope(scope, obj_name):
for s in scope.subscopes:
if str(s.name) == obj_name:
return s
def get_module(obj):
if inspect.ismodule(obj):
return obj
try:
obj = obj.__objclass__
except AttributeError:
pass
try:
imp_plz = obj.__module__
except AttributeError:
# Unfortunately in some cases like `int` there's no __module__
return builtins
else:
return __import__(imp_plz)
def _faked(module, obj, name):
# Crazy underscore actions to try to escape all the internal madness.
if module is None:
module = get_module(obj)
faked_mod = _load_faked_module(module)
if faked_mod is None:
return
# Having the module as a `parser.representation.module`, we need to scan
# for methods.
if name is None:
if inspect.isbuiltin(obj):
return search_scope(faked_mod, obj.__name__)
elif not inspect.isclass(obj):
# object is a method or descriptor
cls = search_scope(faked_mod, obj.__objclass__.__name__)
if cls is None:
return
return search_scope(cls, obj.__name__)
else:
if obj == module:
return search_scope(faked_mod, name)
else:
cls = search_scope(faked_mod, obj.__name__)
if cls is None:
return
return search_scope(cls, name)
def get_faked(module, obj, name=None):
obj = obj.__class__ if is_class_instance(obj) else obj
result = _faked(module, obj, name)
if not isinstance(result, Class) and result is not None:
# Set the docstr which was previously not set (faked modules don't
# contain it).
result.docstr = None
if obj.__doc__:
result.docstr = token_pr.TokenDocstring.fake_docstring(obj.__doc__)
return result
def is_class_instance(obj):
"""Like inspect.* methods."""
return not (inspect.isclass(obj) or inspect.ismodule(obj)
or inspect.isbuiltin(obj) or inspect.ismethod(obj)
or inspect.ismethoddescriptor(obj) or inspect.iscode(obj)
or inspect.isgenerator(obj))

View File

@@ -0,0 +1,9 @@
class partial():
def __init__(self, func, *args, **keywords):
self.__func = func
self.__args = args
self.__keywords = keywords
def __call__(self, *args, **kwargs):
# I know this doesn't work in Python, but Jedi can this ;-)
return self.__func(*self.__args, *args, **self.keywords, **kwargs)

View File

@@ -0,0 +1,26 @@
def connect(database, timeout=None, isolation_level=None, detect_types=None, factory=None):
return Connection()
class Connection():
def cursor(self):
return Cursor()
class Cursor():
def cursor(self):
return Cursor()
def fetchone(self):
return Row()
def fetchmany(self, size=cursor.arraysize):
return [self.fetchone()]
def fetchall(self):
return [self.fetchone()]
class Row():
def keys(self):
return ['']

View File

@@ -0,0 +1,99 @@
def compile():
class SRE_Match():
endpos = 1
lastgroup = 0
lastindex = 1
pos = 0
string = 'a'
regs = ((0, 1),)
def __init__(self, pattern):
self.re = pattern
def start(self):
return 0
def end(self):
return 1
def span(self):
return 0, 1
def expand(self):
return ''
def group(self, nr):
return ''
def groupdict(self):
return {'a', 'a'}
def groups(self):
return ('a',)
class SRE_Pattern():
flags = 0
groupindex = {}
groups = 0
pattern = 'a'
def findall(self, string, pos=None, endpos=None):
"""
findall(string[, pos[, endpos]]) --> list.
Return a list of all non-overlapping matches of pattern in string.
"""
return ['a']
def finditer(self, string, pos=None, endpos=None):
"""
finditer(string[, pos[, endpos]]) --> iterator.
Return an iterator over all non-overlapping matches for the
RE pattern in string. For each match, the iterator returns a
match object.
"""
yield SRE_Match(self)
def match(self, string, pos=None, endpos=None):
"""
match(string[, pos[, endpos]]) --> match object or None.
Matches zero or more characters at the beginning of the string
pattern
"""
return SRE_Match(self)
def scanner(self, string, pos=None, endpos=None):
pass
def search(self, string, pos=None, endpos=None):
"""
search(string[, pos[, endpos]]) --> match object or None.
Scan through string looking for a match, and return a corresponding
MatchObject instance. Return None if no position in the string matches.
"""
return SRE_Match(self)
def split(self, string, maxsplit=0]):
"""
split(string[, maxsplit = 0]) --> list.
Split string by the occurrences of pattern.
"""
return ['a']
def sub(self, repl, string, count=0):
"""
sub(repl, string[, count = 0]) --> newstring
Return the string obtained by replacing the leftmost non-overlapping
occurrences of pattern in string by the replacement repl.
"""
return ''
def subn(self, repl, string, count=0):
"""
subn(repl, string[, count = 0]) --> (newstring, number of subs)
Return the tuple (new_string, number_of_subs_made) found by replacing
the leftmost non-overlapping occurrences of pattern with the
replacement repl.
"""
return ('', 1)
return SRE_Pattern()

View File

@@ -0,0 +1,8 @@
def proxy(object, callback=None):
return object
class weakref():
def __init__(self, object, callback=None):
self.__object = object
def __call__(self):
return self.__object

View File

@@ -0,0 +1,237 @@
"""
Pure Python implementation of some builtins.
This code is not going to be executed anywhere.
These implementations are not always correct, but should work as good as
possible for the auto completion.
"""
def next(iterator, default=None):
if hasattr("next"):
return iterator.next()
else:
return iterator.__next__()
return default
def iter(collection, sentinel=None):
if sentinel:
yield collection()
else:
for c in collection:
yield c
def range(start, stop=None, step=1):
return [0]
class file():
def __iter__(self):
yield ''
def next(self):
return ''
class xrange():
# Attention: this function doesn't exist in Py3k (there it is range).
def __iter__(self):
yield 1
def count(self):
return 1
def index(self):
return 1
def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True):
import io
return io.TextIOWrapper(file, mode, buffering, encoding, errors, newline, closefd)
def open_python2(name, mode=None, buffering=None):
return file(name, mode, buffering)
#--------------------------------------------------------
# descriptors
#--------------------------------------------------------
class property():
def __init__(self, fget, fset=None, fdel=None, doc=None):
self.fget = fget
self.fset = fset
self.fdel = fdel
self.__doc__ = doc
def __get__(self, obj, cls):
return self.fget(obj)
def __set__(self, obj, value):
self.fset(obj, value)
def __delete__(self, obj):
self.fdel(obj)
def setter(self, func):
self.fset = func
return self
def getter(self, func):
self.fget = func
return self
def deleter(self, func):
self.fdel = func
return self
class staticmethod():
def __init__(self, func):
self.__func = func
def __get__(self, obj, cls):
return self.__func
class classmethod():
def __init__(self, func):
self.__func = func
def __get__(self, obj, cls):
def _method(*args, **kwargs):
return self.__func(cls, *args, **kwargs)
return _method
#--------------------------------------------------------
# array stuff
#--------------------------------------------------------
class list():
def __init__(self, iterable=[]):
self.__iterable = []
for i in iterable:
self.__iterable += [i]
def __iter__(self):
for i in self.__iterable:
yield i
def __getitem__(self, y):
return self.__iterable[y]
def pop(self):
return self.__iterable[-1]
class tuple():
def __init__(self, iterable=[]):
self.__iterable = []
for i in iterable:
self.__iterable += [i]
def __iter__(self):
for i in self.__iterable:
yield i
def __getitem__(self, y):
return self.__iterable[y]
def index(self):
return 1
def count(self):
return 1
class set():
def __init__(self, iterable=[]):
self.__iterable = iterable
def __iter__(self):
for i in self.__iterable:
yield i
def pop(self):
return self.__iterable.pop()
def copy(self):
return self
def difference(self, other):
return self - other
def intersection(self, other):
return self & other
def symmetric_difference(self, other):
return self ^ other
def union(self, other):
return self | other
class frozenset():
def __init__(self, iterable=[]):
self.__iterable = iterable
def __iter__(self):
for i in self.__iterable:
yield i
def copy(self):
return self
class dict():
def __init__(self, **elements):
self.__elements = elements
def clear(self):
# has a strange docstr
pass
def get(self, k, d=None):
# TODO implement
try:
#return self.__elements[k]
pass
except KeyError:
return d
class reversed():
def __init__(self, sequence):
self.__sequence = sequence
def __iter__(self):
for i in self.__sequence:
yield i
def __next__(self):
return next(self.__iter__())
def next(self):
return next(self.__iter__())
def sorted(iterable, cmp=None, key=None, reverse=False):
return iterable
#--------------------------------------------------------
# basic types
#--------------------------------------------------------
class int():
def __init__(self, x, base=None):
pass
class str():
def __init__(self, obj):
pass
class type():
def mro():
return [object]

View File

@@ -0,0 +1,4 @@
class datetime():
@staticmethod
def now():
return datetime()

View File

@@ -0,0 +1,3 @@
class TextIOWrapper():
def __next__(self):
return 'hacked io return'

View File

@@ -0,0 +1,5 @@
def getcwd():
return ''
def getcwdu():
return ''

131
jedi/evaluate/docstrings.py Normal file
View File

@@ -0,0 +1,131 @@
"""
Docstrings are another source of information for functions and classes.
:mod:`dynamic` tries to find all executions of functions, while the docstring
parsing is much easier. There are two different types of docstrings that |jedi|
understands:
- `Sphinx <http://sphinx-doc.org/markup/desc.html#info-field-lists>`_
- `Epydoc <http://epydoc.sourceforge.net/manual-fields.html>`_
For example, the sphinx annotation ``:type foo: str`` clearly states that the
type of ``foo`` is ``str``.
As an addition to parameter searching, this module also provides return
annotations.
"""
import re
from jedi.evaluate.cache import memoize_default
from jedi.parser import Parser
DOCSTRING_PARAM_PATTERNS = [
r'\s*:type\s+%s:\s*([^\n]+)', # Sphinx
r'\s*@type\s+%s:\s*([^\n]+)', # Epydoc
]
DOCSTRING_RETURN_PATTERNS = [
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
]
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
@memoize_default(None, evaluator_is_first_arg=True)
def follow_param(evaluator, param):
func = param.parent_function
# print func, param, param.parent_function
if not func.docstr:
return []
param_str = _search_param_in_docstr(
func.docstr.as_string(),
str(param.get_name())
)
position = (1, 0)
if param_str is not None:
# Try to import module part in dotted name.
# (e.g., 'threading' in 'threading.Thread').
if '.' in param_str:
param_str = 'import %s\n%s' % (
param_str.rsplit('.', 1)[0],
param_str)
position = (2, 0)
p = Parser(param_str, no_docstr=True)
stmt = p.module.get_statement_for_position(position)
if stmt is None:
return []
return evaluator.eval_statement(stmt)
return []
def _search_param_in_docstr(docstr, param_str):
"""
Search `docstr` for a type of `param_str`.
>>> _search_param_in_docstr(':type param: int', 'param')
'int'
>>> _search_param_in_docstr('@type param: int', 'param')
'int'
>>> _search_param_in_docstr(
... ':type param: :class:`threading.Thread`', 'param')
'threading.Thread'
>>> _search_param_in_docstr('no document', 'param') is None
True
"""
# look at #40 to see definitions of those params
patterns = [re.compile(p % re.escape(param_str))
for p in DOCSTRING_PARAM_PATTERNS]
for pattern in patterns:
match = pattern.search(docstr)
if match:
return _strip_rest_role(match.group(1))
return None
def _strip_rest_role(type_str):
"""
Strip off the part looks like a ReST role in `type_str`.
>>> _strip_rest_role(':class:`ClassName`') # strip off :class:
'ClassName'
>>> _strip_rest_role(':py:obj:`module.Object`') # works with domain
'module.Object'
>>> _strip_rest_role('ClassName') # do nothing when not ReST role
'ClassName'
See also:
http://sphinx-doc.org/domains.html#cross-referencing-python-objects
"""
match = REST_ROLE_PATTERN.match(type_str)
if match:
return match.group(1)
else:
return type_str
def find_return_types(evaluator, func):
def search_return_in_docstr(code):
for p in DOCSTRING_RETURN_PATTERNS:
match = p.search(code)
if match:
return match.group(1)
if not func.docstr:
return []
type_str = search_return_in_docstr(func.docstr.as_string())
if not type_str:
return []
p = Parser(type_str, None, no_docstr=True)
stmt = p.module.get_statement_for_position((1, 0))
if stmt is None:
return []
stmt.parent = func
return list(evaluator.eval_statement(stmt))

185
jedi/evaluate/dynamic.py Normal file
View File

@@ -0,0 +1,185 @@
"""
To understand Python on a deeper level, |jedi| needs to understand some of the
dynamic features of Python, however this probably the most complicated part:
- Array modifications (e.g. ``list.append``)
- Parameter completion in functions
- Flow checks (e.g. ``if isinstance(a, str)`` -> a is a str)
Array modifications
*******************
If the content of an array (``set``/``list``) is wanted somewhere, the current
module will be checked for appearances of ``arr.append``, ``arr.insert``, etc.
If the ``arr`` name points to an actual array, the content will be added
This can be really cpu intensive, as you can imagine. Because |jedi| has to
follow **every** ``append``. However this works pretty good, because in *slow*
cases, the recursion detector and other settings will stop this process.
It is important to note that:
1. Array modfications work only in the current module
2. Only Array additions are being checked, ``list.pop``, etc. is being ignored.
Parameter completion
********************
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- A param is being encountered
- search for function calls named ``foo``
- execute these calls and check the injected params. This work with a
``ParamListener``.
Flow checks
***********
Flow checks are not really mature. There's only a check for ``isinstance``. It
would check whether a flow has the form of ``if isinstance(a, type_or_tuple)``.
Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
check for -> a is a string). There's big potential in these checks.
"""
from jedi.parser import representation as pr
from jedi import settings
from jedi.evaluate import helpers
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import imports
# This is something like the sys.path, but only for searching params. It means
# that this is the order in which Jedi searches params.
search_param_modules = ['.']
class ParamListener(object):
"""
This listener is used to get the params for a function.
"""
def __init__(self):
self.param_possibilities = []
def execute(self, params):
self.param_possibilities.append(params)
@memoize_default([], evaluator_is_first_arg=True)
def search_params(evaluator, param):
"""
This is a dynamic search for params. If you try to complete a type:
>>> def func(foo):
... foo
>>> func(1)
>>> func("")
It is not known what the type is, because it cannot be guessed with
recursive madness. Therefore one has to analyse the statements that are
calling the function, as well as analyzing the incoming params.
"""
if not settings.dynamic_params:
return []
def get_params_for_module(module):
"""
Returns the values of a param, or an empty array.
"""
@memoize_default([], evaluator_is_first_arg=True)
def get_posibilities(evaluator, module, func_name):
try:
possible_stmts = module.used_names[func_name]
except KeyError:
return []
for stmt in possible_stmts:
if isinstance(stmt, pr.Import):
continue
calls = helpers.scan_statement_for_calls(stmt, func_name)
for c in calls:
# no execution means that params cannot be set
call_path = list(c.generate_call_path())
pos = c.start_pos
scope = stmt.parent
# this whole stuff is just to not execute certain parts
# (speed improvement), basically we could just call
# ``eval_call_path`` on the call_path and it would
# also work.
def listRightIndex(lst, value):
return len(lst) - lst[-1::-1].index(value) - 1
# Need to take right index, because there could be a
# func usage before.
i = listRightIndex(call_path, func_name)
first, last = call_path[:i], call_path[i + 1:]
if not last and not call_path.index(func_name) != i:
continue
scopes = [scope]
if first:
scopes = evaluator.eval_call_path(iter(first), scope, pos)
pos = None
from jedi.evaluate import representation as er
for scope in scopes:
s = evaluator.find_types(scope, func_name, position=pos,
search_global=not first,
resolve_decorator=False)
c = [getattr(escope, 'base_func', None) or escope.base
for escope in s
if escope.isinstance(er.Function, er.Class)]
if compare in c:
# only if we have the correct function we execute
# it, otherwise just ignore it.
evaluator.follow_path(iter(last), s, scope)
return listener.param_possibilities
result = []
for params in get_posibilities(evaluator, module, func_name):
for p in params:
if str(p) == param_name:
result += evaluator.eval_statement(p.parent)
return result
func = param.get_parent_until(pr.Function)
current_module = param.get_parent_until()
func_name = str(func.name)
compare = func
if func_name == '__init__' and isinstance(func.parent, pr.Class):
func_name = str(func.parent.name)
compare = func.parent
# get the param name
if param.assignment_details:
# first assignment details, others would be a syntax error
expression_list, op = param.assignment_details[0]
else:
expression_list = param.expression_list()
offset = 1 if expression_list[0] in ['*', '**'] else 0
param_name = str(expression_list[offset].name)
# add the listener
listener = ParamListener()
func.listeners.add(listener)
result = []
# This is like backtracking: Get the first possible result.
for mod in imports.get_modules_containing_name([current_module], func_name):
result = get_params_for_module(mod)
if result:
break
# cleanup: remove the listener; important: should not stick.
func.listeners.remove(listener)
return result

515
jedi/evaluate/finder.py Normal file
View File

@@ -0,0 +1,515 @@
import copy
import sys
from jedi._compatibility import hasattr, unicode, u, reraise
from jedi.parser import representation as pr
from jedi import debug
from jedi import common
from jedi import settings
from jedi.evaluate import representation as er
from jedi.evaluate import dynamic
from jedi.evaluate import compiled
from jedi.evaluate import docstrings
from jedi.evaluate import iterable
from jedi.evaluate import imports
class NameFinder(object):
def __init__(self, evaluator, scope, name_str, position=None):
self._evaluator = evaluator
self.scope = scope
self.name_str = name_str
self.position = position
def find(self, scopes, resolve_decorator=True):
names = self.filter_name(scopes)
types = self._names_to_types(names, resolve_decorator)
debug.dbg('_names_to_types: %s, old: %s', names, types)
return self._resolve_descriptors(types)
def scopes(self, search_global=False):
if search_global:
return get_names_of_scope(self._evaluator, self.scope, self.position)
else:
if isinstance(self.scope, er.Instance):
return self.scope.scope_generator()
else:
if isinstance(self.scope, (er.Class, pr.Module)):
# classes are only available directly via chaining?
# strange stuff...
names = self.scope.get_defined_names()
else:
names = _get_defined_names_for_position(self.scope, self.position)
return iter([(self.scope, names)])
def filter_name(self, scope_generator):
"""
Filters all variables of a scope (which are defined in the
`scope_generator`), until the name fits.
"""
result = []
for nscope, name_list in scope_generator:
break_scopes = []
# here is the position stuff happening (sorting of variables)
for name in sorted(name_list, key=lambda n: n.start_pos, reverse=True):
p = name.parent.parent if name.parent else None
if isinstance(p, er.InstanceElement) \
and isinstance(p.var, pr.Class):
p = p.var
if self.name_str == name.get_code() and p not in break_scopes:
if not self._name_is_array_assignment(name):
result.append(name) # `arr[1] =` is not the definition
# for comparison we need the raw class
s = nscope.base if isinstance(nscope, er.Class) else nscope
# this means that a definition was found and is not e.g.
# in if/else.
if result and not self._name_is_no_break_scope(name):
if not name.parent or p == s:
break
break_scopes.append(p)
if result:
break
if not result and isinstance(self.scope, er.Instance):
# __getattr__ / __getattribute__
for r in self._check_getattr(self.scope):
if not isinstance(r, compiled.CompiledObject):
new_name = copy.copy(r.name)
new_name.parent = r
result.append(new_name)
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s', self.name_str,
self.scope, nscope, u(result), self.position)
return result
def _check_getattr(self, inst):
"""Checks for both __getattr__ and __getattribute__ methods"""
result = []
# str is important to lose the NamePart!
name = compiled.create(str(self.name_str))
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattr__', [name])
if not result:
# this is a little bit special. `__getattribute__` is executed
# before anything else. But: I know no use case, where this
# could be practical and the jedi would return wrong types. If
# you ever have something, let me know!
with common.ignored(KeyError):
result = inst.execute_subscope_by_name('__getattribute__', [name])
return result
def _name_is_no_break_scope(self, name):
"""
Returns the parent of a name, which means the element which stands
behind a name.
"""
par = name.parent
if par.isinstance(pr.Statement):
details = par.assignment_details
if details and details[0][1] != '=':
return True
if isinstance(name, er.InstanceElement) \
and not name.is_class_var:
return True
elif isinstance(par, pr.Import) and len(par.namespace) > 1:
# TODO multi-level import non-breakable
return True
return False
def _name_is_array_assignment(self, name):
if name.parent.isinstance(pr.Statement):
def is_execution(calls):
for c in calls:
if isinstance(c, (unicode, str)):
continue
if c.isinstance(pr.Array):
if is_execution(c):
return True
elif c.isinstance(pr.Call):
# Compare start_pos, because names may be different
# because of executions.
if c.name.start_pos == name.start_pos \
and c.execution:
return True
return False
is_exe = False
for assignee, op in name.parent.assignment_details:
is_exe |= is_execution(assignee)
if is_exe:
# filter array[3] = ...
# TODO check executions for dict contents
return True
return False
def _names_to_types(self, names, resolve_decorator):
types = []
# Add isinstance and other if/assert knowledge.
flow_scope = self.scope
while flow_scope:
# TODO check if result is in scope -> no evaluation necessary
n = check_flow_information(self._evaluator, flow_scope,
self.name_str, self.position)
if n:
return n
flow_scope = flow_scope.parent
for name in names:
typ = name.parent
if typ.isinstance(pr.ForFlow):
types += self._handle_for_loops(typ)
elif isinstance(typ, pr.Param):
types += self._eval_param(typ)
elif typ.isinstance(pr.Statement):
types += self._remove_statements(typ)
else:
if isinstance(typ, pr.Class):
typ = er.Class(self._evaluator, typ)
elif isinstance(typ, pr.Function):
typ = er.Function(self._evaluator, typ)
if typ.isinstance(er.Function) and resolve_decorator:
typ = typ.get_decorated_func()
types.append(typ)
return types
def _remove_statements(self, stmt):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
evaluator = self._evaluator
types = []
if stmt.is_global():
# global keyword handling.
for token_name in stmt.token_list[1:]:
if isinstance(token_name, pr.Name):
return evaluator.find_types(stmt.parent, str(token_name))
else:
# Remove the statement docstr stuff for now, that has to be
# implemented with the evaluator class.
#if stmt.docstr:
#res_new.append(stmt)
check_instance = None
if isinstance(stmt, er.InstanceElement) and stmt.is_class_var:
check_instance = stmt.instance
stmt = stmt.var
types += evaluator.eval_statement(stmt, seek_name=self.name_str)
if check_instance is not None:
# class renames
types = [er.InstanceElement(evaluator, check_instance, a, True)
if isinstance(a, (er.Function, pr.Function))
else a for a in types]
return types
def _eval_param(self, r):
evaluator = self._evaluator
res_new = []
func = r.parent
cls = func.parent.get_parent_until((pr.Class, pr.Function))
if isinstance(cls, pr.Class) and r.position_nr == 0:
# This is where we add self - if it has never been
# instantiated.
if isinstance(self.scope, er.InstanceElement):
res_new.append(self.scope.instance)
else:
for inst in evaluator.execute(er.Class(evaluator, cls)):
inst.is_generated = True
res_new.append(inst)
return res_new
# Instances are typically faked, if the instance is not called from
# outside. Here we check it for __init__ functions and return.
if isinstance(func, er.InstanceElement) \
and func.instance.is_generated and str(func.name) == '__init__':
r = func.var.params[r.position_nr]
# Add docstring knowledge.
doc_params = docstrings.follow_param(evaluator, r)
if doc_params:
return doc_params
if not r.is_generated:
# Param owns no information itself.
res_new += dynamic.search_params(evaluator, r)
if not res_new:
c = r.expression_list()[0]
if c in ('*', '**'):
t = 'tuple' if c == '*' else 'dict'
typ = evaluator.find_types(compiled.builtin, t)[0]
res_new = evaluator.execute(typ)
if not r.assignment_details:
# this means that there are no default params,
# so just ignore it.
return res_new
return set(res_new) | evaluator.eval_statement(r, seek_name=self.name_str)
def _handle_for_loops(self, loop):
# Take the first statement (for has always only
# one, remember `in`). And follow it.
if not loop.inputs:
return []
result = iterable.get_iterator_types(self._evaluator.eval_statement(loop.inputs[0]))
if len(loop.set_vars) > 1:
expression_list = loop.set_stmt.expression_list()
# loops with loop.set_vars > 0 only have one command
result = _assign_tuples(expression_list[0], result, self.name_str)
return result
def _resolve_descriptors(self, types):
"""Processes descriptors"""
result = []
for r in types:
if isinstance(self.scope, (er.Instance, er.Class)) \
and hasattr(r, 'get_descriptor_return'):
# handle descriptors
with common.ignored(KeyError):
result += r.get_descriptor_return(self.scope)
continue
result.append(r)
return result
def check_flow_information(evaluator, flow, search_name, pos):
""" Try to find out the type of a variable just with the information that
is given by the flows: e.g. It is also responsible for assert checks.::
if isinstance(k, str):
k. # <- completion here
ensures that `k` is a string.
"""
if not settings.dynamic_flow_information:
return None
result = []
if isinstance(flow, pr.IsScope) and not result:
for ass in reversed(flow.asserts):
if pos is None or ass.start_pos > pos:
continue
result = _check_isinstance_type(evaluator, ass, search_name)
if result:
break
if isinstance(flow, pr.Flow) and not result:
if flow.command in ['if', 'while'] and len(flow.inputs) == 1:
result = _check_isinstance_type(evaluator, flow.inputs[0], search_name)
return result
def _check_isinstance_type(evaluator, stmt, search_name):
try:
expression_list = stmt.expression_list()
# this might be removed if we analyze and, etc
assert len(expression_list) == 1
call = expression_list[0]
assert isinstance(call, pr.Call) and str(call.name) == 'isinstance'
assert bool(call.execution)
# isinstance check
isinst = call.execution.values
assert len(isinst) == 2 # has two params
obj, classes = [statement.expression_list() for statement in isinst]
assert len(obj) == 1
assert len(classes) == 1
assert isinstance(obj[0], pr.Call)
# names fit?
assert str(obj[0].name) == search_name
assert isinstance(classes[0], pr.StatementElement) # can be type or tuple
except AssertionError:
return []
result = []
for c in evaluator.eval_call(classes[0]):
for typ in (c.get_index_types() if isinstance(c, iterable.Array) else [c]):
result += evaluator.execute(typ)
return result
def _get_defined_names_for_position(scope, position=None, start_scope=None):
"""
Return filtered version of ``scope.get_defined_names()``.
This function basically does what :meth:`scope.get_defined_names
<parsing_representation.Scope.get_defined_names>` does.
- If `position` is given, delete all names defined after `position`.
- For special objects like instances, `position` is ignored and all
names are returned.
:type scope: :class:`parsing_representation.IsScope`
:param scope: Scope in which names are searched.
:param position: The position as a line/column tuple, default is infinity.
"""
names = scope.get_defined_names()
# Instances have special rules, always return all the possible completions,
# because class variables are always valid and the `self.` variables, too.
if (not position or isinstance(scope, (iterable.Array, er.Instance))
or start_scope != scope
and isinstance(start_scope, (pr.Function, er.FunctionExecution))):
return names
names_new = []
for n in names:
if n.start_pos[0] is not None and n.start_pos < position:
names_new.append(n)
return names_new
def get_names_of_scope(evaluator, scope, position=None, star_search=True, include_builtin=True):
"""
Get all completions (names) possible for the current scope. The star search
option is only here to provide an optimization. Otherwise the whole thing
would probably start a little recursive madness.
This function is used to include names from outer scopes. For example, when
the current scope is function:
>>> from jedi.parser import Parser
>>> parser = Parser('''
... x = ['a', 'b', 'c']
... def func():
... y = None
... ''')
>>> scope = parser.module.subscopes[0]
>>> scope
<Function: func@3-4>
`get_names_of_scope` is a generator. First it yields names from most inner
scope.
>>> from jedi.evaluate import Evaluator
>>> pairs = list(get_names_of_scope(Evaluator(), scope))
>>> pairs[0]
(<Function: func@3-4>, [<Name: y@4,4>])
Then it yield the names from one level outer scope. For this example, this
is the most outer scope.
>>> pairs[1]
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
Finally, it yields names from builtin, if `include_builtin` is
true (default).
>>> pairs[2] #doctest: +ELLIPSIS
(<Builtin: ...builtin...>, [<CompiledName: ...>, ...])
:rtype: [(pr.Scope, [pr.Name])]
:return: Return an generator that yields a pair of scope and names.
"""
in_func_scope = scope
non_flow = scope.get_parent_until(pr.Flow, reverse=True)
while scope:
if isinstance(scope, pr.SubModule) and scope.parent:
# we don't want submodules to report if we have modules.
scope = scope.parent
continue
# `pr.Class` is used, because the parent is never `Class`.
# Ignore the Flows, because the classes and functions care for that.
# InstanceElement of Class is ignored, if it is not the start scope.
if not (scope != non_flow and scope.isinstance(pr.Class)
or scope.isinstance(pr.Flow)
or scope.isinstance(er.Instance)
and non_flow.isinstance(er.Function)
or isinstance(scope, compiled.CompiledObject)
and scope.type() == 'class' and in_func_scope != scope):
try:
if isinstance(scope, er.Instance):
for g in scope.scope_generator():
yield g
else:
yield scope, _get_defined_names_for_position(scope, position, in_func_scope)
except StopIteration:
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
# is a list comprehension
yield scope, scope.get_set_vars(is_internal_call=True)
scope = scope.parent
# This is used, because subscopes (Flow scopes) would distort the
# results.
if scope and scope.isinstance(er.Function, pr.Function, er.FunctionExecution):
in_func_scope = scope
# Add star imports.
if star_search:
for s in imports.remove_star_imports(evaluator, non_flow.get_parent_until()):
for g in get_names_of_scope(evaluator, s, star_search=False):
yield g
# Add builtins to the global scope.
if include_builtin:
yield compiled.builtin, compiled.builtin.get_defined_names()
def _assign_tuples(tup, results, seek_name):
"""
This is a normal assignment checker. In python functions and other things
can return tuples:
>>> a, b = 1, ""
>>> a, (b, c) = 1, ("", 1.0)
Here, if `seek_name` is "a", the number type will be returned.
The first part (before `=`) is the param tuples, the second one result.
:type tup: pr.Array
"""
def eval_results(index):
types = []
for r in results:
try:
func = r.get_exact_index_types
except AttributeError:
debug.warning("invalid tuple lookup %s of result %s in %s",
tup, results, seek_name)
else:
with common.ignored(IndexError):
types += func(index)
return types
result = []
for i, stmt in enumerate(tup):
# Used in assignments. There is just one call and no other things,
# therefore we can just assume, that the first part is important.
command = stmt.expression_list()[0]
if tup.type == pr.Array.NOARRAY:
# unnessecary braces -> just remove.
r = results
else:
r = eval_results(i)
# LHS of tuples can be nested, so resolve it recursively
result += find_assignments(command, r, seek_name)
return result
def find_assignments(lhs, results, seek_name):
"""
Check if `seek_name` is in the left hand side `lhs` of assignment.
`lhs` can simply be a variable (`pr.Call`) or a tuple/list (`pr.Array`)
representing the following cases::
a = 1 # lhs is pr.Call
(a, b) = 2 # lhs is pr.Array
:type lhs: pr.Call
:type results: list
:type seek_name: str
"""
if isinstance(lhs, pr.Array):
return _assign_tuples(lhs, results, seek_name)
elif lhs.name.names[-1] == seek_name:
return results
else:
return []

175
jedi/evaluate/helpers.py Normal file
View File

@@ -0,0 +1,175 @@
import copy
from jedi import common
from jedi.parser import representation as pr
def fast_parent_copy(obj):
"""
Much, much faster than copy.deepcopy, but just for certain elements.
"""
new_elements = {}
def recursion(obj):
if isinstance(obj, pr.Statement):
# Need to set _set_vars, otherwise the cache is not working
# correctly, don't know why.
obj.get_set_vars()
new_obj = copy.copy(obj)
new_elements[obj] = new_obj
try:
items = list(new_obj.__dict__.items())
except AttributeError:
# __dict__ not available, because of __slots__
items = []
before = ()
for cls in new_obj.__class__.__mro__:
with common.ignored(AttributeError):
if before == cls.__slots__:
continue
before = cls.__slots__
items += [(n, getattr(new_obj, n)) for n in before]
for key, value in items:
# replace parent (first try _parent and then parent)
if key in ['parent', '_parent'] and value is not None:
if key == 'parent' and '_parent' in items:
# parent can be a property
continue
with common.ignored(KeyError):
setattr(new_obj, key, new_elements[value])
elif key in ['parent_function', 'use_as_parent', '_sub_module']:
continue
elif isinstance(value, list):
setattr(new_obj, key, list_rec(value))
elif isinstance(value, pr.Simple):
setattr(new_obj, key, recursion(value))
return new_obj
def list_rec(list_obj):
copied_list = list_obj[:] # lists, tuples, strings, unicode
for i, el in enumerate(copied_list):
if isinstance(el, pr.Simple):
copied_list[i] = recursion(el)
elif isinstance(el, list):
copied_list[i] = list_rec(el)
return copied_list
return recursion(obj)
def array_for_pos(stmt, pos, array_types=None):
"""Searches for the array and position of a tuple"""
def search_array(arr, pos):
if arr.type == 'dict':
for stmt in arr.values + arr.keys:
new_arr, index = array_for_pos(stmt, pos, array_types)
if new_arr is not None:
return new_arr, index
else:
for i, stmt in enumerate(arr):
new_arr, index = array_for_pos(stmt, pos, array_types)
if new_arr is not None:
return new_arr, index
if arr.start_pos < pos <= stmt.end_pos:
if not array_types or arr.type in array_types:
return arr, i
if len(arr) == 0 and arr.start_pos < pos < arr.end_pos:
if not array_types or arr.type in array_types:
return arr, 0
return None, 0
def search_call(call, pos):
arr, index = None, 0
if call.next is not None:
if isinstance(call.next, pr.Array):
arr, index = search_array(call.next, pos)
else:
arr, index = search_call(call.next, pos)
if not arr and call.execution is not None:
arr, index = search_array(call.execution, pos)
return arr, index
if stmt.start_pos >= pos >= stmt.end_pos:
return None, 0
for command in stmt.expression_list():
arr = None
if isinstance(command, pr.Array):
arr, index = search_array(command, pos)
elif isinstance(command, pr.StatementElement):
arr, index = search_call(command, pos)
if arr is not None:
return arr, index
return None, 0
def search_call_signatures(stmt, pos):
"""
Returns the function Call that matches the position before.
"""
# some parts will of the statement will be removed
stmt = fast_parent_copy(stmt)
arr, index = array_for_pos(stmt, pos, [pr.Array.TUPLE, pr.Array.NOARRAY])
if arr is not None and isinstance(arr.parent, pr.StatementElement):
call = arr.parent
while isinstance(call.parent, pr.StatementElement):
call = call.parent
arr.parent.execution = None
return call if isinstance(call, pr.Call) else None, index, False
return None, 0, False
def scan_statement_for_calls(stmt, search_name, assignment_details=False):
""" Returns the function Calls that match search_name in an Array. """
def scan_array(arr, search_name):
result = []
if arr.type == pr.Array.DICT:
for key_stmt, value_stmt in arr.items():
result += scan_statement_for_calls(key_stmt, search_name)
result += scan_statement_for_calls(value_stmt, search_name)
else:
for stmt in arr:
result += scan_statement_for_calls(stmt, search_name)
return result
check = list(stmt.expression_list())
if assignment_details:
for expression_list, op in stmt.assignment_details:
check += expression_list
result = []
for c in check:
if isinstance(c, pr.Array):
result += scan_array(c, search_name)
elif isinstance(c, pr.Call):
s_new = c
while s_new is not None:
n = s_new.name
if isinstance(n, pr.Name) and search_name in n.names:
result.append(c)
if s_new.execution is not None:
result += scan_array(s_new.execution, search_name)
s_new = s_new.next
return result
class FakeSubModule():
line_offset = 0
class FakeStatement(pr.Statement):
def __init__(self, expression_list, start_pos=(0, 0)):
p = start_pos
super(FakeStatement, self).__init__(FakeSubModule, expression_list, p, p)
self._expression_list = expression_list
class FakeName(pr.Name):
def __init__(self, name, parent=None):
p = 0, 0
super(FakeName, self).__init__(FakeSubModule, [(name, p)], p, p, parent)

460
jedi/evaluate/imports.py Normal file
View File

@@ -0,0 +1,460 @@
"""
:mod:`imports` is here to resolve import statements and return the
modules/classes/functions/whatever, which they stand for. However there's not
any actual importing done. This module is about finding modules in the
filesystem. This can be quite tricky sometimes, because Python imports are not
always that simple.
This module uses imp for python up to 3.2 and importlib for python 3.3 on; the
correct implementation is delegated to _compatibility.
This module also supports import autocompletion, which means to complete
statements like ``from datetim`` (curser at the end would return ``datetime``).
"""
import os
import pkgutil
import sys
import itertools
from jedi._compatibility import find_module
from jedi import common
from jedi import debug
from jedi import cache
from jedi.parser import fast
from jedi.parser import representation as pr
from jedi.evaluate import sys_path
from jedi import settings
from jedi.common import source_to_unicode
from jedi.evaluate import compiled
class ModuleNotFound(Exception):
pass
class ImportPath(pr.Base):
"""
An ImportPath is the path of a `pr.Import` object.
"""
class GlobalNamespace(object):
def __init__(self):
self.line_offset = 0
GlobalNamespace = GlobalNamespace()
def __init__(self, evaluator, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False, is_just_from=False):
self._evaluator = evaluator
self.import_stmt = import_stmt
self.is_like_search = is_like_search
self.direct_resolve = direct_resolve
self.is_just_from = is_just_from
self.is_partial_import = bool(max(0, kill_count))
path = import_stmt.get_parent_until().path
self.file_path = os.path.dirname(path) if path is not None else None
# rest is import_path resolution
self.import_path = []
if import_stmt.from_ns:
self.import_path += import_stmt.from_ns.names
if import_stmt.namespace:
if self._is_nested_import() and not direct_resolve:
self.import_path.append(import_stmt.namespace.names[0])
else:
self.import_path += import_stmt.namespace.names
for i in range(kill_count + int(is_like_search)):
self.import_path.pop()
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.import_stmt)
def _is_nested_import(self):
"""
This checks for the special case of nested imports, without aliases and
from statement::
import foo.bar
"""
return not self.import_stmt.alias and not self.import_stmt.from_ns \
and len(self.import_stmt.namespace.names) > 1 \
and not self.direct_resolve
def _get_nested_import(self, parent):
"""
See documentation of `self._is_nested_import`.
Generates an Import statement, that can be used to fake nested imports.
"""
i = self.import_stmt
# This is not an existing Import statement. Therefore, set position to
# 0 (0 is not a valid line number).
zero = (0, 0)
names = i.namespace.names[1:]
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
new = pr.Import(i._sub_module, zero, zero, n)
new.parent = parent
debug.dbg('Generated a nested import: %s', new)
return new
def get_defined_names(self, on_import_stmt=False):
names = []
for scope in self.follow():
if scope is ImportPath.GlobalNamespace:
if self._is_relative_import() == 0:
names += self._get_module_names()
if self.file_path is not None:
path = os.path.abspath(self.file_path)
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
names += self._get_module_names([path])
if self._is_relative_import():
rel_path = self._get_relative_path() + '/__init__.py'
if os.path.exists(rel_path):
m = load_module(rel_path)
names += m.get_defined_names()
else:
if on_import_stmt and isinstance(scope, pr.Module) \
and scope.path.endswith('__init__.py'):
pkg_path = os.path.dirname(scope.path)
paths = self._namespace_packages(pkg_path, self.import_path)
names += self._get_module_names([pkg_path] + paths)
if self.is_just_from:
# In the case of an import like `from x.` we don't need to
# add all the variables.
if ['os'] == self.import_path and not self._is_relative_import():
# os.path is a hardcoded exception, because it's a
# ``sys.modules`` modification.
p = (0, 0)
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
p, p, self.import_stmt))
continue
from jedi.evaluate import finder
for s, scope_names in finder.get_names_of_scope(self._evaluator,
scope, include_builtin=False):
for n in scope_names:
if self.import_stmt.from_ns is None \
or self.is_partial_import:
# from_ns must be defined to access module
# values plus a partial import means that there
# is something after the import, which
# automatically implies that there must not be
# any non-module scope.
continue
names.append(n)
return names
def _get_module_names(self, search_path=None):
"""
Get the names of all modules in the search_path. This means file names
and not names defined in the files.
"""
def generate_name(name):
return pr.Name(self.GlobalNamespace, [(name, inf_pos)],
inf_pos, inf_pos, self.import_stmt)
names = []
inf_pos = float('inf'), float('inf')
# add builtin module names
if search_path is None:
names += [generate_name(name) for name in sys.builtin_module_names]
if search_path is None:
search_path = self._sys_path_with_modifications()
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
names.append(generate_name(name))
return names
def _sys_path_with_modifications(self):
# If you edit e.g. gunicorn, there will be imports like this:
# `from gunicorn import something`. But gunicorn is not in the
# sys.path. Therefore look if gunicorn is a parent directory, #56.
in_path = []
if self.import_path:
parts = self.file_path.split(os.path.sep)
for i, p in enumerate(parts):
if p == self.import_path[0]:
new = os.path.sep.join(parts[:i])
in_path.append(new)
module = self.import_stmt.get_parent_until()
return in_path + sys_path.sys_path_with_modifications(module)
def follow(self, is_goto=False):
"""
Returns the imported modules.
"""
if self._evaluator.recursion_detector.push_stmt(self.import_stmt):
# check recursion
return []
if self.import_path:
try:
scope, rest = self._follow_file_system()
except ModuleNotFound:
debug.warning('Module not found: %s', self.import_stmt)
self._evaluator.recursion_detector.pop_stmt()
return []
scopes = [scope]
scopes += remove_star_imports(self._evaluator, scope)
# follow the rest of the import (not FS -> classes, functions)
if len(rest) > 1 or rest and self.is_like_search:
scopes = []
if ['os', 'path'] == self.import_path[:2] \
and not self._is_relative_import():
# This is a huge exception, we follow a nested import
# ``os.path``, because it's a very important one in Python
# that is being achieved by messing with ``sys.modules`` in
# ``os``.
scopes = self._evaluator.follow_path(iter(rest), [scope], scope)
elif rest:
if is_goto:
scopes = itertools.chain.from_iterable(
self._evaluator.find_types(s, rest[0], is_goto=True)
for s in scopes)
else:
scopes = itertools.chain.from_iterable(
self._evaluator.follow_path(iter(rest), [s], s)
for s in scopes)
scopes = list(scopes)
if self._is_nested_import():
scopes.append(self._get_nested_import(scope))
else:
scopes = [ImportPath.GlobalNamespace]
debug.dbg('after import: %s', scopes)
self._evaluator.recursion_detector.pop_stmt()
return scopes
def _is_relative_import(self):
return bool(self.import_stmt.relative_count)
def _get_relative_path(self):
path = self.file_path
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
return path
def _namespace_packages(self, found_path, import_path):
"""
Returns a list of paths of possible ``pkgutil``/``pkg_resources``
namespaces. If the package is no "namespace package", an empty list is
returned.
"""
def follow_path(directories, paths):
try:
directory = next(directories)
except StopIteration:
return paths
else:
deeper_paths = []
for p in paths:
new = os.path.join(p, directory)
if os.path.isdir(new) and new != found_path:
deeper_paths.append(new)
return follow_path(directories, deeper_paths)
with open(os.path.join(found_path, '__init__.py')) as f:
content = f.read()
# these are strings that need to be used for namespace packages,
# the first one is ``pkgutil``, the second ``pkg_resources``.
options = 'declare_namespace(__name__)', 'extend_path(__path__'
if options[0] in content or options[1] in content:
# It is a namespace, now try to find the rest of the modules.
return follow_path(iter(import_path), sys.path)
return []
def _follow_file_system(self):
if self.file_path:
sys_path_mod = list(self._sys_path_with_modifications())
module = self.import_stmt.get_parent_until()
if not module.has_explicit_absolute_import:
# If the module explicitly asks for absolute imports,
# there's probably a bogus local one.
sys_path_mod.insert(0, self.file_path)
# First the sys path is searched normally and if that doesn't
# succeed, try to search the parent directories, because sometimes
# Jedi doesn't recognize sys.path modifications (like py.test
# stuff).
old_path, temp_path = self.file_path, os.path.dirname(self.file_path)
while old_path != temp_path:
sys_path_mod.append(temp_path)
old_path, temp_path = temp_path, os.path.dirname(temp_path)
else:
sys_path_mod = list(sys_path.get_sys_path())
return self._follow_sys_path(sys_path_mod)
def _follow_sys_path(self, sys_path):
"""
Find a module with a path (of the module, like usb.backend.libusb10).
"""
def follow_str(ns_path, string):
debug.dbg('follow_module %s %s', ns_path, string)
path = None
if ns_path:
path = ns_path
elif self._is_relative_import():
path = self._get_relative_path()
if path is not None:
importing = find_module(string, [path])
else:
debug.dbg('search_module %s %s', string, self.file_path)
# Override the sys.path. It works only good that way.
# Injecting the path directly into `find_module` did not work.
sys.path, temp = sys_path, sys.path
try:
importing = find_module(string)
finally:
sys.path = temp
return importing
current_namespace = (None, None, None)
# now execute those paths
rest = []
for i, s in enumerate(self.import_path):
try:
current_namespace = follow_str(current_namespace[1], s)
except ImportError:
_continue = False
if self._is_relative_import() and len(self.import_path) == 1:
# follow `from . import some_variable`
rel_path = self._get_relative_path()
with common.ignored(ImportError):
current_namespace = follow_str(rel_path, '__init__')
elif current_namespace[2]: # is a package
for n in self._namespace_packages(current_namespace[1],
self.import_path[:i]):
try:
current_namespace = follow_str(n, s)
if current_namespace[1]:
_continue = True
break
except ImportError:
pass
if not _continue:
if current_namespace[1]:
rest = self.import_path[i:]
break
else:
raise ModuleNotFound('The module you searched has not been found')
path = current_namespace[1]
is_package_directory = current_namespace[2]
f = None
if is_package_directory or current_namespace[0]:
# is a directory module
if is_package_directory:
path += '/__init__.py'
with open(path) as f:
source = f.read()
else:
source = current_namespace[0].read()
current_namespace[0].close()
return load_module(path, source), rest
else:
return load_module(name=path), rest
def strip_imports(evaluator, scopes):
"""
Here we strip the imports - they don't get resolved necessarily.
Really used anymore? Merge with remove_star_imports?
"""
result = []
for s in scopes:
if isinstance(s, pr.Import):
result += ImportPath(evaluator, s).follow()
else:
result.append(s)
return result
@cache.cache_star_import
def remove_star_imports(evaluator, scope, ignored_modules=()):
"""
Check a module for star imports::
from module import *
and follow these modules.
"""
modules = strip_imports(evaluator, (i for i in scope.get_imports() if i.star))
new = []
for m in modules:
if m not in ignored_modules:
new += remove_star_imports(evaluator, m, modules)
modules += new
# Filter duplicate modules.
return set(modules)
def load_module(path=None, source=None, name=None):
def load(source):
if path is not None and path.endswith('.py'):
if source is None:
with open(path) as f:
source = f.read()
else:
return compiled.load_module(path, name)
p = path or name
p = fast.FastParser(common.source_to_unicode(source), p)
cache.save_parser(path, name, p)
return p.module
cached = cache.load_parser(path, name)
return load(source) if cached is None else cached.module
def get_modules_containing_name(mods, name):
"""
Search a name in the directories of modules.
"""
def check_python_file(path):
try:
return cache.parser_cache[path].parser.module
except KeyError:
try:
return check_fs(path)
except IOError:
return None
def check_fs(path):
with open(path) as f:
source = source_to_unicode(f.read())
if name in source:
return load_module(path, source)
# skip non python modules
mods = set(m for m in mods if not isinstance(m, compiled.CompiledObject))
mod_paths = set()
for m in mods:
mod_paths.add(m.path)
yield m
if settings.dynamic_params_for_other_modules:
paths = set(settings.additional_dynamic_modules)
for p in mod_paths:
if p is not None:
d = os.path.dirname(p)
for entry in os.listdir(d):
if entry not in mod_paths:
if entry.endswith('.py'):
paths.add(d + os.path.sep + entry)
for p in sorted(paths):
# make testing easier, sort it - same results on every interpreter
c = check_python_file(p)
if c is not None and c not in mods:
yield c

392
jedi/evaluate/iterable.py Normal file
View File

@@ -0,0 +1,392 @@
import itertools
from jedi import common
from jedi import debug
from jedi import settings
from jedi._compatibility import use_metaclass, is_py3, unicode
from jedi.parser import representation as pr
from jedi.evaluate import compiled
from jedi.evaluate import helpers
from jedi.evaluate.cache import CachedMetaClass, memoize_default
from jedi.cache import underscore_memoization
class Generator(use_metaclass(CachedMetaClass, pr.Base)):
"""Handling of `yield` functions."""
def __init__(self, evaluator, func, var_args):
super(Generator, self).__init__()
self._evaluator = evaluator
self.func = func
self.var_args = var_args
@underscore_memoization
def get_defined_names(self):
"""
Returns a list of names that define a generator, which can return the
content of a generator.
"""
executes_generator = '__next__', 'send', 'next'
for name in compiled.generator_obj.get_defined_names():
if name.name in executes_generator:
parent = GeneratorMethod(self, name.parent)
yield helpers.FakeName(name.name, parent)
else:
yield name
def iter_content(self):
""" returns the content of __iter__ """
return self._evaluator.execute(self.func, self.var_args, True)
def get_index_types(self, index=None):
debug.warning('Tried to get array access on a generator: %s', self)
return []
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'parent', 'get_imports',
'asserts', 'doc', 'docstr', 'get_parent_until',
'get_code', 'subscopes']:
raise AttributeError("Accessing %s of %s is not allowed."
% (self, name))
return getattr(self.func, name)
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self.func)
class GeneratorMethod(object):
"""``__next__`` and ``send`` methods."""
def __init__(self, generator, builtin_func):
self._builtin_func = builtin_func
self._generator = generator
def execute(self):
return self._generator.iter_content()
def __getattr__(self, name):
return getattr(self._builtin_func, name)
class Array(use_metaclass(CachedMetaClass, pr.Base)):
"""
Used as a mirror to pr.Array, if needed. It defines some getter
methods which are important in this module.
"""
def __init__(self, evaluator, array):
self._evaluator = evaluator
self._array = array
def get_index_types(self, index_arr=None):
""" Get the types of a specific index or all, if not given """
if index_arr is not None:
if index_arr and [x for x in index_arr if ':' in x.expression_list()]:
# array slicing
return [self]
index_possibilities = self._follow_values(index_arr)
if len(index_possibilities) == 1:
# This is indexing only one element, with a fixed index number,
# otherwise it just ignores the index (e.g. [1+1]).
index = index_possibilities[0]
if isinstance(index, compiled.CompiledObject) \
and isinstance(index.obj, (int, str, unicode)):
with common.ignored(KeyError, IndexError, TypeError):
return self.get_exact_index_types(index.obj)
result = list(self._follow_values(self._array.values))
result += check_array_additions(self._evaluator, self)
return set(result)
def get_exact_index_types(self, mixed_index):
""" Here the index is an int/str. Raises IndexError/KeyError """
index = mixed_index
if self.type == pr.Array.DICT:
index = None
for i, key_statement in enumerate(self._array.keys):
# Because we only want the key to be a string.
key_expression_list = key_statement.expression_list()
if len(key_expression_list) != 1: # cannot deal with complex strings
continue
key = key_expression_list[0]
if isinstance(key, pr.Literal):
key = key.value
elif isinstance(key, pr.Name):
key = str(key)
else:
continue
if mixed_index == key:
index = i
break
if index is None:
raise KeyError('No key found in dictionary')
# Can raise an IndexError
values = [self._array.values[index]]
return self._follow_values(values)
def _follow_values(self, values):
""" helper function for the index getters """
return list(itertools.chain.from_iterable(self._evaluator.eval_statement(v)
for v in values))
def get_defined_names(self):
"""
This method generates all `ArrayMethod` for one pr.Array.
It returns e.g. for a list: append, pop, ...
"""
# `array.type` is a string with the type, e.g. 'list'.
scope = self._evaluator.find_types(compiled.builtin, self._array.type)[0]
scope = self._evaluator.execute(scope)[0] # builtins only have one class
names = scope.get_defined_names()
return [ArrayMethod(n) for n in names]
@common.safe_property
def parent(self):
return compiled.builtin
def get_parent_until(self):
return compiled.builtin
def __getattr__(self, name):
if name not in ['type', 'start_pos', 'get_only_subelement', 'parent',
'get_parent_until', 'items']:
raise AttributeError('Strange access on %s: %s.' % (self, name))
return getattr(self._array, name)
def __getitem__(self):
return self._array.__getitem__()
def __iter__(self):
return self._array.__iter__()
def __len__(self):
return self._array.__len__()
def __repr__(self):
return "<e%s of %s>" % (type(self).__name__, self._array)
class ArrayMethod(object):
"""
A name, e.g. `list.append`, it is used to access the original array
methods.
"""
def __init__(self, name):
super(ArrayMethod, self).__init__()
self.name = name
def __getattr__(self, name):
# Set access privileges:
if name not in ['parent', 'names', 'start_pos', 'end_pos', 'get_code']:
raise AttributeError('Strange accesson %s: %s.' % (self, name))
return getattr(self.name, name)
def get_parent_until(self):
return compiled.builtin
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self.name)
def get_iterator_types(inputs):
"""Returns the types of any iterator (arrays, yields, __iter__, etc)."""
iterators = []
# Take the first statement (for has always only
# one, remember `in`). And follow it.
for it in inputs:
if isinstance(it, (Generator, Array, ArrayInstance)):
iterators.append(it)
else:
if not hasattr(it, 'execute_subscope_by_name'):
debug.warning('iterator/for loop input wrong: %s', it)
continue
try:
iterators += it.execute_subscope_by_name('__iter__')
except KeyError:
debug.warning('iterators: No __iter__ method found.')
result = []
from jedi.evaluate.representation import Instance
for gen in iterators:
if isinstance(gen, Array):
# Array is a little bit special, since this is an internal
# array, but there's also the list builtin, which is
# another thing.
result += gen.get_index_types()
elif isinstance(gen, Instance):
# __iter__ returned an instance.
name = '__next__' if is_py3 else 'next'
try:
result += gen.execute_subscope_by_name(name)
except KeyError:
debug.warning('Instance has no __next__ function in %s.', gen)
else:
# is a generator
result += gen.iter_content()
return result
def check_array_additions(evaluator, array):
""" Just a mapper function for the internal _check_array_additions """
if not pr.Array.is_type(array._array, pr.Array.LIST, pr.Array.SET):
# TODO also check for dict updates
return []
is_list = array._array.type == 'list'
current_module = array._array.get_parent_until()
res = _check_array_additions(evaluator, array, current_module, is_list)
return res
@memoize_default([], evaluator_is_first_arg=True)
def _check_array_additions(evaluator, compare_array, module, is_list):
"""
Checks if a `pr.Array` has "add" statements:
>>> a = [""]
>>> a.append(1)
"""
if not settings.dynamic_array_additions or isinstance(module, compiled.CompiledObject):
return []
def check_calls(calls, add_name):
"""
Calls are processed here. The part before the call is searched and
compared with the original Array.
"""
result = []
for c in calls:
call_path = list(c.generate_call_path())
separate_index = call_path.index(add_name)
if add_name == call_path[-1] or separate_index == 0:
# this means that there is no execution -> [].append
# or the keyword is at the start -> append()
continue
backtrack_path = iter(call_path[:separate_index])
position = c.start_pos
scope = c.get_parent_until(pr.IsScope)
found = evaluator.eval_call_path(backtrack_path, scope, position)
if not compare_array in found:
continue
params = call_path[separate_index + 1]
if not params.values:
continue # no params: just ignore it
if add_name in ['append', 'add']:
for param in params:
result += evaluator.eval_statement(param)
elif add_name in ['insert']:
try:
second_param = params[1]
except IndexError:
continue
else:
result += evaluator.eval_statement(second_param)
elif add_name in ['extend', 'update']:
for param in params:
iterators = evaluator.eval_statement(param)
result += get_iterator_types(iterators)
return result
from jedi.evaluate import representation as er
def get_execution_parent(element, *stop_classes):
""" Used to get an Instance/FunctionExecution parent """
if isinstance(element, Array):
stmt = element._array.parent
else:
# is an Instance with an ArrayInstance inside
stmt = element.var_args[0].var_args.parent
if isinstance(stmt, er.InstanceElement):
stop_classes = list(stop_classes) + [er.Function]
return stmt.get_parent_until(stop_classes)
temp_param_add = settings.dynamic_params_for_other_modules
settings.dynamic_params_for_other_modules = False
search_names = ['append', 'extend', 'insert'] if is_list else \
['add', 'update']
comp_arr_parent = get_execution_parent(compare_array, er.FunctionExecution)
possible_stmts = []
res = []
for n in search_names:
try:
possible_stmts += module.used_names[n]
except KeyError:
continue
for stmt in possible_stmts:
# Check if the original scope is an execution. If it is, one
# can search for the same statement, that is in the module
# dict. Executions are somewhat special in jedi, since they
# literally copy the contents of a function.
if isinstance(comp_arr_parent, er.FunctionExecution):
stmt = comp_arr_parent. \
get_statement_for_position(stmt.start_pos)
if stmt is None:
continue
# InstanceElements are special, because they don't get copied,
# but have this wrapper around them.
if isinstance(comp_arr_parent, er.InstanceElement):
stmt = er.InstanceElement(comp_arr_parent.instance, stmt)
if evaluator.recursion_detector.push_stmt(stmt):
# check recursion
continue
res += check_calls(helpers.scan_statement_for_calls(stmt, n), n)
evaluator.recursion_detector.pop_stmt()
# reset settings
settings.dynamic_params_for_other_modules = temp_param_add
return res
def check_array_instances(evaluator, instance):
"""Used for set() and list() instances."""
if not settings.dynamic_arrays_instances:
return instance.var_args
ai = ArrayInstance(evaluator, instance)
return [ai]
class ArrayInstance(pr.Base):
"""
Used for the usage of set() and list().
This is definitely a hack, but a good one :-)
It makes it possible to use set/list conversions.
"""
def __init__(self, evaluator, instance):
self._evaluator = evaluator
self.instance = instance
self.var_args = instance.var_args
def iter_content(self):
"""
The index is here just ignored, because of all the appends, etc.
lists/sets are too complicated too handle that.
"""
items = []
from jedi.evaluate.representation import Instance
for stmt in self.var_args:
for typ in self._evaluator.eval_statement(stmt):
if isinstance(typ, Instance) and len(typ.var_args):
array = typ.var_args[0]
if isinstance(array, ArrayInstance):
# prevent recursions
# TODO compare Modules
if self.var_args.start_pos != array.var_args.start_pos:
items += array.iter_content()
else:
debug.warning('ArrayInstance recursion %s', self.var_args)
continue
items += get_iterator_types([typ])
# TODO check if exclusion of tuple is a problem here.
if isinstance(self.var_args, tuple) or self.var_args.parent is None:
return [] # generated var_args should not be checked for arrays
module = self.var_args.get_parent_until()
is_list = str(self.instance.name) == 'list'
items += _check_array_additions(self._evaluator, self.instance, module, is_list)
return items

175
jedi/evaluate/param.py Normal file
View File

@@ -0,0 +1,175 @@
import copy
from jedi.parser import representation as pr
from jedi.evaluate import iterable
from jedi.evaluate import common
from jedi.evaluate import helpers
def get_params(evaluator, func, var_args):
def gen_param_name_copy(param, keys=(), values=(), array_type=None):
"""
Create a param with the original scope (of varargs) as parent.
"""
if isinstance(var_args, pr.Array):
parent = var_args.parent
start_pos = var_args.start_pos
else:
parent = func
start_pos = 0, 0
new_param = copy.copy(param)
new_param.is_generated = True
if parent is not None:
new_param.parent = parent
# create an Array (-> needed for *args/**kwargs tuples/dicts)
arr = pr.Array(helpers.FakeSubModule, start_pos, array_type, parent)
arr.values = values
key_stmts = []
for key in keys:
key_stmts.append(helpers.FakeStatement([key], start_pos))
arr.keys = key_stmts
arr.type = array_type
new_param._expression_list = [arr]
name = copy.copy(param.get_name())
name.parent = new_param
return name
result = []
start_offset = 0
from jedi.evaluate.representation import InstanceElement
if isinstance(func, InstanceElement):
# Care for self -> just exclude it and add the instance
start_offset = 1
self_name = copy.copy(func.params[0].get_name())
self_name.parent = func.instance
result.append(self_name)
param_dict = {}
for param in func.params:
param_dict[str(param.get_name())] = param
# There may be calls, which don't fit all the params, this just ignores it.
var_arg_iterator = common.PushBackIterator(_var_args_iterator(evaluator, var_args))
non_matching_keys = []
keys_used = set()
keys_only = False
for param in func.params[start_offset:]:
# The value and key can both be null. There, the defaults apply.
# args / kwargs will just be empty arrays / dicts, respectively.
# Wrong value count is just ignored. If you try to test cases that are
# not allowed in Python, Jedi will maybe not show any completions.
key, value = next(var_arg_iterator, (None, None))
while key:
keys_only = True
try:
key_param = param_dict[str(key)]
except KeyError:
non_matching_keys.append((key, value))
else:
keys_used.add(str(key))
result.append(gen_param_name_copy(key_param, values=[value]))
key, value = next(var_arg_iterator, (None, None))
expression_list = param.expression_list()
keys = []
values = []
array_type = None
ignore_creation = False
if expression_list[0] == '*':
# *args param
array_type = pr.Array.TUPLE
if value:
values.append(value)
for key, value in var_arg_iterator:
# Iterate until a key argument is found.
if key:
var_arg_iterator.push_back((key, value))
break
values.append(value)
elif expression_list[0] == '**':
# **kwargs param
array_type = pr.Array.DICT
if non_matching_keys:
keys, values = zip(*non_matching_keys)
elif not keys_only:
# normal param
if value is not None:
values = [value]
else:
if param.assignment_details:
# No value: return the default values.
ignore_creation = True
result.append(param.get_name())
param.is_generated = True
else:
# If there is no assignment detail, that means there is no
# assignment, just the result. Therefore nothing has to be
# returned.
values = []
# Just ignore all the params that are without a key, after one keyword
# argument was set.
if not ignore_creation and (not keys_only or expression_list[0] == '**'):
keys_used.add(str(key))
result.append(gen_param_name_copy(param, keys=keys, values=values,
array_type=array_type))
if keys_only:
# sometimes param arguments are not completely written (which would
# create an Exception, but we have to handle that).
for k in set(param_dict) - keys_used:
result.append(gen_param_name_copy(param_dict[k]))
return result
def _var_args_iterator(evaluator, var_args):
"""
Yields a key/value pair, the key is None, if its not a named arg.
"""
# `var_args` is typically an Array, and not a list.
for stmt in var_args:
if not isinstance(stmt, pr.Statement):
if stmt is None:
yield None, None
continue
old = stmt
# generate a statement if it's not already one.
stmt = helpers.FakeStatement([old])
# *args
expression_list = stmt.expression_list()
if not len(expression_list):
continue
if expression_list[0] == '*':
# *args must be some sort of an array, otherwise -> ignore
for array in evaluator.eval_expression_list(expression_list[1:]):
if isinstance(array, iterable.Array):
for field_stmt in array: # yield from plz!
yield None, field_stmt
elif isinstance(array, iterable.Generator):
for field_stmt in array.iter_content():
yield None, helpers.FakeStatement([field_stmt])
# **kwargs
elif expression_list[0] == '**':
for array in evaluator.eval_expression_list(expression_list[1:]):
if isinstance(array, iterable.Array):
for key_stmt, value_stmt in array.items():
# first index, is the key if syntactically correct
call = key_stmt.expression_list()[0]
if isinstance(call, pr.Name):
yield call, value_stmt
elif isinstance(call, pr.Call):
yield call.name, value_stmt
# Normal arguments (including key arguments).
else:
if stmt.assignment_details:
key_arr, op = stmt.assignment_details[0]
# named parameter
if key_arr and isinstance(key_arr[0], pr.Call):
yield key_arr[0].name, stmt
else:
yield None, stmt

162
jedi/evaluate/recursion.py Normal file
View File

@@ -0,0 +1,162 @@
"""
Recursions are the recipe of |jedi| to conquer Python code. However, someone
must stop recursions going mad. Some settings are here to make |jedi| stop at
the right time. You can read more about them :ref:`here <settings-recursion>`.
Next to :mod:`cache` this module also makes |jedi| not thread-safe. Why?
``ExecutionRecursionDecorator`` uses class variables to count the function
calls.
"""
from jedi.parser import representation as pr
from jedi import debug
from jedi import settings
from jedi.evaluate import compiled
from jedi.evaluate import iterable
def recursion_decorator(func):
def run(evaluator, stmt, *args, **kwargs):
rec_detect = evaluator.recursion_detector
# print stmt, len(self.node_statements())
if rec_detect.push_stmt(stmt):
return []
else:
result = func(evaluator, stmt, *args, **kwargs)
rec_detect.pop_stmt()
return result
return run
class RecursionDetector(object):
"""
A decorator to detect recursions in statements. In a recursion a statement
at the same place, in the same module may not be executed two times.
"""
def __init__(self):
self.top = None
self.current = None
def push_stmt(self, stmt):
self.current = _RecursionNode(stmt, self.current)
check = self._check_recursion()
if check: # TODO remove False!!!!
debug.warning('catched stmt recursion: %s against %s @%s', stmt,
check.stmt, stmt.start_pos)
self.pop_stmt()
return True
return False
def pop_stmt(self):
if self.current is not None:
# I don't know how current can be None, but sometimes it happens
# with Python3.
self.current = self.current.parent
def _check_recursion(self):
test = self.current
while True:
test = test.parent
if self.current == test:
return test
if not test:
return False
def node_statements(self):
result = []
n = self.current
while n:
result.insert(0, n.stmt)
n = n.parent
return result
class _RecursionNode(object):
""" A node of the RecursionDecorator. """
def __init__(self, stmt, parent):
self.script = stmt.get_parent_until()
self.position = stmt.start_pos
self.parent = parent
self.stmt = stmt
# Don't check param instances, they are not causing recursions
# The same's true for the builtins, because the builtins are really
# simple.
self.is_ignored = isinstance(stmt, pr.Param) \
or (self.script == compiled.builtin)
def __eq__(self, other):
if not other:
return None
is_list_comp = lambda x: isinstance(x, pr.ForFlow) and x.is_list_comp
return self.script == other.script \
and self.position == other.position \
and not is_list_comp(self.stmt.parent) \
and not is_list_comp(other.parent) \
and not self.is_ignored and not other.is_ignored
def execution_recursion_decorator(func):
def run(execution, evaluate_generator=False):
detector = execution._evaluator.execution_recursion_detector
if detector.push_execution(execution, evaluate_generator):
result = []
else:
result = func(execution, evaluate_generator)
detector.pop_execution()
return result
return run
class ExecutionRecursionDetector(object):
"""
Catches recursions of executions.
It is designed like a Singelton. Only one instance should exist.
"""
def __init__(self):
self.recursion_level = 0
self.parent_execution_funcs = []
self.execution_funcs = set()
self.execution_count = 0
def __call__(self, execution, evaluate_generator=False):
debug.dbg('Execution recursions: %s', execution, self.recursion_level,
self.execution_count, len(self.execution_funcs))
if self.check_recursion(execution, evaluate_generator):
result = []
else:
result = self.func(execution, evaluate_generator)
self.pop_execution()
return result
def pop_execution(cls):
cls.parent_execution_funcs.pop()
cls.recursion_level -= 1
def push_execution(cls, execution, evaluate_generator):
in_par_execution_funcs = execution.base in cls.parent_execution_funcs
in_execution_funcs = execution.base in cls.execution_funcs
cls.recursion_level += 1
cls.execution_count += 1
cls.execution_funcs.add(execution.base)
cls.parent_execution_funcs.append(execution.base)
if cls.execution_count > settings.max_executions:
return True
if isinstance(execution.base, (iterable.Array, iterable.Generator)):
return False
module = execution.get_parent_until()
if evaluate_generator or module == compiled.builtin:
return False
if in_par_execution_funcs:
if cls.recursion_level > settings.max_function_recursion_level:
return True
if in_execution_funcs and \
len(cls.execution_funcs) > settings.max_until_execution_unique:
return True
if cls.execution_count > settings.max_executions_without_builtins:
return True
return False

View File

@@ -0,0 +1,502 @@
"""
Like described in the :mod:`parsing_representation` module, there's a need for
an ast like module to represent the states of parsed modules.
But now there are also structures in Python that need a little bit more than
that. An ``Instance`` for example is only a ``Class`` before it is
instantiated. This class represents these cases.
So, why is there also a ``Class`` class here? Well, there are decorators and
they change classes in Python 3.
"""
import copy
from jedi._compatibility import use_metaclass, unicode
from jedi.parser import representation as pr
from jedi import debug
from jedi import common
from jedi.evaluate.cache import memoize_default, CachedMetaClass
from jedi.evaluate import compiled
from jedi.evaluate import recursion
from jedi.evaluate import iterable
from jedi.evaluate import docstrings
from jedi.evaluate import helpers
from jedi.evaluate import param
class Executable(pr.IsScope):
"""
An instance is also an executable - because __init__ is called
:param var_args: The param input array, consist of `pr.Array` or list.
"""
def __init__(self, evaluator, base, var_args=()):
self._evaluator = evaluator
self.base = base
self.var_args = var_args
def get_parent_until(self, *args, **kwargs):
return self.base.get_parent_until(*args, **kwargs)
@common.safe_property
def parent(self):
return self.base.parent
class Instance(use_metaclass(CachedMetaClass, Executable)):
"""
This class is used to evaluate instances.
"""
def __init__(self, evaluator, base, var_args=()):
super(Instance, self).__init__(evaluator, base, var_args)
if str(base.name) in ['list', 'set'] \
and compiled.builtin == base.get_parent_until():
# compare the module path with the builtin name.
self.var_args = iterable.check_array_instances(evaluator, self)
else:
# need to execute the __init__ function, because the dynamic param
# searching needs it.
with common.ignored(KeyError):
self.execute_subscope_by_name('__init__', self.var_args)
# Generated instances are classes that are just generated by self
# (No var_args) used.
self.is_generated = False
@memoize_default(None)
def _get_method_execution(self, func):
func = InstanceElement(self._evaluator, self, func, True)
return FunctionExecution(self._evaluator, func, self.var_args)
def _get_func_self_name(self, func):
"""
Returns the name of the first param in a class method (which is
normally self.
"""
try:
return str(func.params[0].get_name())
except IndexError:
return None
@memoize_default([])
def get_self_attributes(self):
def add_self_dot_name(name):
"""
Need to copy and rewrite the name, because names are now
``instance_usage.variable`` instead of ``self.variable``.
"""
n = copy.copy(name)
n.names = n.names[1:]
names.append(InstanceElement(self._evaluator, self, n))
names = []
# This loop adds the names of the self object, copies them and removes
# the self.
for sub in self.base.subscopes:
if isinstance(sub, pr.Class):
continue
# Get the self name, if there's one.
self_name = self._get_func_self_name(sub)
if not self_name:
continue
if sub.name.get_code() == '__init__':
# ``__init__`` is special because the params need are injected
# this way. Therefore an execution is necessary.
if not sub.decorators:
# __init__ decorators should generally just be ignored,
# because to follow them and their self variables is too
# complicated.
sub = self._get_method_execution(sub)
for n in sub.get_set_vars():
# Only names with the selfname are being added.
# It is also important, that they have a len() of 2,
# because otherwise, they are just something else
if n.names[0] == self_name and len(n.names) == 2:
add_self_dot_name(n)
if not isinstance(self.base, compiled.CompiledObject):
for s in self.base.get_super_classes():
for inst in self._evaluator.execute(s):
names += inst.get_self_attributes()
return names
def get_subscope_by_name(self, name):
sub = self.base.get_subscope_by_name(name)
return InstanceElement(self._evaluator, self, sub, True)
def execute_subscope_by_name(self, name, args=()):
method = self.get_subscope_by_name(name)
return self._evaluator.execute(method, args)
def get_descriptor_return(self, obj):
""" Throws a KeyError if there's no method. """
# Arguments in __get__ descriptors are obj, class.
# `method` is the new parent of the array, don't know if that's good.
args = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
return self.execute_subscope_by_name('__get__', args)
@memoize_default([])
def get_defined_names(self):
"""
Get the instance vars of a class. This includes the vars of all
classes
"""
names = self.get_self_attributes()
for var in self.base.instance_names():
names.append(InstanceElement(self._evaluator, self, var, True))
return names
def scope_generator(self):
"""
An Instance has two scopes: The scope with self names and the class
scope. Instance variables have priority over the class scope.
"""
yield self, self.get_self_attributes()
names = []
for var in self.base.instance_names():
names.append(InstanceElement(self._evaluator, self, var, True))
yield self, names
def get_index_types(self, index=None):
args = [] if index is None else [index]
try:
return self.execute_subscope_by_name('__getitem__', args)
except KeyError:
debug.warning('No __getitem__, cannot access the array.')
return []
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'name', 'get_imports',
'doc', 'docstr', 'asserts']:
raise AttributeError("Instance %s: Don't touch this (%s)!"
% (self, name))
return getattr(self.base, name)
def __repr__(self):
return "<e%s of %s (var_args: %s)>" % \
(type(self).__name__, self.base, len(self.var_args or []))
class InstanceElement(use_metaclass(CachedMetaClass, pr.Base)):
"""
InstanceElement is a wrapper for any object, that is used as an instance
variable (e.g. self.variable or class methods).
"""
def __init__(self, evaluator, instance, var, is_class_var=False):
self._evaluator = evaluator
if isinstance(var, pr.Function):
var = Function(evaluator, var)
elif isinstance(var, pr.Class):
var = Class(evaluator, var)
self.instance = instance
self.var = var
self.is_class_var = is_class_var
@common.safe_property
@memoize_default(None)
def parent(self):
par = self.var.parent
if isinstance(par, Class) and par == self.instance.base \
or isinstance(par, pr.Class) \
and par == self.instance.base.base:
par = self.instance
elif not isinstance(par, (pr.Module, compiled.CompiledObject)):
par = InstanceElement(self.instance._evaluator, self.instance, par, self.is_class_var)
return par
def get_parent_until(self, *args, **kwargs):
return pr.Simple.get_parent_until(self, *args, **kwargs)
def get_decorated_func(self):
""" Needed because the InstanceElement should not be stripped """
func = self.var.get_decorated_func(self.instance)
if func == self.var:
return self
return func
def expression_list(self):
# Copy and modify the array.
return [InstanceElement(self.instance._evaluator, self.instance, command, self.is_class_var)
if not isinstance(command, unicode) else command
for command in self.var.expression_list()]
def __iter__(self):
for el in self.var.__iter__():
yield InstanceElement(self.instance._evaluator, self.instance, el, self.is_class_var)
def __getattr__(self, name):
return getattr(self.var, name)
def isinstance(self, *cls):
return isinstance(self.var, cls)
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self.var)
class Class(use_metaclass(CachedMetaClass, pr.IsScope)):
"""
This class is not only important to extend `pr.Class`, it is also a
important for descriptors (if the descriptor methods are evaluated or not).
"""
def __init__(self, evaluator, base):
self._evaluator = evaluator
self.base = base
@memoize_default(default=())
def get_super_classes(self):
supers = []
# TODO care for mro stuff (multiple super classes).
for s in self.base.supers:
# Super classes are statements.
for cls in self._evaluator.eval_statement(s):
if not isinstance(cls, Class):
debug.warning('Received non class, as a super class')
continue # Just ignore other stuff (user input error).
supers.append(cls)
if not supers and self.base.parent != compiled.builtin:
# add `object` to classes
supers += self._evaluator.find_types(compiled.builtin, 'object')
return supers
@memoize_default(default=())
def instance_names(self):
def in_iterable(name, iterable):
""" checks if the name is in the variable 'iterable'. """
for i in iterable:
# Only the last name is important, because these names have a
# maximal length of 2, with the first one being `self`.
if i.names[-1] == name.names[-1]:
return True
return False
result = self.base.get_defined_names()
super_result = []
# TODO mro!
for cls in self.get_super_classes():
# Get the inherited names.
if isinstance(cls, compiled.CompiledObject):
super_result += cls.get_defined_names()
else:
for i in cls.instance_names():
if not in_iterable(i, result):
super_result.append(i)
result += super_result
return result
@memoize_default(default=())
def get_defined_names(self):
result = self.instance_names()
type_cls = self._evaluator.find_types(compiled.builtin, 'type')[0]
return result + list(type_cls.get_defined_names())
def get_subscope_by_name(self, name):
for sub in reversed(self.subscopes):
if sub.name.get_code() == name:
return sub
raise KeyError("Couldn't find subscope.")
@common.safe_property
def name(self):
return self.base.name
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
'doc', 'get_imports', 'get_parent_until', 'get_code',
'subscopes']:
raise AttributeError("Don't touch this: %s of %s !" % (name, self))
return getattr(self.base, name)
def __repr__(self):
return "<e%s of %s>" % (type(self).__name__, self.base)
class Function(use_metaclass(CachedMetaClass, pr.IsScope)):
"""
Needed because of decorators. Decorators are evaluated here.
"""
def __init__(self, evaluator, func, is_decorated=False):
""" This should not be called directly """
self._evaluator = evaluator
self.base_func = func
self.is_decorated = is_decorated
@memoize_default(None)
def _decorated_func(self, instance=None):
"""
Returns the function, that is to be executed in the end.
This is also the places where the decorators are processed.
"""
f = self.base_func
# Only enter it, if has not already been processed.
if not self.is_decorated:
for dec in reversed(self.base_func.decorators):
debug.dbg('decorator: %s %s', dec, f)
dec_results = set(self._evaluator.eval_statement(dec))
if not len(dec_results):
debug.warning('decorator not found: %s on %s', dec, self.base_func)
return None
decorator = dec_results.pop()
if dec_results:
debug.warning('multiple decorators found %s %s',
self.base_func, dec_results)
# Create param array.
old_func = Function(self._evaluator, f, is_decorated=True)
if instance is not None and decorator.isinstance(Function):
old_func = InstanceElement(self._evaluator, instance, old_func)
instance = None
wrappers = self._evaluator.execute(decorator, (old_func,))
if not len(wrappers):
debug.warning('no wrappers found %s', self.base_func)
return None
if len(wrappers) > 1:
# TODO resolve issue with multiple wrappers -> multiple types
debug.warning('multiple wrappers found %s %s',
self.base_func, wrappers)
f = wrappers[0]
debug.dbg('decorator end %s', f)
if f != self.base_func and isinstance(f, pr.Function):
f = Function(self._evaluator, f)
return f
def get_decorated_func(self, instance=None):
decorated_func = self._decorated_func(instance)
if decorated_func == self.base_func:
return self
if decorated_func is None:
# If the decorator func is not found, just ignore the decorator
# function, because sometimes decorators are just really
# complicated.
return Function(self._evaluator, self.base_func, True)
return decorated_func
def get_magic_function_names(self):
return compiled.magic_function_class.get_defined_names()
def get_magic_function_scope(self):
return compiled.magic_function_class
def __getattr__(self, name):
return getattr(self.base_func, name)
def __repr__(self):
dec = ''
if self._decorated_func() != self.base_func:
dec = " is " + repr(self._decorated_func())
return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)
class FunctionExecution(Executable):
"""
This class is used to evaluate functions and their returns.
This is the most complicated class, because it contains the logic to
transfer parameters. It is even more complicated, because there may be
multiple calls to functions and recursion has to be avoided. But this is
responsibility of the decorators.
"""
@memoize_default(default=())
@recursion.execution_recursion_decorator
def get_return_types(self, evaluate_generator=False):
func = self.base
# Feed the listeners, with the params.
for listener in func.listeners:
listener.execute(self._get_params())
if func.is_generator and not evaluate_generator:
return [iterable.Generator(self._evaluator, func, self.var_args)]
else:
stmts = docstrings.find_return_types(self._evaluator, func)
for r in self.returns:
if r is not None:
stmts += self._evaluator.eval_statement(r)
return stmts
@memoize_default(default=())
def _get_params(self):
"""
This returns the params for an TODO and is injected as a
'hack' into the pr.Function class.
This needs to be here, because Instance can have __init__ functions,
which act the same way as normal functions.
"""
return param.get_params(self._evaluator, self.base, self.var_args)
def get_defined_names(self):
"""
Call the default method with the own instance (self implements all
the necessary functions). Add also the params.
"""
return self._get_params() + pr.Scope.get_set_vars(self)
get_set_vars = get_defined_names
def _copy_properties(self, prop):
"""
Literally copies a property of a Function. Copying is very expensive,
because it is something like `copy.deepcopy`. However, these copied
objects can be used for the executions, as if they were in the
execution.
"""
# Copy all these lists into this local function.
attr = getattr(self.base, prop)
objects = []
for element in attr:
if element is None:
copied = element
else:
copied = helpers.fast_parent_copy(element)
copied.parent = self._scope_copy(copied.parent)
if isinstance(copied, pr.Function):
copied = Function(self._evaluator, copied)
objects.append(copied)
return objects
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'imports', '_sub_module']:
raise AttributeError('Tried to access %s: %s. Why?' % (name, self))
return getattr(self.base, name)
@memoize_default(None)
def _scope_copy(self, scope):
""" Copies a scope (e.g. if) in an execution """
# TODO method uses different scopes than the subscopes property.
# just check the start_pos, sometimes it's difficult with closures
# to compare the scopes directly.
if scope.start_pos == self.start_pos:
return self
else:
copied = helpers.fast_parent_copy(scope)
copied.parent = self._scope_copy(copied.parent)
return copied
@common.safe_property
@memoize_default([])
def returns(self):
return self._copy_properties('returns')
@common.safe_property
@memoize_default([])
def asserts(self):
return self._copy_properties('asserts')
@common.safe_property
@memoize_default([])
def statements(self):
return self._copy_properties('statements')
@common.safe_property
@memoize_default([])
def subscopes(self):
return self._copy_properties('subscopes')
def get_statement_for_position(self, pos):
return pr.Scope.get_statement_for_position(self, pos)
def __repr__(self):
return "<%s of %s>" % \
(type(self).__name__, self.base)

91
jedi/evaluate/stdlib.py Normal file
View File

@@ -0,0 +1,91 @@
"""
Implementations of standard library functions, because it's not possible to
understand them with Jedi.
"""
from jedi._compatibility import unicode
from jedi.evaluate import compiled
from jedi.evaluate import representation as er
from jedi.evaluate import iterable
from jedi.parser import representation as pr
from jedi import debug
class NotInStdLib(LookupError):
pass
def execute(evaluator, obj, params):
if not isinstance(obj, (iterable.Generator, iterable.Array)):
obj_name = str(obj.name)
if obj.parent == compiled.builtin:
# for now we just support builtin functions.
try:
return _implemented['builtins'][obj_name](evaluator, obj, params)
except KeyError:
pass
raise NotInStdLib()
def _follow_param(evaluator, params, index):
try:
stmt = params[index]
except IndexError:
return []
else:
if isinstance(stmt, pr.Statement):
return evaluator.eval_statement(stmt)
else:
return [stmt] # just some arbitrary object
def builtins_getattr(evaluator, obj, params):
stmts = []
# follow the first param
objects = _follow_param(evaluator, params, 0)
names = _follow_param(evaluator, params, 1)
for obj in objects:
if not isinstance(obj, (er.Instance, er.Class, pr.Module, compiled.CompiledObject)):
debug.warning('getattr called without instance')
continue
for name in names:
s = unicode, str
if isinstance(name, compiled.CompiledObject) and isinstance(name.obj, s):
stmts += evaluator.follow_path(iter([name.obj]), [obj], obj)
else:
debug.warning('getattr called without str')
continue
return stmts
def builtins_type(evaluator, obj, params):
if len(params) == 1:
# otherwise it would be a metaclass... maybe someday...
objects = _follow_param(evaluator, params, 0)
return [o.base for o in objects if isinstance(o, er.Instance)]
return []
def builtins_super(evaluator, obj, params):
# TODO make this able to detect multiple inheritance super
accept = (pr.Function,)
func = params.get_parent_until(accept)
if func.isinstance(*accept):
cls = func.get_parent_until(accept + (pr.Class,),
include_current=False)
if isinstance(cls, pr.Class):
cls = er.Class(evaluator, cls)
su = cls.get_super_classes()
if su:
return evaluator.execute(su[0])
return []
_implemented = {
'builtins': {
'getattr': builtins_getattr,
'type': builtins_type,
'super': builtins_super,
}
}

120
jedi/evaluate/sys_path.py Normal file
View File

@@ -0,0 +1,120 @@
import os
import sys
from jedi._compatibility import exec_function
from jedi.parser import representation as pr
from jedi import debug
from jedi import common
def get_sys_path():
def check_virtual_env(sys_path):
""" Add virtualenv's site-packages to the `sys.path`."""
venv = os.getenv('VIRTUAL_ENV')
if not venv:
return
venv = os.path.abspath(venv)
p = os.path.join(
venv, 'lib', 'python%d.%d' % sys.version_info[:2], 'site-packages')
sys_path.insert(0, p)
check_virtual_env(sys.path)
return [p for p in sys.path if p != ""]
#@cache.memoize_default([]) TODO add some sort of cache again.
def sys_path_with_modifications(module):
def execute_code(code):
c = "import os; from os.path import *; result=%s"
variables = {'__file__': module.path}
try:
exec_function(c % code, variables)
except Exception:
debug.warning('sys path detected, but failed to evaluate')
return None
try:
res = variables['result']
if isinstance(res, str):
return os.path.abspath(res)
else:
return None
except KeyError:
return None
def check_module(module):
try:
possible_stmts = module.used_names['path']
except KeyError:
return get_sys_path()
sys_path = list(get_sys_path()) # copy
for p in possible_stmts:
if not isinstance(p, pr.Statement):
continue
expression_list = p.expression_list()
# sys.path command is just one thing.
if len(expression_list) != 1 or not isinstance(expression_list[0], pr.Call):
continue
call = expression_list[0]
n = call.name
if not isinstance(n, pr.Name) or len(n.names) != 3:
continue
if n.names[:2] != ('sys', 'path'):
continue
array_cmd = n.names[2]
if call.execution is None:
continue
exe = call.execution
if not (array_cmd == 'insert' and len(exe) == 2
or array_cmd == 'append' and len(exe) == 1):
continue
if array_cmd == 'insert':
exe_type, exe.type = exe.type, pr.Array.NOARRAY
exe_pop = exe.values.pop(0)
res = execute_code(exe.get_code())
if res is not None:
sys_path.insert(0, res)
debug.dbg('sys path inserted: %s', res)
exe.type = exe_type
exe.values.insert(0, exe_pop)
elif array_cmd == 'append':
res = execute_code(exe.get_code())
if res is not None:
sys_path.append(res)
debug.dbg('sys path added: %s', res)
return sys_path
if module.path is None:
# Support for modules without a path is bad, therefore return the
# normal path.
return list(get_sys_path())
curdir = os.path.abspath(os.curdir)
with common.ignored(OSError):
os.chdir(os.path.dirname(module.path))
result = check_module(module)
result += _detect_django_path(module.path)
# cleanup, back to old directory
os.chdir(curdir)
return result
def _detect_django_path(module_path):
""" Detects the path of the very well known Django library (if used) """
result = []
while True:
new = os.path.dirname(module_path)
# If the module_path doesn't change anymore, we're finished -> /
if new == module_path:
break
else:
module_path = new
with common.ignored(IOError):
with open(module_path + os.path.sep + 'manage.py'):
debug.dbg('Found django path: %s', module_path)
result.append(module_path)
return result