mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-09 07:14:48 +08:00
Merge branch 'dev' into get_code_fidelity
Conflicts: jedi/evaluate/docstrings.py jedi/parser/representation.py Resolving merge problems: * Introducing docstring for compiled.fake * Partly fixing poor decision in TokenDocstring __init__
This commit is contained in:
@@ -21,5 +21,6 @@ Lubos Trilety <ltrilety@redhat.com>
|
||||
Akinori Hattori (@hattya)
|
||||
srusskih (@srusskih)
|
||||
Steven Silvester (@blink1073)
|
||||
Colin Duquesnoy (@ColinDuquesnoy) <colin.duquesnoy@gmail.com>
|
||||
|
||||
Note: (@user) means a github user name.
|
||||
|
||||
@@ -3,6 +3,15 @@
|
||||
Changelog
|
||||
---------
|
||||
|
||||
+0.8.0 (2013-03-01)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Memory Consumption for compiled modules (e.g. builtins, sys) has been reduced
|
||||
drastically. Loading times are down as well (it takes basically as long as
|
||||
an import).
|
||||
* REPL completion is starting to become usable.
|
||||
* Various small API changes. Generally this released focused on stability, though.
|
||||
|
||||
0.7.0 (2013-08-09)
|
||||
++++++++++++++++++
|
||||
* switched from LGPL to MIT license
|
||||
|
||||
@@ -10,7 +10,7 @@ My **master** branch is a 100% stable (should be). I only push to it after I am
|
||||
certain that things are working out. Many people are using Jedi directly from
|
||||
the github master branch.
|
||||
|
||||
**Please use PEP8 to style your code.**
|
||||
**Try to use the PEP8 style guide.**
|
||||
|
||||
|
||||
Changing Issues to Pull Requests (Github)
|
||||
|
||||
@@ -34,7 +34,7 @@ me.
|
||||
Jedi can be used with the following editors:
|
||||
|
||||
- Vim (jedi-vim_, YouCompleteMe_)
|
||||
- Emacs (Jedi.el_)
|
||||
- Emacs (Jedi.el_, company-jedi_)
|
||||
- Sublime Text (SublimeJEDI_ [ST2 + ST3], anaconda_ [only ST3])
|
||||
- SynWrite_
|
||||
|
||||
@@ -103,6 +103,9 @@ It's very easy to create an editor plugin that uses Jedi. See `Plugin API
|
||||
<https://jedi.jedidjah.ch/en/latest/docs/plugin-api.html>`_ for more
|
||||
information.
|
||||
|
||||
If you have specific questions, please add an issue or ask on `stackoverflow
|
||||
<https://stackoverflow.com>`_ with the label ``python-jedi``.
|
||||
|
||||
|
||||
Development
|
||||
===========
|
||||
@@ -137,6 +140,7 @@ For more detailed information visit the `testing documentation
|
||||
.. _jedi-vim: https://github.com/davidhalter/jedi-vim
|
||||
.. _youcompleteme: http://valloric.github.io/YouCompleteMe/
|
||||
.. _Jedi.el: https://github.com/tkf/emacs-jedi
|
||||
.. _company-jedi: https://github.com/proofit404/company-jedi
|
||||
.. _sublimejedi: https://github.com/srusskih/SublimeJEDI
|
||||
.. _anaconda: https://github.com/DamnWidget/anaconda
|
||||
.. _SynWrite: http://uvviewsoft.com/synjedi/
|
||||
|
||||
@@ -21,12 +21,20 @@ jedi_cache_directory_orig = None
|
||||
jedi_cache_directory_temp = None
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--jedi-debug", "-D", action='store_true',
|
||||
help="Enables Jedi's debug output.")
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
global jedi_cache_directory_orig, jedi_cache_directory_temp
|
||||
jedi_cache_directory_orig = jedi.settings.cache_directory
|
||||
jedi_cache_directory_temp = tempfile.mkdtemp(prefix='jedi-test-')
|
||||
jedi.settings.cache_directory = jedi_cache_directory_temp
|
||||
|
||||
if config.option.jedi_debug:
|
||||
jedi.set_debug_function()
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
global jedi_cache_directory_orig, jedi_cache_directory_temp
|
||||
|
||||
@@ -19,21 +19,22 @@ Editor Plugins
|
||||
|
||||
Vim:
|
||||
|
||||
- `jedi-vim <http://github.com/davidhalter/jedi-vim>`_
|
||||
- `YouCompleteMe <http://valloric.github.io/YouCompleteMe/>`_
|
||||
- jedi-vim_
|
||||
- YouCompleteMe_
|
||||
|
||||
Emacs:
|
||||
|
||||
- `Jedi.el <https://github.com/tkf/emacs-jedi>`_
|
||||
- Jedi.el_
|
||||
- company-jedi_
|
||||
|
||||
Sublime Text 2/3:
|
||||
|
||||
- `SublimeJEDI <https://github.com/srusskih/SublimeJEDI>`_ (ST2 & ST3)
|
||||
- `anaconda <https://github.com/DamnWidget/anaconda>`_ (only ST3)
|
||||
- SublimeJEDI_ (ST2 & ST3)
|
||||
- anaconda_ (only ST3)
|
||||
|
||||
SynWrite:
|
||||
|
||||
- `SynJedi <http://uvviewsoft.com/synjedi/>`_
|
||||
- SynJedi_
|
||||
|
||||
|
||||
.. _other-software:
|
||||
@@ -62,3 +63,12 @@ Using a custom ``$HOME/.pythonrc.py``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. autofunction:: jedi.utils.setup_readline
|
||||
|
||||
.. _jedi-vim: https://github.com/davidhalter/jedi-vim
|
||||
.. _youcompleteme: http://valloric.github.io/YouCompleteMe/
|
||||
.. _Jedi.el: https://github.com/tkf/emacs-jedi
|
||||
.. _company-jedi: https://github.com/proofit404/company-jedi
|
||||
.. _sublimejedi: https://github.com/srusskih/SublimeJEDI
|
||||
.. _anaconda: https://github.com/DamnWidget/anaconda
|
||||
.. _SynJedi: http://uvviewsoft.com/synjedi/
|
||||
.. _wdb: https://github.com/Kozea/wdb
|
||||
|
||||
@@ -34,16 +34,8 @@ As you see Jedi is pretty simple and allows you to concentrate on writing a
|
||||
good text editor, while still having very good IDE features for Python.
|
||||
"""
|
||||
|
||||
__version__ = 0, 7, 1, 'alpha1'
|
||||
__version__ = '0.8.0-alpha1'
|
||||
|
||||
import sys
|
||||
|
||||
# python imports are hell sometimes. Especially the combination of relative
|
||||
# imports and circular imports... Just avoid it:
|
||||
sys.path.insert(0, __path__[0])
|
||||
|
||||
from .api import Script, Interpreter, NotFoundError, set_debug_function
|
||||
from .api import preload_module, defined_names
|
||||
from . import settings
|
||||
|
||||
sys.path.pop(0)
|
||||
from jedi.api import Script, Interpreter, NotFoundError, set_debug_function
|
||||
from jedi.api import preload_module, defined_names
|
||||
from jedi import settings
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from sys import argv
|
||||
from os.path import join, dirname, abspath
|
||||
|
||||
|
||||
if len(argv) == 2 and argv[1] == 'repl':
|
||||
# don't want to use __main__ only for repl yet, maybe we want to use it for
|
||||
# something else. So just use the keyword ``repl`` for now.
|
||||
from os import path
|
||||
print(path.join(path.dirname(path.abspath(__file__)), 'replstartup.py'))
|
||||
print(join(dirname(abspath(__file__)), 'api', 'replstartup.py'))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""
|
||||
To ensure compatibility from Python ``2.6`` - ``3.3``, a module has been
|
||||
created. Clearly there is huge need to use conforming syntax. But many changes
|
||||
(e.g. ``property``, ``hasattr`` in ``2.5``) can be rewritten in pure python.
|
||||
created. Clearly there is huge need to use conforming syntax.
|
||||
"""
|
||||
import sys
|
||||
import imp
|
||||
@@ -11,8 +10,9 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
is_py3k = sys.hexversion >= 0x03000000
|
||||
is_py33 = sys.hexversion >= 0x03030000
|
||||
is_py3 = sys.version_info[0] >= 3
|
||||
is_py33 = is_py3 and sys.version_info.minor >= 3
|
||||
is_py26 = not is_py3 and sys.version_info[1] < 7
|
||||
|
||||
|
||||
def find_module_py33(string, path=None):
|
||||
@@ -82,7 +82,7 @@ try:
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
utf8 = lambda s: s
|
||||
else:
|
||||
utf8 = lambda s: s.decode('utf-8')
|
||||
@@ -92,7 +92,7 @@ Decode a raw string into unicode object. Do nothing in Python 3.
|
||||
"""
|
||||
|
||||
# exec function
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
def exec_function(source, global_map):
|
||||
exec(source, global_map)
|
||||
else:
|
||||
@@ -100,7 +100,7 @@ else:
|
||||
exec source in global_map """, 'blub', 'exec'))
|
||||
|
||||
# re-raise function
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
def reraise(exception, traceback):
|
||||
raise exception.with_traceback(traceback)
|
||||
else:
|
||||
@@ -125,7 +125,7 @@ except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
# hasattr function used because python
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
hasattr = hasattr
|
||||
else:
|
||||
def hasattr(obj, name):
|
||||
@@ -146,16 +146,13 @@ class Python3Method(object):
|
||||
else:
|
||||
return lambda *args, **kwargs: self.func(obj, *args, **kwargs)
|
||||
|
||||
|
||||
def use_metaclass(meta, *bases):
|
||||
""" Create a class with a metaclass. """
|
||||
if not bases:
|
||||
bases = (object,)
|
||||
return meta("HackClass", bases, {})
|
||||
|
||||
try:
|
||||
from functools import reduce # Python 3
|
||||
except ImportError:
|
||||
reduce = reduce
|
||||
|
||||
try:
|
||||
encoding = sys.stdout.encoding
|
||||
@@ -164,13 +161,14 @@ try:
|
||||
except AttributeError:
|
||||
encoding = 'ascii'
|
||||
|
||||
|
||||
def u(string):
|
||||
"""Cast to unicode DAMMIT!
|
||||
Written because Python2 repr always implicitly casts to a string, so we
|
||||
have to cast back to a unicode (and we now that we always deal with valid
|
||||
unicode, because we check that in the beginning).
|
||||
"""
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
return str(string)
|
||||
elif not isinstance(string, unicode):
|
||||
return unicode(str(string), 'UTF-8')
|
||||
|
||||
@@ -5,31 +5,31 @@ use its methods.
|
||||
Additionally you can add a debug function with :func:`set_debug_function` and
|
||||
catch :exc:`NotFoundError` which is being raised if your completion is not
|
||||
possible.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
.. warning:: Please, note that Jedi is **not thread safe**.
|
||||
"""
|
||||
import re
|
||||
import os
|
||||
import warnings
|
||||
from itertools import chain
|
||||
|
||||
from jedi._compatibility import next, unicode, builtins
|
||||
from jedi.parser import Parser
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser.user_context import UserContext, UserContextParser
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi import helpers
|
||||
from jedi import common
|
||||
from jedi import cache
|
||||
from jedi import modules
|
||||
from jedi import interpret
|
||||
from jedi._compatibility import next, unicode, builtins
|
||||
import keywords
|
||||
import evaluate
|
||||
import api_classes
|
||||
import evaluate_representation as er
|
||||
import dynamic
|
||||
import imports
|
||||
import builtin
|
||||
from jedi.api import keywords
|
||||
from jedi.api import classes
|
||||
from jedi.api import interpreter
|
||||
from jedi.evaluate import Evaluator, filter_private_variable
|
||||
from jedi.evaluate import representation as er
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import imports
|
||||
from jedi.evaluate import helpers
|
||||
from jedi.evaluate.finder import get_names_of_scope
|
||||
|
||||
|
||||
class NotFoundError(Exception):
|
||||
@@ -62,6 +62,8 @@ class Script(object):
|
||||
if source_path is not None:
|
||||
warnings.warn("Use path instead of source_path.", DeprecationWarning)
|
||||
path = source_path
|
||||
self._source_path = path
|
||||
self.path = None if path is None else os.path.abspath(path)
|
||||
|
||||
if source is None:
|
||||
with open(path) as f:
|
||||
@@ -70,24 +72,22 @@ class Script(object):
|
||||
lines = source.splitlines() or ['']
|
||||
if source and source[-1] == '\n':
|
||||
lines.append('')
|
||||
|
||||
self._line = max(len(lines), 1) if line is None else line
|
||||
if not (0 < self._line <= len(lines)):
|
||||
line = max(len(lines), 1) if line is None else line
|
||||
if not (0 < line <= len(lines)):
|
||||
raise ValueError('`line` parameter is not in a valid range.')
|
||||
|
||||
line_len = len(lines[self._line - 1])
|
||||
self._column = line_len if column is None else column
|
||||
if not (0 <= self._column <= line_len):
|
||||
line_len = len(lines[line - 1])
|
||||
column = line_len if column is None else column
|
||||
if not (0 <= column <= line_len):
|
||||
raise ValueError('`column` parameter is not in a valid range.')
|
||||
self._pos = line, column
|
||||
|
||||
api_classes._clear_caches()
|
||||
classes.clear_caches()
|
||||
debug.reset_time()
|
||||
self.source = modules.source_to_unicode(source, encoding)
|
||||
self._pos = self._line, self._column
|
||||
self._module = modules.ModuleWithCursor(
|
||||
path, source=self.source, position=self._pos)
|
||||
self._source_path = path
|
||||
self.path = None if path is None else os.path.abspath(path)
|
||||
self.source = common.source_to_unicode(source, encoding)
|
||||
self._user_context = UserContext(self.source, self._pos)
|
||||
self._parser = UserContextParser(self.source, path, self._pos, self._user_context)
|
||||
self._evaluator = Evaluator()
|
||||
debug.speed('init')
|
||||
|
||||
@property
|
||||
@@ -103,23 +103,17 @@ class Script(object):
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, repr(self._source_path))
|
||||
|
||||
@property
|
||||
def _parser(self):
|
||||
""" lazy parser."""
|
||||
return self._module.parser
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
def completions(self):
|
||||
"""
|
||||
Return :class:`api_classes.Completion` objects. Those objects contain
|
||||
Return :class:`classes.Completion` objects. Those objects contain
|
||||
information about the completions, more than just names.
|
||||
|
||||
:return: Completion objects, sorted by name and __ comes last.
|
||||
:rtype: list of :class:`api_classes.Completion`
|
||||
:rtype: list of :class:`classes.Completion`
|
||||
"""
|
||||
def get_completions(user_stmt, bs):
|
||||
if isinstance(user_stmt, pr.Import):
|
||||
context = self._module.get_context()
|
||||
context = self._user_context.get_context()
|
||||
next(context) # skip the path
|
||||
if next(context) == 'from':
|
||||
# completion is just "import" if before stands from ..
|
||||
@@ -127,26 +121,25 @@ class Script(object):
|
||||
return self._simple_complete(path, like)
|
||||
|
||||
debug.speed('completions start')
|
||||
path = self._module.get_path_until_cursor()
|
||||
path = self._user_context.get_path_until_cursor()
|
||||
if re.search('^\.|\.\.$', path):
|
||||
return []
|
||||
path, dot, like = self._get_completion_parts()
|
||||
|
||||
user_stmt = self._user_stmt(True)
|
||||
bs = builtin.Builtin.scope
|
||||
completions = get_completions(user_stmt, bs)
|
||||
user_stmt = self._parser.user_stmt(True)
|
||||
b = compiled.builtin
|
||||
completions = get_completions(user_stmt, b)
|
||||
|
||||
if not dot:
|
||||
# add named params
|
||||
for call_def in self.call_signatures():
|
||||
if not call_def.module.is_builtin():
|
||||
if not isinstance(call_def.module, compiled.CompiledObject):
|
||||
for p in call_def.params:
|
||||
completions.append((p.get_name(), p))
|
||||
|
||||
if not path and not isinstance(user_stmt, pr.Import):
|
||||
# add keywords
|
||||
completions += ((k, bs) for k in keywords.keyword_names(
|
||||
all=True))
|
||||
completions += ((k, b) for k in keywords.keyword_names(all=True))
|
||||
|
||||
needs_dot = not dot and path
|
||||
|
||||
@@ -157,9 +150,8 @@ class Script(object):
|
||||
if settings.case_insensitive_completion \
|
||||
and n.lower().startswith(like.lower()) \
|
||||
or n.startswith(like):
|
||||
if not evaluate.filter_private_variable(s,
|
||||
user_stmt or self._parser.user_scope, n):
|
||||
new = api_classes.Completion(c, needs_dot, len(like), s)
|
||||
if not filter_private_variable(s, user_stmt or self._parser.user_scope(), n):
|
||||
new = classes.Completion(self._evaluator, c, needs_dot, len(like), s)
|
||||
k = (new.name, new.complete) # key
|
||||
if k in comp_dct and settings.no_completion_duplicates:
|
||||
comp_dct[k]._same_name_completions.append(new)
|
||||
@@ -178,23 +170,24 @@ class Script(object):
|
||||
scopes = list(self._prepare_goto(path, True))
|
||||
except NotFoundError:
|
||||
scopes = []
|
||||
scope_generator = evaluate.get_names_of_scope(
|
||||
self._parser.user_scope, self._pos)
|
||||
scope_generator = get_names_of_scope(self._evaluator,
|
||||
self._parser.user_scope(),
|
||||
self._pos)
|
||||
completions = []
|
||||
for scope, name_list in scope_generator:
|
||||
for c in name_list:
|
||||
completions.append((c, scope))
|
||||
else:
|
||||
completions = []
|
||||
debug.dbg('possible scopes', scopes)
|
||||
debug.dbg('possible completion scopes: %s', scopes)
|
||||
for s in scopes:
|
||||
if s.isinstance(er.Function):
|
||||
names = s.get_magic_method_names()
|
||||
names = s.get_magic_function_names()
|
||||
else:
|
||||
if isinstance(s, imports.ImportPath):
|
||||
under = like + self._module.get_path_after_cursor()
|
||||
under = like + self._user_context.get_path_after_cursor()
|
||||
if under == 'import':
|
||||
current_line = self._module.get_position_line()
|
||||
current_line = self._user_context.get_position_line()
|
||||
if not current_line.endswith('import import'):
|
||||
continue
|
||||
a = s.import_stmt.alias
|
||||
@@ -208,27 +201,14 @@ class Script(object):
|
||||
completions.append((c, s))
|
||||
return completions
|
||||
|
||||
def _user_stmt(self, is_completion=False):
|
||||
user_stmt = self._parser.user_stmt
|
||||
debug.speed('parsed')
|
||||
|
||||
if is_completion and not user_stmt:
|
||||
# for statements like `from x import ` (cursor not in statement)
|
||||
pos = next(self._module.get_context(yield_positions=True))
|
||||
last_stmt = pos and self._parser.module.get_statement_for_position(
|
||||
pos, include_imports=True)
|
||||
if isinstance(last_stmt, pr.Import):
|
||||
user_stmt = last_stmt
|
||||
return user_stmt
|
||||
|
||||
def _prepare_goto(self, goto_path, is_completion=False):
|
||||
"""
|
||||
Base for completions/goto. Basically it returns the resolved scopes
|
||||
under cursor.
|
||||
"""
|
||||
debug.dbg('start: %s in %s' % (goto_path, self._parser.user_scope))
|
||||
debug.dbg('start: %s in %s', goto_path, self._parser.user_scope())
|
||||
|
||||
user_stmt = self._user_stmt(is_completion)
|
||||
user_stmt = self._parser.user_stmt(is_completion)
|
||||
if not user_stmt and len(goto_path.split('\n')) > 1:
|
||||
# If the user_stmt is not defined and the goto_path is multi line,
|
||||
# something's strange. Most probably the backwards tokenizer
|
||||
@@ -240,17 +220,17 @@ class Script(object):
|
||||
else:
|
||||
# just parse one statement, take it and evaluate it
|
||||
stmt = self._get_under_cursor_stmt(goto_path)
|
||||
scopes = evaluate.follow_statement(stmt)
|
||||
scopes = self._evaluator.eval_statement(stmt)
|
||||
return scopes
|
||||
|
||||
def _get_under_cursor_stmt(self, cursor_txt):
|
||||
offset = self._line - 1, self._column
|
||||
offset = self._pos[0] - 1, self._pos[1]
|
||||
r = Parser(cursor_txt, no_docstr=True, offset=offset)
|
||||
try:
|
||||
stmt = r.module.statements[0]
|
||||
except IndexError:
|
||||
raise NotFoundError()
|
||||
stmt.parent = self._parser.user_scope
|
||||
stmt.parent = self._parser.user_scope()
|
||||
return stmt
|
||||
|
||||
def complete(self):
|
||||
@@ -316,7 +296,6 @@ class Script(object):
|
||||
sig = self.call_signatures()
|
||||
return sig[0] if sig else None
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
def goto_definitions(self):
|
||||
"""
|
||||
Return the definitions of a the path under the cursor. goto function!
|
||||
@@ -327,7 +306,7 @@ class Script(object):
|
||||
because Python itself is a dynamic language, which means depending on
|
||||
an option you can have two different versions of a function.
|
||||
|
||||
:rtype: list of :class:`api_classes.Definition`
|
||||
:rtype: list of :class:`classes.Definition`
|
||||
"""
|
||||
def resolve_import_paths(scopes):
|
||||
for s in scopes.copy():
|
||||
@@ -336,16 +315,16 @@ class Script(object):
|
||||
scopes.update(resolve_import_paths(set(s.follow())))
|
||||
return scopes
|
||||
|
||||
goto_path = self._module.get_path_under_cursor()
|
||||
goto_path = self._user_context.get_path_under_cursor()
|
||||
|
||||
context = self._module.get_context()
|
||||
context = self._user_context.get_context()
|
||||
scopes = set()
|
||||
lower_priority_operators = ('()', '(', ',')
|
||||
"""Operators that could hide callee."""
|
||||
if next(context) in ('class', 'def'):
|
||||
scopes = set([self._module.parser.user_scope])
|
||||
scopes = set([self._parser.user_scope()])
|
||||
elif not goto_path:
|
||||
op = self._module.get_operator_under_cursor()
|
||||
op = self._user_context.get_operator_under_cursor()
|
||||
if op and op not in lower_priority_operators:
|
||||
scopes = set([keywords.get_operator(op, self._pos)])
|
||||
|
||||
@@ -357,13 +336,10 @@ class Script(object):
|
||||
call = call.next
|
||||
# reset cursor position:
|
||||
(row, col) = call.name.end_pos
|
||||
_pos = (row, max(col - 1, 0))
|
||||
self._module = modules.ModuleWithCursor(
|
||||
self._source_path,
|
||||
source=self.source,
|
||||
position=_pos)
|
||||
pos = (row, max(col - 1, 0))
|
||||
self._user_context = UserContext(self.source, pos)
|
||||
# then try to find the path again
|
||||
goto_path = self._module.get_path_under_cursor()
|
||||
goto_path = self._user_context.get_path_under_cursor()
|
||||
|
||||
if not scopes:
|
||||
if goto_path:
|
||||
@@ -376,11 +352,10 @@ class Script(object):
|
||||
# add keywords
|
||||
scopes |= keywords.keywords(string=goto_path, pos=self._pos)
|
||||
|
||||
d = set([api_classes.Definition(s) for s in scopes
|
||||
d = set([classes.Definition(self._evaluator, s) for s in scopes
|
||||
if s is not imports.ImportPath.GlobalNamespace])
|
||||
return self._sorted_defs(d)
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
def goto_assignments(self):
|
||||
"""
|
||||
Return the first definition found. Imports and statements aren't
|
||||
@@ -388,10 +363,10 @@ class Script(object):
|
||||
dynamic language, which means depending on an option you can have two
|
||||
different versions of a function.
|
||||
|
||||
:rtype: list of :class:`api_classes.Definition`
|
||||
:rtype: list of :class:`classes.Definition`
|
||||
"""
|
||||
results, _ = self._goto()
|
||||
d = [api_classes.Definition(d) for d in set(results)
|
||||
d = [classes.Definition(self._evaluator, d) for d in set(results)
|
||||
if d is not imports.ImportPath.GlobalNamespace]
|
||||
return self._sorted_defs(d)
|
||||
|
||||
@@ -410,16 +385,16 @@ class Script(object):
|
||||
for d in defs:
|
||||
if isinstance(d.parent, pr.Import) \
|
||||
and d.start_pos == (0, 0):
|
||||
i = imports.ImportPath(d.parent).follow(is_goto=True)
|
||||
i = imports.ImportPath(self._evaluator, d.parent).follow(is_goto=True)
|
||||
definitions.remove(d)
|
||||
definitions |= follow_inexistent_imports(i)
|
||||
return definitions
|
||||
|
||||
goto_path = self._module.get_path_under_cursor()
|
||||
context = self._module.get_context()
|
||||
user_stmt = self._user_stmt()
|
||||
goto_path = self._user_context.get_path_under_cursor()
|
||||
context = self._user_context.get_context()
|
||||
user_stmt = self._parser.user_stmt()
|
||||
if next(context) in ('class', 'def'):
|
||||
user_scope = self._parser.user_scope
|
||||
user_scope = self._parser.user_scope()
|
||||
definitions = set([user_scope.name])
|
||||
search_name = unicode(user_scope.name)
|
||||
elif isinstance(user_stmt, pr.Import):
|
||||
@@ -438,10 +413,10 @@ class Script(object):
|
||||
definitions.append(import_name[0])
|
||||
else:
|
||||
stmt = self._get_under_cursor_stmt(goto_path)
|
||||
defs, search_name = evaluate.goto(stmt)
|
||||
defs, search_name = self._evaluator.goto(stmt)
|
||||
definitions = follow_inexistent_imports(defs)
|
||||
if isinstance(user_stmt, pr.Statement):
|
||||
c = user_stmt.get_commands()
|
||||
c = user_stmt.expression_list()
|
||||
if c and not isinstance(c[0], (str, unicode)) \
|
||||
and c[0].start_pos > self._pos \
|
||||
and not re.search(r'\.\w+$', goto_path):
|
||||
@@ -450,51 +425,48 @@ class Script(object):
|
||||
definitions = [user_stmt]
|
||||
return definitions, search_name
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
def usages(self, additional_module_paths=()):
|
||||
"""
|
||||
Return :class:`api_classes.Usage` objects, which contain all
|
||||
Return :class:`classes.Usage` objects, which contain all
|
||||
names that point to the definition of the name under the cursor. This
|
||||
is very useful for refactoring (renaming), or to show all usages of a
|
||||
variable.
|
||||
|
||||
.. todo:: Implement additional_module_paths
|
||||
|
||||
:rtype: list of :class:`api_classes.Usage`
|
||||
:rtype: list of :class:`classes.Usage`
|
||||
"""
|
||||
temp, settings.dynamic_flow_information = \
|
||||
settings.dynamic_flow_information, False
|
||||
user_stmt = self._user_stmt()
|
||||
user_stmt = self._parser.user_stmt()
|
||||
definitions, search_name = self._goto(add_import_name=True)
|
||||
if isinstance(user_stmt, pr.Statement):
|
||||
c = user_stmt.get_commands()[0]
|
||||
c = user_stmt.expression_list()[0]
|
||||
if not isinstance(c, unicode) and self._pos < c.start_pos:
|
||||
# the search_name might be before `=`
|
||||
definitions = [v for v in user_stmt.get_set_vars()
|
||||
if unicode(v.names[-1]) == search_name]
|
||||
if not isinstance(user_stmt, pr.Import):
|
||||
# import case is looked at with add_import_name option
|
||||
definitions = dynamic.usages_add_import_modules(definitions,
|
||||
search_name)
|
||||
definitions = usages_add_import_modules(self._evaluator, definitions, search_name)
|
||||
|
||||
module = set([d.get_parent_until() for d in definitions])
|
||||
module.add(self._parser.module)
|
||||
names = dynamic.usages(definitions, search_name, module)
|
||||
module.add(self._parser.module())
|
||||
names = usages(self._evaluator, definitions, search_name, module)
|
||||
|
||||
for d in set(definitions):
|
||||
if isinstance(d, pr.Module):
|
||||
names.append(api_classes.Usage(d, d))
|
||||
names.append(classes.Usage(self._evaluator, d, d))
|
||||
elif isinstance(d, er.Instance):
|
||||
# Instances can be ignored, because they are being created by
|
||||
# ``__getattr__``.
|
||||
pass
|
||||
else:
|
||||
names.append(api_classes.Usage(d.names[-1], d))
|
||||
names.append(classes.Usage(self._evaluator, d.names[-1], d))
|
||||
|
||||
settings.dynamic_flow_information = temp
|
||||
return self._sorted_defs(set(names))
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
def call_signatures(self):
|
||||
"""
|
||||
Return the function object of the call you're currently in.
|
||||
@@ -509,27 +481,28 @@ class Script(object):
|
||||
|
||||
This would return ``None``.
|
||||
|
||||
:rtype: list of :class:`api_classes.CallDef`
|
||||
:rtype: list of :class:`classes.CallDef`
|
||||
"""
|
||||
|
||||
call, index = self._func_call_and_param_index()
|
||||
if call is None:
|
||||
return []
|
||||
|
||||
user_stmt = self._user_stmt()
|
||||
user_stmt = self._parser.user_stmt()
|
||||
with common.scale_speed_settings(settings.scale_call_signatures):
|
||||
_callable = lambda: evaluate.follow_call(call)
|
||||
_callable = lambda: self._evaluator.eval_call(call)
|
||||
origins = cache.cache_call_signatures(_callable, user_stmt)
|
||||
debug.speed('func_call followed')
|
||||
|
||||
return [api_classes.CallDef(o, index, call) for o in origins
|
||||
if o.isinstance(er.Function, er.Instance, er.Class)]
|
||||
return [classes.CallDef(o, index, call) for o in origins
|
||||
if o.isinstance(er.Function, er.Instance, er.Class)
|
||||
or isinstance(o, compiled.CompiledObject) and o.type() != 'module']
|
||||
|
||||
def _func_call_and_param_index(self):
|
||||
debug.speed('func_call start')
|
||||
call, index = None, 0
|
||||
if call is None:
|
||||
user_stmt = self._user_stmt()
|
||||
user_stmt = self._parser.user_stmt()
|
||||
if user_stmt is not None and isinstance(user_stmt, pr.Statement):
|
||||
call, index, _ = helpers.search_call_signatures(user_stmt, self._pos)
|
||||
debug.speed('func_call parsed')
|
||||
@@ -550,11 +523,10 @@ class Script(object):
|
||||
cur_name_part = name_part
|
||||
kill_count += 1
|
||||
|
||||
|
||||
context = self._module.get_context()
|
||||
context = self._user_context.get_context()
|
||||
just_from = next(context) == 'from'
|
||||
|
||||
i = imports.ImportPath(user_stmt, is_like_search,
|
||||
i = imports.ImportPath(self._evaluator, user_stmt, is_like_search,
|
||||
kill_count=kill_count, direct_resolve=True,
|
||||
is_just_from=just_from)
|
||||
return i, cur_name_part
|
||||
@@ -564,19 +536,17 @@ class Script(object):
|
||||
Returns the parts for the completion
|
||||
:return: tuple - (path, dot, like)
|
||||
"""
|
||||
path = self._module.get_path_until_cursor()
|
||||
path = self._user_context.get_path_until_cursor()
|
||||
match = re.match(r'^(.*?)(\.|)(\w?[\w\d]*)$', path, flags=re.S)
|
||||
return match.groups()
|
||||
|
||||
@staticmethod
|
||||
def _sorted_defs(d):
|
||||
def _sorted_defs(self, d):
|
||||
# Note: `or ''` below is required because `module_path` could be
|
||||
# None and you can't compare None and str in Python 3.
|
||||
return sorted(d, key=lambda x: (x.module_path or '', x.line, x.column))
|
||||
return sorted(d, key=lambda x: (x.module_path or '', x.line or 0, x.column or 0))
|
||||
|
||||
|
||||
class Interpreter(Script):
|
||||
|
||||
"""
|
||||
Jedi API for Python REPLs.
|
||||
|
||||
@@ -590,7 +560,6 @@ class Interpreter(Script):
|
||||
>>> script = Interpreter('join().up', [namespace])
|
||||
>>> print(script.completions()[0].name)
|
||||
upper
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, source, namespaces=[], **kwds):
|
||||
@@ -611,17 +580,15 @@ class Interpreter(Script):
|
||||
self.namespaces = namespaces
|
||||
|
||||
# Here we add the namespaces to the current parser.
|
||||
importer = interpret.ObjectImporter(self._parser.user_scope)
|
||||
for ns in namespaces:
|
||||
importer.import_raw_namespace(ns)
|
||||
interpreter.create(self._evaluator, namespaces[0], self._parser.module())
|
||||
|
||||
def _simple_complete(self, path, like):
|
||||
user_stmt = self._user_stmt(True)
|
||||
user_stmt = self._parser.user_stmt(True)
|
||||
is_simple_path = not path or re.search('^[\w][\w\d.]*$', path)
|
||||
if isinstance(user_stmt, pr.Import) or not is_simple_path:
|
||||
return super(type(self), self)._simple_complete(path, like)
|
||||
else:
|
||||
class NamespaceModule:
|
||||
class NamespaceModule(object):
|
||||
def __getattr__(_, name):
|
||||
for n in self.namespaces:
|
||||
try:
|
||||
@@ -646,18 +613,15 @@ class Interpreter(Script):
|
||||
pass
|
||||
|
||||
completions = []
|
||||
for n in namespaces:
|
||||
for name in dir(n):
|
||||
for namespace in namespaces:
|
||||
for name in dir(namespace):
|
||||
if name.lower().startswith(like.lower()):
|
||||
scope = self._parser.module
|
||||
n = pr.Name(self._parser.module, [(name, (0, 0))],
|
||||
(0, 0), (0, 0), scope)
|
||||
scope = self._parser.module()
|
||||
n = helpers.FakeName(name, scope)
|
||||
completions.append((n, scope))
|
||||
return completions
|
||||
|
||||
|
||||
|
||||
|
||||
def defined_names(source, path=None, encoding='utf-8'):
|
||||
"""
|
||||
Get all definitions in `source` sorted by its position.
|
||||
@@ -668,13 +632,13 @@ def defined_names(source, path=None, encoding='utf-8'):
|
||||
`defined_names` method which can be used to get sub-definitions
|
||||
(e.g., methods in class).
|
||||
|
||||
:rtype: list of api_classes.Definition
|
||||
:rtype: list of classes.Definition
|
||||
"""
|
||||
parser = Parser(
|
||||
modules.source_to_unicode(source, encoding),
|
||||
common.source_to_unicode(source, encoding),
|
||||
module_path=path,
|
||||
)
|
||||
return api_classes._defined_names(parser.module)
|
||||
return classes.defined_names(Evaluator(), parser.module)
|
||||
|
||||
|
||||
def preload_module(*modules):
|
||||
@@ -700,3 +664,88 @@ def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,
|
||||
debug.enable_warning = warnings
|
||||
debug.enable_notice = notices
|
||||
debug.enable_speed = speed
|
||||
|
||||
|
||||
# TODO move to a better place.
|
||||
def usages(evaluator, definitions, search_name, mods):
|
||||
def compare_array(definitions):
|
||||
""" `definitions` are being compared by module/start_pos, because
|
||||
sometimes the id's of the objects change (e.g. executions).
|
||||
"""
|
||||
result = []
|
||||
for d in definitions:
|
||||
module = d.get_parent_until()
|
||||
result.append((module, d.start_pos))
|
||||
return result
|
||||
|
||||
def check_call(call):
|
||||
result = []
|
||||
follow = [] # There might be multiple search_name's in one call_path
|
||||
call_path = list(call.generate_call_path())
|
||||
for i, name in enumerate(call_path):
|
||||
# name is `pr.NamePart`.
|
||||
if name == search_name:
|
||||
follow.append(call_path[:i + 1])
|
||||
|
||||
for f in follow:
|
||||
follow_res, search = evaluator.goto(call.parent, f)
|
||||
# names can change (getattr stuff), therefore filter names that
|
||||
# don't match `search_name`.
|
||||
|
||||
# TODO add something like that in the future - for now usages are
|
||||
# completely broken anyway.
|
||||
#follow_res = [r for r in follow_res if str(r) == search]
|
||||
#print search.start_pos,search_name.start_pos
|
||||
#print follow_res, search, search_name, [(r, r.start_pos) for r in follow_res]
|
||||
follow_res = usages_add_import_modules(evaluator, follow_res, search)
|
||||
|
||||
compare_follow_res = compare_array(follow_res)
|
||||
# compare to see if they match
|
||||
if any(r in compare_definitions for r in compare_follow_res):
|
||||
scope = call.parent
|
||||
result.append(classes.Usage(evaluator, search, scope))
|
||||
|
||||
return result
|
||||
|
||||
if not definitions:
|
||||
return set()
|
||||
|
||||
compare_definitions = compare_array(definitions)
|
||||
mods |= set([d.get_parent_until() for d in definitions])
|
||||
names = []
|
||||
for m in imports.get_modules_containing_name(mods, search_name):
|
||||
try:
|
||||
stmts = m.used_names[search_name]
|
||||
except KeyError:
|
||||
continue
|
||||
for stmt in stmts:
|
||||
if isinstance(stmt, pr.Import):
|
||||
count = 0
|
||||
imps = []
|
||||
for i in stmt.get_all_import_names():
|
||||
for name_part in i.names:
|
||||
count += 1
|
||||
if name_part == search_name:
|
||||
imps.append((count, name_part))
|
||||
|
||||
for used_count, name_part in imps:
|
||||
i = imports.ImportPath(evaluator, stmt, kill_count=count - used_count,
|
||||
direct_resolve=True)
|
||||
f = i.follow(is_goto=True)
|
||||
if set(f) & set(definitions):
|
||||
names.append(classes.Usage(evaluator, name_part, stmt))
|
||||
else:
|
||||
for call in helpers.scan_statement_for_calls(stmt, search_name, assignment_details=True):
|
||||
names += check_call(call)
|
||||
return names
|
||||
|
||||
|
||||
def usages_add_import_modules(evaluator, definitions, search_name):
|
||||
""" Adds the modules of the imports """
|
||||
new = set()
|
||||
for d in definitions:
|
||||
if isinstance(d.parent, pr.Import):
|
||||
s = imports.ImportPath(evaluator, d.parent, direct_resolve=True)
|
||||
with common.ignored(IndexError):
|
||||
new.add(s.follow(is_goto=True)[0])
|
||||
return set(definitions) | new
|
||||
@@ -3,36 +3,28 @@ The :mod:`api_classes` module contains the return classes of the API. These
|
||||
classes are the much bigger part of the whole API, because they contain the
|
||||
interesting information about completion and goto operations.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import warnings
|
||||
import functools
|
||||
|
||||
from jedi._compatibility import unicode, next
|
||||
from jedi import settings
|
||||
from jedi import common
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import cache
|
||||
import keywords
|
||||
import recursion
|
||||
import dynamic
|
||||
import evaluate
|
||||
import imports
|
||||
import evaluate_representation as er
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.evaluate import representation as er
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.evaluate import imports
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.api import keywords
|
||||
from jedi.evaluate.finder import get_names_of_scope
|
||||
|
||||
|
||||
def _clear_caches():
|
||||
def clear_caches():
|
||||
"""
|
||||
Clear all caches of this and related modules. The only cache that will not
|
||||
be deleted is the module cache.
|
||||
"""
|
||||
cache.clear_caches()
|
||||
dynamic.search_param_cache.clear()
|
||||
recursion.ExecutionRecursionDecorator.reset()
|
||||
|
||||
evaluate.follow_statement.reset()
|
||||
|
||||
imports.imports_processed = 0
|
||||
|
||||
|
||||
def _clear_caches_after_call(func):
|
||||
@@ -42,7 +34,7 @@ def _clear_caches_after_call(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwds):
|
||||
result = func(*args, **kwds)
|
||||
_clear_caches()
|
||||
clear_caches()
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
@@ -69,7 +61,8 @@ class BaseDefinition(object):
|
||||
'_sre.SRE_Pattern': 're.RegexObject',
|
||||
}.items())
|
||||
|
||||
def __init__(self, definition, start_pos):
|
||||
def __init__(self, evaluator, definition, start_pos):
|
||||
self._evaluator = evaluator
|
||||
self._start_pos = start_pos
|
||||
self._definition = definition
|
||||
"""
|
||||
@@ -79,8 +72,11 @@ class BaseDefinition(object):
|
||||
|
||||
# generate a path to the definition
|
||||
self._module = definition.get_parent_until()
|
||||
self.module_path = self._module.path
|
||||
"""Shows the file path of a module. e.g. ``/usr/lib/python2.7/os.py``"""
|
||||
if self.in_builtin_module():
|
||||
self.module_path = None
|
||||
else:
|
||||
self.module_path = self._module.path
|
||||
"""Shows the file path of a module. e.g. ``/usr/lib/python2.7/os.py``"""
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
@@ -143,8 +139,10 @@ class BaseDefinition(object):
|
||||
"""
|
||||
# generate the type
|
||||
stripped = self._definition
|
||||
if isinstance(self._definition, er.InstanceElement):
|
||||
stripped = self._definition.var
|
||||
if isinstance(stripped, compiled.CompiledObject):
|
||||
return stripped.type()
|
||||
if isinstance(stripped, er.InstanceElement):
|
||||
stripped = stripped.var
|
||||
if isinstance(stripped, pr.Name):
|
||||
stripped = stripped.parent
|
||||
return type(stripped).__name__.lower()
|
||||
@@ -176,18 +174,17 @@ class BaseDefinition(object):
|
||||
The module name.
|
||||
|
||||
>>> from jedi import Script
|
||||
>>> source = 'import datetime'
|
||||
>>> script = Script(source, 1, len(source), 'example.py')
|
||||
>>> source = 'import json'
|
||||
>>> script = Script(source, path='example.py')
|
||||
>>> d = script.goto_definitions()[0]
|
||||
>>> print(d.module_name) # doctest: +ELLIPSIS
|
||||
datetime
|
||||
json
|
||||
"""
|
||||
return str(self._module.name)
|
||||
|
||||
def in_builtin_module(self):
|
||||
"""Whether this is a builtin module."""
|
||||
return not (self.module_path is None or
|
||||
self.module_path.endswith('.py'))
|
||||
return isinstance(self._module, compiled.CompiledObject)
|
||||
|
||||
@property
|
||||
def line_nr(self):
|
||||
@@ -241,7 +238,7 @@ class BaseDefinition(object):
|
||||
|
||||
"""
|
||||
try:
|
||||
return self._definition.doc
|
||||
return self._definition.doc or '' # Always a String, never None.
|
||||
except AttributeError:
|
||||
return self.raw_doc
|
||||
|
||||
@@ -309,8 +306,8 @@ class Completion(BaseDefinition):
|
||||
`Completion` objects are returned from :meth:`api.Script.completions`. They
|
||||
provide additional information about a completion.
|
||||
"""
|
||||
def __init__(self, name, needs_dot, like_name_length, base):
|
||||
super(Completion, self).__init__(name.parent, name.start_pos)
|
||||
def __init__(self, evaluator, name, needs_dot, like_name_length, base):
|
||||
super(Completion, self).__init__(evaluator, name.parent, name.start_pos)
|
||||
|
||||
self._name = name
|
||||
self._needs_dot = needs_dot
|
||||
@@ -321,8 +318,6 @@ class Completion(BaseDefinition):
|
||||
# duplicate items in the completion)
|
||||
self._same_name_completions = []
|
||||
|
||||
self._followed_definitions = None
|
||||
|
||||
def _complete(self, like_name):
|
||||
dot = '.' if self._needs_dot else ''
|
||||
append = ''
|
||||
@@ -402,6 +397,7 @@ class Completion(BaseDefinition):
|
||||
line = '' if self.in_builtin_module else '@%s' % self.line
|
||||
return '%s: %s%s' % (t, desc, line)
|
||||
|
||||
@cache.underscore_memoization
|
||||
def follow_definition(self):
|
||||
"""
|
||||
Return the original definitions. I strongly recommend not using it for
|
||||
@@ -411,19 +407,16 @@ class Completion(BaseDefinition):
|
||||
follows all results. This means with 1000 completions (e.g. numpy),
|
||||
it's just PITA-slow.
|
||||
"""
|
||||
if self._followed_definitions is None:
|
||||
if self._definition.isinstance(pr.Statement):
|
||||
defs = evaluate.follow_statement(self._definition)
|
||||
elif self._definition.isinstance(pr.Import):
|
||||
defs = imports.strip_imports([self._definition])
|
||||
else:
|
||||
return [self]
|
||||
if self._definition.isinstance(pr.Statement):
|
||||
defs = self._evaluator.eval_statement(self._definition)
|
||||
elif self._definition.isinstance(pr.Import):
|
||||
defs = imports.strip_imports(self._evaluator, [self._definition])
|
||||
else:
|
||||
return [self]
|
||||
|
||||
self._followed_definitions = \
|
||||
[BaseDefinition(d, d.start_pos) for d in defs]
|
||||
_clear_caches()
|
||||
|
||||
return self._followed_definitions
|
||||
defs = [BaseDefinition(self._evaluator, d, d.start_pos) for d in defs]
|
||||
clear_caches()
|
||||
return defs
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (type(self).__name__, self._name)
|
||||
@@ -434,8 +427,8 @@ class Definition(BaseDefinition):
|
||||
*Definition* objects are returned from :meth:`api.Script.goto_assignments`
|
||||
or :meth:`api.Script.goto_definitions`.
|
||||
"""
|
||||
def __init__(self, definition):
|
||||
super(Definition, self).__init__(definition, definition.start_pos)
|
||||
def __init__(self, evaluator, definition):
|
||||
super(Definition, self).__init__(evaluator, definition, definition.start_pos)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -450,9 +443,11 @@ class Definition(BaseDefinition):
|
||||
if isinstance(d, er.InstanceElement):
|
||||
d = d.var
|
||||
|
||||
if isinstance(d, pr.Name):
|
||||
if isinstance(d, compiled.CompiledObject):
|
||||
return d.name
|
||||
elif isinstance(d, pr.Name):
|
||||
return d.names[-1] if d.names else None
|
||||
elif isinstance(d, er.Array):
|
||||
elif isinstance(d, iterable.Array):
|
||||
return unicode(d.type)
|
||||
elif isinstance(d, (pr.Class, er.Class, er.Instance,
|
||||
er.Function, pr.Function)):
|
||||
@@ -469,7 +464,6 @@ class Definition(BaseDefinition):
|
||||
return d.assignment_details[0][1].values[0][0].name.names[-1]
|
||||
except IndexError:
|
||||
return None
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@@ -505,7 +499,9 @@ class Definition(BaseDefinition):
|
||||
if isinstance(d, pr.Name):
|
||||
d = d.parent
|
||||
|
||||
if isinstance(d, er.Array):
|
||||
if isinstance(d, compiled.CompiledObject):
|
||||
d = d.type() + ' ' + d.name
|
||||
elif isinstance(d, iterable.Array):
|
||||
d = 'class ' + d.type
|
||||
elif isinstance(d, (pr.Class, er.Class, er.Instance)):
|
||||
d = 'class ' + unicode(d.name)
|
||||
@@ -533,12 +529,7 @@ class Definition(BaseDefinition):
|
||||
.. todo:: Add full path. This function is should return a
|
||||
`module.class.function` path.
|
||||
"""
|
||||
if self.module_path.endswith('.py') \
|
||||
and not isinstance(self._definition, pr.Module):
|
||||
position = '@%s' % (self.line)
|
||||
else:
|
||||
# is a builtin or module
|
||||
position = ''
|
||||
position = '' if self.in_builtin_module else '@%s' % (self.line)
|
||||
return "%s:%s%s" % (self.module_name, self.description, position)
|
||||
|
||||
def defined_names(self):
|
||||
@@ -552,26 +543,26 @@ class Definition(BaseDefinition):
|
||||
d = d.var
|
||||
if isinstance(d, pr.Name):
|
||||
d = d.parent
|
||||
return _defined_names(d)
|
||||
return defined_names(self._evaluator, d)
|
||||
|
||||
|
||||
def _defined_names(scope):
|
||||
def defined_names(evaluator, scope):
|
||||
"""
|
||||
List sub-definitions (e.g., methods in class).
|
||||
|
||||
:type scope: Scope
|
||||
:rtype: list of Definition
|
||||
"""
|
||||
pair = next(evaluate.get_names_of_scope(
|
||||
scope, star_search=False, include_builtin=False), None)
|
||||
pair = next(get_names_of_scope(evaluator, scope, star_search=False,
|
||||
include_builtin=False), None)
|
||||
names = pair[1] if pair else []
|
||||
return [Definition(d) for d in sorted(names, key=lambda s: s.start_pos)]
|
||||
return [Definition(evaluator, d) for d in sorted(names, key=lambda s: s.start_pos)]
|
||||
|
||||
|
||||
class Usage(BaseDefinition):
|
||||
"""TODO: document this"""
|
||||
def __init__(self, name_part, scope):
|
||||
super(Usage, self).__init__(scope, name_part.start_pos)
|
||||
def __init__(self, evaluator, name_part, scope):
|
||||
super(Usage, self).__init__(evaluator, scope, name_part.start_pos)
|
||||
self.text = unicode(name_part)
|
||||
self.end_pos = name_part.end_pos
|
||||
|
||||
92
jedi/api/interpreter.py
Normal file
92
jedi/api/interpreter.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import inspect
|
||||
import re
|
||||
|
||||
from jedi._compatibility import builtins
|
||||
from jedi import debug
|
||||
from jedi.cache import underscore_memoization
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate.compiled.fake import get_module
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser.fast import FastParser
|
||||
from jedi.evaluate import helpers
|
||||
|
||||
|
||||
class InterpreterNamespace(pr.Module):
|
||||
def __init__(self, evaluator, namespace, parser_module):
|
||||
self.namespace = namespace
|
||||
self.parser_module = parser_module
|
||||
self._evaluator = evaluator
|
||||
|
||||
def get_defined_names(self):
|
||||
for name in self.parser_module.get_defined_names():
|
||||
yield name
|
||||
for key, value in self.namespace.items():
|
||||
yield LazyName(self._evaluator, key, value)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.parser_module, name)
|
||||
|
||||
|
||||
class LazyName(helpers.FakeName):
|
||||
def __init__(self, evaluator, name, value):
|
||||
super(LazyName, self).__init__(name)
|
||||
self._evaluator = evaluator
|
||||
self._value = value
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
@underscore_memoization
|
||||
def parent(self):
|
||||
parser_path = []
|
||||
obj = self._value
|
||||
if inspect.ismodule(obj):
|
||||
module = obj
|
||||
else:
|
||||
try:
|
||||
o = obj.__objclass__
|
||||
parser_path.append(pr.NamePart(obj.__name__, None, None))
|
||||
obj = o
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
module_name = obj.__module__
|
||||
parser_path.insert(0, pr.NamePart(obj.__name__, None, None))
|
||||
except AttributeError:
|
||||
# Unfortunately in some cases like `int` there's no __module__
|
||||
module = builtins
|
||||
else:
|
||||
module = __import__(module_name)
|
||||
raw_module = get_module(self._value)
|
||||
|
||||
try:
|
||||
path = module.__file__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
path = re.sub('c$', '', path)
|
||||
if path.endswith('.py'):
|
||||
# cut the `c` from `.pyc`
|
||||
with open(path) as f:
|
||||
mod = FastParser(f.read(), path[:-1]).module
|
||||
if not parser_path:
|
||||
return mod
|
||||
found = self._evaluator.eval_call_path(iter(parser_path), mod, None)
|
||||
if found:
|
||||
return found[0]
|
||||
debug.warning('Interpreter lookup for Python code failed %s',
|
||||
mod)
|
||||
|
||||
module = compiled.CompiledObject(raw_module)
|
||||
return compiled.create(self._value, module, module)
|
||||
|
||||
@parent.setter
|
||||
def parent(self, value):
|
||||
"""Needed because of the ``representation.Simple`` super class."""
|
||||
|
||||
|
||||
def create(evaluator, namespace, parser_module):
|
||||
ns = InterpreterNamespace(evaluator, namespace, parser_module)
|
||||
for attr_name in pr.SCOPE_CONTENTS:
|
||||
for something in getattr(parser_module, attr_name):
|
||||
something.parent = ns
|
||||
@@ -1,12 +1,9 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import pydoc
|
||||
import keyword
|
||||
|
||||
from jedi._compatibility import is_py3k
|
||||
from jedi.parser import representation as pr
|
||||
from jedi._compatibility import is_py3
|
||||
from jedi import common
|
||||
import builtin
|
||||
from jedi.evaluate import compiled
|
||||
|
||||
try:
|
||||
from pydoc_data import topics as pydoc_topics
|
||||
@@ -14,7 +11,7 @@ except ImportError:
|
||||
# Python 2.6
|
||||
import pydoc_topics
|
||||
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
keys = keyword.kwlist
|
||||
else:
|
||||
keys = keyword.kwlist + ['None', 'False', 'True']
|
||||
@@ -32,8 +29,7 @@ def keyword_names(*args, **kwargs):
|
||||
kwds = []
|
||||
for k in keywords(*args, **kwargs):
|
||||
start = k.start_pos
|
||||
end = start[0], start[1] + len(k.name)
|
||||
kwds.append(pr.Name(k.parent, [(k.name, start)], start, end, k))
|
||||
kwds.append(KeywordName(k, k.name, start))
|
||||
return kwds
|
||||
|
||||
|
||||
@@ -41,11 +37,22 @@ def get_operator(string, pos):
|
||||
return Keyword(string, pos)
|
||||
|
||||
|
||||
class KeywordName(object):
|
||||
def __init__(self, parent, name, start_pos):
|
||||
self.parent = parent
|
||||
self.names = [name]
|
||||
self.start_pos = start_pos
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
return self.start_pos[0], self.start_pos[1] + len(self.name)
|
||||
|
||||
|
||||
class Keyword(object):
|
||||
def __init__(self, name, pos):
|
||||
self.name = name
|
||||
self.start_pos = pos
|
||||
self.parent = builtin.Builtin.scope
|
||||
self.parent = compiled.builtin
|
||||
|
||||
def get_parent_until(self):
|
||||
return self.parent
|
||||
447
jedi/builtin.py
447
jedi/builtin.py
@@ -1,447 +0,0 @@
|
||||
"""
|
||||
A big part of the Python standard libraries are unfortunately not only written
|
||||
in Python. The process works like this:
|
||||
|
||||
- ``BuiltinModule`` imports the builtin module (e.g. ``sys``)
|
||||
- then ``BuiltinModule`` generates code with the docstrings of builtin
|
||||
functions.
|
||||
- The :mod:`parsing` parser processes the generated code.
|
||||
|
||||
This is possible, because many builtin functions supply docstrings, for example
|
||||
the method ``list.index`` has the following attribute ``__doc__``:
|
||||
|
||||
L.index(value, [start, [stop]]) -> integer -- return first index of value.
|
||||
Raises ValueError if the value is not present.
|
||||
|
||||
`PEP 257 <http://www.python.org/dev/peps/pep-0257/#one-line-docstrings>`_
|
||||
teaches how docstrings should look like for C functions.
|
||||
|
||||
Additionally there's a ``Builtin`` instance in this module, to make it
|
||||
possible to access functions like ``list`` and ``int`` directly, the same way
|
||||
|jedi| access other functions.
|
||||
"""
|
||||
|
||||
from __future__ import with_statement
|
||||
from jedi._compatibility import exec_function, is_py3k
|
||||
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
if is_py3k:
|
||||
import io
|
||||
import types
|
||||
import inspect
|
||||
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
from jedi.parser import Parser
|
||||
from jedi import modules
|
||||
import evaluate
|
||||
|
||||
|
||||
class BuiltinModule(modules.CachedModule):
|
||||
"""
|
||||
This module is a parser for all builtin modules, which are programmed in
|
||||
C/C++. It should also work on third party modules.
|
||||
It can be instantiated with either a path or a name of the module. The path
|
||||
is important for third party modules.
|
||||
|
||||
:param name: The name of the module.
|
||||
:param path: The path of the module.
|
||||
:param sys_path: The sys.path, which is can be customizable.
|
||||
"""
|
||||
|
||||
map_types = {
|
||||
'floating point number': '0.0',
|
||||
'string': '""',
|
||||
'str': '""',
|
||||
'character': '"a"',
|
||||
'integer': '0',
|
||||
'int': '0',
|
||||
'dictionary': '{}',
|
||||
'list': '[]',
|
||||
'file object': 'file("")',
|
||||
# TODO things like dbg: ('not working', 'tuple of integers')
|
||||
}
|
||||
|
||||
if is_py3k:
|
||||
map_types['file object'] = 'import io; return io.TextIOWrapper()'
|
||||
|
||||
def __init__(self, path=None, name=None, sys_path=None):
|
||||
if sys_path is None:
|
||||
sys_path = modules.get_sys_path()
|
||||
if not name:
|
||||
name = os.path.basename(path)
|
||||
name = name.rpartition('.')[0] # cut file type (normally .so)
|
||||
super(BuiltinModule, self).__init__(path=path, name=name)
|
||||
|
||||
self.sys_path = list(sys_path)
|
||||
self._module = None
|
||||
|
||||
@property
|
||||
def module(self):
|
||||
def load_module(name, path):
|
||||
if path:
|
||||
self.sys_path.insert(0, path)
|
||||
|
||||
temp, sys.path = sys.path, self.sys_path
|
||||
content = {}
|
||||
try:
|
||||
exec_function('import %s as module' % name, content)
|
||||
self._module = content['module']
|
||||
except AttributeError:
|
||||
# use sys.modules, because you cannot access some modules
|
||||
# directly. -> #59
|
||||
self._module = sys.modules[name]
|
||||
sys.path = temp
|
||||
|
||||
if path:
|
||||
self.sys_path.pop(0)
|
||||
|
||||
# module might already be defined
|
||||
if not self._module:
|
||||
path = self.path
|
||||
name = self.name
|
||||
if self.path:
|
||||
|
||||
dot_path = []
|
||||
p = self.path
|
||||
# search for the builtin with the correct path
|
||||
while p and p not in sys.path:
|
||||
p, sep, mod = p.rpartition(os.path.sep)
|
||||
dot_path.append(mod.partition('.')[0])
|
||||
if p:
|
||||
name = ".".join(reversed(dot_path))
|
||||
path = p
|
||||
else:
|
||||
path = os.path.dirname(self.path)
|
||||
|
||||
load_module(name, path)
|
||||
return self._module
|
||||
|
||||
def _get_source(self):
|
||||
""" Override this abstract method """
|
||||
return _generate_code(self.module, self._load_mixins())
|
||||
|
||||
def _load_mixins(self):
|
||||
"""
|
||||
Load functions that are mixed in to the standard library.
|
||||
E.g. builtins are written in C (binaries), but my autocompletion only
|
||||
understands Python code. By mixing in Python code, the autocompletion
|
||||
should work much better for builtins.
|
||||
"""
|
||||
regex = r'^(def|class)\s+([\w\d]+)'
|
||||
|
||||
def process_code(code, depth=0):
|
||||
funcs = {}
|
||||
matches = list(re.finditer(regex, code, re.MULTILINE))
|
||||
positions = [m.start() for m in matches]
|
||||
for i, pos in enumerate(positions):
|
||||
try:
|
||||
code_block = code[pos:positions[i + 1]]
|
||||
except IndexError:
|
||||
code_block = code[pos:len(code)]
|
||||
structure_name = matches[i].group(1)
|
||||
name = matches[i].group(2)
|
||||
if structure_name == 'def':
|
||||
funcs[name] = code_block
|
||||
elif structure_name == 'class':
|
||||
if depth > 0:
|
||||
raise NotImplementedError()
|
||||
|
||||
# remove class line
|
||||
c = re.sub(r'^[^\n]+', '', code_block)
|
||||
# remove whitespace
|
||||
c = re.compile(r'^[ ]{4}', re.MULTILINE).sub('', c)
|
||||
|
||||
funcs[name] = process_code(c)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
return funcs
|
||||
|
||||
try:
|
||||
name = self.name
|
||||
# sometimes there are stupid endings like `_sqlite3.cpython-32mu`
|
||||
name = re.sub(r'\..*', '', name)
|
||||
|
||||
if name == '__builtin__' and not is_py3k:
|
||||
name = 'builtins'
|
||||
path = os.path.dirname(os.path.abspath(__file__))
|
||||
with open(os.path.sep.join([path, 'mixin', name]) + '.pym') as f:
|
||||
s = f.read()
|
||||
except IOError:
|
||||
return {}
|
||||
else:
|
||||
mixin_dct = process_code(s)
|
||||
if is_py3k and self.name == Builtin.name:
|
||||
# in the case of Py3k xrange is now range
|
||||
mixin_dct['range'] = mixin_dct['xrange']
|
||||
return mixin_dct
|
||||
|
||||
|
||||
def _generate_code(scope, mixin_funcs={}, depth=0):
|
||||
"""
|
||||
Generate a string, which uses python syntax as an input to the Parser.
|
||||
"""
|
||||
def get_doc(obj, indent=False):
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc:
|
||||
doc = ('r"""\n%s\n"""\n' % doc)
|
||||
if indent:
|
||||
doc = common.indent_block(doc)
|
||||
return doc
|
||||
return ''
|
||||
|
||||
def is_in_base_classes(cls, name, comparison):
|
||||
""" Base classes may contain the exact same object """
|
||||
if name in mixin_funcs:
|
||||
return False
|
||||
try:
|
||||
mro = cls.mro()
|
||||
except TypeError:
|
||||
# this happens, if cls == type
|
||||
return False
|
||||
for base in mro[1:]:
|
||||
try:
|
||||
attr = getattr(base, name)
|
||||
except AttributeError:
|
||||
continue
|
||||
if attr == comparison:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_scope_objects(names):
|
||||
"""
|
||||
Looks for the names defined with dir() in an objects and divides
|
||||
them into different object types.
|
||||
"""
|
||||
classes = {}
|
||||
funcs = {}
|
||||
stmts = {}
|
||||
members = {}
|
||||
for n in names:
|
||||
try:
|
||||
# this has a builtin_function_or_method
|
||||
exe = getattr(scope, n)
|
||||
except AttributeError:
|
||||
# happens e.g. in properties of
|
||||
# PyQt4.QtGui.QStyleOptionComboBox.currentText
|
||||
# -> just set it to None
|
||||
members[n] = None
|
||||
else:
|
||||
if inspect.isclass(scope):
|
||||
if is_in_base_classes(scope, n, exe):
|
||||
continue
|
||||
if inspect.isbuiltin(exe) or inspect.ismethod(exe) \
|
||||
or inspect.ismethoddescriptor(exe):
|
||||
funcs[n] = exe
|
||||
elif inspect.isclass(exe) or inspect.ismodule(exe):
|
||||
classes[n] = exe
|
||||
elif inspect.ismemberdescriptor(exe):
|
||||
members[n] = exe
|
||||
else:
|
||||
stmts[n] = exe
|
||||
return classes, funcs, stmts, members
|
||||
|
||||
code = ''
|
||||
if inspect.ismodule(scope): # generate comment where the code's from.
|
||||
try:
|
||||
path = scope.__file__
|
||||
except AttributeError:
|
||||
path = '?'
|
||||
code += '# Generated module %s from %s\n' % (scope.__name__, path)
|
||||
|
||||
code += get_doc(scope)
|
||||
|
||||
# Remove some magic vars, (TODO why?)
|
||||
names = set(dir(scope)) - set(['__file__', '__name__', '__doc__',
|
||||
'__path__', '__package__'])
|
||||
|
||||
classes, funcs, stmts, members = get_scope_objects(names)
|
||||
|
||||
# classes
|
||||
for name, cl in classes.items():
|
||||
bases = (c.__name__ for c in cl.__bases__) if inspect.isclass(cl) \
|
||||
else []
|
||||
code += 'class %s(%s):\n' % (name, ','.join(bases))
|
||||
if depth == 0:
|
||||
try:
|
||||
mixin = mixin_funcs[name]
|
||||
except KeyError:
|
||||
mixin = {}
|
||||
cl_code = _generate_code(cl, mixin, depth + 1)
|
||||
code += common.indent_block(cl_code)
|
||||
code += '\n'
|
||||
|
||||
# functions
|
||||
for name, func in funcs.items():
|
||||
params, ret = _parse_function_doc(func)
|
||||
if depth > 0:
|
||||
params = 'self, ' + params
|
||||
doc_str = get_doc(func, indent=True)
|
||||
try:
|
||||
mixin = mixin_funcs[name]
|
||||
except KeyError:
|
||||
# normal code generation
|
||||
code += 'def %s(%s):\n' % (name, params)
|
||||
code += doc_str
|
||||
code += common.indent_block('%s\n\n' % ret)
|
||||
else:
|
||||
# generation of code with mixins
|
||||
# the parser only supports basic functions with a newline after
|
||||
# the double dots
|
||||
# find doc_str place
|
||||
try:
|
||||
pos = re.search(r'\):\s*\n', mixin).end()
|
||||
except TypeError:
|
||||
# pypy uses a different reversed builtin
|
||||
if name == 'reversed':
|
||||
mixin = 'def reversed(sequence):\n' \
|
||||
' for i in self.__sequence: yield i'
|
||||
pos = 24
|
||||
else:
|
||||
debug.warning('mixin trouble in pypy: %s', name)
|
||||
raise
|
||||
if pos is None:
|
||||
raise Exception("Builtin function not parsed correctly")
|
||||
code += mixin[:pos] + doc_str + mixin[pos:]
|
||||
|
||||
# class members (functions) properties?
|
||||
for name, func in members.items():
|
||||
# recursion problem in properties TODO remove
|
||||
if name in ['fget', 'fset', 'fdel']:
|
||||
continue
|
||||
ret = 'pass'
|
||||
code += '@property\ndef %s(self):\n' % (name)
|
||||
code += common.indent_block(get_doc(func) + '%s\n\n' % ret)
|
||||
|
||||
# variables
|
||||
for name, value in stmts.items():
|
||||
if is_py3k:
|
||||
file_type = io.TextIOWrapper
|
||||
else:
|
||||
file_type = types.FileType
|
||||
if isinstance(value, file_type):
|
||||
value = 'open()'
|
||||
elif name == 'None':
|
||||
value = ''
|
||||
elif type(value).__name__ in ['int', 'bool', 'float',
|
||||
'dict', 'list', 'tuple']:
|
||||
value = repr(value)
|
||||
else:
|
||||
# get the type, if the type is not simple.
|
||||
mod = type(value).__module__
|
||||
value = type(value).__name__ + '()'
|
||||
if mod != '__builtin__':
|
||||
value = '%s.%s' % (mod, value)
|
||||
code += '%s = %s\n' % (name, value)
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def _parse_function_doc(func):
|
||||
"""
|
||||
Takes a function and returns the params and return value as a tuple.
|
||||
This is nothing more than a docstring parser.
|
||||
"""
|
||||
# TODO: things like utime(path, (atime, mtime)) and a(b [, b]) -> None
|
||||
doc = inspect.getdoc(func)
|
||||
|
||||
if doc is None:
|
||||
return '', 'pass'
|
||||
|
||||
# get full string, parse round parentheses: def func(a, (b,c))
|
||||
try:
|
||||
count = 0
|
||||
debug.dbg(func, func.__name__, doc)
|
||||
start = doc.index('(')
|
||||
for i, s in enumerate(doc[start:]):
|
||||
if s == '(':
|
||||
count += 1
|
||||
elif s == ')':
|
||||
count -= 1
|
||||
if count == 0:
|
||||
end = start + i
|
||||
break
|
||||
param_str = doc[start + 1:end]
|
||||
except (ValueError, UnboundLocalError):
|
||||
# ValueError for doc.index
|
||||
# UnboundLocalError for undefined end in last line
|
||||
debug.dbg('no brackets found - no param')
|
||||
end = 0
|
||||
param_str = ''
|
||||
else:
|
||||
# remove square brackets, that show an optional param ( = None)
|
||||
def change_options(m):
|
||||
args = m.group(1).split(',')
|
||||
for i, a in enumerate(args):
|
||||
if a and '=' not in a:
|
||||
args[i] += '=None'
|
||||
return ','.join(args)
|
||||
|
||||
while True:
|
||||
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
|
||||
change_options, param_str)
|
||||
if changes == 0:
|
||||
break
|
||||
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
|
||||
|
||||
# parse return value
|
||||
r = re.search('-[>-]* ', doc[end:end + 7])
|
||||
if r is None:
|
||||
ret = ''
|
||||
else:
|
||||
index = end + r.end()
|
||||
# get result type, which can contain newlines
|
||||
pattern = re.compile(r'(,\n|[^\n-])+')
|
||||
ret_str = pattern.match(doc, index).group(0).strip()
|
||||
# New object -> object()
|
||||
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
|
||||
|
||||
ret = BuiltinModule.map_types.get(ret_str, ret_str)
|
||||
if ret == ret_str and ret not in ['None', 'object', 'tuple', 'set']:
|
||||
debug.dbg('not working', ret_str)
|
||||
|
||||
ret = ('return ' if 'return' not in ret else '') + ret
|
||||
return param_str, ret
|
||||
|
||||
|
||||
class Builtin(object):
|
||||
""" The builtin scope / module """
|
||||
# Python 3 compatibility
|
||||
if is_py3k:
|
||||
name = 'builtins'
|
||||
else:
|
||||
name = '__builtin__'
|
||||
|
||||
_builtin = None
|
||||
|
||||
@property
|
||||
def builtin(self):
|
||||
if self._builtin is None:
|
||||
self._builtin = BuiltinModule(name=self.name)
|
||||
return self._builtin
|
||||
|
||||
@property
|
||||
def scope(self):
|
||||
return self.builtin.parser.module
|
||||
|
||||
@property
|
||||
def magic_function_scope(self):
|
||||
try:
|
||||
return self._magic_function_scope
|
||||
except AttributeError:
|
||||
# depth = 1 because this is not a module
|
||||
class Container(object):
|
||||
FunctionType = types.FunctionType
|
||||
source = _generate_code(Container, depth=0)
|
||||
parser = Parser(source, None)
|
||||
module = parser.module
|
||||
module.parent = self.scope
|
||||
typ = evaluate.follow_path(iter(['FunctionType']), module, module)
|
||||
|
||||
s = self._magic_function_scope = typ.pop()
|
||||
return s
|
||||
|
||||
|
||||
Builtin = Builtin()
|
||||
164
jedi/cache.py
164
jedi/cache.py
@@ -3,11 +3,8 @@ This caching is very important for speed and memory optimizations. There's
|
||||
nothing really spectacular, just some decorators. The following cache types are
|
||||
available:
|
||||
|
||||
- module caching (`load_module` and `save_module`), which uses pickle and is
|
||||
- module caching (`load_parser` and `save_parser`), which uses pickle and is
|
||||
really important to assure low load times of modules like ``numpy``.
|
||||
- the popular ``memoize_default`` works like a typical memoize and returns the
|
||||
default otherwise.
|
||||
- ``CachedMetaClass`` uses ``memoize_default`` to do the same with classes.
|
||||
- ``time_cache`` can be used to cache something for just a limited time span,
|
||||
which can be useful if there's user interaction and the user cannot react
|
||||
faster than a certain time.
|
||||
@@ -16,30 +13,26 @@ This module is one of the reasons why |jedi| is not thread-safe. As you can see
|
||||
there are global variables, which are holding the cache information. Some of
|
||||
these variables are being cleaned after every API usage.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import hashlib
|
||||
import gc
|
||||
import inspect
|
||||
import shutil
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import shutil
|
||||
|
||||
from jedi import settings
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
|
||||
# memoize caches will be deleted after every action
|
||||
memoize_caches = []
|
||||
_time_caches = []
|
||||
|
||||
time_caches = []
|
||||
|
||||
star_import_cache = {}
|
||||
_star_import_cache = {}
|
||||
|
||||
# for fast_parser, should not be deleted
|
||||
parser_cache = {}
|
||||
@@ -60,20 +53,15 @@ def clear_caches(delete_all=False):
|
||||
:param delete_all: Deletes also the cache that is normally not deleted,
|
||||
like parser cache, which is important for faster parsing.
|
||||
"""
|
||||
global memoize_caches, time_caches
|
||||
|
||||
# memorize_caches must never be deleted, because the dicts will get lost in
|
||||
# the wrappers.
|
||||
for m in memoize_caches:
|
||||
m.clear()
|
||||
global _time_caches
|
||||
|
||||
if delete_all:
|
||||
time_caches = []
|
||||
star_import_cache.clear()
|
||||
_time_caches = []
|
||||
_star_import_cache.clear()
|
||||
parser_cache.clear()
|
||||
else:
|
||||
# normally just kill the expired entries, not all
|
||||
for tc in time_caches:
|
||||
for tc in _time_caches:
|
||||
# check time_cache for expired entries
|
||||
for key, (t, value) in list(tc.items()):
|
||||
if t < time.time():
|
||||
@@ -81,41 +69,6 @@ def clear_caches(delete_all=False):
|
||||
del tc[key]
|
||||
|
||||
|
||||
def memoize_default(default=None, cache=memoize_caches):
|
||||
""" This is a typical memoization decorator, BUT there is one difference:
|
||||
To prevent recursion it sets defaults.
|
||||
|
||||
Preventing recursion is in this case the much bigger use than speed. I
|
||||
don't think, that there is a big speed difference, but there are many cases
|
||||
where recursion could happen (think about a = b; b = a).
|
||||
"""
|
||||
def func(function):
|
||||
memo = {}
|
||||
cache.append(memo)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
key = (args, frozenset(kwargs.items()))
|
||||
if key in memo:
|
||||
return memo[key]
|
||||
else:
|
||||
memo[key] = default
|
||||
rv = function(*args, **kwargs)
|
||||
memo[key] = rv
|
||||
return rv
|
||||
return wrapper
|
||||
return func
|
||||
|
||||
|
||||
class CachedMetaClass(type):
|
||||
""" This is basically almost the same than the decorator above, it just
|
||||
caches class initializations. I haven't found any other way, so I do it
|
||||
with meta classes.
|
||||
"""
|
||||
@memoize_default()
|
||||
def __call__(self, *args, **kwargs):
|
||||
return super(CachedMetaClass, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def time_cache(time_add_setting):
|
||||
""" This decorator works as follows: Call it with a setting and after that
|
||||
use the function with a callable that returns the key.
|
||||
@@ -124,7 +77,7 @@ def time_cache(time_add_setting):
|
||||
"""
|
||||
def _temp(key_func):
|
||||
dct = {}
|
||||
time_caches.append(dct)
|
||||
_time_caches.append(dct)
|
||||
|
||||
def wrapper(optional_callable, *args, **kwargs):
|
||||
key = key_func(*args, **kwargs)
|
||||
@@ -148,63 +101,108 @@ def cache_call_signatures(stmt):
|
||||
return None if module_path is None else (module_path, stmt.start_pos)
|
||||
|
||||
|
||||
def underscore_memoization(func):
|
||||
"""
|
||||
Decorator for methods::
|
||||
|
||||
class A(object):
|
||||
def x(self):
|
||||
if self._x:
|
||||
self._x = 10
|
||||
return self._x
|
||||
|
||||
Becomes::
|
||||
|
||||
class A(object):
|
||||
@underscore_memoization
|
||||
def x(self):
|
||||
return 10
|
||||
|
||||
A now has an attribute ``_x`` written by this decorator.
|
||||
"""
|
||||
name = '_' + func.__name__
|
||||
|
||||
def wrapper(self):
|
||||
try:
|
||||
return getattr(self, name)
|
||||
except AttributeError:
|
||||
result = func(self)
|
||||
if inspect.isgenerator(result):
|
||||
result = list(result)
|
||||
setattr(self, name, result)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def cache_star_import(func):
|
||||
def wrapper(scope, *args, **kwargs):
|
||||
def wrapper(evaluator, scope, *args, **kwargs):
|
||||
with common.ignored(KeyError):
|
||||
mods = star_import_cache[scope]
|
||||
mods = _star_import_cache[scope]
|
||||
if mods[0] + settings.star_import_cache_validity > time.time():
|
||||
return mods[1]
|
||||
# cache is too old and therefore invalid or not available
|
||||
invalidate_star_import_cache(scope)
|
||||
mods = func(scope, *args, **kwargs)
|
||||
star_import_cache[scope] = time.time(), mods
|
||||
_invalidate_star_import_cache_module(scope)
|
||||
mods = func(evaluator, scope, *args, **kwargs)
|
||||
_star_import_cache[scope] = time.time(), mods
|
||||
|
||||
return mods
|
||||
return wrapper
|
||||
|
||||
|
||||
def invalidate_star_import_cache(module, only_main=False):
|
||||
def _invalidate_star_import_cache_module(module, only_main=False):
|
||||
""" Important if some new modules are being reparsed """
|
||||
with common.ignored(KeyError):
|
||||
t, mods = star_import_cache[module]
|
||||
t, mods = _star_import_cache[module]
|
||||
|
||||
del star_import_cache[module]
|
||||
del _star_import_cache[module]
|
||||
|
||||
for m in mods:
|
||||
invalidate_star_import_cache(m, only_main=True)
|
||||
_invalidate_star_import_cache_module(m, only_main=True)
|
||||
|
||||
if not only_main:
|
||||
# We need a list here because otherwise the list is being changed
|
||||
# during the iteration in py3k: iteritems -> items.
|
||||
for key, (t, mods) in list(star_import_cache.items()):
|
||||
for key, (t, mods) in list(_star_import_cache.items()):
|
||||
if module in mods:
|
||||
invalidate_star_import_cache(key)
|
||||
_invalidate_star_import_cache_module(key)
|
||||
|
||||
|
||||
def load_module(path, name):
|
||||
def invalidate_star_import_cache(path):
|
||||
"""On success returns True."""
|
||||
try:
|
||||
parser_cache_item = parser_cache[path]
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
_invalidate_star_import_cache_module(parser_cache_item.parser.module)
|
||||
return True
|
||||
|
||||
|
||||
def load_parser(path, name):
|
||||
"""
|
||||
Returns the module or None, if it fails.
|
||||
"""
|
||||
if path is None and name is None:
|
||||
return None
|
||||
|
||||
tim = os.path.getmtime(path) if path else None
|
||||
p_time = os.path.getmtime(path) if path else None
|
||||
n = name if path is None else path
|
||||
try:
|
||||
parser_cache_item = parser_cache[n]
|
||||
if not path or tim <= parser_cache_item.change_time:
|
||||
if not path or p_time <= parser_cache_item.change_time:
|
||||
return parser_cache_item.parser
|
||||
else:
|
||||
# In case there is already a module cached and this module
|
||||
# has to be reparsed, we also need to invalidate the import
|
||||
# caches.
|
||||
invalidate_star_import_cache(parser_cache_item.parser.module)
|
||||
_invalidate_star_import_cache_module(parser_cache_item.parser.module)
|
||||
except KeyError:
|
||||
if settings.use_filesystem_cache:
|
||||
return ModulePickling.load_module(n, tim)
|
||||
return ParserPickling.load_parser(n, p_time)
|
||||
|
||||
|
||||
def save_module(path, name, parser, pickling=True):
|
||||
def save_parser(path, name, parser, pickling=True):
|
||||
try:
|
||||
p_time = None if not path else os.path.getmtime(path)
|
||||
except OSError:
|
||||
@@ -215,12 +213,12 @@ def save_module(path, name, parser, pickling=True):
|
||||
item = ParserCacheItem(parser, p_time)
|
||||
parser_cache[n] = item
|
||||
if settings.use_filesystem_cache and pickling:
|
||||
ModulePickling.save_module(n, item)
|
||||
ParserPickling.save_parser(n, item)
|
||||
|
||||
|
||||
class _ModulePickling(object):
|
||||
class ParserPickling(object):
|
||||
|
||||
version = 5
|
||||
version = 9
|
||||
"""
|
||||
Version number (integer) for file system cache.
|
||||
|
||||
@@ -246,7 +244,7 @@ class _ModulePickling(object):
|
||||
.. todo:: Detect interpreter (e.g., PyPy).
|
||||
"""
|
||||
|
||||
def load_module(self, path, original_changed_time):
|
||||
def load_parser(self, path, original_changed_time):
|
||||
try:
|
||||
pickle_changed_time = self._index[path]
|
||||
except KeyError:
|
||||
@@ -263,11 +261,11 @@ class _ModulePickling(object):
|
||||
finally:
|
||||
gc.enable()
|
||||
|
||||
debug.dbg('pickle loaded', path)
|
||||
debug.dbg('pickle loaded: %s', path)
|
||||
parser_cache[path] = parser_cache_item
|
||||
return parser_cache_item.parser
|
||||
|
||||
def save_module(self, path, parser_cache_item):
|
||||
def save_parser(self, path, parser_cache_item):
|
||||
self.__index = None
|
||||
try:
|
||||
files = self._index
|
||||
@@ -292,7 +290,7 @@ class _ModulePickling(object):
|
||||
else:
|
||||
# 0 means version is not defined (= always delete cache):
|
||||
if data.get('version', 0) != self.version:
|
||||
self.delete_cache()
|
||||
self.clear_cache()
|
||||
self.__index = {}
|
||||
else:
|
||||
self.__index = data['index']
|
||||
@@ -311,7 +309,7 @@ class _ModulePickling(object):
|
||||
json.dump(data, f)
|
||||
self.__index = None
|
||||
|
||||
def delete_cache(self):
|
||||
def clear_cache(self):
|
||||
shutil.rmtree(self._cache_directory())
|
||||
|
||||
def _get_hashed_path(self, path):
|
||||
@@ -328,4 +326,4 @@ class _ModulePickling(object):
|
||||
|
||||
|
||||
# is a singleton
|
||||
ModulePickling = _ModulePickling()
|
||||
ParserPickling = ParserPickling()
|
||||
|
||||
152
jedi/common.py
152
jedi/common.py
@@ -2,19 +2,17 @@
|
||||
import sys
|
||||
import contextlib
|
||||
import functools
|
||||
import tokenizer as tokenize
|
||||
import re
|
||||
from ast import literal_eval
|
||||
|
||||
from jedi._compatibility import next, reraise
|
||||
from jedi._compatibility import unicode, next, reraise
|
||||
from jedi import settings
|
||||
|
||||
FLOWS = ['if', 'else', 'elif', 'while', 'with', 'try', 'except', 'finally']
|
||||
|
||||
|
||||
class MultiLevelStopIteration(Exception):
|
||||
"""
|
||||
StopIteration's get catched pretty easy by for loops, let errors propagate.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UncaughtAttributeError(Exception):
|
||||
@@ -30,7 +28,11 @@ class UncaughtAttributeError(Exception):
|
||||
"""
|
||||
|
||||
|
||||
def rethrow_uncaught(func):
|
||||
def safe_property(func):
|
||||
return property(reraise_uncaught(func))
|
||||
|
||||
|
||||
def reraise_uncaught(func):
|
||||
"""
|
||||
Re-throw uncaught `AttributeError`.
|
||||
|
||||
@@ -84,107 +86,6 @@ class PushBackIterator(object):
|
||||
return self.current
|
||||
|
||||
|
||||
class NoErrorTokenizer(object):
|
||||
def __init__(self, readline, offset=(0, 0), is_fast_parser=False):
|
||||
self.readline = readline
|
||||
self.gen = tokenize.generate_tokens(readline)
|
||||
self.offset = offset
|
||||
self.closed = False
|
||||
self.is_first = True
|
||||
self.push_backs = []
|
||||
|
||||
# fast parser options
|
||||
self.is_fast_parser = is_fast_parser
|
||||
self.current = self.previous = [None, None, (0, 0), (0, 0), '']
|
||||
self.in_flow = False
|
||||
self.new_indent = False
|
||||
self.parser_indent = self.old_parser_indent = 0
|
||||
self.is_decorator = False
|
||||
self.first_stmt = True
|
||||
|
||||
def push_last_back(self):
|
||||
self.push_backs.append(self.current)
|
||||
|
||||
def next(self):
|
||||
""" Python 2 Compatibility """
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self):
|
||||
if self.closed:
|
||||
raise MultiLevelStopIteration()
|
||||
if self.push_backs:
|
||||
return self.push_backs.pop(0)
|
||||
|
||||
self.last_previous = self.previous
|
||||
self.previous = self.current
|
||||
self.current = next(self.gen)
|
||||
c = list(self.current)
|
||||
|
||||
if c[0] == tokenize.ENDMARKER:
|
||||
self.current = self.previous
|
||||
self.previous = self.last_previous
|
||||
raise MultiLevelStopIteration()
|
||||
|
||||
# this is exactly the same check as in fast_parser, but this time with
|
||||
# tokenize and therefore precise.
|
||||
breaks = ['def', 'class', '@']
|
||||
|
||||
if self.is_first:
|
||||
c[2] = self.offset[0] + c[2][0], self.offset[1] + c[2][1]
|
||||
c[3] = self.offset[0] + c[3][0], self.offset[1] + c[3][1]
|
||||
self.is_first = False
|
||||
else:
|
||||
c[2] = self.offset[0] + c[2][0], c[2][1]
|
||||
c[3] = self.offset[0] + c[3][0], c[3][1]
|
||||
self.current = c
|
||||
|
||||
def close():
|
||||
if not self.first_stmt:
|
||||
self.closed = True
|
||||
raise MultiLevelStopIteration()
|
||||
# ignore indents/comments
|
||||
if self.is_fast_parser \
|
||||
and self.previous[0] in (tokenize.INDENT, tokenize.NL, None,
|
||||
tokenize.NEWLINE, tokenize.DEDENT) \
|
||||
and c[0] not in (
|
||||
tokenize.COMMENT,
|
||||
tokenize.INDENT,
|
||||
tokenize.NL,
|
||||
tokenize.NEWLINE,
|
||||
tokenize.DEDENT
|
||||
):
|
||||
# print c, tokenize.tok_name[c[0]]
|
||||
|
||||
tok = c[1]
|
||||
indent = c[2][1]
|
||||
if indent < self.parser_indent: # -> dedent
|
||||
self.parser_indent = indent
|
||||
self.new_indent = False
|
||||
if not self.in_flow or indent < self.old_parser_indent:
|
||||
close()
|
||||
self.in_flow = False
|
||||
elif self.new_indent:
|
||||
self.parser_indent = indent
|
||||
self.new_indent = False
|
||||
|
||||
if not self.in_flow:
|
||||
if tok in FLOWS or tok in breaks:
|
||||
self.in_flow = tok in FLOWS
|
||||
if not self.is_decorator and not self.in_flow:
|
||||
close()
|
||||
self.is_decorator = '@' == tok
|
||||
if not self.is_decorator:
|
||||
self.old_parser_indent = self.parser_indent
|
||||
self.parser_indent += 1 # new scope: must be higher
|
||||
self.new_indent = True
|
||||
|
||||
if tok != '@':
|
||||
if self.first_stmt and not self.new_indent:
|
||||
self.parser_indent = indent
|
||||
self.first_stmt = False
|
||||
return c
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def scale_speed_settings(factor):
|
||||
a = settings.max_executions
|
||||
@@ -197,7 +98,7 @@ def scale_speed_settings(factor):
|
||||
|
||||
|
||||
def indent_block(text, indention=' '):
|
||||
""" This function indents a text block with a default of four spaces """
|
||||
"""This function indents a text block with a default of four spaces."""
|
||||
temp = ''
|
||||
while text and text[-1] == '\n':
|
||||
temp += text[-1]
|
||||
@@ -208,9 +109,40 @@ def indent_block(text, indention=' '):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ignored(*exceptions):
|
||||
"""Context manager that ignores all of the specified exceptions. This will
|
||||
be in the standard library starting with Python 3.4."""
|
||||
"""
|
||||
Context manager that ignores all of the specified exceptions. This will
|
||||
be in the standard library starting with Python 3.4.
|
||||
"""
|
||||
try:
|
||||
yield
|
||||
except exceptions:
|
||||
pass
|
||||
|
||||
|
||||
def source_to_unicode(source, encoding=None):
|
||||
def detect_encoding():
|
||||
"""
|
||||
For the implementation of encoding definitions in Python, look at:
|
||||
- http://www.python.org/dev/peps/pep-0263/
|
||||
- http://docs.python.org/2/reference/lexical_analysis.html#encoding-declarations
|
||||
"""
|
||||
byte_mark = literal_eval(r"b'\xef\xbb\xbf'")
|
||||
if source.startswith(byte_mark):
|
||||
# UTF-8 byte-order mark
|
||||
return 'utf-8'
|
||||
|
||||
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', str(source)).group(0)
|
||||
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
|
||||
first_two_lines)
|
||||
if possible_encoding:
|
||||
return possible_encoding.group(1)
|
||||
else:
|
||||
# the default if nothing else has been set -> PEP 263
|
||||
return encoding if encoding is not None else 'iso-8859-1'
|
||||
|
||||
if isinstance(source, unicode):
|
||||
# only cast str/bytes
|
||||
return source
|
||||
|
||||
# cast to unicode by default
|
||||
return unicode(source, detect_encoding(), 'replace')
|
||||
|
||||
@@ -1,11 +1,20 @@
|
||||
from _compatibility import u, encoding, is_py3k
|
||||
from jedi._compatibility import encoding, is_py3
|
||||
import inspect
|
||||
import os
|
||||
import time
|
||||
|
||||
try:
|
||||
# Use colorama for nicer console output.
|
||||
from colorama import Fore, init
|
||||
init()
|
||||
if os.name == 'nt':
|
||||
# does not work on Windows, as pyreadline and colorama interfere
|
||||
raise ImportError
|
||||
else:
|
||||
# Use colorama for nicer console output.
|
||||
from colorama import Fore, init
|
||||
from colorama import initialise
|
||||
# pytest resets the stream at the end - causes troubles. Since after
|
||||
# every output the stream is reset automatically we don't need this.
|
||||
initialise.atexit_done = True
|
||||
init()
|
||||
except ImportError:
|
||||
class Fore(object):
|
||||
RED = ''
|
||||
@@ -23,32 +32,48 @@ enable_notice = False
|
||||
|
||||
# callback, interface: level, str
|
||||
debug_function = None
|
||||
ignored_modules = ['parsing', 'builtin', 'jedi.builtin', 'jedi.parsing']
|
||||
ignored_modules = ['jedi.evaluate.builtin', 'jedi.parser']
|
||||
debug_indent = -1
|
||||
|
||||
|
||||
def reset_time():
|
||||
global start_time
|
||||
global start_time, debug_indent
|
||||
start_time = time.time()
|
||||
debug_indent = -1
|
||||
|
||||
|
||||
def dbg(*args):
|
||||
def increase_indent(func):
|
||||
"""Decorator for makin """
|
||||
def wrapper(*args, **kwargs):
|
||||
global debug_indent
|
||||
debug_indent += 1
|
||||
result = func(*args, **kwargs)
|
||||
debug_indent -= 1
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
def dbg(message, *args):
|
||||
""" Looks at the stack, to see if a debug message should be printed. """
|
||||
if debug_function and enable_notice:
|
||||
frm = inspect.stack()[1]
|
||||
mod = inspect.getmodule(frm[0])
|
||||
if not (mod.__name__ in ignored_modules):
|
||||
debug_function(NOTICE, 'dbg: ' + ', '.join(u(a) for a in args))
|
||||
i = ' ' * debug_indent
|
||||
debug_function(NOTICE, i + 'dbg: ' + message % args)
|
||||
|
||||
|
||||
def warning(*args):
|
||||
def warning(message, *args):
|
||||
if debug_function and enable_warning:
|
||||
debug_function(WARNING, 'warning: ' + ', '.join(u(a) for a in args))
|
||||
i = ' ' * debug_indent
|
||||
debug_function(WARNING, i + 'warning: ' + message % args)
|
||||
|
||||
|
||||
def speed(name):
|
||||
if debug_function and enable_speed:
|
||||
now = time.time()
|
||||
debug_function(SPEED, 'speed: ' + '%s %s' % (name, now - start_time))
|
||||
i = ' ' * debug_indent
|
||||
debug_function(SPEED, i + 'speed: ' + '%s %s' % (name, now - start_time))
|
||||
|
||||
|
||||
def print_to_stdout(level, str_out):
|
||||
@@ -59,7 +84,7 @@ def print_to_stdout(level, str_out):
|
||||
col = Fore.RED
|
||||
else:
|
||||
col = Fore.YELLOW
|
||||
if not is_py3k:
|
||||
if not is_py3:
|
||||
str_out = str_out.encode(encoding, 'replace')
|
||||
print(col + str_out + Fore.RESET)
|
||||
|
||||
|
||||
590
jedi/dynamic.py
590
jedi/dynamic.py
@@ -1,590 +0,0 @@
|
||||
"""
|
||||
To understand Python on a deeper level, |jedi| needs to understand some of the
|
||||
dynamic features of Python, however this probably the most complicated part:
|
||||
|
||||
- Array modifications (e.g. ``list.append``)
|
||||
- Parameter completion in functions
|
||||
- Flow checks (e.g. ``if isinstance(a, str)`` -> a is a str)
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is wanted somewhere, the current
|
||||
module will be checked for appearances of ``arr.append``, ``arr.insert``, etc.
|
||||
If the ``arr`` name points to an actual array, the content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append``. However this works pretty good, because in *slow*
|
||||
cases, the recursion detector and other settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module
|
||||
2. Only Array additions are being checked, ``list.pop``, etc. is being ignored.
|
||||
|
||||
Parameter completion
|
||||
********************
|
||||
|
||||
One of the really important features of |jedi| is to have an option to
|
||||
understand code like this::
|
||||
|
||||
def foo(bar):
|
||||
bar. # completion here
|
||||
foo(1)
|
||||
|
||||
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
|
||||
like ``foo('str')``, what would happen? Well, we'll just show both. Because
|
||||
that's what a human would expect.
|
||||
|
||||
It works as follows:
|
||||
|
||||
- A param is being encountered
|
||||
- search for function calls named ``foo``
|
||||
- execute these calls and check the injected params. This work with a
|
||||
``ParamListener``.
|
||||
|
||||
Flow checks
|
||||
***********
|
||||
|
||||
Flow checks are not really mature. There's only a check for ``isinstance``. It
|
||||
would check whether a flow has the form of ``if isinstance(a, type_or_tuple)``.
|
||||
Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
|
||||
check for -> a is a string). There's big potential in these checks.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
|
||||
from jedi import cache
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import modules
|
||||
from jedi import settings
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
from jedi.parser import fast as fast_parser
|
||||
import api_classes
|
||||
import evaluate
|
||||
import imports
|
||||
import evaluate_representation as er
|
||||
|
||||
# This is something like the sys.path, but only for searching params. It means
|
||||
# that this is the order in which Jedi searches params.
|
||||
search_param_modules = ['.']
|
||||
search_param_cache = {}
|
||||
|
||||
|
||||
def get_directory_modules_for_name(mods, name):
|
||||
"""
|
||||
Search a name in the directories of modules.
|
||||
"""
|
||||
def check_python_file(path):
|
||||
try:
|
||||
return cache.parser_cache[path].parser.module
|
||||
except KeyError:
|
||||
try:
|
||||
return check_fs(path)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def check_fs(path):
|
||||
with open(path) as f:
|
||||
source = modules.source_to_unicode(f.read())
|
||||
if name in source:
|
||||
return modules.Module(path, source).parser.module
|
||||
|
||||
# skip non python modules
|
||||
mods = set(m for m in mods if m.path is None or m.path.endswith('.py'))
|
||||
mod_paths = set()
|
||||
for m in mods:
|
||||
mod_paths.add(m.path)
|
||||
yield m
|
||||
|
||||
if settings.dynamic_params_for_other_modules:
|
||||
paths = set(settings.additional_dynamic_modules)
|
||||
for p in mod_paths:
|
||||
if p is not None:
|
||||
d = os.path.dirname(p)
|
||||
for entry in os.listdir(d):
|
||||
if entry not in mod_paths:
|
||||
if entry.endswith('.py'):
|
||||
paths.add(d + os.path.sep + entry)
|
||||
|
||||
for p in sorted(paths):
|
||||
# make testing easier, sort it - same results on every interpreter
|
||||
c = check_python_file(p)
|
||||
if c is not None and c not in mods:
|
||||
yield c
|
||||
|
||||
|
||||
def search_param_memoize(func):
|
||||
"""
|
||||
Is only good for search params memoize, respectively the closure,
|
||||
because it just caches the input, not the func, like normal memoize does.
|
||||
"""
|
||||
def wrapper(*args, **kwargs):
|
||||
key = (args, frozenset(kwargs.items()))
|
||||
if key in search_param_cache:
|
||||
return search_param_cache[key]
|
||||
else:
|
||||
rv = func(*args, **kwargs)
|
||||
search_param_cache[key] = rv
|
||||
return rv
|
||||
return wrapper
|
||||
|
||||
|
||||
class ParamListener(object):
|
||||
"""
|
||||
This listener is used to get the params for a function.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.param_possibilities = []
|
||||
|
||||
def execute(self, params):
|
||||
self.param_possibilities.append(params)
|
||||
|
||||
|
||||
@cache.memoize_default([])
|
||||
def search_params(param):
|
||||
"""
|
||||
This is a dynamic search for params. If you try to complete a type:
|
||||
>>> def func(foo):
|
||||
>>> # here is the completion
|
||||
>>> foo
|
||||
>>> func(1)
|
||||
>>> func("")
|
||||
|
||||
It is not known what the type is, because it cannot be guessed with
|
||||
recursive madness. Therefore one has to analyse the statements that are
|
||||
calling the function, as well as analyzing the incoming params.
|
||||
"""
|
||||
if not settings.dynamic_params:
|
||||
return []
|
||||
|
||||
def get_params_for_module(module):
|
||||
"""
|
||||
Returns the values of a param, or an empty array.
|
||||
"""
|
||||
@search_param_memoize
|
||||
def get_posibilities(module, func_name):
|
||||
try:
|
||||
possible_stmts = module.used_names[func_name]
|
||||
except KeyError:
|
||||
return []
|
||||
|
||||
for stmt in possible_stmts:
|
||||
if isinstance(stmt, pr.Import):
|
||||
continue
|
||||
calls = _scan_statement(stmt, func_name)
|
||||
for c in calls:
|
||||
# no execution means that params cannot be set
|
||||
call_path = list(c.generate_call_path())
|
||||
pos = c.start_pos
|
||||
scope = stmt.parent
|
||||
|
||||
# this whole stuff is just to not execute certain parts
|
||||
# (speed improvement), basically we could just call
|
||||
# ``follow_call_path`` on the call_path and it would
|
||||
# also work.
|
||||
def listRightIndex(lst, value):
|
||||
return len(lst) - lst[-1::-1].index(value) -1
|
||||
|
||||
# Need to take right index, because there could be a
|
||||
# func usage before.
|
||||
i = listRightIndex(call_path, func_name)
|
||||
first, last = call_path[:i], call_path[i+1:]
|
||||
if not last and not call_path.index(func_name) != i:
|
||||
continue
|
||||
scopes = [scope]
|
||||
if first:
|
||||
scopes = evaluate.follow_call_path(iter(first), scope, pos)
|
||||
pos = None
|
||||
for scope in scopes:
|
||||
s = evaluate.find_name(scope, func_name, position=pos,
|
||||
search_global=not first,
|
||||
resolve_decorator=False)
|
||||
|
||||
c = [getattr(escope, 'base_func', None) or escope.base
|
||||
for escope in s
|
||||
if escope.isinstance(er.Function, er.Class)
|
||||
]
|
||||
if compare in c:
|
||||
# only if we have the correct function we execute
|
||||
# it, otherwise just ignore it.
|
||||
evaluate.follow_paths(iter(last), s, scope)
|
||||
|
||||
return listener.param_possibilities
|
||||
|
||||
result = []
|
||||
for params in get_posibilities(module, func_name):
|
||||
for p in params:
|
||||
if str(p) == param_name:
|
||||
result += evaluate.follow_statement(p.parent)
|
||||
return result
|
||||
|
||||
func = param.get_parent_until(pr.Function)
|
||||
current_module = param.get_parent_until()
|
||||
func_name = str(func.name)
|
||||
compare = func
|
||||
if func_name == '__init__' and isinstance(func.parent, pr.Class):
|
||||
func_name = str(func.parent.name)
|
||||
compare = func.parent
|
||||
|
||||
# get the param name
|
||||
if param.assignment_details:
|
||||
# first assignment details, others would be a syntax error
|
||||
commands, op = param.assignment_details[0]
|
||||
else:
|
||||
commands = param.get_commands()
|
||||
offset = 1 if commands[0] in ['*', '**'] else 0
|
||||
param_name = str(commands[offset].name)
|
||||
|
||||
# add the listener
|
||||
listener = ParamListener()
|
||||
func.listeners.add(listener)
|
||||
|
||||
result = []
|
||||
# This is like backtracking: Get the first possible result.
|
||||
for mod in get_directory_modules_for_name([current_module], func_name):
|
||||
result = get_params_for_module(mod)
|
||||
if result:
|
||||
break
|
||||
|
||||
# cleanup: remove the listener; important: should not stick.
|
||||
func.listeners.remove(listener)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def check_array_additions(array):
|
||||
""" Just a mapper function for the internal _check_array_additions """
|
||||
if not pr.Array.is_type(array._array, pr.Array.LIST, pr.Array.SET):
|
||||
# TODO also check for dict updates
|
||||
return []
|
||||
|
||||
is_list = array._array.type == 'list'
|
||||
current_module = array._array.get_parent_until()
|
||||
res = _check_array_additions(array, current_module, is_list)
|
||||
return res
|
||||
|
||||
|
||||
def _scan_statement(stmt, search_name, assignment_details=False):
|
||||
""" Returns the function Call that match search_name in an Array. """
|
||||
def scan_array(arr, search_name):
|
||||
result = []
|
||||
if arr.type == pr.Array.DICT:
|
||||
for key_stmt, value_stmt in arr.items():
|
||||
result += _scan_statement(key_stmt, search_name)
|
||||
result += _scan_statement(value_stmt, search_name)
|
||||
else:
|
||||
for stmt in arr:
|
||||
result += _scan_statement(stmt, search_name)
|
||||
return result
|
||||
|
||||
check = list(stmt.get_commands())
|
||||
if assignment_details:
|
||||
for commands, op in stmt.assignment_details:
|
||||
check += commands
|
||||
|
||||
result = []
|
||||
for c in check:
|
||||
if isinstance(c, pr.Array):
|
||||
result += scan_array(c, search_name)
|
||||
elif isinstance(c, pr.Call):
|
||||
s_new = c
|
||||
while s_new is not None:
|
||||
n = s_new.name
|
||||
if isinstance(n, pr.Name) and search_name in n.names:
|
||||
result.append(c)
|
||||
|
||||
if s_new.execution is not None:
|
||||
result += scan_array(s_new.execution, search_name)
|
||||
s_new = s_new.next
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@cache.memoize_default([])
|
||||
def _check_array_additions(compare_array, module, is_list):
|
||||
"""
|
||||
Checks if a `pr.Array` has "add" statements:
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
if not settings.dynamic_array_additions or module.is_builtin():
|
||||
return []
|
||||
|
||||
def check_calls(calls, add_name):
|
||||
"""
|
||||
Calls are processed here. The part before the call is searched and
|
||||
compared with the original Array.
|
||||
"""
|
||||
result = []
|
||||
for c in calls:
|
||||
call_path = list(c.generate_call_path())
|
||||
separate_index = call_path.index(add_name)
|
||||
if add_name == call_path[-1] or separate_index == 0:
|
||||
# this means that there is no execution -> [].append
|
||||
# or the keyword is at the start -> append()
|
||||
continue
|
||||
backtrack_path = iter(call_path[:separate_index])
|
||||
|
||||
position = c.start_pos
|
||||
scope = c.get_parent_until(pr.IsScope)
|
||||
|
||||
found = evaluate.follow_call_path(backtrack_path, scope, position)
|
||||
if not compare_array in found:
|
||||
continue
|
||||
|
||||
params = call_path[separate_index + 1]
|
||||
if not params.values:
|
||||
continue # no params: just ignore it
|
||||
if add_name in ['append', 'add']:
|
||||
for param in params:
|
||||
result += evaluate.follow_statement(param)
|
||||
elif add_name in ['insert']:
|
||||
try:
|
||||
second_param = params[1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
result += evaluate.follow_statement(second_param)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for param in params:
|
||||
iterators = evaluate.follow_statement(param)
|
||||
result += evaluate.get_iterator_types(iterators)
|
||||
return result
|
||||
|
||||
def get_execution_parent(element, *stop_classes):
|
||||
""" Used to get an Instance/Execution parent """
|
||||
if isinstance(element, er.Array):
|
||||
stmt = element._array.parent
|
||||
else:
|
||||
# is an Instance with an ArrayInstance inside
|
||||
stmt = element.var_args[0].var_args.parent
|
||||
if isinstance(stmt, er.InstanceElement):
|
||||
stop_classes = list(stop_classes) + [er.Function]
|
||||
return stmt.get_parent_until(stop_classes)
|
||||
|
||||
temp_param_add = settings.dynamic_params_for_other_modules
|
||||
settings.dynamic_params_for_other_modules = False
|
||||
|
||||
search_names = ['append', 'extend', 'insert'] if is_list else \
|
||||
['add', 'update']
|
||||
comp_arr_parent = get_execution_parent(compare_array, er.Execution)
|
||||
|
||||
possible_stmts = []
|
||||
res = []
|
||||
for n in search_names:
|
||||
try:
|
||||
possible_stmts += module.used_names[n]
|
||||
except KeyError:
|
||||
continue
|
||||
for stmt in possible_stmts:
|
||||
# Check if the original scope is an execution. If it is, one
|
||||
# can search for the same statement, that is in the module
|
||||
# dict. Executions are somewhat special in jedi, since they
|
||||
# literally copy the contents of a function.
|
||||
if isinstance(comp_arr_parent, er.Execution):
|
||||
stmt = comp_arr_parent. \
|
||||
get_statement_for_position(stmt.start_pos)
|
||||
if stmt is None:
|
||||
continue
|
||||
# InstanceElements are special, because they don't get copied,
|
||||
# but have this wrapper around them.
|
||||
if isinstance(comp_arr_parent, er.InstanceElement):
|
||||
stmt = er.InstanceElement(comp_arr_parent.instance, stmt)
|
||||
|
||||
if evaluate.follow_statement.push_stmt(stmt):
|
||||
# check recursion
|
||||
continue
|
||||
res += check_calls(_scan_statement(stmt, n), n)
|
||||
evaluate.follow_statement.pop_stmt()
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
return res
|
||||
|
||||
|
||||
def check_array_instances(instance):
|
||||
"""Used for set() and list() instances."""
|
||||
if not settings.dynamic_arrays_instances:
|
||||
return instance.var_args
|
||||
ai = ArrayInstance(instance)
|
||||
return [ai]
|
||||
|
||||
|
||||
class ArrayInstance(pr.Base):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
"""
|
||||
def __init__(self, instance):
|
||||
self.instance = instance
|
||||
self.var_args = instance.var_args
|
||||
|
||||
def iter_content(self):
|
||||
"""
|
||||
The index is here just ignored, because of all the appends, etc.
|
||||
lists/sets are too complicated too handle that.
|
||||
"""
|
||||
items = []
|
||||
for stmt in self.var_args:
|
||||
for typ in evaluate.follow_statement(stmt):
|
||||
if isinstance(typ, er.Instance) and len(typ.var_args):
|
||||
array = typ.var_args[0]
|
||||
if isinstance(array, ArrayInstance):
|
||||
# prevent recursions
|
||||
# TODO compare Modules
|
||||
if self.var_args.start_pos != array.var_args.start_pos:
|
||||
items += array.iter_content()
|
||||
else:
|
||||
debug.warning(
|
||||
'ArrayInstance recursion',
|
||||
self.var_args)
|
||||
continue
|
||||
items += evaluate.get_iterator_types([typ])
|
||||
|
||||
# TODO check if exclusion of tuple is a problem here.
|
||||
if isinstance(self.var_args, tuple) or self.var_args.parent is None:
|
||||
return [] # generated var_args should not be checked for arrays
|
||||
|
||||
module = self.var_args.get_parent_until()
|
||||
is_list = str(self.instance.name) == 'list'
|
||||
items += _check_array_additions(self.instance, module, is_list)
|
||||
return items
|
||||
|
||||
|
||||
def usages(definitions, search_name, mods):
|
||||
def compare_array(definitions):
|
||||
""" `definitions` are being compared by module/start_pos, because
|
||||
sometimes the id's of the objects change (e.g. executions).
|
||||
"""
|
||||
result = []
|
||||
for d in definitions:
|
||||
module = d.get_parent_until()
|
||||
result.append((module, d.start_pos))
|
||||
return result
|
||||
|
||||
def check_call(call):
|
||||
result = []
|
||||
follow = [] # There might be multiple search_name's in one call_path
|
||||
call_path = list(call.generate_call_path())
|
||||
for i, name in enumerate(call_path):
|
||||
# name is `pr.NamePart`.
|
||||
if name == search_name:
|
||||
follow.append(call_path[:i + 1])
|
||||
|
||||
for f in follow:
|
||||
follow_res, search = evaluate.goto(call.parent, f)
|
||||
follow_res = usages_add_import_modules(follow_res, search)
|
||||
|
||||
compare_follow_res = compare_array(follow_res)
|
||||
# compare to see if they match
|
||||
if any(r in compare_definitions for r in compare_follow_res):
|
||||
scope = call.parent
|
||||
result.append(api_classes.Usage(search, scope))
|
||||
|
||||
return result
|
||||
|
||||
if not definitions:
|
||||
return set()
|
||||
|
||||
compare_definitions = compare_array(definitions)
|
||||
mods |= set([d.get_parent_until() for d in definitions])
|
||||
names = []
|
||||
for m in get_directory_modules_for_name(mods, search_name):
|
||||
try:
|
||||
stmts = m.used_names[search_name]
|
||||
except KeyError:
|
||||
continue
|
||||
for stmt in stmts:
|
||||
if isinstance(stmt, pr.Import):
|
||||
count = 0
|
||||
imps = []
|
||||
for i in stmt.get_all_import_names():
|
||||
for name_part in i.names:
|
||||
count += 1
|
||||
if name_part == search_name:
|
||||
imps.append((count, name_part))
|
||||
|
||||
for used_count, name_part in imps:
|
||||
i = imports.ImportPath(stmt, kill_count=count - used_count,
|
||||
direct_resolve=True)
|
||||
f = i.follow(is_goto=True)
|
||||
if set(f) & set(definitions):
|
||||
names.append(api_classes.Usage(name_part, stmt))
|
||||
else:
|
||||
for call in _scan_statement(stmt, search_name,
|
||||
assignment_details=True):
|
||||
names += check_call(call)
|
||||
return names
|
||||
|
||||
|
||||
def usages_add_import_modules(definitions, search_name):
|
||||
""" Adds the modules of the imports """
|
||||
new = set()
|
||||
for d in definitions:
|
||||
if isinstance(d.parent, pr.Import):
|
||||
s = imports.ImportPath(d.parent, direct_resolve=True)
|
||||
with common.ignored(IndexError):
|
||||
new.add(s.follow(is_goto=True)[0])
|
||||
return set(definitions) | new
|
||||
|
||||
|
||||
def check_flow_information(flow, search_name, pos):
|
||||
""" Try to find out the type of a variable just with the information that
|
||||
is given by the flows: e.g. It is also responsible for assert checks.::
|
||||
|
||||
if isinstance(k, str):
|
||||
k. # <- completion here
|
||||
|
||||
ensures that `k` is a string.
|
||||
"""
|
||||
if not settings.dynamic_flow_information:
|
||||
return None
|
||||
result = []
|
||||
if isinstance(flow, (pr.Scope, fast_parser.Module)) and not result:
|
||||
for ass in reversed(flow.asserts):
|
||||
if pos is None or ass.start_pos > pos:
|
||||
continue
|
||||
result = _check_isinstance_type(ass, search_name)
|
||||
if result:
|
||||
break
|
||||
|
||||
if isinstance(flow, pr.Flow) and not result:
|
||||
if flow.command in ['if', 'while'] and len(flow.inputs) == 1:
|
||||
result = _check_isinstance_type(flow.inputs[0], search_name)
|
||||
return result
|
||||
|
||||
|
||||
def _check_isinstance_type(stmt, search_name):
|
||||
try:
|
||||
commands = stmt.get_commands()
|
||||
# this might be removed if we analyze and, etc
|
||||
assert len(commands) == 1
|
||||
call = commands[0]
|
||||
assert isinstance(call, pr.Call) and str(call.name) == 'isinstance'
|
||||
assert bool(call.execution)
|
||||
|
||||
# isinstance check
|
||||
isinst = call.execution.values
|
||||
assert len(isinst) == 2 # has two params
|
||||
obj, classes = [statement.get_commands() for statement in isinst]
|
||||
assert len(obj) == 1
|
||||
assert len(classes) == 1
|
||||
assert isinstance(obj[0], pr.Call)
|
||||
# names fit?
|
||||
assert str(obj[0].name) == search_name
|
||||
assert isinstance(classes[0], pr.StatementElement) # can be type or tuple
|
||||
except AssertionError:
|
||||
return []
|
||||
|
||||
result = []
|
||||
for c in evaluate.follow_call(classes[0]):
|
||||
if isinstance(c, er.Array):
|
||||
result += c.get_index_types()
|
||||
else:
|
||||
result.append(c)
|
||||
for i, c in enumerate(result):
|
||||
result[i] = er.Instance(c)
|
||||
return result
|
||||
826
jedi/evaluate.py
826
jedi/evaluate.py
@@ -1,826 +0,0 @@
|
||||
"""
|
||||
Evaluation of Python code in |jedi| is based on three assumptions:
|
||||
|
||||
* Code is recursive (to weaken this assumption, the :mod:`dynamic` module
|
||||
exists).
|
||||
* No magic is being used:
|
||||
|
||||
- metaclasses
|
||||
- ``setattr()`` / ``__import__()``
|
||||
- writing to ``globals()``, ``locals()``, ``object.__dict__``
|
||||
* The programmer is not a total dick, e.g. like `this
|
||||
<https://github.com/davidhalter/jedi/issues/24>`_ :-)
|
||||
|
||||
That said, there's mainly one entry point in this script: ``follow_statement``.
|
||||
This is where autocompletion starts. Everything you want to complete is either
|
||||
a ``Statement`` or some special name like ``class``, which is easy to complete.
|
||||
|
||||
Therefore you need to understand what follows after ``follow_statement``. Let's
|
||||
make an example::
|
||||
|
||||
import datetime
|
||||
datetime.date.toda# <-- cursor here
|
||||
|
||||
First of all, this module doesn't care about completion. It really just cares
|
||||
about ``datetime.date``. At the end of the procedure ``follow_statement`` will
|
||||
return the ``datetime`` class.
|
||||
|
||||
To *visualize* this (simplified):
|
||||
|
||||
- ``follow_statement`` - ``<Statement: datetime.date>``
|
||||
|
||||
- Unpacking of the statement into ``[[<Call: datetime.date>]]``
|
||||
- ``follow_call_list``, calls ``follow_call`` with ``<Call: datetime.date>``
|
||||
- ``follow_call`` - searches the ``datetime`` name within the module.
|
||||
|
||||
This is exactly where it starts to get complicated. Now recursions start to
|
||||
kick in. The statement has not been resolved fully, but now we need to resolve
|
||||
the datetime import. So it continues
|
||||
|
||||
- follow import, which happens in the :mod:`imports` module.
|
||||
- now the same ``follow_call`` as above calls ``follow_paths`` to follow the
|
||||
second part of the statement ``date``.
|
||||
- After ``follow_paths`` returns with the desired ``datetime.date`` class, the
|
||||
result is being returned and the recursion finishes.
|
||||
|
||||
Now what would happen if we wanted ``datetime.date.foo.bar``? Just two more
|
||||
calls to ``follow_paths`` (which calls itself with a recursion). What if the
|
||||
import would contain another Statement like this::
|
||||
|
||||
from foo import bar
|
||||
Date = bar.baz
|
||||
|
||||
Well... You get it. Just another ``follow_statement`` recursion. It's really
|
||||
easy. Just that Python is not that easy sometimes. To understand tuple
|
||||
assignments and different class scopes, a lot more code had to be written. Yet
|
||||
we're still not talking about Descriptors and Nested List Comprehensions, just
|
||||
the simple stuff.
|
||||
|
||||
So if you want to change something, write a test and then just change what you
|
||||
want. This module has been tested by about 600 tests. Don't be afraid to break
|
||||
something. The tests are good enough.
|
||||
|
||||
I need to mention now that this recursive approach is really good because it
|
||||
only *evaluates* what needs to be *evaluated*. All the statements and modules
|
||||
that are not used are just being ignored. It's a little bit similar to the
|
||||
backtracking algorithm.
|
||||
|
||||
|
||||
.. todo:: nonlocal statement, needed or can be ignored? (py3k)
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import sys
|
||||
import itertools
|
||||
|
||||
from jedi._compatibility import next, hasattr, is_py3k, unicode, reraise, u
|
||||
from jedi import common
|
||||
from jedi import cache
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
import evaluate_representation as er
|
||||
import recursion
|
||||
import docstrings
|
||||
import builtin
|
||||
import imports
|
||||
import dynamic
|
||||
|
||||
|
||||
def get_defined_names_for_position(scope, position=None, start_scope=None):
|
||||
"""
|
||||
Return filtered version of ``scope.get_defined_names()``.
|
||||
|
||||
This function basically does what :meth:`scope.get_defined_names
|
||||
<parsing_representation.Scope.get_defined_names>` does.
|
||||
|
||||
- If `position` is given, delete all names defined after `position`.
|
||||
- For special objects like instances, `position` is ignored and all
|
||||
names are returned.
|
||||
|
||||
:type scope: :class:`parsing_representation.IsScope`
|
||||
:param scope: Scope in which names are searched.
|
||||
:param position: The position as a line/column tuple, default is infinity.
|
||||
"""
|
||||
names = scope.get_defined_names()
|
||||
# Instances have special rules, always return all the possible completions,
|
||||
# because class variables are always valid and the `self.` variables, too.
|
||||
if (not position or isinstance(scope, (er.Array, er.Instance))
|
||||
or start_scope != scope
|
||||
and isinstance(start_scope, (pr.Function, er.Execution))):
|
||||
return names
|
||||
names_new = []
|
||||
for n in names:
|
||||
if n.start_pos[0] is not None and n.start_pos < position:
|
||||
names_new.append(n)
|
||||
return names_new
|
||||
|
||||
|
||||
def get_names_of_scope(scope, position=None, star_search=True,
|
||||
include_builtin=True):
|
||||
"""
|
||||
Get all completions (names) possible for the current scope.
|
||||
The star search option is only here to provide an optimization. Otherwise
|
||||
the whole thing would probably start a little recursive madness.
|
||||
|
||||
This function is used to include names from outer scopes. For example,
|
||||
when the current scope is function:
|
||||
|
||||
>>> from jedi.parser import Parser
|
||||
>>> parser = Parser('''
|
||||
... x = ['a', 'b', 'c']
|
||||
... def func():
|
||||
... y = None
|
||||
... ''')
|
||||
>>> scope = parser.module.subscopes[0]
|
||||
>>> scope
|
||||
<Function: func@3-4>
|
||||
|
||||
`get_names_of_scope` is a generator. First it yields names from
|
||||
most inner scope.
|
||||
|
||||
>>> pairs = list(get_names_of_scope(scope))
|
||||
>>> pairs[0]
|
||||
(<Function: func@3-4>, [<Name: y@4,4>])
|
||||
|
||||
Then it yield the names from one level outer scope. For this
|
||||
example, this is the most outer scope.
|
||||
|
||||
>>> pairs[1]
|
||||
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
|
||||
|
||||
Finally, it yields names from builtin, if `include_builtin` is
|
||||
true (default).
|
||||
|
||||
>>> pairs[2] #doctest: +ELLIPSIS
|
||||
(<Module: ...builtin...>, [<Name: ...>, ...])
|
||||
|
||||
:rtype: [(pr.Scope, [pr.Name])]
|
||||
:return: Return an generator that yields a pair of scope and names.
|
||||
"""
|
||||
in_func_scope = scope
|
||||
non_flow = scope.get_parent_until(pr.Flow, reverse=True)
|
||||
while scope:
|
||||
if isinstance(scope, pr.SubModule) and scope.parent:
|
||||
# we don't want submodules to report if we have modules.
|
||||
scope = scope.parent
|
||||
continue
|
||||
# `pr.Class` is used, because the parent is never `Class`.
|
||||
# Ignore the Flows, because the classes and functions care for that.
|
||||
# InstanceElement of Class is ignored, if it is not the start scope.
|
||||
if not (scope != non_flow and scope.isinstance(pr.Class)
|
||||
or scope.isinstance(pr.Flow)
|
||||
or scope.isinstance(er.Instance)
|
||||
and non_flow.isinstance(er.Function)):
|
||||
try:
|
||||
if isinstance(scope, er.Instance):
|
||||
for g in scope.scope_generator():
|
||||
yield g
|
||||
else:
|
||||
yield scope, get_defined_names_for_position(scope,
|
||||
position, in_func_scope)
|
||||
except StopIteration:
|
||||
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
|
||||
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
|
||||
# is a list comprehension
|
||||
yield scope, scope.get_set_vars(is_internal_call=True)
|
||||
|
||||
scope = scope.parent
|
||||
# This is used, because subscopes (Flow scopes) would distort the
|
||||
# results.
|
||||
if scope and scope.isinstance(er.Function, pr.Function, er.Execution):
|
||||
in_func_scope = scope
|
||||
|
||||
# Add star imports.
|
||||
if star_search:
|
||||
for s in imports.remove_star_imports(non_flow.get_parent_until()):
|
||||
for g in get_names_of_scope(s, star_search=False):
|
||||
yield g
|
||||
|
||||
# Add builtins to the global scope.
|
||||
if include_builtin:
|
||||
builtin_scope = builtin.Builtin.scope
|
||||
yield builtin_scope, builtin_scope.get_defined_names()
|
||||
|
||||
|
||||
def find_name(scope, name_str, position=None, search_global=False,
|
||||
is_goto=False, resolve_decorator=True):
|
||||
"""
|
||||
This is the search function. The most important part to debug.
|
||||
`remove_statements` and `filter_statements` really are the core part of
|
||||
this completion.
|
||||
|
||||
:param position: Position of the last statement -> tuple of line, column
|
||||
:return: List of Names. Their parents are the scopes, they are defined in.
|
||||
:rtype: list
|
||||
"""
|
||||
def remove_statements(result):
|
||||
"""
|
||||
This is the part where statements are being stripped.
|
||||
|
||||
Due to lazy evaluation, statements like a = func; b = a; b() have to be
|
||||
evaluated.
|
||||
"""
|
||||
res_new = []
|
||||
for r in result:
|
||||
add = []
|
||||
if r.isinstance(pr.Statement):
|
||||
check_instance = None
|
||||
if isinstance(r, er.InstanceElement) and r.is_class_var:
|
||||
check_instance = r.instance
|
||||
r = r.var
|
||||
|
||||
# Global variables handling.
|
||||
if r.is_global():
|
||||
for token_name in r.token_list[1:]:
|
||||
if isinstance(token_name, pr.Name):
|
||||
add = find_name(r.parent, str(token_name))
|
||||
else:
|
||||
# generated objects are used within executions, but these
|
||||
# objects are in functions, and we have to dynamically
|
||||
# execute first.
|
||||
if isinstance(r, pr.Param):
|
||||
func = r.parent
|
||||
# Instances are typically faked, if the instance is not
|
||||
# called from outside. Here we check it for __init__
|
||||
# functions and return.
|
||||
if isinstance(func, er.InstanceElement) \
|
||||
and func.instance.is_generated \
|
||||
and hasattr(func, 'name') \
|
||||
and str(func.name) == '__init__' \
|
||||
and r.position_nr > 0: # 0 would be self
|
||||
r = func.var.params[r.position_nr]
|
||||
|
||||
# add docstring knowledge
|
||||
doc_params = docstrings.follow_param(r)
|
||||
if doc_params:
|
||||
res_new += doc_params
|
||||
continue
|
||||
|
||||
if not r.is_generated:
|
||||
res_new += dynamic.search_params(r)
|
||||
if not res_new:
|
||||
c = r.get_commands()[0]
|
||||
if c in ('*', '**'):
|
||||
t = 'tuple' if c == '*' else 'dict'
|
||||
res_new = [er.Instance(
|
||||
find_name(builtin.Builtin.scope, t)[0])
|
||||
]
|
||||
if not r.assignment_details:
|
||||
# this means that there are no default params,
|
||||
# so just ignore it.
|
||||
continue
|
||||
|
||||
# Remove the statement docstr stuff for now, that has to be
|
||||
# implemented with the evaluator class.
|
||||
#if r.docstr:
|
||||
#res_new.append(r)
|
||||
|
||||
scopes = follow_statement(r, seek_name=name_str)
|
||||
add += remove_statements(scopes)
|
||||
|
||||
if check_instance is not None:
|
||||
# class renames
|
||||
add = [er.InstanceElement(check_instance, a, True)
|
||||
if isinstance(a, (er.Function, pr.Function))
|
||||
else a for a in add]
|
||||
res_new += add
|
||||
else:
|
||||
if isinstance(r, pr.Class):
|
||||
r = er.Class(r)
|
||||
elif isinstance(r, pr.Function):
|
||||
r = er.Function(r)
|
||||
if r.isinstance(er.Function) and resolve_decorator:
|
||||
r = r.get_decorated_func()
|
||||
res_new.append(r)
|
||||
debug.dbg('sfn remove, new: %s, old: %s' % (res_new, result))
|
||||
return res_new
|
||||
|
||||
def filter_name(scope_generator):
|
||||
"""
|
||||
Filters all variables of a scope (which are defined in the
|
||||
`scope_generator`), until the name fits.
|
||||
"""
|
||||
def handle_for_loops(loop):
|
||||
# Take the first statement (for has always only
|
||||
# one, remember `in`). And follow it.
|
||||
if not loop.inputs:
|
||||
return []
|
||||
result = get_iterator_types(follow_statement(loop.inputs[0]))
|
||||
if len(loop.set_vars) > 1:
|
||||
commands = loop.set_stmt.get_commands()
|
||||
# loops with loop.set_vars > 0 only have one command
|
||||
result = assign_tuples(commands[0], result, name_str)
|
||||
return result
|
||||
|
||||
def process(name):
|
||||
"""
|
||||
Returns the parent of a name, which means the element which stands
|
||||
behind a name.
|
||||
"""
|
||||
result = []
|
||||
no_break_scope = False
|
||||
par = name.parent
|
||||
exc = pr.Class, pr.Function
|
||||
until = lambda: par.parent.parent.get_parent_until(exc)
|
||||
is_array_assignment = False
|
||||
|
||||
if par is None:
|
||||
pass
|
||||
elif par.isinstance(pr.Flow):
|
||||
if par.command == 'for':
|
||||
result += handle_for_loops(par)
|
||||
else:
|
||||
debug.warning('Flow: Why are you here? %s' % par.command)
|
||||
elif par.isinstance(pr.Param) \
|
||||
and par.parent is not None \
|
||||
and isinstance(until(), pr.Class) \
|
||||
and par.position_nr == 0:
|
||||
# This is where self gets added - this happens at another
|
||||
# place, if the var_args are clear. But sometimes the class is
|
||||
# not known. Therefore add a new instance for self. Otherwise
|
||||
# take the existing.
|
||||
if isinstance(scope, er.InstanceElement):
|
||||
inst = scope.instance
|
||||
else:
|
||||
inst = er.Instance(er.Class(until()))
|
||||
inst.is_generated = True
|
||||
result.append(inst)
|
||||
elif par.isinstance(pr.Statement):
|
||||
def is_execution(calls):
|
||||
for c in calls:
|
||||
if isinstance(c, (unicode, str)):
|
||||
continue
|
||||
if c.isinstance(pr.Array):
|
||||
if is_execution(c):
|
||||
return True
|
||||
elif c.isinstance(pr.Call):
|
||||
# Compare start_pos, because names may be different
|
||||
# because of executions.
|
||||
if c.name.start_pos == name.start_pos \
|
||||
and c.execution:
|
||||
return True
|
||||
return False
|
||||
|
||||
is_exe = False
|
||||
for assignee, op in par.assignment_details:
|
||||
is_exe |= is_execution(assignee)
|
||||
|
||||
if is_exe:
|
||||
# filter array[3] = ...
|
||||
# TODO check executions for dict contents
|
||||
is_array_assignment = True
|
||||
else:
|
||||
details = par.assignment_details
|
||||
if details and details[0][1] != '=':
|
||||
no_break_scope = True
|
||||
|
||||
# TODO this makes self variables non-breakable. wanted?
|
||||
if isinstance(name, er.InstanceElement) \
|
||||
and not name.is_class_var:
|
||||
no_break_scope = True
|
||||
|
||||
result.append(par)
|
||||
else:
|
||||
# TODO multi-level import non-breakable
|
||||
if isinstance(par, pr.Import) and len(par.namespace) > 1:
|
||||
no_break_scope = True
|
||||
result.append(par)
|
||||
return result, no_break_scope, is_array_assignment
|
||||
|
||||
flow_scope = scope
|
||||
result = []
|
||||
# compare func uses the tuple of line/indent = line/column
|
||||
comparison_func = lambda name: (name.start_pos)
|
||||
|
||||
for nscope, name_list in scope_generator:
|
||||
break_scopes = []
|
||||
# here is the position stuff happening (sorting of variables)
|
||||
for name in sorted(name_list, key=comparison_func, reverse=True):
|
||||
p = name.parent.parent if name.parent else None
|
||||
if isinstance(p, er.InstanceElement) \
|
||||
and isinstance(p.var, pr.Class):
|
||||
p = p.var
|
||||
if name_str == name.get_code() and p not in break_scopes:
|
||||
r, no_break_scope, is_array_assignment = process(name)
|
||||
if is_goto:
|
||||
if not is_array_assignment: # shouldn't goto arr[1] =
|
||||
result.append(name)
|
||||
else:
|
||||
result += r
|
||||
# for comparison we need the raw class
|
||||
s = nscope.base if isinstance(nscope, er.Class) else nscope
|
||||
# this means that a definition was found and is not e.g.
|
||||
# in if/else.
|
||||
if result and not no_break_scope:
|
||||
if not name.parent or p == s:
|
||||
break
|
||||
break_scopes.append(p)
|
||||
|
||||
while flow_scope:
|
||||
# TODO check if result is in scope -> no evaluation necessary
|
||||
n = dynamic.check_flow_information(flow_scope, name_str,
|
||||
position)
|
||||
if n:
|
||||
result = n
|
||||
break
|
||||
|
||||
if result:
|
||||
break
|
||||
if flow_scope == nscope:
|
||||
break
|
||||
flow_scope = flow_scope.parent
|
||||
flow_scope = nscope
|
||||
if result:
|
||||
break
|
||||
|
||||
if not result and isinstance(nscope, er.Instance):
|
||||
# __getattr__ / __getattribute__
|
||||
result += check_getattr(nscope, name_str)
|
||||
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s' % (name_str, scope,
|
||||
nscope, u(result), position))
|
||||
return result
|
||||
|
||||
def descriptor_check(result):
|
||||
"""Processes descriptors"""
|
||||
res_new = []
|
||||
for r in result:
|
||||
if isinstance(scope, (er.Instance, er.Class)) \
|
||||
and hasattr(r, 'get_descriptor_return'):
|
||||
# handle descriptors
|
||||
with common.ignored(KeyError):
|
||||
res_new += r.get_descriptor_return(scope)
|
||||
continue
|
||||
res_new.append(r)
|
||||
return res_new
|
||||
|
||||
if search_global:
|
||||
scope_generator = get_names_of_scope(scope, position=position)
|
||||
else:
|
||||
if isinstance(scope, er.Instance):
|
||||
scope_generator = scope.scope_generator()
|
||||
else:
|
||||
if isinstance(scope, (er.Class, pr.Module)):
|
||||
# classes are only available directly via chaining?
|
||||
# strange stuff...
|
||||
names = scope.get_defined_names()
|
||||
else:
|
||||
names = get_defined_names_for_position(scope, position)
|
||||
scope_generator = iter([(scope, names)])
|
||||
|
||||
if is_goto:
|
||||
return filter_name(scope_generator)
|
||||
return descriptor_check(remove_statements(filter_name(scope_generator)))
|
||||
|
||||
|
||||
def check_getattr(inst, name_str):
|
||||
"""Checks for both __getattr__ and __getattribute__ methods"""
|
||||
result = []
|
||||
# str is important to lose the NamePart!
|
||||
module = builtin.Builtin.scope
|
||||
name = pr.String(module, "'%s'" % name_str, (0, 0), (0, 0), inst)
|
||||
with common.ignored(KeyError):
|
||||
result = inst.execute_subscope_by_name('__getattr__', [name])
|
||||
if not result:
|
||||
# this is a little bit special. `__getattribute__` is executed
|
||||
# before anything else. But: I know no use case, where this
|
||||
# could be practical and the jedi would return wrong types. If
|
||||
# you ever have something, let me know!
|
||||
with common.ignored(KeyError):
|
||||
result = inst.execute_subscope_by_name('__getattribute__', [name])
|
||||
return result
|
||||
|
||||
|
||||
def get_iterator_types(inputs):
|
||||
"""Returns the types of any iterator (arrays, yields, __iter__, etc)."""
|
||||
iterators = []
|
||||
# Take the first statement (for has always only
|
||||
# one, remember `in`). And follow it.
|
||||
for it in inputs:
|
||||
if isinstance(it, (er.Generator, er.Array, dynamic.ArrayInstance)):
|
||||
iterators.append(it)
|
||||
else:
|
||||
if not hasattr(it, 'execute_subscope_by_name'):
|
||||
debug.warning('iterator/for loop input wrong', it)
|
||||
continue
|
||||
try:
|
||||
iterators += it.execute_subscope_by_name('__iter__')
|
||||
except KeyError:
|
||||
debug.warning('iterators: No __iter__ method found.')
|
||||
|
||||
result = []
|
||||
for gen in iterators:
|
||||
if isinstance(gen, er.Array):
|
||||
# Array is a little bit special, since this is an internal
|
||||
# array, but there's also the list builtin, which is
|
||||
# another thing.
|
||||
result += gen.get_index_types()
|
||||
elif isinstance(gen, er.Instance):
|
||||
# __iter__ returned an instance.
|
||||
name = '__next__' if is_py3k else 'next'
|
||||
try:
|
||||
result += gen.execute_subscope_by_name(name)
|
||||
except KeyError:
|
||||
debug.warning('Instance has no __next__ function', gen)
|
||||
else:
|
||||
# is a generator
|
||||
result += gen.iter_content()
|
||||
return result
|
||||
|
||||
|
||||
def assign_tuples(tup, results, seek_name):
|
||||
"""
|
||||
This is a normal assignment checker. In python functions and other things
|
||||
can return tuples:
|
||||
>>> a, b = 1, ""
|
||||
>>> a, (b, c) = 1, ("", 1.0)
|
||||
|
||||
Here, if `seek_name` is "a", the number type will be returned.
|
||||
The first part (before `=`) is the param tuples, the second one result.
|
||||
|
||||
:type tup: pr.Array
|
||||
"""
|
||||
def eval_results(index):
|
||||
types = []
|
||||
for r in results:
|
||||
try:
|
||||
func = r.get_exact_index_types
|
||||
except AttributeError:
|
||||
debug.warning("invalid tuple lookup %s of result %s in %s"
|
||||
% (tup, results, seek_name))
|
||||
else:
|
||||
with common.ignored(IndexError):
|
||||
types += func(index)
|
||||
return types
|
||||
|
||||
result = []
|
||||
for i, stmt in enumerate(tup):
|
||||
# Used in assignments. There is just one call and no other things,
|
||||
# therefore we can just assume, that the first part is important.
|
||||
command = stmt.get_commands()[0]
|
||||
|
||||
if tup.type == pr.Array.NOARRAY:
|
||||
|
||||
# unnessecary braces -> just remove.
|
||||
r = results
|
||||
else:
|
||||
r = eval_results(i)
|
||||
|
||||
# LHS of tuples can be nested, so resolve it recursively
|
||||
result += find_assignments(command, r, seek_name)
|
||||
return result
|
||||
|
||||
|
||||
def find_assignments(lhs, results, seek_name):
|
||||
"""
|
||||
Check if `seek_name` is in the left hand side `lhs` of assignment.
|
||||
|
||||
`lhs` can simply be a variable (`pr.Call`) or a tuple/list (`pr.Array`)
|
||||
representing the following cases::
|
||||
|
||||
a = 1 # lhs is pr.Call
|
||||
(a, b) = 2 # lhs is pr.Array
|
||||
|
||||
:type lhs: pr.Call
|
||||
:type results: list
|
||||
:type seek_name: str
|
||||
"""
|
||||
if isinstance(lhs, pr.Array):
|
||||
return assign_tuples(lhs, results, seek_name)
|
||||
elif lhs.name.names[-1] == seek_name:
|
||||
return results
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
@recursion.RecursionDecorator
|
||||
@cache.memoize_default(default=())
|
||||
def follow_statement(stmt, seek_name=None):
|
||||
"""
|
||||
The starting point of the completion. A statement always owns a call list,
|
||||
which are the calls, that a statement does.
|
||||
In case multiple names are defined in the statement, `seek_name` returns
|
||||
the result for this name.
|
||||
|
||||
:param stmt: A `pr.Statement`.
|
||||
:param seek_name: A string.
|
||||
"""
|
||||
debug.dbg('follow_stmt %s (%s)' % (stmt, seek_name))
|
||||
commands = stmt.get_commands()
|
||||
debug.dbg('calls: %s' % commands)
|
||||
|
||||
result = follow_call_list(commands)
|
||||
|
||||
# Assignment checking is only important if the statement defines multiple
|
||||
# variables.
|
||||
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
|
||||
new_result = []
|
||||
for ass_commands, op in stmt.assignment_details:
|
||||
new_result += find_assignments(ass_commands[0], result, seek_name)
|
||||
result = new_result
|
||||
return set(result)
|
||||
|
||||
|
||||
@common.rethrow_uncaught
|
||||
def follow_call_list(call_list, follow_array=False):
|
||||
"""
|
||||
`call_list` can be either `pr.Array` or `list of list`.
|
||||
It is used to evaluate a two dimensional object, that has calls, arrays and
|
||||
operators in it.
|
||||
"""
|
||||
def evaluate_list_comprehension(lc, parent=None):
|
||||
input = lc.input
|
||||
nested_lc = lc.input.token_list[0]
|
||||
if isinstance(nested_lc, pr.ListComprehension):
|
||||
# is nested LC
|
||||
input = nested_lc.stmt
|
||||
module = input.get_parent_until()
|
||||
# create a for loop, which does the same as list comprehensions
|
||||
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
|
||||
|
||||
loop.parent = parent or lc.get_parent_until(pr.IsScope)
|
||||
|
||||
if isinstance(nested_lc, pr.ListComprehension):
|
||||
loop = evaluate_list_comprehension(nested_lc, loop)
|
||||
return loop
|
||||
|
||||
result = []
|
||||
calls_iterator = iter(call_list)
|
||||
for call in calls_iterator:
|
||||
if pr.Array.is_type(call, pr.Array.NOARRAY):
|
||||
r = list(itertools.chain.from_iterable(follow_statement(s)
|
||||
for s in call))
|
||||
call_path = call.generate_call_path()
|
||||
next(call_path, None) # the first one has been used already
|
||||
result += follow_paths(call_path, r, call.parent,
|
||||
position=call.start_pos)
|
||||
elif isinstance(call, pr.ListComprehension):
|
||||
loop = evaluate_list_comprehension(call)
|
||||
# Caveat: parents are being changed, but this doesn't matter,
|
||||
# because nothing else uses it.
|
||||
call.stmt.parent = loop
|
||||
result += follow_statement(call.stmt)
|
||||
else:
|
||||
if isinstance(call, pr.Lambda):
|
||||
result.append(er.Function(call))
|
||||
# With things like params, these can also be functions...
|
||||
elif isinstance(call, pr.Base) and call.isinstance(er.Function,
|
||||
er.Class, er.Instance, dynamic.ArrayInstance):
|
||||
result.append(call)
|
||||
# The string tokens are just operations (+, -, etc.)
|
||||
elif not isinstance(call, (str, unicode)):
|
||||
if isinstance(call, pr.Call) and str(call.name) == 'if':
|
||||
# Ternary operators.
|
||||
while True:
|
||||
try:
|
||||
call = next(calls_iterator)
|
||||
except StopIteration:
|
||||
break
|
||||
with common.ignored(AttributeError):
|
||||
if str(call.name) == 'else':
|
||||
break
|
||||
continue
|
||||
result += follow_call(call)
|
||||
elif call == '*':
|
||||
if [r for r in result if isinstance(r, er.Array)
|
||||
or isinstance(r, er.Instance)
|
||||
and str(r.name) == 'str']:
|
||||
# if it is an iterable, ignore * operations
|
||||
next(calls_iterator)
|
||||
return set(result)
|
||||
|
||||
|
||||
def follow_call(call):
|
||||
"""Follow a call is following a function, variable, string, etc."""
|
||||
path = call.generate_call_path()
|
||||
|
||||
# find the statement of the Scope
|
||||
s = call
|
||||
while not s.parent.isinstance(pr.IsScope):
|
||||
s = s.parent
|
||||
return follow_call_path(path, s.parent, s.start_pos)
|
||||
|
||||
|
||||
def follow_call_path(path, scope, position):
|
||||
"""Follows a path generated by `pr.StatementElement.generate_call_path()`"""
|
||||
current = next(path)
|
||||
|
||||
if isinstance(current, pr.Array):
|
||||
result = [er.Array(current)]
|
||||
else:
|
||||
if isinstance(current, pr.NamePart):
|
||||
# This is the first global lookup.
|
||||
scopes = find_name(scope, current, position=position,
|
||||
search_global=True)
|
||||
else:
|
||||
# for pr.Literal
|
||||
scopes = find_name(builtin.Builtin.scope, current.type_as_string())
|
||||
# Make instances of those number/string objects.
|
||||
scopes = [er.Instance(s, (current.value,)) for s in scopes]
|
||||
result = imports.strip_imports(scopes)
|
||||
|
||||
return follow_paths(path, result, scope, position=position)
|
||||
|
||||
|
||||
def follow_paths(path, results, call_scope, position=None):
|
||||
"""
|
||||
In each result, `path` must be followed. Copies the path iterator.
|
||||
"""
|
||||
results_new = []
|
||||
if results:
|
||||
if len(results) > 1:
|
||||
iter_paths = itertools.tee(path, len(results))
|
||||
else:
|
||||
iter_paths = [path]
|
||||
|
||||
for i, r in enumerate(results):
|
||||
fp = follow_path(iter_paths[i], r, call_scope, position=position)
|
||||
if fp is not None:
|
||||
results_new += fp
|
||||
else:
|
||||
# This means stop iteration.
|
||||
return results
|
||||
return results_new
|
||||
|
||||
|
||||
def follow_path(path, scope, call_scope, position=None):
|
||||
"""
|
||||
Uses a generator and tries to complete the path, e.g.::
|
||||
|
||||
foo.bar.baz
|
||||
|
||||
`follow_path` is only responsible for completing `.bar.baz`, the rest is
|
||||
done in the `follow_call` function.
|
||||
"""
|
||||
# current is either an Array or a Scope.
|
||||
try:
|
||||
current = next(path)
|
||||
except StopIteration:
|
||||
return None
|
||||
debug.dbg('follow %s in scope %s' % (current, scope))
|
||||
|
||||
result = []
|
||||
if isinstance(current, pr.Array):
|
||||
# This must be an execution, either () or [].
|
||||
if current.type == pr.Array.LIST:
|
||||
if hasattr(scope, 'get_index_types'):
|
||||
result = scope.get_index_types(current)
|
||||
elif current.type not in [pr.Array.DICT]:
|
||||
# Scope must be a class or func - make an instance or execution.
|
||||
debug.dbg('exe', scope)
|
||||
result = er.Execution(scope, current).get_return_types()
|
||||
else:
|
||||
# Curly braces are not allowed, because they make no sense.
|
||||
debug.warning('strange function call with {}', current, scope)
|
||||
else:
|
||||
# The function must not be decorated with something else.
|
||||
if scope.isinstance(er.Function):
|
||||
scope = scope.get_magic_method_scope()
|
||||
else:
|
||||
# This is the typical lookup while chaining things.
|
||||
if filter_private_variable(scope, call_scope, current):
|
||||
return []
|
||||
result = imports.strip_imports(find_name(scope, current,
|
||||
position=position))
|
||||
return follow_paths(path, set(result), call_scope, position=position)
|
||||
|
||||
|
||||
def filter_private_variable(scope, call_scope, var_name):
|
||||
"""private variables begin with a double underline `__`"""
|
||||
if isinstance(var_name, (str, unicode)) and isinstance(scope, er.Instance)\
|
||||
and var_name.startswith('__') and not var_name.endswith('__'):
|
||||
s = call_scope.get_parent_until((pr.Class, er.Instance))
|
||||
if s != scope and s != scope.base.base:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def goto(stmt, call_path=None):
|
||||
if call_path is None:
|
||||
commands = stmt.get_commands()
|
||||
if len(commands) == 0:
|
||||
return [], ''
|
||||
# Only the first command is important, the rest should basically not
|
||||
# happen except in broken code (e.g. docstrings that aren't code).
|
||||
call = commands[0]
|
||||
if isinstance(call, (str, unicode)):
|
||||
call_path = [call]
|
||||
else:
|
||||
call_path = list(call.generate_call_path())
|
||||
|
||||
scope = stmt.get_parent_until(pr.IsScope)
|
||||
pos = stmt.start_pos
|
||||
call_path, search = call_path[:-1], call_path[-1]
|
||||
pos = pos[0], pos[1] + 1
|
||||
|
||||
if call_path:
|
||||
scopes = follow_call_path(iter(call_path), scope, pos)
|
||||
search_global = False
|
||||
pos = None
|
||||
else:
|
||||
scopes = [scope]
|
||||
search_global = True
|
||||
follow_res = []
|
||||
for s in scopes:
|
||||
follow_res += find_name(s, search, pos,
|
||||
search_global=search_global, is_goto=True)
|
||||
return follow_res, search
|
||||
376
jedi/evaluate/__init__.py
Normal file
376
jedi/evaluate/__init__.py
Normal file
@@ -0,0 +1,376 @@
|
||||
"""
|
||||
Evaluation of Python code in |jedi| is based on three assumptions:
|
||||
|
||||
* Code is recursive (to weaken this assumption, the :mod:`dynamic` module
|
||||
exists).
|
||||
* No magic is being used:
|
||||
|
||||
- metaclasses
|
||||
- ``setattr()`` / ``__import__()``
|
||||
- writing to ``globals()``, ``locals()``, ``object.__dict__``
|
||||
* The programmer is not a total dick, e.g. like `this
|
||||
<https://github.com/davidhalter/jedi/issues/24>`_ :-)
|
||||
|
||||
That said, there's mainly one entry point in this script: ``eval_statement``.
|
||||
This is where autocompletion starts. Everything you want to complete is either
|
||||
a ``Statement`` or some special name like ``class``, which is easy to complete.
|
||||
|
||||
Therefore you need to understand what follows after ``eval_statement``. Let's
|
||||
make an example::
|
||||
|
||||
import datetime
|
||||
datetime.date.toda# <-- cursor here
|
||||
|
||||
First of all, this module doesn't care about completion. It really just cares
|
||||
about ``datetime.date``. At the end of the procedure ``eval_statement`` will
|
||||
return the ``datetime`` class.
|
||||
|
||||
To *visualize* this (simplified):
|
||||
|
||||
- ``eval_statement`` - ``<Statement: datetime.date>``
|
||||
|
||||
- Unpacking of the statement into ``[[<Call: datetime.date>]]``
|
||||
- ``eval_expression_list``, calls ``eval_call`` with ``<Call: datetime.date>``
|
||||
- ``eval_call`` - searches the ``datetime`` name within the module.
|
||||
|
||||
This is exactly where it starts to get complicated. Now recursions start to
|
||||
kick in. The statement has not been resolved fully, but now we need to resolve
|
||||
the datetime import. So it continues
|
||||
|
||||
- follow import, which happens in the :mod:`imports` module.
|
||||
- now the same ``eval_call`` as above calls ``follow_path`` to follow the
|
||||
second part of the statement ``date``.
|
||||
- After ``follow_path`` returns with the desired ``datetime.date`` class, the
|
||||
result is being returned and the recursion finishes.
|
||||
|
||||
Now what would happen if we wanted ``datetime.date.foo.bar``? Just two more
|
||||
calls to ``follow_path`` (which calls itself with a recursion). What if the
|
||||
import would contain another Statement like this::
|
||||
|
||||
from foo import bar
|
||||
Date = bar.baz
|
||||
|
||||
Well... You get it. Just another ``eval_statement`` recursion. It's really
|
||||
easy. Just that Python is not that easy sometimes. To understand tuple
|
||||
assignments and different class scopes, a lot more code had to be written. Yet
|
||||
we're still not talking about Descriptors and Nested List Comprehensions, just
|
||||
the simple stuff.
|
||||
|
||||
So if you want to change something, write a test and then just change what you
|
||||
want. This module has been tested by about 600 tests. Don't be afraid to break
|
||||
something. The tests are good enough.
|
||||
|
||||
I need to mention now that this recursive approach is really good because it
|
||||
only *evaluates* what needs to be *evaluated*. All the statements and modules
|
||||
that are not used are just being ignored. It's a little bit similar to the
|
||||
backtracking algorithm.
|
||||
|
||||
|
||||
.. todo:: nonlocal statement, needed or can be ignored? (py3k)
|
||||
"""
|
||||
import itertools
|
||||
|
||||
from jedi._compatibility import next, hasattr, unicode
|
||||
from jedi import common
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
from jedi.evaluate import representation as er
|
||||
from jedi.evaluate import imports
|
||||
from jedi.evaluate import recursion
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.evaluate.cache import memoize_default
|
||||
from jedi.evaluate import stdlib
|
||||
from jedi.evaluate import finder
|
||||
from jedi.evaluate import compiled
|
||||
|
||||
|
||||
class Evaluator(object):
|
||||
def __init__(self):
|
||||
self.memoize_cache = {} # for memoize decorators
|
||||
self.recursion_detector = recursion.RecursionDetector()
|
||||
self.execution_recursion_detector = recursion.ExecutionRecursionDetector()
|
||||
|
||||
def find_types(self, scope, name_str, position=None, search_global=False,
|
||||
is_goto=False, resolve_decorator=True):
|
||||
"""
|
||||
This is the search function. The most important part to debug.
|
||||
`remove_statements` and `filter_statements` really are the core part of
|
||||
this completion.
|
||||
|
||||
:param position: Position of the last statement -> tuple of line, column
|
||||
:return: List of Names. Their parents are the types.
|
||||
"""
|
||||
f = finder.NameFinder(self, scope, name_str, position)
|
||||
scopes = f.scopes(search_global)
|
||||
if is_goto:
|
||||
return f.filter_name(scopes)
|
||||
return f.find(scopes, resolve_decorator)
|
||||
|
||||
@memoize_default(default=(), evaluator_is_first_arg=True)
|
||||
@recursion.recursion_decorator
|
||||
@debug.increase_indent
|
||||
def eval_statement(self, stmt, seek_name=None):
|
||||
"""
|
||||
The starting point of the completion. A statement always owns a call
|
||||
list, which are the calls, that a statement does. In case multiple
|
||||
names are defined in the statement, `seek_name` returns the result for
|
||||
this name.
|
||||
|
||||
:param stmt: A `pr.Statement`.
|
||||
"""
|
||||
debug.dbg('eval_statement %s (%s)', stmt, seek_name)
|
||||
expression_list = stmt.expression_list()
|
||||
|
||||
result = self.eval_expression_list(expression_list)
|
||||
|
||||
# Assignment checking is only important if the statement defines multiple
|
||||
# variables.
|
||||
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
|
||||
new_result = []
|
||||
for ass_expression_list, op in stmt.assignment_details:
|
||||
new_result += finder.find_assignments(ass_expression_list[0], result, seek_name)
|
||||
result = new_result
|
||||
return set(result)
|
||||
|
||||
def eval_expression_list(self, expression_list, follow_array=False):
|
||||
"""
|
||||
`expression_list` can be either `pr.Array` or `list of list`.
|
||||
It is used to evaluate a two dimensional object, that has calls, arrays and
|
||||
operators in it.
|
||||
"""
|
||||
def evaluate_list_comprehension(lc, parent=None):
|
||||
input = lc.input
|
||||
nested_lc = lc.input.token_list[0]
|
||||
if isinstance(nested_lc, pr.ListComprehension):
|
||||
# is nested LC
|
||||
input = nested_lc.stmt
|
||||
module = input.get_parent_until()
|
||||
# create a for loop, which does the same as list comprehensions
|
||||
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
|
||||
|
||||
loop.parent = parent or lc.get_parent_until(pr.IsScope)
|
||||
|
||||
if isinstance(nested_lc, pr.ListComprehension):
|
||||
loop = evaluate_list_comprehension(nested_lc, loop)
|
||||
return loop
|
||||
|
||||
debug.dbg('eval_expression_list: %s', expression_list)
|
||||
result = []
|
||||
calls_iterator = iter(expression_list)
|
||||
for call in calls_iterator:
|
||||
if pr.Array.is_type(call, pr.Array.NOARRAY):
|
||||
r = list(itertools.chain.from_iterable(self.eval_statement(s)
|
||||
for s in call))
|
||||
call_path = call.generate_call_path()
|
||||
next(call_path, None) # the first one has been used already
|
||||
result += self.follow_path(call_path, r, call.parent,
|
||||
position=call.start_pos)
|
||||
elif isinstance(call, pr.ListComprehension):
|
||||
loop = evaluate_list_comprehension(call)
|
||||
# Caveat: parents are being changed, but this doesn't matter,
|
||||
# because nothing else uses it.
|
||||
call.stmt.parent = loop
|
||||
result += self.eval_statement(call.stmt)
|
||||
else:
|
||||
if isinstance(call, pr.Lambda):
|
||||
result.append(er.Function(self, call))
|
||||
# With things like params, these can also be functions...
|
||||
elif isinstance(call, pr.Base) and call.isinstance(
|
||||
er.Function, er.Class, er.Instance, iterable.ArrayInstance):
|
||||
result.append(call)
|
||||
# The string tokens are just operations (+, -, etc.)
|
||||
elif isinstance(call, compiled.CompiledObject):
|
||||
result.append(call)
|
||||
elif not isinstance(call, (str, unicode)):
|
||||
if isinstance(call, pr.Call) and str(call.name) == 'if':
|
||||
# Ternary operators.
|
||||
while True:
|
||||
try:
|
||||
call = next(calls_iterator)
|
||||
except StopIteration:
|
||||
break
|
||||
with common.ignored(AttributeError):
|
||||
if str(call.name) == 'else':
|
||||
break
|
||||
continue
|
||||
result += self.eval_call(call)
|
||||
elif call == '*':
|
||||
if [r for r in result if isinstance(r, iterable.Array)
|
||||
or isinstance(r, compiled.CompiledObject)
|
||||
and isinstance(r.obj, (str, unicode))]:
|
||||
# if it is an iterable, ignore * operations
|
||||
next(calls_iterator)
|
||||
return set(result)
|
||||
|
||||
def eval_call(self, call):
|
||||
"""Follow a call is following a function, variable, string, etc."""
|
||||
path = call.generate_call_path()
|
||||
|
||||
# find the statement of the Scope
|
||||
s = call
|
||||
while not s.parent.isinstance(pr.IsScope):
|
||||
s = s.parent
|
||||
return self.eval_call_path(path, s.parent, s.start_pos)
|
||||
|
||||
def eval_call_path(self, path, scope, position):
|
||||
"""
|
||||
Follows a path generated by `pr.StatementElement.generate_call_path()`.
|
||||
"""
|
||||
current = next(path)
|
||||
|
||||
if isinstance(current, pr.Array):
|
||||
types = [iterable.Array(self, current)]
|
||||
else:
|
||||
if isinstance(current, pr.NamePart):
|
||||
# This is the first global lookup.
|
||||
types = self.find_types(scope, current, position=position,
|
||||
search_global=True)
|
||||
else:
|
||||
# for pr.Literal
|
||||
types = [compiled.create(current.value)]
|
||||
types = imports.strip_imports(self, types)
|
||||
|
||||
return self.follow_path(path, types, scope, position=position)
|
||||
|
||||
def follow_path(self, path, types, call_scope, position=None):
|
||||
"""
|
||||
Follows a path like::
|
||||
|
||||
self.follow_path(iter(['Foo', 'bar']), [a_type], from_somewhere)
|
||||
|
||||
to follow a call like ``module.a_type.Foo.bar`` (in ``from_somewhere``).
|
||||
"""
|
||||
results_new = []
|
||||
iter_paths = itertools.tee(path, len(types))
|
||||
|
||||
for i, typ in enumerate(types):
|
||||
fp = self._follow_path(iter_paths[i], typ, call_scope, position=position)
|
||||
if fp is not None:
|
||||
results_new += fp
|
||||
else:
|
||||
# This means stop iteration.
|
||||
return types
|
||||
return results_new
|
||||
|
||||
def _follow_path(self, path, typ, scope, position=None):
|
||||
"""
|
||||
Uses a generator and tries to complete the path, e.g.::
|
||||
|
||||
foo.bar.baz
|
||||
|
||||
`_follow_path` is only responsible for completing `.bar.baz`, the rest
|
||||
is done in the `follow_call` function.
|
||||
"""
|
||||
# current is either an Array or a Scope.
|
||||
try:
|
||||
current = next(path)
|
||||
except StopIteration:
|
||||
return None
|
||||
debug.dbg('_follow_path: %s in scope %s', current, typ)
|
||||
|
||||
result = []
|
||||
if isinstance(current, pr.Array):
|
||||
# This must be an execution, either () or [].
|
||||
if current.type == pr.Array.LIST:
|
||||
if hasattr(typ, 'get_index_types'):
|
||||
result = typ.get_index_types(current)
|
||||
elif current.type not in [pr.Array.DICT]:
|
||||
# Scope must be a class or func - make an instance or execution.
|
||||
result = self.execute(typ, current)
|
||||
else:
|
||||
# Curly braces are not allowed, because they make no sense.
|
||||
debug.warning('strange function call with {} %s %s', current, typ)
|
||||
else:
|
||||
# The function must not be decorated with something else.
|
||||
if typ.isinstance(er.Function):
|
||||
typ = typ.get_magic_function_scope()
|
||||
else:
|
||||
# This is the typical lookup while chaining things.
|
||||
if filter_private_variable(typ, scope, current):
|
||||
return []
|
||||
types = self.find_types(typ, current, position=position)
|
||||
result = imports.strip_imports(self, types)
|
||||
return self.follow_path(path, set(result), scope, position=position)
|
||||
|
||||
@debug.increase_indent
|
||||
def execute(self, obj, params=(), evaluate_generator=False):
|
||||
if obj.isinstance(er.Function):
|
||||
obj = obj.get_decorated_func()
|
||||
|
||||
debug.dbg('execute: %s %s', obj, params)
|
||||
try:
|
||||
return stdlib.execute(self, obj, params)
|
||||
except stdlib.NotInStdLib:
|
||||
pass
|
||||
|
||||
if isinstance(obj, iterable.GeneratorMethod):
|
||||
return obj.execute()
|
||||
elif obj.isinstance(compiled.CompiledObject):
|
||||
if obj.is_executable_class():
|
||||
return [er.Instance(self, obj, params)]
|
||||
else:
|
||||
return list(obj.execute_function(self, params))
|
||||
elif obj.isinstance(er.Class):
|
||||
# There maybe executions of executions.
|
||||
return [er.Instance(self, obj, params)]
|
||||
else:
|
||||
stmts = []
|
||||
if obj.isinstance(er.Function):
|
||||
stmts = er.FunctionExecution(self, obj, params).get_return_types(evaluate_generator)
|
||||
else:
|
||||
if hasattr(obj, 'execute_subscope_by_name'):
|
||||
try:
|
||||
stmts = obj.execute_subscope_by_name('__call__', params)
|
||||
except KeyError:
|
||||
debug.warning("no __call__ func available %s", obj)
|
||||
else:
|
||||
debug.warning("no execution possible %s", obj)
|
||||
|
||||
debug.dbg('execute result: %s in %s', stmts, obj)
|
||||
return imports.strip_imports(self, stmts)
|
||||
|
||||
def goto(self, stmt, call_path=None):
|
||||
if call_path is None:
|
||||
expression_list = stmt.expression_list()
|
||||
if len(expression_list) == 0:
|
||||
return [], ''
|
||||
# Only the first command is important, the rest should basically not
|
||||
# happen except in broken code (e.g. docstrings that aren't code).
|
||||
call = expression_list[0]
|
||||
if isinstance(call, (str, unicode)):
|
||||
call_path = [call]
|
||||
else:
|
||||
call_path = list(call.generate_call_path())
|
||||
|
||||
scope = stmt.get_parent_until(pr.IsScope)
|
||||
pos = stmt.start_pos
|
||||
call_path, search = call_path[:-1], call_path[-1]
|
||||
pos = pos[0], pos[1] + 1
|
||||
|
||||
if call_path:
|
||||
scopes = self.eval_call_path(iter(call_path), scope, pos)
|
||||
search_global = False
|
||||
pos = None
|
||||
else:
|
||||
scopes = [scope]
|
||||
search_global = True
|
||||
follow_res = []
|
||||
for s in scopes:
|
||||
follow_res += self.find_types(s, search, pos,
|
||||
search_global=search_global, is_goto=True)
|
||||
return follow_res, search
|
||||
|
||||
|
||||
def filter_private_variable(scope, call_scope, var_name):
|
||||
"""private variables begin with a double underline `__`"""
|
||||
if isinstance(var_name, (str, unicode)) and isinstance(scope, er.Instance)\
|
||||
and var_name.startswith('__') and not var_name.endswith('__'):
|
||||
s = call_scope.get_parent_until((pr.Class, er.Instance, compiled.CompiledObject))
|
||||
if s != scope:
|
||||
if isinstance(scope.base, compiled.CompiledObject):
|
||||
if s != scope.base:
|
||||
return True
|
||||
else:
|
||||
if s != scope.base.base:
|
||||
return True
|
||||
return False
|
||||
51
jedi/evaluate/cache.py
Normal file
51
jedi/evaluate/cache.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
- the popular ``memoize_default`` works like a typical memoize and returns the
|
||||
default otherwise.
|
||||
- ``CachedMetaClass`` uses ``memoize_default`` to do the same with classes.
|
||||
"""
|
||||
|
||||
|
||||
def memoize_default(default, evaluator_is_first_arg=False, second_arg_is_evaluator=False):
|
||||
""" This is a typical memoization decorator, BUT there is one difference:
|
||||
To prevent recursion it sets defaults.
|
||||
|
||||
Preventing recursion is in this case the much bigger use than speed. I
|
||||
don't think, that there is a big speed difference, but there are many cases
|
||||
where recursion could happen (think about a = b; b = a).
|
||||
"""
|
||||
def func(function):
|
||||
def wrapper(obj, *args, **kwargs):
|
||||
if evaluator_is_first_arg:
|
||||
cache = obj.memoize_cache
|
||||
elif second_arg_is_evaluator: # needed for meta classes
|
||||
cache = args[0].memoize_cache
|
||||
else:
|
||||
cache = obj._evaluator.memoize_cache
|
||||
|
||||
try:
|
||||
memo = cache[function]
|
||||
except KeyError:
|
||||
memo = {}
|
||||
cache[function] = memo
|
||||
|
||||
key = (obj, args, frozenset(kwargs.items()))
|
||||
if key in memo:
|
||||
return memo[key]
|
||||
else:
|
||||
memo[key] = default
|
||||
rv = function(obj, *args, **kwargs)
|
||||
memo[key] = rv
|
||||
return rv
|
||||
return wrapper
|
||||
return func
|
||||
|
||||
|
||||
class CachedMetaClass(type):
|
||||
"""
|
||||
This is basically almost the same than the decorator above, it just caches
|
||||
class initializations. I haven't found any other way, so I'm doing it with
|
||||
meta classes.
|
||||
"""
|
||||
@memoize_default(None, second_arg_is_evaluator=True)
|
||||
def __call__(self, *args, **kwargs):
|
||||
return super(CachedMetaClass, self).__call__(*args, **kwargs)
|
||||
308
jedi/evaluate/compiled/__init__.py
Normal file
308
jedi/evaluate/compiled/__init__.py
Normal file
@@ -0,0 +1,308 @@
|
||||
"""
|
||||
Imitate the parser representation.
|
||||
"""
|
||||
import inspect
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
|
||||
from jedi._compatibility import builtins as _builtins
|
||||
from jedi import debug
|
||||
from jedi.parser.representation import Base
|
||||
from jedi.cache import underscore_memoization
|
||||
from jedi.evaluate.sys_path import get_sys_path
|
||||
from . import fake
|
||||
|
||||
|
||||
class CompiledObject(Base):
|
||||
# comply with the parser
|
||||
start_pos = 0, 0
|
||||
asserts = []
|
||||
path = None # modules have this attribute - set it to None.
|
||||
|
||||
def __init__(self, obj, parent=None):
|
||||
self.obj = obj
|
||||
self.parent = parent
|
||||
self.doc = inspect.getdoc(obj)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (type(self).__name__, self.obj)
|
||||
|
||||
def get_parent_until(self, *args, **kwargs):
|
||||
# compiled modules only use functions and classes/methods (2 levels)
|
||||
return getattr(self.parent, 'parent', self.parent) or self.parent or self
|
||||
|
||||
@underscore_memoization
|
||||
def _parse_function_doc(self):
|
||||
if self.doc is None:
|
||||
return '', ''
|
||||
|
||||
return _parse_function_doc(self.doc)
|
||||
|
||||
def type(self):
|
||||
cls = self._cls().obj
|
||||
if inspect.isclass(cls):
|
||||
return 'class'
|
||||
elif inspect.ismodule(cls):
|
||||
return 'module'
|
||||
elif inspect.isbuiltin(cls) or inspect.ismethod(cls) \
|
||||
or inspect.ismethoddescriptor(cls):
|
||||
return 'def'
|
||||
|
||||
def is_executable_class(self):
|
||||
return inspect.isclass(self.obj)
|
||||
|
||||
@underscore_memoization
|
||||
def _cls(self):
|
||||
# Ensures that a CompiledObject is returned that is not an instance (like list)
|
||||
if fake.is_class_instance(self.obj):
|
||||
try:
|
||||
c = self.obj.__class__
|
||||
except AttributeError:
|
||||
# happens with numpy.core.umath._UFUNC_API (you get it
|
||||
# automatically by doing `import numpy`.
|
||||
c = type(None)
|
||||
return CompiledObject(c, self.parent)
|
||||
return self
|
||||
|
||||
def get_defined_names(self):
|
||||
cls = self._cls()
|
||||
for name in dir(cls.obj):
|
||||
yield CompiledName(cls, name)
|
||||
|
||||
def instance_names(self):
|
||||
return self.get_defined_names()
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
if name in dir(self._cls().obj):
|
||||
return CompiledName(self._cls(), name).parent
|
||||
else:
|
||||
raise KeyError("CompiledObject doesn't have an attribute '%s'." % name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
# might not exist sometimes (raises AttributeError)
|
||||
return self._cls().obj.__name__
|
||||
|
||||
def execute_function(self, evaluator, params):
|
||||
if self.type() != 'def':
|
||||
return
|
||||
|
||||
for name in self._parse_function_doc()[1].split():
|
||||
try:
|
||||
bltn_obj = _create_from_name(builtin, builtin, name)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
if isinstance(bltn_obj, CompiledObject):
|
||||
# We want everything except None.
|
||||
if bltn_obj.obj is not None:
|
||||
yield bltn_obj
|
||||
else:
|
||||
for result in evaluator.execute(bltn_obj, params):
|
||||
yield result
|
||||
|
||||
@property
|
||||
@underscore_memoization
|
||||
def subscopes(self):
|
||||
"""
|
||||
Returns only the faked scopes - the other ones are not important for
|
||||
internal analysis.
|
||||
"""
|
||||
module = self.get_parent_until()
|
||||
faked_subscopes = []
|
||||
for name in dir(self._cls().obj):
|
||||
f = fake.get_faked(module.obj, self.obj, name)
|
||||
if f:
|
||||
f.parent = self
|
||||
faked_subscopes.append(f)
|
||||
return faked_subscopes
|
||||
|
||||
def get_self_attributes(self):
|
||||
return [] # Instance compatibility
|
||||
|
||||
def get_imports(self):
|
||||
return [] # Builtins don't have imports
|
||||
|
||||
|
||||
class CompiledName(object):
|
||||
def __init__(self, obj, name):
|
||||
self._obj = obj
|
||||
self.name = name
|
||||
self.start_pos = 0, 0 # an illegal start_pos, to make sorting easy.
|
||||
|
||||
def get_parent_until(self):
|
||||
return self.parent.get_parent_until()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: (%s).%s>' % (type(self).__name__, self._obj.name, self.name)
|
||||
|
||||
@property
|
||||
@underscore_memoization
|
||||
def parent(self):
|
||||
module = self._obj.get_parent_until()
|
||||
return _create_from_name(module, self._obj, self.name)
|
||||
|
||||
@property
|
||||
def names(self):
|
||||
return [self.name] # compatibility with parser.representation.Name
|
||||
|
||||
def get_code(self):
|
||||
return self.name
|
||||
|
||||
|
||||
def load_module(path, name):
|
||||
if not name:
|
||||
name = os.path.basename(path)
|
||||
name = name.rpartition('.')[0] # cut file type (normally .so)
|
||||
|
||||
# sometimes there are endings like `_sqlite3.cpython-32mu`
|
||||
name = re.sub(r'\..*', '', name)
|
||||
|
||||
dot_path = []
|
||||
if path:
|
||||
p = path
|
||||
# if path is not in sys.path, we need to make a well defined import
|
||||
# like `from numpy.core import umath.`
|
||||
while p and p not in sys.path:
|
||||
p, sep, mod = p.rpartition(os.path.sep)
|
||||
dot_path.insert(0, mod.partition('.')[0])
|
||||
if p:
|
||||
name = ".".join(dot_path)
|
||||
path = p
|
||||
else:
|
||||
path = os.path.dirname(path)
|
||||
|
||||
sys_path = get_sys_path()
|
||||
if path:
|
||||
sys_path.insert(0, path)
|
||||
|
||||
temp, sys.path = sys.path, sys_path
|
||||
try:
|
||||
module = __import__(name, {}, {}, dot_path[:-1])
|
||||
except AttributeError:
|
||||
# use sys.modules, because you cannot access some modules
|
||||
# directly. -> github issue #59
|
||||
module = sys.modules[name]
|
||||
sys.path = temp
|
||||
return CompiledObject(module)
|
||||
|
||||
|
||||
docstr_defaults = {
|
||||
'floating point number': 'float',
|
||||
'character': 'str',
|
||||
'integer': 'int',
|
||||
'dictionary': 'dict',
|
||||
'string': 'str',
|
||||
}
|
||||
|
||||
|
||||
def _parse_function_doc(doc):
|
||||
"""
|
||||
Takes a function and returns the params and return value as a tuple.
|
||||
This is nothing more than a docstring parser.
|
||||
|
||||
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
|
||||
TODO docstrings like 'tuple of integers'
|
||||
"""
|
||||
# parse round parentheses: def func(a, (b,c))
|
||||
try:
|
||||
count = 0
|
||||
start = doc.index('(')
|
||||
for i, s in enumerate(doc[start:]):
|
||||
if s == '(':
|
||||
count += 1
|
||||
elif s == ')':
|
||||
count -= 1
|
||||
if count == 0:
|
||||
end = start + i
|
||||
break
|
||||
param_str = doc[start + 1:end]
|
||||
except (ValueError, UnboundLocalError):
|
||||
# ValueError for doc.index
|
||||
# UnboundLocalError for undefined end in last line
|
||||
debug.dbg('no brackets found - no param')
|
||||
end = 0
|
||||
param_str = ''
|
||||
else:
|
||||
# remove square brackets, that show an optional param ( = None)
|
||||
def change_options(m):
|
||||
args = m.group(1).split(',')
|
||||
for i, a in enumerate(args):
|
||||
if a and '=' not in a:
|
||||
args[i] += '=None'
|
||||
return ','.join(args)
|
||||
|
||||
while True:
|
||||
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
|
||||
change_options, param_str)
|
||||
if changes == 0:
|
||||
break
|
||||
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
|
||||
|
||||
# parse return value
|
||||
r = re.search('-[>-]* ', doc[end:end + 7])
|
||||
if r is None:
|
||||
ret = ''
|
||||
else:
|
||||
index = end + r.end()
|
||||
# get result type, which can contain newlines
|
||||
pattern = re.compile(r'(,\n|[^\n-])+')
|
||||
ret_str = pattern.match(doc, index).group(0).strip()
|
||||
# New object -> object()
|
||||
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
|
||||
|
||||
ret = docstr_defaults.get(ret_str, ret_str)
|
||||
|
||||
return param_str, ret
|
||||
|
||||
|
||||
class Builtin(CompiledObject):
|
||||
def get_defined_names(self):
|
||||
# Filter None, because it's really just a keyword, nobody wants to
|
||||
# access it.
|
||||
return [d for d in super(Builtin, self).get_defined_names() if d.name != 'None']
|
||||
|
||||
|
||||
def _a_generator(foo):
|
||||
"""Used to have an object to return for generators."""
|
||||
yield 42
|
||||
yield foo
|
||||
|
||||
builtin = Builtin(_builtins)
|
||||
magic_function_class = CompiledObject(type(load_module), parent=builtin)
|
||||
generator_obj = CompiledObject(_a_generator(1.0))
|
||||
|
||||
|
||||
def _create_from_name(module, parent, name):
|
||||
faked = fake.get_faked(module.obj, parent.obj, name)
|
||||
# only functions are necessary.
|
||||
if faked is not None:
|
||||
faked.parent = parent
|
||||
return faked
|
||||
|
||||
try:
|
||||
obj = getattr(parent.obj, name)
|
||||
except AttributeError:
|
||||
# happens e.g. in properties of
|
||||
# PyQt4.QtGui.QStyleOptionComboBox.currentText
|
||||
# -> just set it to None
|
||||
obj = None
|
||||
return CompiledObject(obj, parent)
|
||||
|
||||
|
||||
def create(obj, parent=builtin, module=None):
|
||||
"""
|
||||
A very weird interface class to this module. The more options provided the
|
||||
more acurate loading compiled objects is.
|
||||
"""
|
||||
if not inspect.ismodule(obj):
|
||||
faked = fake.get_faked(module and module.obj, obj)
|
||||
if faked is not None:
|
||||
faked.parent = parent
|
||||
return faked
|
||||
|
||||
return CompiledObject(obj, parent)
|
||||
117
jedi/evaluate/compiled/fake.py
Normal file
117
jedi/evaluate/compiled/fake.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""
|
||||
Loads functions that are mixed in to the standard library. E.g. builtins are
|
||||
written in C (binaries), but my autocompletion only understands Python code. By
|
||||
mixing in Python code, the autocompletion should work much better for builtins.
|
||||
"""
|
||||
|
||||
import os
|
||||
import inspect
|
||||
|
||||
from jedi._compatibility import is_py3, builtins
|
||||
from jedi.parser import Parser
|
||||
from jedi.parser import token as token_pr
|
||||
from jedi.parser.representation import Class
|
||||
from jedi.evaluate.helpers import FakeName
|
||||
|
||||
modules = {}
|
||||
|
||||
|
||||
def _load_faked_module(module):
|
||||
module_name = module.__name__
|
||||
if module_name == '__builtin__' and not is_py3:
|
||||
module_name = 'builtins'
|
||||
|
||||
try:
|
||||
return modules[module_name]
|
||||
except KeyError:
|
||||
path = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
with open(os.path.join(path, 'fake', module_name) + '.pym') as f:
|
||||
source = f.read()
|
||||
except IOError:
|
||||
modules[module_name] = None
|
||||
return
|
||||
module = Parser(source, module_name).module
|
||||
modules[module_name] = module
|
||||
|
||||
if module_name == 'builtins' and not is_py3:
|
||||
# There are two implementations of `open` for either python 2/3.
|
||||
# -> Rename the python2 version (`look at fake/builtins.pym`).
|
||||
open_func = search_scope(module, 'open')
|
||||
open_func.name = FakeName('open_python3')
|
||||
open_func = search_scope(module, 'open_python2')
|
||||
open_func.name = FakeName('open')
|
||||
return module
|
||||
|
||||
|
||||
def search_scope(scope, obj_name):
|
||||
for s in scope.subscopes:
|
||||
if str(s.name) == obj_name:
|
||||
return s
|
||||
|
||||
|
||||
def get_module(obj):
|
||||
if inspect.ismodule(obj):
|
||||
return obj
|
||||
try:
|
||||
obj = obj.__objclass__
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
imp_plz = obj.__module__
|
||||
except AttributeError:
|
||||
# Unfortunately in some cases like `int` there's no __module__
|
||||
return builtins
|
||||
else:
|
||||
return __import__(imp_plz)
|
||||
|
||||
|
||||
def _faked(module, obj, name):
|
||||
# Crazy underscore actions to try to escape all the internal madness.
|
||||
if module is None:
|
||||
module = get_module(obj)
|
||||
|
||||
faked_mod = _load_faked_module(module)
|
||||
if faked_mod is None:
|
||||
return
|
||||
|
||||
# Having the module as a `parser.representation.module`, we need to scan
|
||||
# for methods.
|
||||
if name is None:
|
||||
if inspect.isbuiltin(obj):
|
||||
return search_scope(faked_mod, obj.__name__)
|
||||
elif not inspect.isclass(obj):
|
||||
# object is a method or descriptor
|
||||
cls = search_scope(faked_mod, obj.__objclass__.__name__)
|
||||
if cls is None:
|
||||
return
|
||||
return search_scope(cls, obj.__name__)
|
||||
else:
|
||||
if obj == module:
|
||||
return search_scope(faked_mod, name)
|
||||
else:
|
||||
cls = search_scope(faked_mod, obj.__name__)
|
||||
if cls is None:
|
||||
return
|
||||
return search_scope(cls, name)
|
||||
|
||||
|
||||
def get_faked(module, obj, name=None):
|
||||
obj = obj.__class__ if is_class_instance(obj) else obj
|
||||
result = _faked(module, obj, name)
|
||||
if not isinstance(result, Class) and result is not None:
|
||||
# Set the docstr which was previously not set (faked modules don't
|
||||
# contain it).
|
||||
result.docstr = None
|
||||
if obj.__doc__:
|
||||
result.docstr = token_pr.TokenDocstring.fake_docstring(obj.__doc__)
|
||||
return result
|
||||
|
||||
|
||||
def is_class_instance(obj):
|
||||
"""Like inspect.* methods."""
|
||||
return not (inspect.isclass(obj) or inspect.ismodule(obj)
|
||||
or inspect.isbuiltin(obj) or inspect.ismethod(obj)
|
||||
or inspect.ismethoddescriptor(obj) or inspect.iscode(obj)
|
||||
or inspect.isgenerator(obj))
|
||||
@@ -1,7 +1,7 @@
|
||||
def proxy(object, callback=None):
|
||||
return object
|
||||
|
||||
class ref():
|
||||
class weakref():
|
||||
def __init__(self, object, callback=None):
|
||||
self.__object = object
|
||||
def __call__(self):
|
||||
@@ -45,6 +45,15 @@ class xrange():
|
||||
return 1
|
||||
|
||||
|
||||
def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True):
|
||||
import io
|
||||
return io.TextIOWrapper(file, mode, buffering, encoding, errors, newline, closefd)
|
||||
|
||||
|
||||
def open_python2(name, mode=None, buffering=None):
|
||||
return file(name, mode, buffering)
|
||||
|
||||
|
||||
#--------------------------------------------------------
|
||||
# descriptors
|
||||
#--------------------------------------------------------
|
||||
@@ -1,3 +1,3 @@
|
||||
class TextIOWrapper():
|
||||
def __next__(self):
|
||||
return ''
|
||||
return 'hacked io return'
|
||||
@@ -16,10 +16,8 @@ annotations.
|
||||
|
||||
import re
|
||||
|
||||
from jedi import cache
|
||||
from jedi.evaluate.cache import memoize_default
|
||||
from jedi.parser import Parser
|
||||
import evaluate
|
||||
import evaluate_representation as er
|
||||
|
||||
DOCSTRING_PARAM_PATTERNS = [
|
||||
r'\s*:type\s+%s:\s*([^\n]+)', # Sphinx
|
||||
@@ -34,8 +32,8 @@ DOCSTRING_RETURN_PATTERNS = [
|
||||
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
|
||||
|
||||
|
||||
@cache.memoize_default()
|
||||
def follow_param(param):
|
||||
@memoize_default(None, evaluator_is_first_arg=True)
|
||||
def follow_param(evaluator, param):
|
||||
func = param.parent_function
|
||||
# print func, param, param.parent_function
|
||||
if not func.docstr:
|
||||
@@ -44,7 +42,7 @@ def follow_param(param):
|
||||
func.docstr.as_string(),
|
||||
str(param.get_name())
|
||||
)
|
||||
user_position = (1, 0)
|
||||
position = (1, 0)
|
||||
|
||||
if param_str is not None:
|
||||
|
||||
@@ -54,14 +52,13 @@ def follow_param(param):
|
||||
param_str = 'import %s\n%s' % (
|
||||
param_str.rsplit('.', 1)[0],
|
||||
param_str)
|
||||
user_position = (2, 0)
|
||||
position = (2, 0)
|
||||
|
||||
p = Parser(
|
||||
param_str, None, user_position, no_docstr=True
|
||||
)
|
||||
if p.user_stmt is None:
|
||||
p = Parser(param_str, no_docstr=True)
|
||||
stmt = p.module.get_statement_for_position(position)
|
||||
if stmt is None:
|
||||
return []
|
||||
return evaluate.follow_statement(p.user_stmt)
|
||||
return evaluator.eval_statement(stmt)
|
||||
return []
|
||||
|
||||
|
||||
@@ -113,27 +110,22 @@ def _strip_rest_role(type_str):
|
||||
return type_str
|
||||
|
||||
|
||||
def find_return_types(func):
|
||||
def find_return_types(evaluator, func):
|
||||
def search_return_in_docstr(code):
|
||||
for p in DOCSTRING_RETURN_PATTERNS:
|
||||
match = p.search(code)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
if isinstance(func, er.InstanceElement):
|
||||
func = func.var
|
||||
|
||||
if isinstance(func, er.Function):
|
||||
func = func.base_func
|
||||
|
||||
if not func.docstr:
|
||||
return []
|
||||
type_str = search_return_in_docstr(func.docstr.as_string())
|
||||
if not type_str:
|
||||
return []
|
||||
|
||||
p = Parser(type_str, None, (1, 0), no_docstr=True)
|
||||
if p.user_stmt is None:
|
||||
p = Parser(type_str, None, no_docstr=True)
|
||||
stmt = p.module.get_statement_for_position((1, 0))
|
||||
if stmt is None:
|
||||
return []
|
||||
p.user_stmt.parent = func
|
||||
return list(evaluate.follow_statement(p.user_stmt))
|
||||
stmt.parent = func
|
||||
return list(evaluator.eval_statement(stmt))
|
||||
185
jedi/evaluate/dynamic.py
Normal file
185
jedi/evaluate/dynamic.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""
|
||||
To understand Python on a deeper level, |jedi| needs to understand some of the
|
||||
dynamic features of Python, however this probably the most complicated part:
|
||||
|
||||
- Array modifications (e.g. ``list.append``)
|
||||
- Parameter completion in functions
|
||||
- Flow checks (e.g. ``if isinstance(a, str)`` -> a is a str)
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is wanted somewhere, the current
|
||||
module will be checked for appearances of ``arr.append``, ``arr.insert``, etc.
|
||||
If the ``arr`` name points to an actual array, the content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append``. However this works pretty good, because in *slow*
|
||||
cases, the recursion detector and other settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module
|
||||
2. Only Array additions are being checked, ``list.pop``, etc. is being ignored.
|
||||
|
||||
Parameter completion
|
||||
********************
|
||||
|
||||
One of the really important features of |jedi| is to have an option to
|
||||
understand code like this::
|
||||
|
||||
def foo(bar):
|
||||
bar. # completion here
|
||||
foo(1)
|
||||
|
||||
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
|
||||
like ``foo('str')``, what would happen? Well, we'll just show both. Because
|
||||
that's what a human would expect.
|
||||
|
||||
It works as follows:
|
||||
|
||||
- A param is being encountered
|
||||
- search for function calls named ``foo``
|
||||
- execute these calls and check the injected params. This work with a
|
||||
``ParamListener``.
|
||||
|
||||
Flow checks
|
||||
***********
|
||||
|
||||
Flow checks are not really mature. There's only a check for ``isinstance``. It
|
||||
would check whether a flow has the form of ``if isinstance(a, type_or_tuple)``.
|
||||
Unfortunately every other thing is being ignored (e.g. a == '' would be easy to
|
||||
check for -> a is a string). There's big potential in these checks.
|
||||
"""
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import settings
|
||||
from jedi.evaluate import helpers
|
||||
from jedi.evaluate.cache import memoize_default
|
||||
from jedi.evaluate import imports
|
||||
|
||||
# This is something like the sys.path, but only for searching params. It means
|
||||
# that this is the order in which Jedi searches params.
|
||||
search_param_modules = ['.']
|
||||
|
||||
|
||||
class ParamListener(object):
|
||||
"""
|
||||
This listener is used to get the params for a function.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.param_possibilities = []
|
||||
|
||||
def execute(self, params):
|
||||
self.param_possibilities.append(params)
|
||||
|
||||
|
||||
@memoize_default([], evaluator_is_first_arg=True)
|
||||
def search_params(evaluator, param):
|
||||
"""
|
||||
This is a dynamic search for params. If you try to complete a type:
|
||||
|
||||
>>> def func(foo):
|
||||
... foo
|
||||
>>> func(1)
|
||||
>>> func("")
|
||||
|
||||
It is not known what the type is, because it cannot be guessed with
|
||||
recursive madness. Therefore one has to analyse the statements that are
|
||||
calling the function, as well as analyzing the incoming params.
|
||||
"""
|
||||
if not settings.dynamic_params:
|
||||
return []
|
||||
|
||||
def get_params_for_module(module):
|
||||
"""
|
||||
Returns the values of a param, or an empty array.
|
||||
"""
|
||||
@memoize_default([], evaluator_is_first_arg=True)
|
||||
def get_posibilities(evaluator, module, func_name):
|
||||
try:
|
||||
possible_stmts = module.used_names[func_name]
|
||||
except KeyError:
|
||||
return []
|
||||
|
||||
for stmt in possible_stmts:
|
||||
if isinstance(stmt, pr.Import):
|
||||
continue
|
||||
calls = helpers.scan_statement_for_calls(stmt, func_name)
|
||||
for c in calls:
|
||||
# no execution means that params cannot be set
|
||||
call_path = list(c.generate_call_path())
|
||||
pos = c.start_pos
|
||||
scope = stmt.parent
|
||||
|
||||
# this whole stuff is just to not execute certain parts
|
||||
# (speed improvement), basically we could just call
|
||||
# ``eval_call_path`` on the call_path and it would
|
||||
# also work.
|
||||
def listRightIndex(lst, value):
|
||||
return len(lst) - lst[-1::-1].index(value) - 1
|
||||
|
||||
# Need to take right index, because there could be a
|
||||
# func usage before.
|
||||
i = listRightIndex(call_path, func_name)
|
||||
first, last = call_path[:i], call_path[i + 1:]
|
||||
if not last and not call_path.index(func_name) != i:
|
||||
continue
|
||||
scopes = [scope]
|
||||
if first:
|
||||
scopes = evaluator.eval_call_path(iter(first), scope, pos)
|
||||
pos = None
|
||||
from jedi.evaluate import representation as er
|
||||
for scope in scopes:
|
||||
s = evaluator.find_types(scope, func_name, position=pos,
|
||||
search_global=not first,
|
||||
resolve_decorator=False)
|
||||
|
||||
c = [getattr(escope, 'base_func', None) or escope.base
|
||||
for escope in s
|
||||
if escope.isinstance(er.Function, er.Class)]
|
||||
if compare in c:
|
||||
# only if we have the correct function we execute
|
||||
# it, otherwise just ignore it.
|
||||
evaluator.follow_path(iter(last), s, scope)
|
||||
|
||||
return listener.param_possibilities
|
||||
|
||||
result = []
|
||||
for params in get_posibilities(evaluator, module, func_name):
|
||||
for p in params:
|
||||
if str(p) == param_name:
|
||||
result += evaluator.eval_statement(p.parent)
|
||||
return result
|
||||
|
||||
func = param.get_parent_until(pr.Function)
|
||||
current_module = param.get_parent_until()
|
||||
func_name = str(func.name)
|
||||
compare = func
|
||||
if func_name == '__init__' and isinstance(func.parent, pr.Class):
|
||||
func_name = str(func.parent.name)
|
||||
compare = func.parent
|
||||
|
||||
# get the param name
|
||||
if param.assignment_details:
|
||||
# first assignment details, others would be a syntax error
|
||||
expression_list, op = param.assignment_details[0]
|
||||
else:
|
||||
expression_list = param.expression_list()
|
||||
offset = 1 if expression_list[0] in ['*', '**'] else 0
|
||||
param_name = str(expression_list[offset].name)
|
||||
|
||||
# add the listener
|
||||
listener = ParamListener()
|
||||
func.listeners.add(listener)
|
||||
|
||||
result = []
|
||||
# This is like backtracking: Get the first possible result.
|
||||
for mod in imports.get_modules_containing_name([current_module], func_name):
|
||||
result = get_params_for_module(mod)
|
||||
if result:
|
||||
break
|
||||
|
||||
# cleanup: remove the listener; important: should not stick.
|
||||
func.listeners.remove(listener)
|
||||
|
||||
return result
|
||||
515
jedi/evaluate/finder.py
Normal file
515
jedi/evaluate/finder.py
Normal file
@@ -0,0 +1,515 @@
|
||||
import copy
|
||||
import sys
|
||||
|
||||
from jedi._compatibility import hasattr, unicode, u, reraise
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
from jedi import common
|
||||
from jedi import settings
|
||||
from jedi.evaluate import representation as er
|
||||
from jedi.evaluate import dynamic
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import docstrings
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.evaluate import imports
|
||||
|
||||
|
||||
class NameFinder(object):
|
||||
def __init__(self, evaluator, scope, name_str, position=None):
|
||||
self._evaluator = evaluator
|
||||
self.scope = scope
|
||||
self.name_str = name_str
|
||||
self.position = position
|
||||
|
||||
def find(self, scopes, resolve_decorator=True):
|
||||
names = self.filter_name(scopes)
|
||||
types = self._names_to_types(names, resolve_decorator)
|
||||
debug.dbg('_names_to_types: %s, old: %s', names, types)
|
||||
return self._resolve_descriptors(types)
|
||||
|
||||
def scopes(self, search_global=False):
|
||||
if search_global:
|
||||
return get_names_of_scope(self._evaluator, self.scope, self.position)
|
||||
else:
|
||||
if isinstance(self.scope, er.Instance):
|
||||
return self.scope.scope_generator()
|
||||
else:
|
||||
if isinstance(self.scope, (er.Class, pr.Module)):
|
||||
# classes are only available directly via chaining?
|
||||
# strange stuff...
|
||||
names = self.scope.get_defined_names()
|
||||
else:
|
||||
names = _get_defined_names_for_position(self.scope, self.position)
|
||||
return iter([(self.scope, names)])
|
||||
|
||||
def filter_name(self, scope_generator):
|
||||
"""
|
||||
Filters all variables of a scope (which are defined in the
|
||||
`scope_generator`), until the name fits.
|
||||
"""
|
||||
result = []
|
||||
for nscope, name_list in scope_generator:
|
||||
break_scopes = []
|
||||
# here is the position stuff happening (sorting of variables)
|
||||
for name in sorted(name_list, key=lambda n: n.start_pos, reverse=True):
|
||||
p = name.parent.parent if name.parent else None
|
||||
if isinstance(p, er.InstanceElement) \
|
||||
and isinstance(p.var, pr.Class):
|
||||
p = p.var
|
||||
if self.name_str == name.get_code() and p not in break_scopes:
|
||||
if not self._name_is_array_assignment(name):
|
||||
result.append(name) # `arr[1] =` is not the definition
|
||||
# for comparison we need the raw class
|
||||
s = nscope.base if isinstance(nscope, er.Class) else nscope
|
||||
# this means that a definition was found and is not e.g.
|
||||
# in if/else.
|
||||
if result and not self._name_is_no_break_scope(name):
|
||||
if not name.parent or p == s:
|
||||
break
|
||||
break_scopes.append(p)
|
||||
if result:
|
||||
break
|
||||
|
||||
if not result and isinstance(self.scope, er.Instance):
|
||||
# __getattr__ / __getattribute__
|
||||
for r in self._check_getattr(self.scope):
|
||||
if not isinstance(r, compiled.CompiledObject):
|
||||
new_name = copy.copy(r.name)
|
||||
new_name.parent = r
|
||||
result.append(new_name)
|
||||
|
||||
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s', self.name_str,
|
||||
self.scope, nscope, u(result), self.position)
|
||||
return result
|
||||
|
||||
def _check_getattr(self, inst):
|
||||
"""Checks for both __getattr__ and __getattribute__ methods"""
|
||||
result = []
|
||||
# str is important to lose the NamePart!
|
||||
name = compiled.create(str(self.name_str))
|
||||
with common.ignored(KeyError):
|
||||
result = inst.execute_subscope_by_name('__getattr__', [name])
|
||||
if not result:
|
||||
# this is a little bit special. `__getattribute__` is executed
|
||||
# before anything else. But: I know no use case, where this
|
||||
# could be practical and the jedi would return wrong types. If
|
||||
# you ever have something, let me know!
|
||||
with common.ignored(KeyError):
|
||||
result = inst.execute_subscope_by_name('__getattribute__', [name])
|
||||
return result
|
||||
|
||||
def _name_is_no_break_scope(self, name):
|
||||
"""
|
||||
Returns the parent of a name, which means the element which stands
|
||||
behind a name.
|
||||
"""
|
||||
par = name.parent
|
||||
if par.isinstance(pr.Statement):
|
||||
details = par.assignment_details
|
||||
if details and details[0][1] != '=':
|
||||
return True
|
||||
|
||||
if isinstance(name, er.InstanceElement) \
|
||||
and not name.is_class_var:
|
||||
return True
|
||||
elif isinstance(par, pr.Import) and len(par.namespace) > 1:
|
||||
# TODO multi-level import non-breakable
|
||||
return True
|
||||
return False
|
||||
|
||||
def _name_is_array_assignment(self, name):
|
||||
if name.parent.isinstance(pr.Statement):
|
||||
def is_execution(calls):
|
||||
for c in calls:
|
||||
if isinstance(c, (unicode, str)):
|
||||
continue
|
||||
if c.isinstance(pr.Array):
|
||||
if is_execution(c):
|
||||
return True
|
||||
elif c.isinstance(pr.Call):
|
||||
# Compare start_pos, because names may be different
|
||||
# because of executions.
|
||||
if c.name.start_pos == name.start_pos \
|
||||
and c.execution:
|
||||
return True
|
||||
return False
|
||||
|
||||
is_exe = False
|
||||
for assignee, op in name.parent.assignment_details:
|
||||
is_exe |= is_execution(assignee)
|
||||
|
||||
if is_exe:
|
||||
# filter array[3] = ...
|
||||
# TODO check executions for dict contents
|
||||
return True
|
||||
return False
|
||||
|
||||
def _names_to_types(self, names, resolve_decorator):
|
||||
types = []
|
||||
# Add isinstance and other if/assert knowledge.
|
||||
flow_scope = self.scope
|
||||
while flow_scope:
|
||||
# TODO check if result is in scope -> no evaluation necessary
|
||||
n = check_flow_information(self._evaluator, flow_scope,
|
||||
self.name_str, self.position)
|
||||
if n:
|
||||
return n
|
||||
flow_scope = flow_scope.parent
|
||||
|
||||
for name in names:
|
||||
typ = name.parent
|
||||
if typ.isinstance(pr.ForFlow):
|
||||
types += self._handle_for_loops(typ)
|
||||
elif isinstance(typ, pr.Param):
|
||||
types += self._eval_param(typ)
|
||||
elif typ.isinstance(pr.Statement):
|
||||
types += self._remove_statements(typ)
|
||||
else:
|
||||
if isinstance(typ, pr.Class):
|
||||
typ = er.Class(self._evaluator, typ)
|
||||
elif isinstance(typ, pr.Function):
|
||||
typ = er.Function(self._evaluator, typ)
|
||||
if typ.isinstance(er.Function) and resolve_decorator:
|
||||
typ = typ.get_decorated_func()
|
||||
types.append(typ)
|
||||
return types
|
||||
|
||||
def _remove_statements(self, stmt):
|
||||
"""
|
||||
This is the part where statements are being stripped.
|
||||
|
||||
Due to lazy evaluation, statements like a = func; b = a; b() have to be
|
||||
evaluated.
|
||||
"""
|
||||
evaluator = self._evaluator
|
||||
types = []
|
||||
if stmt.is_global():
|
||||
# global keyword handling.
|
||||
for token_name in stmt.token_list[1:]:
|
||||
if isinstance(token_name, pr.Name):
|
||||
return evaluator.find_types(stmt.parent, str(token_name))
|
||||
else:
|
||||
# Remove the statement docstr stuff for now, that has to be
|
||||
# implemented with the evaluator class.
|
||||
#if stmt.docstr:
|
||||
#res_new.append(stmt)
|
||||
|
||||
check_instance = None
|
||||
if isinstance(stmt, er.InstanceElement) and stmt.is_class_var:
|
||||
check_instance = stmt.instance
|
||||
stmt = stmt.var
|
||||
|
||||
types += evaluator.eval_statement(stmt, seek_name=self.name_str)
|
||||
|
||||
if check_instance is not None:
|
||||
# class renames
|
||||
types = [er.InstanceElement(evaluator, check_instance, a, True)
|
||||
if isinstance(a, (er.Function, pr.Function))
|
||||
else a for a in types]
|
||||
return types
|
||||
|
||||
def _eval_param(self, r):
|
||||
evaluator = self._evaluator
|
||||
res_new = []
|
||||
func = r.parent
|
||||
|
||||
cls = func.parent.get_parent_until((pr.Class, pr.Function))
|
||||
|
||||
if isinstance(cls, pr.Class) and r.position_nr == 0:
|
||||
# This is where we add self - if it has never been
|
||||
# instantiated.
|
||||
if isinstance(self.scope, er.InstanceElement):
|
||||
res_new.append(self.scope.instance)
|
||||
else:
|
||||
for inst in evaluator.execute(er.Class(evaluator, cls)):
|
||||
inst.is_generated = True
|
||||
res_new.append(inst)
|
||||
return res_new
|
||||
|
||||
# Instances are typically faked, if the instance is not called from
|
||||
# outside. Here we check it for __init__ functions and return.
|
||||
if isinstance(func, er.InstanceElement) \
|
||||
and func.instance.is_generated and str(func.name) == '__init__':
|
||||
r = func.var.params[r.position_nr]
|
||||
|
||||
# Add docstring knowledge.
|
||||
doc_params = docstrings.follow_param(evaluator, r)
|
||||
if doc_params:
|
||||
return doc_params
|
||||
|
||||
if not r.is_generated:
|
||||
# Param owns no information itself.
|
||||
res_new += dynamic.search_params(evaluator, r)
|
||||
if not res_new:
|
||||
c = r.expression_list()[0]
|
||||
if c in ('*', '**'):
|
||||
t = 'tuple' if c == '*' else 'dict'
|
||||
typ = evaluator.find_types(compiled.builtin, t)[0]
|
||||
res_new = evaluator.execute(typ)
|
||||
if not r.assignment_details:
|
||||
# this means that there are no default params,
|
||||
# so just ignore it.
|
||||
return res_new
|
||||
return set(res_new) | evaluator.eval_statement(r, seek_name=self.name_str)
|
||||
|
||||
def _handle_for_loops(self, loop):
|
||||
# Take the first statement (for has always only
|
||||
# one, remember `in`). And follow it.
|
||||
if not loop.inputs:
|
||||
return []
|
||||
result = iterable.get_iterator_types(self._evaluator.eval_statement(loop.inputs[0]))
|
||||
if len(loop.set_vars) > 1:
|
||||
expression_list = loop.set_stmt.expression_list()
|
||||
# loops with loop.set_vars > 0 only have one command
|
||||
result = _assign_tuples(expression_list[0], result, self.name_str)
|
||||
return result
|
||||
|
||||
def _resolve_descriptors(self, types):
|
||||
"""Processes descriptors"""
|
||||
result = []
|
||||
for r in types:
|
||||
if isinstance(self.scope, (er.Instance, er.Class)) \
|
||||
and hasattr(r, 'get_descriptor_return'):
|
||||
# handle descriptors
|
||||
with common.ignored(KeyError):
|
||||
result += r.get_descriptor_return(self.scope)
|
||||
continue
|
||||
result.append(r)
|
||||
return result
|
||||
|
||||
|
||||
def check_flow_information(evaluator, flow, search_name, pos):
|
||||
""" Try to find out the type of a variable just with the information that
|
||||
is given by the flows: e.g. It is also responsible for assert checks.::
|
||||
|
||||
if isinstance(k, str):
|
||||
k. # <- completion here
|
||||
|
||||
ensures that `k` is a string.
|
||||
"""
|
||||
if not settings.dynamic_flow_information:
|
||||
return None
|
||||
|
||||
result = []
|
||||
if isinstance(flow, pr.IsScope) and not result:
|
||||
for ass in reversed(flow.asserts):
|
||||
if pos is None or ass.start_pos > pos:
|
||||
continue
|
||||
result = _check_isinstance_type(evaluator, ass, search_name)
|
||||
if result:
|
||||
break
|
||||
|
||||
if isinstance(flow, pr.Flow) and not result:
|
||||
if flow.command in ['if', 'while'] and len(flow.inputs) == 1:
|
||||
result = _check_isinstance_type(evaluator, flow.inputs[0], search_name)
|
||||
return result
|
||||
|
||||
|
||||
def _check_isinstance_type(evaluator, stmt, search_name):
|
||||
try:
|
||||
expression_list = stmt.expression_list()
|
||||
# this might be removed if we analyze and, etc
|
||||
assert len(expression_list) == 1
|
||||
call = expression_list[0]
|
||||
assert isinstance(call, pr.Call) and str(call.name) == 'isinstance'
|
||||
assert bool(call.execution)
|
||||
|
||||
# isinstance check
|
||||
isinst = call.execution.values
|
||||
assert len(isinst) == 2 # has two params
|
||||
obj, classes = [statement.expression_list() for statement in isinst]
|
||||
assert len(obj) == 1
|
||||
assert len(classes) == 1
|
||||
assert isinstance(obj[0], pr.Call)
|
||||
# names fit?
|
||||
assert str(obj[0].name) == search_name
|
||||
assert isinstance(classes[0], pr.StatementElement) # can be type or tuple
|
||||
except AssertionError:
|
||||
return []
|
||||
|
||||
result = []
|
||||
for c in evaluator.eval_call(classes[0]):
|
||||
for typ in (c.get_index_types() if isinstance(c, iterable.Array) else [c]):
|
||||
result += evaluator.execute(typ)
|
||||
return result
|
||||
|
||||
|
||||
def _get_defined_names_for_position(scope, position=None, start_scope=None):
|
||||
"""
|
||||
Return filtered version of ``scope.get_defined_names()``.
|
||||
|
||||
This function basically does what :meth:`scope.get_defined_names
|
||||
<parsing_representation.Scope.get_defined_names>` does.
|
||||
|
||||
- If `position` is given, delete all names defined after `position`.
|
||||
- For special objects like instances, `position` is ignored and all
|
||||
names are returned.
|
||||
|
||||
:type scope: :class:`parsing_representation.IsScope`
|
||||
:param scope: Scope in which names are searched.
|
||||
:param position: The position as a line/column tuple, default is infinity.
|
||||
"""
|
||||
names = scope.get_defined_names()
|
||||
# Instances have special rules, always return all the possible completions,
|
||||
# because class variables are always valid and the `self.` variables, too.
|
||||
if (not position or isinstance(scope, (iterable.Array, er.Instance))
|
||||
or start_scope != scope
|
||||
and isinstance(start_scope, (pr.Function, er.FunctionExecution))):
|
||||
return names
|
||||
names_new = []
|
||||
for n in names:
|
||||
if n.start_pos[0] is not None and n.start_pos < position:
|
||||
names_new.append(n)
|
||||
return names_new
|
||||
|
||||
|
||||
def get_names_of_scope(evaluator, scope, position=None, star_search=True, include_builtin=True):
|
||||
"""
|
||||
Get all completions (names) possible for the current scope. The star search
|
||||
option is only here to provide an optimization. Otherwise the whole thing
|
||||
would probably start a little recursive madness.
|
||||
|
||||
This function is used to include names from outer scopes. For example, when
|
||||
the current scope is function:
|
||||
|
||||
>>> from jedi.parser import Parser
|
||||
>>> parser = Parser('''
|
||||
... x = ['a', 'b', 'c']
|
||||
... def func():
|
||||
... y = None
|
||||
... ''')
|
||||
>>> scope = parser.module.subscopes[0]
|
||||
>>> scope
|
||||
<Function: func@3-4>
|
||||
|
||||
`get_names_of_scope` is a generator. First it yields names from most inner
|
||||
scope.
|
||||
|
||||
>>> from jedi.evaluate import Evaluator
|
||||
>>> pairs = list(get_names_of_scope(Evaluator(), scope))
|
||||
>>> pairs[0]
|
||||
(<Function: func@3-4>, [<Name: y@4,4>])
|
||||
|
||||
Then it yield the names from one level outer scope. For this example, this
|
||||
is the most outer scope.
|
||||
|
||||
>>> pairs[1]
|
||||
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
|
||||
|
||||
Finally, it yields names from builtin, if `include_builtin` is
|
||||
true (default).
|
||||
|
||||
>>> pairs[2] #doctest: +ELLIPSIS
|
||||
(<Builtin: ...builtin...>, [<CompiledName: ...>, ...])
|
||||
|
||||
:rtype: [(pr.Scope, [pr.Name])]
|
||||
:return: Return an generator that yields a pair of scope and names.
|
||||
"""
|
||||
in_func_scope = scope
|
||||
non_flow = scope.get_parent_until(pr.Flow, reverse=True)
|
||||
while scope:
|
||||
if isinstance(scope, pr.SubModule) and scope.parent:
|
||||
# we don't want submodules to report if we have modules.
|
||||
scope = scope.parent
|
||||
continue
|
||||
# `pr.Class` is used, because the parent is never `Class`.
|
||||
# Ignore the Flows, because the classes and functions care for that.
|
||||
# InstanceElement of Class is ignored, if it is not the start scope.
|
||||
if not (scope != non_flow and scope.isinstance(pr.Class)
|
||||
or scope.isinstance(pr.Flow)
|
||||
or scope.isinstance(er.Instance)
|
||||
and non_flow.isinstance(er.Function)
|
||||
or isinstance(scope, compiled.CompiledObject)
|
||||
and scope.type() == 'class' and in_func_scope != scope):
|
||||
try:
|
||||
if isinstance(scope, er.Instance):
|
||||
for g in scope.scope_generator():
|
||||
yield g
|
||||
else:
|
||||
yield scope, _get_defined_names_for_position(scope, position, in_func_scope)
|
||||
except StopIteration:
|
||||
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
|
||||
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
|
||||
# is a list comprehension
|
||||
yield scope, scope.get_set_vars(is_internal_call=True)
|
||||
|
||||
scope = scope.parent
|
||||
# This is used, because subscopes (Flow scopes) would distort the
|
||||
# results.
|
||||
if scope and scope.isinstance(er.Function, pr.Function, er.FunctionExecution):
|
||||
in_func_scope = scope
|
||||
|
||||
# Add star imports.
|
||||
if star_search:
|
||||
for s in imports.remove_star_imports(evaluator, non_flow.get_parent_until()):
|
||||
for g in get_names_of_scope(evaluator, s, star_search=False):
|
||||
yield g
|
||||
|
||||
# Add builtins to the global scope.
|
||||
if include_builtin:
|
||||
yield compiled.builtin, compiled.builtin.get_defined_names()
|
||||
|
||||
|
||||
def _assign_tuples(tup, results, seek_name):
|
||||
"""
|
||||
This is a normal assignment checker. In python functions and other things
|
||||
can return tuples:
|
||||
>>> a, b = 1, ""
|
||||
>>> a, (b, c) = 1, ("", 1.0)
|
||||
|
||||
Here, if `seek_name` is "a", the number type will be returned.
|
||||
The first part (before `=`) is the param tuples, the second one result.
|
||||
|
||||
:type tup: pr.Array
|
||||
"""
|
||||
def eval_results(index):
|
||||
types = []
|
||||
for r in results:
|
||||
try:
|
||||
func = r.get_exact_index_types
|
||||
except AttributeError:
|
||||
debug.warning("invalid tuple lookup %s of result %s in %s",
|
||||
tup, results, seek_name)
|
||||
else:
|
||||
with common.ignored(IndexError):
|
||||
types += func(index)
|
||||
return types
|
||||
|
||||
result = []
|
||||
for i, stmt in enumerate(tup):
|
||||
# Used in assignments. There is just one call and no other things,
|
||||
# therefore we can just assume, that the first part is important.
|
||||
command = stmt.expression_list()[0]
|
||||
|
||||
if tup.type == pr.Array.NOARRAY:
|
||||
|
||||
# unnessecary braces -> just remove.
|
||||
r = results
|
||||
else:
|
||||
r = eval_results(i)
|
||||
|
||||
# LHS of tuples can be nested, so resolve it recursively
|
||||
result += find_assignments(command, r, seek_name)
|
||||
return result
|
||||
|
||||
|
||||
def find_assignments(lhs, results, seek_name):
|
||||
"""
|
||||
Check if `seek_name` is in the left hand side `lhs` of assignment.
|
||||
|
||||
`lhs` can simply be a variable (`pr.Call`) or a tuple/list (`pr.Array`)
|
||||
representing the following cases::
|
||||
|
||||
a = 1 # lhs is pr.Call
|
||||
(a, b) = 2 # lhs is pr.Array
|
||||
|
||||
:type lhs: pr.Call
|
||||
:type results: list
|
||||
:type seek_name: str
|
||||
"""
|
||||
if isinstance(lhs, pr.Array):
|
||||
return _assign_tuples(lhs, results, seek_name)
|
||||
elif lhs.name.names[-1] == seek_name:
|
||||
return results
|
||||
else:
|
||||
return []
|
||||
@@ -1,5 +1,3 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import copy
|
||||
|
||||
from jedi import common
|
||||
@@ -62,14 +60,6 @@ def fast_parent_copy(obj):
|
||||
return recursion(obj)
|
||||
|
||||
|
||||
def check_arr_index(arr, pos):
|
||||
positions = arr.arr_el_pos
|
||||
for index, comma_pos in enumerate(positions):
|
||||
if pos < comma_pos:
|
||||
return index
|
||||
return len(positions)
|
||||
|
||||
|
||||
def array_for_pos(stmt, pos, array_types=None):
|
||||
"""Searches for the array and position of a tuple"""
|
||||
def search_array(arr, pos):
|
||||
@@ -105,7 +95,7 @@ def array_for_pos(stmt, pos, array_types=None):
|
||||
if stmt.start_pos >= pos >= stmt.end_pos:
|
||||
return None, 0
|
||||
|
||||
for command in stmt.get_commands():
|
||||
for command in stmt.expression_list():
|
||||
arr = None
|
||||
if isinstance(command, pr.Array):
|
||||
arr, index = search_array(command, pos)
|
||||
@@ -132,11 +122,54 @@ def search_call_signatures(stmt, pos):
|
||||
return None, 0, False
|
||||
|
||||
|
||||
class FakeStatement(pr.Statement):
|
||||
class SubModule():
|
||||
line_offset = 0
|
||||
def scan_statement_for_calls(stmt, search_name, assignment_details=False):
|
||||
""" Returns the function Calls that match search_name in an Array. """
|
||||
def scan_array(arr, search_name):
|
||||
result = []
|
||||
if arr.type == pr.Array.DICT:
|
||||
for key_stmt, value_stmt in arr.items():
|
||||
result += scan_statement_for_calls(key_stmt, search_name)
|
||||
result += scan_statement_for_calls(value_stmt, search_name)
|
||||
else:
|
||||
for stmt in arr:
|
||||
result += scan_statement_for_calls(stmt, search_name)
|
||||
return result
|
||||
|
||||
def __init__(self, content):
|
||||
cls = type(self)
|
||||
check = list(stmt.expression_list())
|
||||
if assignment_details:
|
||||
for expression_list, op in stmt.assignment_details:
|
||||
check += expression_list
|
||||
|
||||
result = []
|
||||
for c in check:
|
||||
if isinstance(c, pr.Array):
|
||||
result += scan_array(c, search_name)
|
||||
elif isinstance(c, pr.Call):
|
||||
s_new = c
|
||||
while s_new is not None:
|
||||
n = s_new.name
|
||||
if isinstance(n, pr.Name) and search_name in n.names:
|
||||
result.append(c)
|
||||
|
||||
if s_new.execution is not None:
|
||||
result += scan_array(s_new.execution, search_name)
|
||||
s_new = s_new.next
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class FakeSubModule():
|
||||
line_offset = 0
|
||||
|
||||
|
||||
class FakeStatement(pr.Statement):
|
||||
def __init__(self, expression_list, start_pos=(0, 0)):
|
||||
p = start_pos
|
||||
super(FakeStatement, self).__init__(FakeSubModule, expression_list, p, p)
|
||||
self._expression_list = expression_list
|
||||
|
||||
|
||||
class FakeName(pr.Name):
|
||||
def __init__(self, name, parent=None):
|
||||
p = 0, 0
|
||||
super(cls, self).__init__(cls.SubModule, [content], p, p)
|
||||
super(FakeName, self).__init__(FakeSubModule, [(name, p)], p, p, parent)
|
||||
@@ -11,24 +11,21 @@ correct implementation is delegated to _compatibility.
|
||||
This module also supports import autocompletion, which means to complete
|
||||
statements like ``from datetim`` (curser at the end would return ``datetime``).
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import itertools
|
||||
|
||||
from jedi._compatibility import find_module
|
||||
from jedi import modules
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import cache
|
||||
import builtin
|
||||
import evaluate
|
||||
|
||||
# for debugging purposes only
|
||||
imports_processed = 0
|
||||
from jedi.parser import fast
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.evaluate import sys_path
|
||||
from jedi import settings
|
||||
from jedi.common import source_to_unicode
|
||||
from jedi.evaluate import compiled
|
||||
|
||||
|
||||
class ModuleNotFound(Exception):
|
||||
@@ -45,8 +42,9 @@ class ImportPath(pr.Base):
|
||||
|
||||
GlobalNamespace = GlobalNamespace()
|
||||
|
||||
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
|
||||
def __init__(self, evaluator, import_stmt, is_like_search=False, kill_count=0,
|
||||
direct_resolve=False, is_just_from=False):
|
||||
self._evaluator = evaluator
|
||||
self.import_stmt = import_stmt
|
||||
self.is_like_search = is_like_search
|
||||
self.direct_resolve = direct_resolve
|
||||
@@ -96,7 +94,7 @@ class ImportPath(pr.Base):
|
||||
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
|
||||
new = pr.Import(i._sub_module, zero, zero, n)
|
||||
new.parent = parent
|
||||
debug.dbg('Generated a nested import: %s' % new)
|
||||
debug.dbg('Generated a nested import: %s', new)
|
||||
return new
|
||||
|
||||
def get_defined_names(self, on_import_stmt=False):
|
||||
@@ -114,9 +112,9 @@ class ImportPath(pr.Base):
|
||||
|
||||
if self._is_relative_import():
|
||||
rel_path = self._get_relative_path() + '/__init__.py'
|
||||
with common.ignored(IOError):
|
||||
m = modules.Module(rel_path)
|
||||
names += m.parser.module.get_defined_names()
|
||||
if os.path.exists(rel_path):
|
||||
m = load_module(rel_path)
|
||||
names += m.get_defined_names()
|
||||
else:
|
||||
if on_import_stmt and isinstance(scope, pr.Module) \
|
||||
and scope.path.endswith('__init__.py'):
|
||||
@@ -131,10 +129,11 @@ class ImportPath(pr.Base):
|
||||
# ``sys.modules`` modification.
|
||||
p = (0, 0)
|
||||
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
|
||||
p, p, self.import_stmt))
|
||||
p, p, self.import_stmt))
|
||||
continue
|
||||
for s, scope_names in evaluate.get_names_of_scope(scope,
|
||||
include_builtin=False):
|
||||
from jedi.evaluate import finder
|
||||
for s, scope_names in finder.get_names_of_scope(self._evaluator,
|
||||
scope, include_builtin=False):
|
||||
for n in scope_names:
|
||||
if self.import_stmt.from_ns is None \
|
||||
or self.is_partial_import:
|
||||
@@ -181,13 +180,13 @@ class ImportPath(pr.Base):
|
||||
in_path.append(new)
|
||||
|
||||
module = self.import_stmt.get_parent_until()
|
||||
return in_path + modules.sys_path_with_modifications(module)
|
||||
return in_path + sys_path.sys_path_with_modifications(module)
|
||||
|
||||
def follow(self, is_goto=False):
|
||||
"""
|
||||
Returns the imported modules.
|
||||
"""
|
||||
if evaluate.follow_statement.push_stmt(self.import_stmt):
|
||||
if self._evaluator.recursion_detector.push_stmt(self.import_stmt):
|
||||
# check recursion
|
||||
return []
|
||||
|
||||
@@ -195,12 +194,12 @@ class ImportPath(pr.Base):
|
||||
try:
|
||||
scope, rest = self._follow_file_system()
|
||||
except ModuleNotFound:
|
||||
debug.warning('Module not found: ' + str(self.import_stmt))
|
||||
evaluate.follow_statement.pop_stmt()
|
||||
debug.warning('Module not found: %s', self.import_stmt)
|
||||
self._evaluator.recursion_detector.pop_stmt()
|
||||
return []
|
||||
|
||||
scopes = [scope]
|
||||
scopes += remove_star_imports(scope)
|
||||
scopes += remove_star_imports(self._evaluator, scope)
|
||||
|
||||
# follow the rest of the import (not FS -> classes, functions)
|
||||
if len(rest) > 1 or rest and self.is_like_search:
|
||||
@@ -211,15 +210,15 @@ class ImportPath(pr.Base):
|
||||
# ``os.path``, because it's a very important one in Python
|
||||
# that is being achieved by messing with ``sys.modules`` in
|
||||
# ``os``.
|
||||
scopes = evaluate.follow_path(iter(rest), scope, scope)
|
||||
scopes = self._evaluator.follow_path(iter(rest), [scope], scope)
|
||||
elif rest:
|
||||
if is_goto:
|
||||
scopes = itertools.chain.from_iterable(
|
||||
evaluate.find_name(s, rest[0], is_goto=True)
|
||||
self._evaluator.find_types(s, rest[0], is_goto=True)
|
||||
for s in scopes)
|
||||
else:
|
||||
scopes = itertools.chain.from_iterable(
|
||||
evaluate.follow_path(iter(rest), s, s)
|
||||
self._evaluator.follow_path(iter(rest), [s], s)
|
||||
for s in scopes)
|
||||
scopes = list(scopes)
|
||||
|
||||
@@ -227,9 +226,9 @@ class ImportPath(pr.Base):
|
||||
scopes.append(self._get_nested_import(scope))
|
||||
else:
|
||||
scopes = [ImportPath.GlobalNamespace]
|
||||
debug.dbg('after import', scopes)
|
||||
debug.dbg('after import: %s', scopes)
|
||||
|
||||
evaluate.follow_statement.pop_stmt()
|
||||
self._evaluator.recursion_detector.pop_stmt()
|
||||
return scopes
|
||||
|
||||
def _is_relative_import(self):
|
||||
@@ -288,7 +287,7 @@ class ImportPath(pr.Base):
|
||||
sys_path_mod.append(temp_path)
|
||||
old_path, temp_path = temp_path, os.path.dirname(temp_path)
|
||||
else:
|
||||
sys_path_mod = list(modules.get_sys_path())
|
||||
sys_path_mod = list(sys_path.get_sys_path())
|
||||
|
||||
return self._follow_sys_path(sys_path_mod)
|
||||
|
||||
@@ -297,19 +296,17 @@ class ImportPath(pr.Base):
|
||||
Find a module with a path (of the module, like usb.backend.libusb10).
|
||||
"""
|
||||
def follow_str(ns_path, string):
|
||||
debug.dbg('follow_module', ns_path, string)
|
||||
debug.dbg('follow_module %s %s', ns_path, string)
|
||||
path = None
|
||||
if ns_path:
|
||||
path = ns_path
|
||||
elif self._is_relative_import():
|
||||
path = self._get_relative_path()
|
||||
|
||||
global imports_processed
|
||||
imports_processed += 1
|
||||
if path is not None:
|
||||
importing = find_module(string, [path])
|
||||
else:
|
||||
debug.dbg('search_module', string, self.file_path)
|
||||
debug.dbg('search_module %s %s', string, self.file_path)
|
||||
# Override the sys.path. It works only good that way.
|
||||
# Injecting the path directly into `find_module` did not work.
|
||||
sys.path, temp = sys_path, sys.path
|
||||
@@ -364,17 +361,12 @@ class ImportPath(pr.Base):
|
||||
else:
|
||||
source = current_namespace[0].read()
|
||||
current_namespace[0].close()
|
||||
if path.endswith('.py'):
|
||||
f = modules.Module(path, source)
|
||||
else:
|
||||
f = builtin.BuiltinModule(path=path)
|
||||
return load_module(path, source), rest
|
||||
else:
|
||||
f = builtin.BuiltinModule(name=path)
|
||||
|
||||
return f.parser.module, rest
|
||||
return load_module(name=path), rest
|
||||
|
||||
|
||||
def strip_imports(scopes):
|
||||
def strip_imports(evaluator, scopes):
|
||||
"""
|
||||
Here we strip the imports - they don't get resolved necessarily.
|
||||
Really used anymore? Merge with remove_star_imports?
|
||||
@@ -382,26 +374,87 @@ def strip_imports(scopes):
|
||||
result = []
|
||||
for s in scopes:
|
||||
if isinstance(s, pr.Import):
|
||||
result += ImportPath(s).follow()
|
||||
result += ImportPath(evaluator, s).follow()
|
||||
else:
|
||||
result.append(s)
|
||||
return result
|
||||
|
||||
|
||||
@cache.cache_star_import
|
||||
def remove_star_imports(scope, ignored_modules=()):
|
||||
def remove_star_imports(evaluator, scope, ignored_modules=()):
|
||||
"""
|
||||
Check a module for star imports:
|
||||
>>> from module import *
|
||||
Check a module for star imports::
|
||||
|
||||
from module import *
|
||||
|
||||
and follow these modules.
|
||||
"""
|
||||
modules = strip_imports(i for i in scope.get_imports() if i.star)
|
||||
modules = strip_imports(evaluator, (i for i in scope.get_imports() if i.star))
|
||||
new = []
|
||||
for m in modules:
|
||||
if m not in ignored_modules:
|
||||
new += remove_star_imports(m, modules)
|
||||
new += remove_star_imports(evaluator, m, modules)
|
||||
modules += new
|
||||
|
||||
# Filter duplicate modules.
|
||||
return set(modules)
|
||||
|
||||
|
||||
def load_module(path=None, source=None, name=None):
|
||||
def load(source):
|
||||
if path is not None and path.endswith('.py'):
|
||||
if source is None:
|
||||
with open(path) as f:
|
||||
source = f.read()
|
||||
else:
|
||||
return compiled.load_module(path, name)
|
||||
p = path or name
|
||||
p = fast.FastParser(common.source_to_unicode(source), p)
|
||||
cache.save_parser(path, name, p)
|
||||
return p.module
|
||||
|
||||
cached = cache.load_parser(path, name)
|
||||
return load(source) if cached is None else cached.module
|
||||
|
||||
|
||||
def get_modules_containing_name(mods, name):
|
||||
"""
|
||||
Search a name in the directories of modules.
|
||||
"""
|
||||
def check_python_file(path):
|
||||
try:
|
||||
return cache.parser_cache[path].parser.module
|
||||
except KeyError:
|
||||
try:
|
||||
return check_fs(path)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def check_fs(path):
|
||||
with open(path) as f:
|
||||
source = source_to_unicode(f.read())
|
||||
if name in source:
|
||||
return load_module(path, source)
|
||||
|
||||
# skip non python modules
|
||||
mods = set(m for m in mods if not isinstance(m, compiled.CompiledObject))
|
||||
mod_paths = set()
|
||||
for m in mods:
|
||||
mod_paths.add(m.path)
|
||||
yield m
|
||||
|
||||
if settings.dynamic_params_for_other_modules:
|
||||
paths = set(settings.additional_dynamic_modules)
|
||||
for p in mod_paths:
|
||||
if p is not None:
|
||||
d = os.path.dirname(p)
|
||||
for entry in os.listdir(d):
|
||||
if entry not in mod_paths:
|
||||
if entry.endswith('.py'):
|
||||
paths.add(d + os.path.sep + entry)
|
||||
|
||||
for p in sorted(paths):
|
||||
# make testing easier, sort it - same results on every interpreter
|
||||
c = check_python_file(p)
|
||||
if c is not None and c not in mods:
|
||||
yield c
|
||||
392
jedi/evaluate/iterable.py
Normal file
392
jedi/evaluate/iterable.py
Normal file
@@ -0,0 +1,392 @@
|
||||
import itertools
|
||||
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi._compatibility import use_metaclass, is_py3, unicode
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import helpers
|
||||
from jedi.evaluate.cache import CachedMetaClass, memoize_default
|
||||
from jedi.cache import underscore_memoization
|
||||
|
||||
|
||||
class Generator(use_metaclass(CachedMetaClass, pr.Base)):
|
||||
"""Handling of `yield` functions."""
|
||||
def __init__(self, evaluator, func, var_args):
|
||||
super(Generator, self).__init__()
|
||||
self._evaluator = evaluator
|
||||
self.func = func
|
||||
self.var_args = var_args
|
||||
|
||||
@underscore_memoization
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Returns a list of names that define a generator, which can return the
|
||||
content of a generator.
|
||||
"""
|
||||
executes_generator = '__next__', 'send', 'next'
|
||||
for name in compiled.generator_obj.get_defined_names():
|
||||
if name.name in executes_generator:
|
||||
parent = GeneratorMethod(self, name.parent)
|
||||
yield helpers.FakeName(name.name, parent)
|
||||
else:
|
||||
yield name
|
||||
|
||||
def iter_content(self):
|
||||
""" returns the content of __iter__ """
|
||||
return self._evaluator.execute(self.func, self.var_args, True)
|
||||
|
||||
def get_index_types(self, index=None):
|
||||
debug.warning('Tried to get array access on a generator: %s', self)
|
||||
return []
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'get_imports',
|
||||
'asserts', 'doc', 'docstr', 'get_parent_until',
|
||||
'get_code', 'subscopes']:
|
||||
raise AttributeError("Accessing %s of %s is not allowed."
|
||||
% (self, name))
|
||||
return getattr(self.func, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.func)
|
||||
|
||||
|
||||
class GeneratorMethod(object):
|
||||
"""``__next__`` and ``send`` methods."""
|
||||
def __init__(self, generator, builtin_func):
|
||||
self._builtin_func = builtin_func
|
||||
self._generator = generator
|
||||
|
||||
def execute(self):
|
||||
return self._generator.iter_content()
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._builtin_func, name)
|
||||
|
||||
|
||||
class Array(use_metaclass(CachedMetaClass, pr.Base)):
|
||||
"""
|
||||
Used as a mirror to pr.Array, if needed. It defines some getter
|
||||
methods which are important in this module.
|
||||
"""
|
||||
def __init__(self, evaluator, array):
|
||||
self._evaluator = evaluator
|
||||
self._array = array
|
||||
|
||||
def get_index_types(self, index_arr=None):
|
||||
""" Get the types of a specific index or all, if not given """
|
||||
if index_arr is not None:
|
||||
if index_arr and [x for x in index_arr if ':' in x.expression_list()]:
|
||||
# array slicing
|
||||
return [self]
|
||||
|
||||
index_possibilities = self._follow_values(index_arr)
|
||||
if len(index_possibilities) == 1:
|
||||
# This is indexing only one element, with a fixed index number,
|
||||
# otherwise it just ignores the index (e.g. [1+1]).
|
||||
index = index_possibilities[0]
|
||||
if isinstance(index, compiled.CompiledObject) \
|
||||
and isinstance(index.obj, (int, str, unicode)):
|
||||
with common.ignored(KeyError, IndexError, TypeError):
|
||||
return self.get_exact_index_types(index.obj)
|
||||
|
||||
result = list(self._follow_values(self._array.values))
|
||||
result += check_array_additions(self._evaluator, self)
|
||||
return set(result)
|
||||
|
||||
def get_exact_index_types(self, mixed_index):
|
||||
""" Here the index is an int/str. Raises IndexError/KeyError """
|
||||
index = mixed_index
|
||||
if self.type == pr.Array.DICT:
|
||||
index = None
|
||||
for i, key_statement in enumerate(self._array.keys):
|
||||
# Because we only want the key to be a string.
|
||||
key_expression_list = key_statement.expression_list()
|
||||
if len(key_expression_list) != 1: # cannot deal with complex strings
|
||||
continue
|
||||
key = key_expression_list[0]
|
||||
if isinstance(key, pr.Literal):
|
||||
key = key.value
|
||||
elif isinstance(key, pr.Name):
|
||||
key = str(key)
|
||||
else:
|
||||
continue
|
||||
|
||||
if mixed_index == key:
|
||||
index = i
|
||||
break
|
||||
if index is None:
|
||||
raise KeyError('No key found in dictionary')
|
||||
|
||||
# Can raise an IndexError
|
||||
values = [self._array.values[index]]
|
||||
return self._follow_values(values)
|
||||
|
||||
def _follow_values(self, values):
|
||||
""" helper function for the index getters """
|
||||
return list(itertools.chain.from_iterable(self._evaluator.eval_statement(v)
|
||||
for v in values))
|
||||
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
This method generates all `ArrayMethod` for one pr.Array.
|
||||
It returns e.g. for a list: append, pop, ...
|
||||
"""
|
||||
# `array.type` is a string with the type, e.g. 'list'.
|
||||
scope = self._evaluator.find_types(compiled.builtin, self._array.type)[0]
|
||||
scope = self._evaluator.execute(scope)[0] # builtins only have one class
|
||||
names = scope.get_defined_names()
|
||||
return [ArrayMethod(n) for n in names]
|
||||
|
||||
@common.safe_property
|
||||
def parent(self):
|
||||
return compiled.builtin
|
||||
|
||||
def get_parent_until(self):
|
||||
return compiled.builtin
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['type', 'start_pos', 'get_only_subelement', 'parent',
|
||||
'get_parent_until', 'items']:
|
||||
raise AttributeError('Strange access on %s: %s.' % (self, name))
|
||||
return getattr(self._array, name)
|
||||
|
||||
def __getitem__(self):
|
||||
return self._array.__getitem__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._array.__iter__()
|
||||
|
||||
def __len__(self):
|
||||
return self._array.__len__()
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s>" % (type(self).__name__, self._array)
|
||||
|
||||
|
||||
class ArrayMethod(object):
|
||||
"""
|
||||
A name, e.g. `list.append`, it is used to access the original array
|
||||
methods.
|
||||
"""
|
||||
def __init__(self, name):
|
||||
super(ArrayMethod, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __getattr__(self, name):
|
||||
# Set access privileges:
|
||||
if name not in ['parent', 'names', 'start_pos', 'end_pos', 'get_code']:
|
||||
raise AttributeError('Strange accesson %s: %s.' % (self, name))
|
||||
return getattr(self.name, name)
|
||||
|
||||
def get_parent_until(self):
|
||||
return compiled.builtin
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.name)
|
||||
|
||||
|
||||
def get_iterator_types(inputs):
|
||||
"""Returns the types of any iterator (arrays, yields, __iter__, etc)."""
|
||||
iterators = []
|
||||
# Take the first statement (for has always only
|
||||
# one, remember `in`). And follow it.
|
||||
for it in inputs:
|
||||
if isinstance(it, (Generator, Array, ArrayInstance)):
|
||||
iterators.append(it)
|
||||
else:
|
||||
if not hasattr(it, 'execute_subscope_by_name'):
|
||||
debug.warning('iterator/for loop input wrong: %s', it)
|
||||
continue
|
||||
try:
|
||||
iterators += it.execute_subscope_by_name('__iter__')
|
||||
except KeyError:
|
||||
debug.warning('iterators: No __iter__ method found.')
|
||||
|
||||
result = []
|
||||
from jedi.evaluate.representation import Instance
|
||||
for gen in iterators:
|
||||
if isinstance(gen, Array):
|
||||
# Array is a little bit special, since this is an internal
|
||||
# array, but there's also the list builtin, which is
|
||||
# another thing.
|
||||
result += gen.get_index_types()
|
||||
elif isinstance(gen, Instance):
|
||||
# __iter__ returned an instance.
|
||||
name = '__next__' if is_py3 else 'next'
|
||||
try:
|
||||
result += gen.execute_subscope_by_name(name)
|
||||
except KeyError:
|
||||
debug.warning('Instance has no __next__ function in %s.', gen)
|
||||
else:
|
||||
# is a generator
|
||||
result += gen.iter_content()
|
||||
return result
|
||||
|
||||
|
||||
def check_array_additions(evaluator, array):
|
||||
""" Just a mapper function for the internal _check_array_additions """
|
||||
if not pr.Array.is_type(array._array, pr.Array.LIST, pr.Array.SET):
|
||||
# TODO also check for dict updates
|
||||
return []
|
||||
|
||||
is_list = array._array.type == 'list'
|
||||
current_module = array._array.get_parent_until()
|
||||
res = _check_array_additions(evaluator, array, current_module, is_list)
|
||||
return res
|
||||
|
||||
|
||||
@memoize_default([], evaluator_is_first_arg=True)
|
||||
def _check_array_additions(evaluator, compare_array, module, is_list):
|
||||
"""
|
||||
Checks if a `pr.Array` has "add" statements:
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
if not settings.dynamic_array_additions or isinstance(module, compiled.CompiledObject):
|
||||
return []
|
||||
|
||||
def check_calls(calls, add_name):
|
||||
"""
|
||||
Calls are processed here. The part before the call is searched and
|
||||
compared with the original Array.
|
||||
"""
|
||||
result = []
|
||||
for c in calls:
|
||||
call_path = list(c.generate_call_path())
|
||||
separate_index = call_path.index(add_name)
|
||||
if add_name == call_path[-1] or separate_index == 0:
|
||||
# this means that there is no execution -> [].append
|
||||
# or the keyword is at the start -> append()
|
||||
continue
|
||||
backtrack_path = iter(call_path[:separate_index])
|
||||
|
||||
position = c.start_pos
|
||||
scope = c.get_parent_until(pr.IsScope)
|
||||
|
||||
found = evaluator.eval_call_path(backtrack_path, scope, position)
|
||||
if not compare_array in found:
|
||||
continue
|
||||
|
||||
params = call_path[separate_index + 1]
|
||||
if not params.values:
|
||||
continue # no params: just ignore it
|
||||
if add_name in ['append', 'add']:
|
||||
for param in params:
|
||||
result += evaluator.eval_statement(param)
|
||||
elif add_name in ['insert']:
|
||||
try:
|
||||
second_param = params[1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
result += evaluator.eval_statement(second_param)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for param in params:
|
||||
iterators = evaluator.eval_statement(param)
|
||||
result += get_iterator_types(iterators)
|
||||
return result
|
||||
|
||||
from jedi.evaluate import representation as er
|
||||
|
||||
def get_execution_parent(element, *stop_classes):
|
||||
""" Used to get an Instance/FunctionExecution parent """
|
||||
if isinstance(element, Array):
|
||||
stmt = element._array.parent
|
||||
else:
|
||||
# is an Instance with an ArrayInstance inside
|
||||
stmt = element.var_args[0].var_args.parent
|
||||
if isinstance(stmt, er.InstanceElement):
|
||||
stop_classes = list(stop_classes) + [er.Function]
|
||||
return stmt.get_parent_until(stop_classes)
|
||||
|
||||
temp_param_add = settings.dynamic_params_for_other_modules
|
||||
settings.dynamic_params_for_other_modules = False
|
||||
|
||||
search_names = ['append', 'extend', 'insert'] if is_list else \
|
||||
['add', 'update']
|
||||
comp_arr_parent = get_execution_parent(compare_array, er.FunctionExecution)
|
||||
|
||||
possible_stmts = []
|
||||
res = []
|
||||
for n in search_names:
|
||||
try:
|
||||
possible_stmts += module.used_names[n]
|
||||
except KeyError:
|
||||
continue
|
||||
for stmt in possible_stmts:
|
||||
# Check if the original scope is an execution. If it is, one
|
||||
# can search for the same statement, that is in the module
|
||||
# dict. Executions are somewhat special in jedi, since they
|
||||
# literally copy the contents of a function.
|
||||
if isinstance(comp_arr_parent, er.FunctionExecution):
|
||||
stmt = comp_arr_parent. \
|
||||
get_statement_for_position(stmt.start_pos)
|
||||
if stmt is None:
|
||||
continue
|
||||
# InstanceElements are special, because they don't get copied,
|
||||
# but have this wrapper around them.
|
||||
if isinstance(comp_arr_parent, er.InstanceElement):
|
||||
stmt = er.InstanceElement(comp_arr_parent.instance, stmt)
|
||||
|
||||
if evaluator.recursion_detector.push_stmt(stmt):
|
||||
# check recursion
|
||||
continue
|
||||
|
||||
res += check_calls(helpers.scan_statement_for_calls(stmt, n), n)
|
||||
evaluator.recursion_detector.pop_stmt()
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
return res
|
||||
|
||||
|
||||
def check_array_instances(evaluator, instance):
|
||||
"""Used for set() and list() instances."""
|
||||
if not settings.dynamic_arrays_instances:
|
||||
return instance.var_args
|
||||
ai = ArrayInstance(evaluator, instance)
|
||||
return [ai]
|
||||
|
||||
|
||||
class ArrayInstance(pr.Base):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
"""
|
||||
def __init__(self, evaluator, instance):
|
||||
self._evaluator = evaluator
|
||||
self.instance = instance
|
||||
self.var_args = instance.var_args
|
||||
|
||||
def iter_content(self):
|
||||
"""
|
||||
The index is here just ignored, because of all the appends, etc.
|
||||
lists/sets are too complicated too handle that.
|
||||
"""
|
||||
items = []
|
||||
from jedi.evaluate.representation import Instance
|
||||
for stmt in self.var_args:
|
||||
for typ in self._evaluator.eval_statement(stmt):
|
||||
if isinstance(typ, Instance) and len(typ.var_args):
|
||||
array = typ.var_args[0]
|
||||
if isinstance(array, ArrayInstance):
|
||||
# prevent recursions
|
||||
# TODO compare Modules
|
||||
if self.var_args.start_pos != array.var_args.start_pos:
|
||||
items += array.iter_content()
|
||||
else:
|
||||
debug.warning('ArrayInstance recursion %s', self.var_args)
|
||||
continue
|
||||
items += get_iterator_types([typ])
|
||||
|
||||
# TODO check if exclusion of tuple is a problem here.
|
||||
if isinstance(self.var_args, tuple) or self.var_args.parent is None:
|
||||
return [] # generated var_args should not be checked for arrays
|
||||
|
||||
module = self.var_args.get_parent_until()
|
||||
is_list = str(self.instance.name) == 'list'
|
||||
items += _check_array_additions(self._evaluator, self.instance, module, is_list)
|
||||
return items
|
||||
175
jedi/evaluate/param.py
Normal file
175
jedi/evaluate/param.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import copy
|
||||
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.evaluate import common
|
||||
from jedi.evaluate import helpers
|
||||
|
||||
|
||||
def get_params(evaluator, func, var_args):
|
||||
def gen_param_name_copy(param, keys=(), values=(), array_type=None):
|
||||
"""
|
||||
Create a param with the original scope (of varargs) as parent.
|
||||
"""
|
||||
if isinstance(var_args, pr.Array):
|
||||
parent = var_args.parent
|
||||
start_pos = var_args.start_pos
|
||||
else:
|
||||
parent = func
|
||||
start_pos = 0, 0
|
||||
|
||||
new_param = copy.copy(param)
|
||||
new_param.is_generated = True
|
||||
if parent is not None:
|
||||
new_param.parent = parent
|
||||
|
||||
# create an Array (-> needed for *args/**kwargs tuples/dicts)
|
||||
arr = pr.Array(helpers.FakeSubModule, start_pos, array_type, parent)
|
||||
arr.values = values
|
||||
key_stmts = []
|
||||
for key in keys:
|
||||
key_stmts.append(helpers.FakeStatement([key], start_pos))
|
||||
arr.keys = key_stmts
|
||||
arr.type = array_type
|
||||
|
||||
new_param._expression_list = [arr]
|
||||
|
||||
name = copy.copy(param.get_name())
|
||||
name.parent = new_param
|
||||
return name
|
||||
|
||||
result = []
|
||||
start_offset = 0
|
||||
from jedi.evaluate.representation import InstanceElement
|
||||
if isinstance(func, InstanceElement):
|
||||
# Care for self -> just exclude it and add the instance
|
||||
start_offset = 1
|
||||
self_name = copy.copy(func.params[0].get_name())
|
||||
self_name.parent = func.instance
|
||||
result.append(self_name)
|
||||
|
||||
param_dict = {}
|
||||
for param in func.params:
|
||||
param_dict[str(param.get_name())] = param
|
||||
# There may be calls, which don't fit all the params, this just ignores it.
|
||||
var_arg_iterator = common.PushBackIterator(_var_args_iterator(evaluator, var_args))
|
||||
|
||||
non_matching_keys = []
|
||||
keys_used = set()
|
||||
keys_only = False
|
||||
for param in func.params[start_offset:]:
|
||||
# The value and key can both be null. There, the defaults apply.
|
||||
# args / kwargs will just be empty arrays / dicts, respectively.
|
||||
# Wrong value count is just ignored. If you try to test cases that are
|
||||
# not allowed in Python, Jedi will maybe not show any completions.
|
||||
key, value = next(var_arg_iterator, (None, None))
|
||||
while key:
|
||||
keys_only = True
|
||||
try:
|
||||
key_param = param_dict[str(key)]
|
||||
except KeyError:
|
||||
non_matching_keys.append((key, value))
|
||||
else:
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(key_param, values=[value]))
|
||||
key, value = next(var_arg_iterator, (None, None))
|
||||
|
||||
expression_list = param.expression_list()
|
||||
keys = []
|
||||
values = []
|
||||
array_type = None
|
||||
ignore_creation = False
|
||||
if expression_list[0] == '*':
|
||||
# *args param
|
||||
array_type = pr.Array.TUPLE
|
||||
if value:
|
||||
values.append(value)
|
||||
for key, value in var_arg_iterator:
|
||||
# Iterate until a key argument is found.
|
||||
if key:
|
||||
var_arg_iterator.push_back((key, value))
|
||||
break
|
||||
values.append(value)
|
||||
elif expression_list[0] == '**':
|
||||
# **kwargs param
|
||||
array_type = pr.Array.DICT
|
||||
if non_matching_keys:
|
||||
keys, values = zip(*non_matching_keys)
|
||||
elif not keys_only:
|
||||
# normal param
|
||||
if value is not None:
|
||||
values = [value]
|
||||
else:
|
||||
if param.assignment_details:
|
||||
# No value: return the default values.
|
||||
ignore_creation = True
|
||||
result.append(param.get_name())
|
||||
param.is_generated = True
|
||||
else:
|
||||
# If there is no assignment detail, that means there is no
|
||||
# assignment, just the result. Therefore nothing has to be
|
||||
# returned.
|
||||
values = []
|
||||
|
||||
# Just ignore all the params that are without a key, after one keyword
|
||||
# argument was set.
|
||||
if not ignore_creation and (not keys_only or expression_list[0] == '**'):
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(param, keys=keys, values=values,
|
||||
array_type=array_type))
|
||||
|
||||
if keys_only:
|
||||
# sometimes param arguments are not completely written (which would
|
||||
# create an Exception, but we have to handle that).
|
||||
for k in set(param_dict) - keys_used:
|
||||
result.append(gen_param_name_copy(param_dict[k]))
|
||||
return result
|
||||
|
||||
|
||||
def _var_args_iterator(evaluator, var_args):
|
||||
"""
|
||||
Yields a key/value pair, the key is None, if its not a named arg.
|
||||
"""
|
||||
# `var_args` is typically an Array, and not a list.
|
||||
for stmt in var_args:
|
||||
if not isinstance(stmt, pr.Statement):
|
||||
if stmt is None:
|
||||
yield None, None
|
||||
continue
|
||||
old = stmt
|
||||
# generate a statement if it's not already one.
|
||||
stmt = helpers.FakeStatement([old])
|
||||
|
||||
# *args
|
||||
expression_list = stmt.expression_list()
|
||||
if not len(expression_list):
|
||||
continue
|
||||
if expression_list[0] == '*':
|
||||
# *args must be some sort of an array, otherwise -> ignore
|
||||
for array in evaluator.eval_expression_list(expression_list[1:]):
|
||||
if isinstance(array, iterable.Array):
|
||||
for field_stmt in array: # yield from plz!
|
||||
yield None, field_stmt
|
||||
elif isinstance(array, iterable.Generator):
|
||||
for field_stmt in array.iter_content():
|
||||
yield None, helpers.FakeStatement([field_stmt])
|
||||
# **kwargs
|
||||
elif expression_list[0] == '**':
|
||||
for array in evaluator.eval_expression_list(expression_list[1:]):
|
||||
if isinstance(array, iterable.Array):
|
||||
for key_stmt, value_stmt in array.items():
|
||||
# first index, is the key if syntactically correct
|
||||
call = key_stmt.expression_list()[0]
|
||||
if isinstance(call, pr.Name):
|
||||
yield call, value_stmt
|
||||
elif isinstance(call, pr.Call):
|
||||
yield call.name, value_stmt
|
||||
# Normal arguments (including key arguments).
|
||||
else:
|
||||
if stmt.assignment_details:
|
||||
key_arr, op = stmt.assignment_details[0]
|
||||
# named parameter
|
||||
if key_arr and isinstance(key_arr[0], pr.Call):
|
||||
yield key_arr[0].name, stmt
|
||||
else:
|
||||
yield None, stmt
|
||||
@@ -10,34 +10,38 @@ calls.
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
import evaluate_representation as er
|
||||
import builtin
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import iterable
|
||||
|
||||
|
||||
class RecursionDecorator(object):
|
||||
def recursion_decorator(func):
|
||||
def run(evaluator, stmt, *args, **kwargs):
|
||||
rec_detect = evaluator.recursion_detector
|
||||
# print stmt, len(self.node_statements())
|
||||
if rec_detect.push_stmt(stmt):
|
||||
return []
|
||||
else:
|
||||
result = func(evaluator, stmt, *args, **kwargs)
|
||||
rec_detect.pop_stmt()
|
||||
return result
|
||||
return run
|
||||
|
||||
|
||||
class RecursionDetector(object):
|
||||
"""
|
||||
A decorator to detect recursions in statements. In a recursion a statement
|
||||
at the same place, in the same module may not be executed two times.
|
||||
"""
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.reset()
|
||||
|
||||
def __call__(self, stmt, *args, **kwargs):
|
||||
# print stmt, len(self.node_statements())
|
||||
if self.push_stmt(stmt):
|
||||
return []
|
||||
else:
|
||||
result = self.func(stmt, *args, **kwargs)
|
||||
self.pop_stmt()
|
||||
return result
|
||||
def __init__(self):
|
||||
self.top = None
|
||||
self.current = None
|
||||
|
||||
def push_stmt(self, stmt):
|
||||
self.current = RecursionNode(stmt, self.current)
|
||||
self.current = _RecursionNode(stmt, self.current)
|
||||
check = self._check_recursion()
|
||||
if check: # TODO remove False!!!!
|
||||
debug.warning('catched stmt recursion: %s against %s @%s'
|
||||
% (stmt, check.stmt, stmt.start_pos))
|
||||
debug.warning('catched stmt recursion: %s against %s @%s', stmt,
|
||||
check.stmt, stmt.start_pos)
|
||||
self.pop_stmt()
|
||||
return True
|
||||
return False
|
||||
@@ -57,10 +61,6 @@ class RecursionDecorator(object):
|
||||
if not test:
|
||||
return False
|
||||
|
||||
def reset(self):
|
||||
self.top = None
|
||||
self.current = None
|
||||
|
||||
def node_statements(self):
|
||||
result = []
|
||||
n = self.current
|
||||
@@ -70,7 +70,7 @@ class RecursionDecorator(object):
|
||||
return result
|
||||
|
||||
|
||||
class RecursionNode(object):
|
||||
class _RecursionNode(object):
|
||||
""" A node of the RecursionDecorator. """
|
||||
def __init__(self, stmt, parent):
|
||||
self.script = stmt.get_parent_until()
|
||||
@@ -82,7 +82,7 @@ class RecursionNode(object):
|
||||
# The same's true for the builtins, because the builtins are really
|
||||
# simple.
|
||||
self.is_ignored = isinstance(stmt, pr.Param) \
|
||||
or (self.script == builtin.Builtin.scope)
|
||||
or (self.script == compiled.builtin)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not other:
|
||||
@@ -96,32 +96,45 @@ class RecursionNode(object):
|
||||
and not self.is_ignored and not other.is_ignored
|
||||
|
||||
|
||||
class ExecutionRecursionDecorator(object):
|
||||
def execution_recursion_decorator(func):
|
||||
def run(execution, evaluate_generator=False):
|
||||
detector = execution._evaluator.execution_recursion_detector
|
||||
if detector.push_execution(execution, evaluate_generator):
|
||||
result = []
|
||||
else:
|
||||
result = func(execution, evaluate_generator)
|
||||
detector.pop_execution()
|
||||
return result
|
||||
|
||||
return run
|
||||
|
||||
|
||||
class ExecutionRecursionDetector(object):
|
||||
"""
|
||||
Catches recursions of executions.
|
||||
It is designed like a Singelton. Only one instance should exist.
|
||||
"""
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.reset()
|
||||
def __init__(self):
|
||||
self.recursion_level = 0
|
||||
self.parent_execution_funcs = []
|
||||
self.execution_funcs = set()
|
||||
self.execution_count = 0
|
||||
|
||||
def __call__(self, execution, evaluate_generator=False):
|
||||
debug.dbg('Execution recursions: %s' % execution, self.recursion_level,
|
||||
debug.dbg('Execution recursions: %s', execution, self.recursion_level,
|
||||
self.execution_count, len(self.execution_funcs))
|
||||
if self.check_recursion(execution, evaluate_generator):
|
||||
result = []
|
||||
else:
|
||||
result = self.func(execution, evaluate_generator)
|
||||
self.cleanup()
|
||||
self.pop_execution()
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def cleanup(cls):
|
||||
def pop_execution(cls):
|
||||
cls.parent_execution_funcs.pop()
|
||||
cls.recursion_level -= 1
|
||||
|
||||
@classmethod
|
||||
def check_recursion(cls, execution, evaluate_generator):
|
||||
def push_execution(cls, execution, evaluate_generator):
|
||||
in_par_execution_funcs = execution.base in cls.parent_execution_funcs
|
||||
in_execution_funcs = execution.base in cls.execution_funcs
|
||||
cls.recursion_level += 1
|
||||
@@ -132,10 +145,10 @@ class ExecutionRecursionDecorator(object):
|
||||
if cls.execution_count > settings.max_executions:
|
||||
return True
|
||||
|
||||
if isinstance(execution.base, (er.Generator, er.Array)):
|
||||
if isinstance(execution.base, (iterable.Array, iterable.Generator)):
|
||||
return False
|
||||
module = execution.get_parent_until()
|
||||
if evaluate_generator or module == builtin.Builtin.scope:
|
||||
if evaluate_generator or module == compiled.builtin:
|
||||
return False
|
||||
|
||||
if in_par_execution_funcs:
|
||||
@@ -147,10 +160,3 @@ class ExecutionRecursionDecorator(object):
|
||||
if cls.execution_count > settings.max_executions_without_builtins:
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def reset(cls):
|
||||
cls.recursion_level = 0
|
||||
cls.parent_execution_funcs = []
|
||||
cls.execution_funcs = set()
|
||||
cls.execution_count = 0
|
||||
502
jedi/evaluate/representation.py
Normal file
502
jedi/evaluate/representation.py
Normal file
@@ -0,0 +1,502 @@
|
||||
"""
|
||||
Like described in the :mod:`parsing_representation` module, there's a need for
|
||||
an ast like module to represent the states of parsed modules.
|
||||
|
||||
But now there are also structures in Python that need a little bit more than
|
||||
that. An ``Instance`` for example is only a ``Class`` before it is
|
||||
instantiated. This class represents these cases.
|
||||
|
||||
So, why is there also a ``Class`` class here? Well, there are decorators and
|
||||
they change classes in Python 3.
|
||||
"""
|
||||
import copy
|
||||
|
||||
from jedi._compatibility import use_metaclass, unicode
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
from jedi import common
|
||||
from jedi.evaluate.cache import memoize_default, CachedMetaClass
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import recursion
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.evaluate import docstrings
|
||||
from jedi.evaluate import helpers
|
||||
from jedi.evaluate import param
|
||||
|
||||
|
||||
class Executable(pr.IsScope):
|
||||
"""
|
||||
An instance is also an executable - because __init__ is called
|
||||
:param var_args: The param input array, consist of `pr.Array` or list.
|
||||
"""
|
||||
def __init__(self, evaluator, base, var_args=()):
|
||||
self._evaluator = evaluator
|
||||
self.base = base
|
||||
self.var_args = var_args
|
||||
|
||||
def get_parent_until(self, *args, **kwargs):
|
||||
return self.base.get_parent_until(*args, **kwargs)
|
||||
|
||||
@common.safe_property
|
||||
def parent(self):
|
||||
return self.base.parent
|
||||
|
||||
|
||||
class Instance(use_metaclass(CachedMetaClass, Executable)):
|
||||
"""
|
||||
This class is used to evaluate instances.
|
||||
"""
|
||||
def __init__(self, evaluator, base, var_args=()):
|
||||
super(Instance, self).__init__(evaluator, base, var_args)
|
||||
if str(base.name) in ['list', 'set'] \
|
||||
and compiled.builtin == base.get_parent_until():
|
||||
# compare the module path with the builtin name.
|
||||
self.var_args = iterable.check_array_instances(evaluator, self)
|
||||
else:
|
||||
# need to execute the __init__ function, because the dynamic param
|
||||
# searching needs it.
|
||||
with common.ignored(KeyError):
|
||||
self.execute_subscope_by_name('__init__', self.var_args)
|
||||
# Generated instances are classes that are just generated by self
|
||||
# (No var_args) used.
|
||||
self.is_generated = False
|
||||
|
||||
@memoize_default(None)
|
||||
def _get_method_execution(self, func):
|
||||
func = InstanceElement(self._evaluator, self, func, True)
|
||||
return FunctionExecution(self._evaluator, func, self.var_args)
|
||||
|
||||
def _get_func_self_name(self, func):
|
||||
"""
|
||||
Returns the name of the first param in a class method (which is
|
||||
normally self.
|
||||
"""
|
||||
try:
|
||||
return str(func.params[0].get_name())
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
@memoize_default([])
|
||||
def get_self_attributes(self):
|
||||
def add_self_dot_name(name):
|
||||
"""
|
||||
Need to copy and rewrite the name, because names are now
|
||||
``instance_usage.variable`` instead of ``self.variable``.
|
||||
"""
|
||||
n = copy.copy(name)
|
||||
n.names = n.names[1:]
|
||||
names.append(InstanceElement(self._evaluator, self, n))
|
||||
|
||||
names = []
|
||||
# This loop adds the names of the self object, copies them and removes
|
||||
# the self.
|
||||
for sub in self.base.subscopes:
|
||||
if isinstance(sub, pr.Class):
|
||||
continue
|
||||
# Get the self name, if there's one.
|
||||
self_name = self._get_func_self_name(sub)
|
||||
if not self_name:
|
||||
continue
|
||||
|
||||
if sub.name.get_code() == '__init__':
|
||||
# ``__init__`` is special because the params need are injected
|
||||
# this way. Therefore an execution is necessary.
|
||||
if not sub.decorators:
|
||||
# __init__ decorators should generally just be ignored,
|
||||
# because to follow them and their self variables is too
|
||||
# complicated.
|
||||
sub = self._get_method_execution(sub)
|
||||
for n in sub.get_set_vars():
|
||||
# Only names with the selfname are being added.
|
||||
# It is also important, that they have a len() of 2,
|
||||
# because otherwise, they are just something else
|
||||
if n.names[0] == self_name and len(n.names) == 2:
|
||||
add_self_dot_name(n)
|
||||
|
||||
if not isinstance(self.base, compiled.CompiledObject):
|
||||
for s in self.base.get_super_classes():
|
||||
for inst in self._evaluator.execute(s):
|
||||
names += inst.get_self_attributes()
|
||||
return names
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
sub = self.base.get_subscope_by_name(name)
|
||||
return InstanceElement(self._evaluator, self, sub, True)
|
||||
|
||||
def execute_subscope_by_name(self, name, args=()):
|
||||
method = self.get_subscope_by_name(name)
|
||||
return self._evaluator.execute(method, args)
|
||||
|
||||
def get_descriptor_return(self, obj):
|
||||
""" Throws a KeyError if there's no method. """
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
args = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
|
||||
return self.execute_subscope_by_name('__get__', args)
|
||||
|
||||
@memoize_default([])
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Get the instance vars of a class. This includes the vars of all
|
||||
classes
|
||||
"""
|
||||
names = self.get_self_attributes()
|
||||
|
||||
for var in self.base.instance_names():
|
||||
names.append(InstanceElement(self._evaluator, self, var, True))
|
||||
return names
|
||||
|
||||
def scope_generator(self):
|
||||
"""
|
||||
An Instance has two scopes: The scope with self names and the class
|
||||
scope. Instance variables have priority over the class scope.
|
||||
"""
|
||||
yield self, self.get_self_attributes()
|
||||
|
||||
names = []
|
||||
for var in self.base.instance_names():
|
||||
names.append(InstanceElement(self._evaluator, self, var, True))
|
||||
yield self, names
|
||||
|
||||
def get_index_types(self, index=None):
|
||||
args = [] if index is None else [index]
|
||||
try:
|
||||
return self.execute_subscope_by_name('__getitem__', args)
|
||||
except KeyError:
|
||||
debug.warning('No __getitem__, cannot access the array.')
|
||||
return []
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'name', 'get_imports',
|
||||
'doc', 'docstr', 'asserts']:
|
||||
raise AttributeError("Instance %s: Don't touch this (%s)!"
|
||||
% (self, name))
|
||||
return getattr(self.base, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s (var_args: %s)>" % \
|
||||
(type(self).__name__, self.base, len(self.var_args or []))
|
||||
|
||||
|
||||
class InstanceElement(use_metaclass(CachedMetaClass, pr.Base)):
|
||||
"""
|
||||
InstanceElement is a wrapper for any object, that is used as an instance
|
||||
variable (e.g. self.variable or class methods).
|
||||
"""
|
||||
def __init__(self, evaluator, instance, var, is_class_var=False):
|
||||
self._evaluator = evaluator
|
||||
if isinstance(var, pr.Function):
|
||||
var = Function(evaluator, var)
|
||||
elif isinstance(var, pr.Class):
|
||||
var = Class(evaluator, var)
|
||||
self.instance = instance
|
||||
self.var = var
|
||||
self.is_class_var = is_class_var
|
||||
|
||||
@common.safe_property
|
||||
@memoize_default(None)
|
||||
def parent(self):
|
||||
par = self.var.parent
|
||||
if isinstance(par, Class) and par == self.instance.base \
|
||||
or isinstance(par, pr.Class) \
|
||||
and par == self.instance.base.base:
|
||||
par = self.instance
|
||||
elif not isinstance(par, (pr.Module, compiled.CompiledObject)):
|
||||
par = InstanceElement(self.instance._evaluator, self.instance, par, self.is_class_var)
|
||||
return par
|
||||
|
||||
def get_parent_until(self, *args, **kwargs):
|
||||
return pr.Simple.get_parent_until(self, *args, **kwargs)
|
||||
|
||||
def get_decorated_func(self):
|
||||
""" Needed because the InstanceElement should not be stripped """
|
||||
func = self.var.get_decorated_func(self.instance)
|
||||
if func == self.var:
|
||||
return self
|
||||
return func
|
||||
|
||||
def expression_list(self):
|
||||
# Copy and modify the array.
|
||||
return [InstanceElement(self.instance._evaluator, self.instance, command, self.is_class_var)
|
||||
if not isinstance(command, unicode) else command
|
||||
for command in self.var.expression_list()]
|
||||
|
||||
def __iter__(self):
|
||||
for el in self.var.__iter__():
|
||||
yield InstanceElement(self.instance._evaluator, self.instance, el, self.is_class_var)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.var, name)
|
||||
|
||||
def isinstance(self, *cls):
|
||||
return isinstance(self.var, cls)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.var)
|
||||
|
||||
|
||||
class Class(use_metaclass(CachedMetaClass, pr.IsScope)):
|
||||
"""
|
||||
This class is not only important to extend `pr.Class`, it is also a
|
||||
important for descriptors (if the descriptor methods are evaluated or not).
|
||||
"""
|
||||
def __init__(self, evaluator, base):
|
||||
self._evaluator = evaluator
|
||||
self.base = base
|
||||
|
||||
@memoize_default(default=())
|
||||
def get_super_classes(self):
|
||||
supers = []
|
||||
# TODO care for mro stuff (multiple super classes).
|
||||
for s in self.base.supers:
|
||||
# Super classes are statements.
|
||||
for cls in self._evaluator.eval_statement(s):
|
||||
if not isinstance(cls, Class):
|
||||
debug.warning('Received non class, as a super class')
|
||||
continue # Just ignore other stuff (user input error).
|
||||
supers.append(cls)
|
||||
if not supers and self.base.parent != compiled.builtin:
|
||||
# add `object` to classes
|
||||
supers += self._evaluator.find_types(compiled.builtin, 'object')
|
||||
return supers
|
||||
|
||||
@memoize_default(default=())
|
||||
def instance_names(self):
|
||||
def in_iterable(name, iterable):
|
||||
""" checks if the name is in the variable 'iterable'. """
|
||||
for i in iterable:
|
||||
# Only the last name is important, because these names have a
|
||||
# maximal length of 2, with the first one being `self`.
|
||||
if i.names[-1] == name.names[-1]:
|
||||
return True
|
||||
return False
|
||||
|
||||
result = self.base.get_defined_names()
|
||||
super_result = []
|
||||
# TODO mro!
|
||||
for cls in self.get_super_classes():
|
||||
# Get the inherited names.
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
super_result += cls.get_defined_names()
|
||||
else:
|
||||
for i in cls.instance_names():
|
||||
if not in_iterable(i, result):
|
||||
super_result.append(i)
|
||||
result += super_result
|
||||
return result
|
||||
|
||||
@memoize_default(default=())
|
||||
def get_defined_names(self):
|
||||
result = self.instance_names()
|
||||
type_cls = self._evaluator.find_types(compiled.builtin, 'type')[0]
|
||||
return result + list(type_cls.get_defined_names())
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
for sub in reversed(self.subscopes):
|
||||
if sub.name.get_code() == name:
|
||||
return sub
|
||||
raise KeyError("Couldn't find subscope.")
|
||||
|
||||
@common.safe_property
|
||||
def name(self):
|
||||
return self.base.name
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
|
||||
'doc', 'get_imports', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
raise AttributeError("Don't touch this: %s of %s !" % (name, self))
|
||||
return getattr(self.base, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s>" % (type(self).__name__, self.base)
|
||||
|
||||
|
||||
class Function(use_metaclass(CachedMetaClass, pr.IsScope)):
|
||||
"""
|
||||
Needed because of decorators. Decorators are evaluated here.
|
||||
"""
|
||||
def __init__(self, evaluator, func, is_decorated=False):
|
||||
""" This should not be called directly """
|
||||
self._evaluator = evaluator
|
||||
self.base_func = func
|
||||
self.is_decorated = is_decorated
|
||||
|
||||
@memoize_default(None)
|
||||
def _decorated_func(self, instance=None):
|
||||
"""
|
||||
Returns the function, that is to be executed in the end.
|
||||
This is also the places where the decorators are processed.
|
||||
"""
|
||||
f = self.base_func
|
||||
|
||||
# Only enter it, if has not already been processed.
|
||||
if not self.is_decorated:
|
||||
for dec in reversed(self.base_func.decorators):
|
||||
debug.dbg('decorator: %s %s', dec, f)
|
||||
dec_results = set(self._evaluator.eval_statement(dec))
|
||||
if not len(dec_results):
|
||||
debug.warning('decorator not found: %s on %s', dec, self.base_func)
|
||||
return None
|
||||
decorator = dec_results.pop()
|
||||
if dec_results:
|
||||
debug.warning('multiple decorators found %s %s',
|
||||
self.base_func, dec_results)
|
||||
# Create param array.
|
||||
old_func = Function(self._evaluator, f, is_decorated=True)
|
||||
if instance is not None and decorator.isinstance(Function):
|
||||
old_func = InstanceElement(self._evaluator, instance, old_func)
|
||||
instance = None
|
||||
|
||||
wrappers = self._evaluator.execute(decorator, (old_func,))
|
||||
if not len(wrappers):
|
||||
debug.warning('no wrappers found %s', self.base_func)
|
||||
return None
|
||||
if len(wrappers) > 1:
|
||||
# TODO resolve issue with multiple wrappers -> multiple types
|
||||
debug.warning('multiple wrappers found %s %s',
|
||||
self.base_func, wrappers)
|
||||
f = wrappers[0]
|
||||
|
||||
debug.dbg('decorator end %s', f)
|
||||
if f != self.base_func and isinstance(f, pr.Function):
|
||||
f = Function(self._evaluator, f)
|
||||
return f
|
||||
|
||||
def get_decorated_func(self, instance=None):
|
||||
decorated_func = self._decorated_func(instance)
|
||||
if decorated_func == self.base_func:
|
||||
return self
|
||||
if decorated_func is None:
|
||||
# If the decorator func is not found, just ignore the decorator
|
||||
# function, because sometimes decorators are just really
|
||||
# complicated.
|
||||
return Function(self._evaluator, self.base_func, True)
|
||||
return decorated_func
|
||||
|
||||
def get_magic_function_names(self):
|
||||
return compiled.magic_function_class.get_defined_names()
|
||||
|
||||
def get_magic_function_scope(self):
|
||||
return compiled.magic_function_class
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.base_func, name)
|
||||
|
||||
def __repr__(self):
|
||||
dec = ''
|
||||
if self._decorated_func() != self.base_func:
|
||||
dec = " is " + repr(self._decorated_func())
|
||||
return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)
|
||||
|
||||
|
||||
class FunctionExecution(Executable):
|
||||
"""
|
||||
This class is used to evaluate functions and their returns.
|
||||
|
||||
This is the most complicated class, because it contains the logic to
|
||||
transfer parameters. It is even more complicated, because there may be
|
||||
multiple calls to functions and recursion has to be avoided. But this is
|
||||
responsibility of the decorators.
|
||||
"""
|
||||
@memoize_default(default=())
|
||||
@recursion.execution_recursion_decorator
|
||||
def get_return_types(self, evaluate_generator=False):
|
||||
func = self.base
|
||||
# Feed the listeners, with the params.
|
||||
for listener in func.listeners:
|
||||
listener.execute(self._get_params())
|
||||
if func.is_generator and not evaluate_generator:
|
||||
return [iterable.Generator(self._evaluator, func, self.var_args)]
|
||||
else:
|
||||
stmts = docstrings.find_return_types(self._evaluator, func)
|
||||
for r in self.returns:
|
||||
if r is not None:
|
||||
stmts += self._evaluator.eval_statement(r)
|
||||
return stmts
|
||||
|
||||
@memoize_default(default=())
|
||||
def _get_params(self):
|
||||
"""
|
||||
This returns the params for an TODO and is injected as a
|
||||
'hack' into the pr.Function class.
|
||||
This needs to be here, because Instance can have __init__ functions,
|
||||
which act the same way as normal functions.
|
||||
"""
|
||||
return param.get_params(self._evaluator, self.base, self.var_args)
|
||||
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Call the default method with the own instance (self implements all
|
||||
the necessary functions). Add also the params.
|
||||
"""
|
||||
return self._get_params() + pr.Scope.get_set_vars(self)
|
||||
|
||||
get_set_vars = get_defined_names
|
||||
|
||||
def _copy_properties(self, prop):
|
||||
"""
|
||||
Literally copies a property of a Function. Copying is very expensive,
|
||||
because it is something like `copy.deepcopy`. However, these copied
|
||||
objects can be used for the executions, as if they were in the
|
||||
execution.
|
||||
"""
|
||||
# Copy all these lists into this local function.
|
||||
attr = getattr(self.base, prop)
|
||||
objects = []
|
||||
for element in attr:
|
||||
if element is None:
|
||||
copied = element
|
||||
else:
|
||||
copied = helpers.fast_parent_copy(element)
|
||||
copied.parent = self._scope_copy(copied.parent)
|
||||
if isinstance(copied, pr.Function):
|
||||
copied = Function(self._evaluator, copied)
|
||||
objects.append(copied)
|
||||
return objects
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'imports', '_sub_module']:
|
||||
raise AttributeError('Tried to access %s: %s. Why?' % (name, self))
|
||||
return getattr(self.base, name)
|
||||
|
||||
@memoize_default(None)
|
||||
def _scope_copy(self, scope):
|
||||
""" Copies a scope (e.g. if) in an execution """
|
||||
# TODO method uses different scopes than the subscopes property.
|
||||
|
||||
# just check the start_pos, sometimes it's difficult with closures
|
||||
# to compare the scopes directly.
|
||||
if scope.start_pos == self.start_pos:
|
||||
return self
|
||||
else:
|
||||
copied = helpers.fast_parent_copy(scope)
|
||||
copied.parent = self._scope_copy(copied.parent)
|
||||
return copied
|
||||
|
||||
@common.safe_property
|
||||
@memoize_default([])
|
||||
def returns(self):
|
||||
return self._copy_properties('returns')
|
||||
|
||||
@common.safe_property
|
||||
@memoize_default([])
|
||||
def asserts(self):
|
||||
return self._copy_properties('asserts')
|
||||
|
||||
@common.safe_property
|
||||
@memoize_default([])
|
||||
def statements(self):
|
||||
return self._copy_properties('statements')
|
||||
|
||||
@common.safe_property
|
||||
@memoize_default([])
|
||||
def subscopes(self):
|
||||
return self._copy_properties('subscopes')
|
||||
|
||||
def get_statement_for_position(self, pos):
|
||||
return pr.Scope.get_statement_for_position(self, pos)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % \
|
||||
(type(self).__name__, self.base)
|
||||
91
jedi/evaluate/stdlib.py
Normal file
91
jedi/evaluate/stdlib.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Implementations of standard library functions, because it's not possible to
|
||||
understand them with Jedi.
|
||||
"""
|
||||
|
||||
from jedi._compatibility import unicode
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import representation as er
|
||||
from jedi.evaluate import iterable
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
|
||||
|
||||
class NotInStdLib(LookupError):
|
||||
pass
|
||||
|
||||
|
||||
def execute(evaluator, obj, params):
|
||||
if not isinstance(obj, (iterable.Generator, iterable.Array)):
|
||||
obj_name = str(obj.name)
|
||||
if obj.parent == compiled.builtin:
|
||||
# for now we just support builtin functions.
|
||||
try:
|
||||
return _implemented['builtins'][obj_name](evaluator, obj, params)
|
||||
except KeyError:
|
||||
pass
|
||||
raise NotInStdLib()
|
||||
|
||||
|
||||
def _follow_param(evaluator, params, index):
|
||||
try:
|
||||
stmt = params[index]
|
||||
except IndexError:
|
||||
return []
|
||||
else:
|
||||
if isinstance(stmt, pr.Statement):
|
||||
return evaluator.eval_statement(stmt)
|
||||
else:
|
||||
return [stmt] # just some arbitrary object
|
||||
|
||||
|
||||
def builtins_getattr(evaluator, obj, params):
|
||||
stmts = []
|
||||
# follow the first param
|
||||
objects = _follow_param(evaluator, params, 0)
|
||||
names = _follow_param(evaluator, params, 1)
|
||||
for obj in objects:
|
||||
if not isinstance(obj, (er.Instance, er.Class, pr.Module, compiled.CompiledObject)):
|
||||
debug.warning('getattr called without instance')
|
||||
continue
|
||||
|
||||
for name in names:
|
||||
s = unicode, str
|
||||
if isinstance(name, compiled.CompiledObject) and isinstance(name.obj, s):
|
||||
stmts += evaluator.follow_path(iter([name.obj]), [obj], obj)
|
||||
else:
|
||||
debug.warning('getattr called without str')
|
||||
continue
|
||||
return stmts
|
||||
|
||||
|
||||
def builtins_type(evaluator, obj, params):
|
||||
if len(params) == 1:
|
||||
# otherwise it would be a metaclass... maybe someday...
|
||||
objects = _follow_param(evaluator, params, 0)
|
||||
return [o.base for o in objects if isinstance(o, er.Instance)]
|
||||
return []
|
||||
|
||||
|
||||
def builtins_super(evaluator, obj, params):
|
||||
# TODO make this able to detect multiple inheritance super
|
||||
accept = (pr.Function,)
|
||||
func = params.get_parent_until(accept)
|
||||
if func.isinstance(*accept):
|
||||
cls = func.get_parent_until(accept + (pr.Class,),
|
||||
include_current=False)
|
||||
if isinstance(cls, pr.Class):
|
||||
cls = er.Class(evaluator, cls)
|
||||
su = cls.get_super_classes()
|
||||
if su:
|
||||
return evaluator.execute(su[0])
|
||||
return []
|
||||
|
||||
|
||||
_implemented = {
|
||||
'builtins': {
|
||||
'getattr': builtins_getattr,
|
||||
'type': builtins_type,
|
||||
'super': builtins_super,
|
||||
}
|
||||
}
|
||||
120
jedi/evaluate/sys_path.py
Normal file
120
jedi/evaluate/sys_path.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from jedi._compatibility import exec_function
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import debug
|
||||
from jedi import common
|
||||
|
||||
|
||||
def get_sys_path():
|
||||
def check_virtual_env(sys_path):
|
||||
""" Add virtualenv's site-packages to the `sys.path`."""
|
||||
venv = os.getenv('VIRTUAL_ENV')
|
||||
if not venv:
|
||||
return
|
||||
venv = os.path.abspath(venv)
|
||||
p = os.path.join(
|
||||
venv, 'lib', 'python%d.%d' % sys.version_info[:2], 'site-packages')
|
||||
sys_path.insert(0, p)
|
||||
|
||||
check_virtual_env(sys.path)
|
||||
return [p for p in sys.path if p != ""]
|
||||
|
||||
|
||||
#@cache.memoize_default([]) TODO add some sort of cache again.
|
||||
def sys_path_with_modifications(module):
|
||||
def execute_code(code):
|
||||
c = "import os; from os.path import *; result=%s"
|
||||
variables = {'__file__': module.path}
|
||||
try:
|
||||
exec_function(c % code, variables)
|
||||
except Exception:
|
||||
debug.warning('sys path detected, but failed to evaluate')
|
||||
return None
|
||||
try:
|
||||
res = variables['result']
|
||||
if isinstance(res, str):
|
||||
return os.path.abspath(res)
|
||||
else:
|
||||
return None
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def check_module(module):
|
||||
try:
|
||||
possible_stmts = module.used_names['path']
|
||||
except KeyError:
|
||||
return get_sys_path()
|
||||
|
||||
sys_path = list(get_sys_path()) # copy
|
||||
for p in possible_stmts:
|
||||
if not isinstance(p, pr.Statement):
|
||||
continue
|
||||
expression_list = p.expression_list()
|
||||
# sys.path command is just one thing.
|
||||
if len(expression_list) != 1 or not isinstance(expression_list[0], pr.Call):
|
||||
continue
|
||||
call = expression_list[0]
|
||||
n = call.name
|
||||
if not isinstance(n, pr.Name) or len(n.names) != 3:
|
||||
continue
|
||||
if n.names[:2] != ('sys', 'path'):
|
||||
continue
|
||||
array_cmd = n.names[2]
|
||||
if call.execution is None:
|
||||
continue
|
||||
exe = call.execution
|
||||
if not (array_cmd == 'insert' and len(exe) == 2
|
||||
or array_cmd == 'append' and len(exe) == 1):
|
||||
continue
|
||||
|
||||
if array_cmd == 'insert':
|
||||
exe_type, exe.type = exe.type, pr.Array.NOARRAY
|
||||
exe_pop = exe.values.pop(0)
|
||||
res = execute_code(exe.get_code())
|
||||
if res is not None:
|
||||
sys_path.insert(0, res)
|
||||
debug.dbg('sys path inserted: %s', res)
|
||||
exe.type = exe_type
|
||||
exe.values.insert(0, exe_pop)
|
||||
elif array_cmd == 'append':
|
||||
res = execute_code(exe.get_code())
|
||||
if res is not None:
|
||||
sys_path.append(res)
|
||||
debug.dbg('sys path added: %s', res)
|
||||
return sys_path
|
||||
|
||||
if module.path is None:
|
||||
# Support for modules without a path is bad, therefore return the
|
||||
# normal path.
|
||||
return list(get_sys_path())
|
||||
|
||||
curdir = os.path.abspath(os.curdir)
|
||||
with common.ignored(OSError):
|
||||
os.chdir(os.path.dirname(module.path))
|
||||
|
||||
result = check_module(module)
|
||||
result += _detect_django_path(module.path)
|
||||
|
||||
# cleanup, back to old directory
|
||||
os.chdir(curdir)
|
||||
return result
|
||||
|
||||
|
||||
def _detect_django_path(module_path):
|
||||
""" Detects the path of the very well known Django library (if used) """
|
||||
result = []
|
||||
while True:
|
||||
new = os.path.dirname(module_path)
|
||||
# If the module_path doesn't change anymore, we're finished -> /
|
||||
if new == module_path:
|
||||
break
|
||||
else:
|
||||
module_path = new
|
||||
|
||||
with common.ignored(IOError):
|
||||
with open(module_path + os.path.sep + 'manage.py'):
|
||||
debug.dbg('Found django path: %s', module_path)
|
||||
result.append(module_path)
|
||||
return result
|
||||
@@ -1,945 +0,0 @@
|
||||
"""
|
||||
Like described in the :mod:`parsing_representation` module, there's a need for
|
||||
an ast like module to represent the states of parsed modules.
|
||||
|
||||
But now there are also structures in Python that need a little bit more than
|
||||
that. An ``Instance`` for example is only a ``Class`` before it is
|
||||
instantiated. This class represents these cases.
|
||||
|
||||
So, why is there also a ``Class`` class here? Well, there are decorators and
|
||||
they change classes in Python 3.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
|
||||
from jedi._compatibility import use_metaclass, next, hasattr, unicode
|
||||
from jedi.parser import representation as pr
|
||||
from jedi import cache
|
||||
from jedi import helpers
|
||||
from jedi import debug
|
||||
from jedi import common
|
||||
import recursion
|
||||
import docstrings
|
||||
import imports
|
||||
import evaluate
|
||||
import builtin
|
||||
import dynamic
|
||||
|
||||
|
||||
class Executable(pr.IsScope):
|
||||
"""
|
||||
An instance is also an executable - because __init__ is called
|
||||
:param var_args: The param input array, consist of `pr.Array` or list.
|
||||
"""
|
||||
def __init__(self, base, var_args=()):
|
||||
self.base = base
|
||||
self.var_args = var_args
|
||||
|
||||
def get_parent_until(self, *args, **kwargs):
|
||||
return self.decorated.get_parent_until(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
return self.decorated.parent
|
||||
|
||||
@property
|
||||
def decorated(self):
|
||||
"""
|
||||
Instance doesn't care about decorators and Execution overrides this
|
||||
"""
|
||||
return self.base
|
||||
|
||||
|
||||
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
|
||||
"""
|
||||
This class is used to evaluate instances.
|
||||
"""
|
||||
def __init__(self, base, var_args=()):
|
||||
super(Instance, self).__init__(base, var_args)
|
||||
if str(base.name) in ['list', 'set'] \
|
||||
and builtin.Builtin.scope == base.get_parent_until():
|
||||
# compare the module path with the builtin name.
|
||||
self.var_args = dynamic.check_array_instances(self)
|
||||
else:
|
||||
# need to execute the __init__ function, because the dynamic param
|
||||
# searching needs it.
|
||||
with common.ignored(KeyError):
|
||||
self.execute_subscope_by_name('__init__', self.var_args)
|
||||
# Generated instances are classes that are just generated by self
|
||||
# (No var_args) used.
|
||||
self.is_generated = False
|
||||
|
||||
@cache.memoize_default()
|
||||
def _get_method_execution(self, func):
|
||||
func = InstanceElement(self, func, True)
|
||||
return Execution(func, self.var_args)
|
||||
|
||||
def _get_func_self_name(self, func):
|
||||
"""
|
||||
Returns the name of the first param in a class method (which is
|
||||
normally self.
|
||||
"""
|
||||
try:
|
||||
return str(func.params[0].get_name())
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
@cache.memoize_default([])
|
||||
def _get_self_attributes(self):
|
||||
def add_self_dot_name(name):
|
||||
"""
|
||||
Need to copy and rewrite the name, because names are now
|
||||
``instance_usage.variable`` instead of ``self.variable``.
|
||||
"""
|
||||
n = copy.copy(name)
|
||||
n.names = n.names[1:]
|
||||
names.append(InstanceElement(self, n))
|
||||
|
||||
names = []
|
||||
# This loop adds the names of the self object, copies them and removes
|
||||
# the self.
|
||||
for sub in self.base.subscopes:
|
||||
if isinstance(sub, pr.Class):
|
||||
continue
|
||||
# Get the self name, if there's one.
|
||||
self_name = self._get_func_self_name(sub)
|
||||
if not self_name:
|
||||
continue
|
||||
|
||||
if sub.name.get_code() == '__init__':
|
||||
# ``__init__`` is special because the params need are injected
|
||||
# this way. Therefore an execution is necessary.
|
||||
if not sub.decorators:
|
||||
# __init__ decorators should generally just be ignored,
|
||||
# because to follow them and their self variables is too
|
||||
# complicated.
|
||||
sub = self._get_method_execution(sub)
|
||||
for n in sub.get_set_vars():
|
||||
# Only names with the selfname are being added.
|
||||
# It is also important, that they have a len() of 2,
|
||||
# because otherwise, they are just something else
|
||||
if n.names[0] == self_name and len(n.names) == 2:
|
||||
add_self_dot_name(n)
|
||||
|
||||
for s in self.base.get_super_classes():
|
||||
names += Instance(s)._get_self_attributes()
|
||||
|
||||
return names
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
sub = self.base.get_subscope_by_name(name)
|
||||
return InstanceElement(self, sub, True)
|
||||
|
||||
def execute_subscope_by_name(self, name, args=()):
|
||||
method = self.get_subscope_by_name(name)
|
||||
return Execution(method, args).get_return_types()
|
||||
|
||||
def get_descriptor_return(self, obj):
|
||||
""" Throws a KeyError if there's no method. """
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
args = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
|
||||
return self.execute_subscope_by_name('__get__', args)
|
||||
|
||||
@cache.memoize_default([])
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Get the instance vars of a class. This includes the vars of all
|
||||
classes
|
||||
"""
|
||||
names = self._get_self_attributes()
|
||||
|
||||
class_names = self.base.instance_names()
|
||||
for var in class_names:
|
||||
names.append(InstanceElement(self, var, True))
|
||||
return names
|
||||
|
||||
def scope_generator(self):
|
||||
"""
|
||||
An Instance has two scopes: The scope with self names and the class
|
||||
scope. Instance variables have priority over the class scope.
|
||||
"""
|
||||
yield self, self._get_self_attributes()
|
||||
|
||||
names = []
|
||||
class_names = self.base.instance_names()
|
||||
for var in class_names:
|
||||
names.append(InstanceElement(self, var, True))
|
||||
yield self, names
|
||||
|
||||
def get_index_types(self, index=None):
|
||||
args = [] if index is None else [index]
|
||||
try:
|
||||
return self.execute_subscope_by_name('__getitem__', args)
|
||||
except KeyError:
|
||||
debug.warning('No __getitem__, cannot access the array.')
|
||||
return []
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'name', 'get_imports',
|
||||
'doc', 'docstr', 'asserts']:
|
||||
raise AttributeError("Instance %s: Don't touch this (%s)!"
|
||||
% (self, name))
|
||||
return getattr(self.base, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s (var_args: %s)>" % \
|
||||
(type(self).__name__, self.base, len(self.var_args or []))
|
||||
|
||||
|
||||
class InstanceElement(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
"""
|
||||
InstanceElement is a wrapper for any object, that is used as an instance
|
||||
variable (e.g. self.variable or class methods).
|
||||
"""
|
||||
def __init__(self, instance, var, is_class_var=False):
|
||||
if isinstance(var, pr.Function):
|
||||
var = Function(var)
|
||||
elif isinstance(var, pr.Class):
|
||||
var = Class(var)
|
||||
self.instance = instance
|
||||
self.var = var
|
||||
self.is_class_var = is_class_var
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def parent(self):
|
||||
par = self.var.parent
|
||||
if isinstance(par, Class) and par == self.instance.base \
|
||||
or isinstance(par, pr.Class) \
|
||||
and par == self.instance.base.base:
|
||||
par = self.instance
|
||||
elif not isinstance(par, pr.Module):
|
||||
par = InstanceElement(self.instance, par, self.is_class_var)
|
||||
return par
|
||||
|
||||
def get_parent_until(self, *args, **kwargs):
|
||||
return pr.Simple.get_parent_until(self, *args, **kwargs)
|
||||
|
||||
def get_decorated_func(self):
|
||||
""" Needed because the InstanceElement should not be stripped """
|
||||
func = self.var.get_decorated_func(self.instance)
|
||||
if func == self.var:
|
||||
return self
|
||||
return func
|
||||
|
||||
def get_commands(self):
|
||||
# Copy and modify the array.
|
||||
return [InstanceElement(self.instance, command, self.is_class_var)
|
||||
if not isinstance(command, unicode) else command
|
||||
for command in self.var.get_commands()]
|
||||
|
||||
def __iter__(self):
|
||||
for el in self.var.__iter__():
|
||||
yield InstanceElement(self.instance, el, self.is_class_var)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.var, name)
|
||||
|
||||
def isinstance(self, *cls):
|
||||
return isinstance(self.var, cls)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.var)
|
||||
|
||||
|
||||
class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
|
||||
"""
|
||||
This class is not only important to extend `pr.Class`, it is also a
|
||||
important for descriptors (if the descriptor methods are evaluated or not).
|
||||
"""
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
@cache.memoize_default(default=())
|
||||
def get_super_classes(self):
|
||||
supers = []
|
||||
# TODO care for mro stuff (multiple super classes).
|
||||
for s in self.base.supers:
|
||||
# Super classes are statements.
|
||||
for cls in evaluate.follow_statement(s):
|
||||
if not isinstance(cls, Class):
|
||||
debug.warning('Received non class, as a super class')
|
||||
continue # Just ignore other stuff (user input error).
|
||||
supers.append(cls)
|
||||
if not supers and self.base.parent != builtin.Builtin.scope:
|
||||
# add `object` to classes
|
||||
supers += evaluate.find_name(builtin.Builtin.scope, 'object')
|
||||
return supers
|
||||
|
||||
@cache.memoize_default(default=())
|
||||
def instance_names(self):
|
||||
def in_iterable(name, iterable):
|
||||
""" checks if the name is in the variable 'iterable'. """
|
||||
for i in iterable:
|
||||
# Only the last name is important, because these names have a
|
||||
# maximal length of 2, with the first one being `self`.
|
||||
if i.names[-1] == name.names[-1]:
|
||||
return True
|
||||
return False
|
||||
|
||||
result = self.base.get_defined_names()
|
||||
super_result = []
|
||||
# TODO mro!
|
||||
for cls in self.get_super_classes():
|
||||
# Get the inherited names.
|
||||
for i in cls.instance_names():
|
||||
if not in_iterable(i, result):
|
||||
super_result.append(i)
|
||||
result += super_result
|
||||
return result
|
||||
|
||||
@cache.memoize_default(default=())
|
||||
def get_defined_names(self):
|
||||
result = self.instance_names()
|
||||
type_cls = evaluate.find_name(builtin.Builtin.scope, 'type')[0]
|
||||
return result + type_cls.base.get_defined_names()
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
for sub in reversed(self.subscopes):
|
||||
if sub.name.get_code() == name:
|
||||
return sub
|
||||
raise KeyError("Couldn't find subscope.")
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.base.name
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
|
||||
'doc', 'get_imports', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
raise AttributeError("Don't touch this: %s of %s !" % (name, self))
|
||||
return getattr(self.base, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s>" % (type(self).__name__, self.base)
|
||||
|
||||
|
||||
class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
|
||||
"""
|
||||
Needed because of decorators. Decorators are evaluated here.
|
||||
"""
|
||||
def __init__(self, func, is_decorated=False):
|
||||
""" This should not be called directly """
|
||||
self.base_func = func
|
||||
self.is_decorated = is_decorated
|
||||
|
||||
@cache.memoize_default()
|
||||
def _decorated_func(self, instance=None):
|
||||
"""
|
||||
Returns the function, that is to be executed in the end.
|
||||
This is also the places where the decorators are processed.
|
||||
"""
|
||||
f = self.base_func
|
||||
|
||||
# Only enter it, if has not already been processed.
|
||||
if not self.is_decorated:
|
||||
for dec in reversed(self.base_func.decorators):
|
||||
debug.dbg('decorator:', dec, f)
|
||||
dec_results = set(evaluate.follow_statement(dec))
|
||||
if not len(dec_results):
|
||||
debug.warning('decorator not found: %s on %s' %
|
||||
(dec, self.base_func))
|
||||
return None
|
||||
decorator = dec_results.pop()
|
||||
if dec_results:
|
||||
debug.warning('multiple decorators found', self.base_func,
|
||||
dec_results)
|
||||
# Create param array.
|
||||
old_func = Function(f, is_decorated=True)
|
||||
if instance is not None and decorator.isinstance(Function):
|
||||
old_func = InstanceElement(instance, old_func)
|
||||
instance = None
|
||||
|
||||
wrappers = Execution(decorator, (old_func,)).get_return_types()
|
||||
if not len(wrappers):
|
||||
debug.warning('no wrappers found', self.base_func)
|
||||
return None
|
||||
if len(wrappers) > 1:
|
||||
debug.warning('multiple wrappers found', self.base_func,
|
||||
wrappers)
|
||||
# This is here, that the wrapper gets executed.
|
||||
f = wrappers[0]
|
||||
|
||||
debug.dbg('decorator end', f)
|
||||
if f != self.base_func and isinstance(f, pr.Function):
|
||||
f = Function(f)
|
||||
return f
|
||||
|
||||
def get_decorated_func(self, instance=None):
|
||||
decorated_func = self._decorated_func(instance)
|
||||
if decorated_func == self.base_func:
|
||||
return self
|
||||
if decorated_func is None:
|
||||
# If the decorator func is not found, just ignore the decorator
|
||||
# function, because sometimes decorators are just really
|
||||
# complicated.
|
||||
return Function(self.base_func, True)
|
||||
return decorated_func
|
||||
|
||||
def get_magic_method_names(self):
|
||||
return builtin.Builtin.magic_function_scope.get_defined_names()
|
||||
|
||||
def get_magic_method_scope(self):
|
||||
return builtin.Builtin.magic_function_scope
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.base_func, name)
|
||||
|
||||
def __repr__(self):
|
||||
dec = ''
|
||||
if self._decorated_func() != self.base_func:
|
||||
dec = " is " + repr(self._decorated_func())
|
||||
return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)
|
||||
|
||||
|
||||
class Execution(Executable):
|
||||
"""
|
||||
This class is used to evaluate functions and their returns.
|
||||
|
||||
This is the most complicated class, because it contains the logic to
|
||||
transfer parameters. It is even more complicated, because there may be
|
||||
multiple calls to functions and recursion has to be avoided. But this is
|
||||
responsibility of the decorators.
|
||||
"""
|
||||
def follow_var_arg(self, index):
|
||||
try:
|
||||
stmt = self.var_args[index]
|
||||
except IndexError:
|
||||
return []
|
||||
else:
|
||||
if isinstance(stmt, pr.Statement):
|
||||
return evaluate.follow_statement(stmt)
|
||||
else:
|
||||
return [stmt] # just some arbitrary object
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def decorated(self):
|
||||
"""Get the decorated version of the input"""
|
||||
base = self.base
|
||||
if self.base.isinstance(Function):
|
||||
base = base.get_decorated_func()
|
||||
return base
|
||||
|
||||
@cache.memoize_default(default=())
|
||||
@recursion.ExecutionRecursionDecorator
|
||||
def get_return_types(self, evaluate_generator=False):
|
||||
""" Get the return types of a function. """
|
||||
base = self.decorated
|
||||
stmts = []
|
||||
if base.parent == builtin.Builtin.scope \
|
||||
and not isinstance(base, (Generator, Array)):
|
||||
func_name = str(base.name)
|
||||
|
||||
# some implementations of builtins:
|
||||
if func_name == 'getattr':
|
||||
# follow the first param
|
||||
objects = self.follow_var_arg(0)
|
||||
names = self.follow_var_arg(1)
|
||||
for obj in objects:
|
||||
if not isinstance(obj, (Instance, Class, pr.Module)):
|
||||
debug.warning('getattr called without instance')
|
||||
continue
|
||||
|
||||
for arr_name in names:
|
||||
if not isinstance(arr_name, Instance):
|
||||
debug.warning('getattr called without str')
|
||||
continue
|
||||
if len(arr_name.var_args) != 1:
|
||||
debug.warning('jedi getattr is too simple')
|
||||
key = arr_name.var_args[0]
|
||||
stmts += evaluate.follow_path(iter([key]), obj, base)
|
||||
return stmts
|
||||
elif func_name == 'type':
|
||||
# otherwise it would be a metaclass
|
||||
if len(self.var_args) == 1:
|
||||
objects = self.follow_var_arg(0)
|
||||
return [o.base for o in objects if isinstance(o, Instance)]
|
||||
elif func_name == 'super':
|
||||
# TODO make this able to detect multiple inheritance supers
|
||||
accept = (pr.Function,)
|
||||
func = self.var_args.get_parent_until(accept)
|
||||
if func.isinstance(*accept):
|
||||
cls = func.get_parent_until(accept + (pr.Class,),
|
||||
include_current=False)
|
||||
if isinstance(cls, pr.Class):
|
||||
cls = Class(cls)
|
||||
su = cls.get_super_classes()
|
||||
if su:
|
||||
return [Instance(su[0])]
|
||||
return []
|
||||
|
||||
if base.isinstance(Class):
|
||||
# There maybe executions of executions.
|
||||
return [Instance(base, self.var_args)]
|
||||
elif isinstance(base, Generator):
|
||||
return base.iter_content()
|
||||
else:
|
||||
try:
|
||||
base.returns # Test if it is a function
|
||||
except AttributeError:
|
||||
if hasattr(base, 'execute_subscope_by_name'):
|
||||
try:
|
||||
stmts = base.execute_subscope_by_name('__call__',
|
||||
self.var_args)
|
||||
except KeyError:
|
||||
debug.warning("no __call__ func available", base)
|
||||
else:
|
||||
debug.warning("no execution possible", base)
|
||||
else:
|
||||
stmts = self._get_function_returns(base, evaluate_generator)
|
||||
|
||||
debug.dbg('exec result: %s in %s' % (stmts, self))
|
||||
|
||||
return imports.strip_imports(stmts)
|
||||
|
||||
def _get_function_returns(self, func, evaluate_generator):
|
||||
""" A normal Function execution """
|
||||
# Feed the listeners, with the params.
|
||||
for listener in func.listeners:
|
||||
listener.execute(self.get_params())
|
||||
if func.is_generator and not evaluate_generator:
|
||||
return [Generator(func, self.var_args)]
|
||||
else:
|
||||
stmts = docstrings.find_return_types(func)
|
||||
for r in self.returns:
|
||||
if r is not None:
|
||||
stmts += evaluate.follow_statement(r)
|
||||
return stmts
|
||||
|
||||
@cache.memoize_default(default=())
|
||||
def get_params(self):
|
||||
"""
|
||||
This returns the params for an Execution/Instance and is injected as a
|
||||
'hack' into the pr.Function class.
|
||||
This needs to be here, because Instance can have __init__ functions,
|
||||
which act the same way as normal functions.
|
||||
"""
|
||||
def gen_param_name_copy(param, keys=(), values=(), array_type=None):
|
||||
"""
|
||||
Create a param with the original scope (of varargs) as parent.
|
||||
"""
|
||||
if isinstance(self.var_args, pr.Array):
|
||||
parent = self.var_args.parent
|
||||
start_pos = self.var_args.start_pos
|
||||
else:
|
||||
parent = self.decorated
|
||||
start_pos = 0, 0
|
||||
|
||||
new_param = copy.copy(param)
|
||||
new_param.is_generated = True
|
||||
if parent is not None:
|
||||
new_param.parent = parent
|
||||
|
||||
# create an Array (-> needed for *args/**kwargs tuples/dicts)
|
||||
arr = pr.Array(self._sub_module, start_pos, array_type, parent)
|
||||
arr.values = values
|
||||
key_stmts = []
|
||||
for key in keys:
|
||||
stmt = pr.Statement(self._sub_module, [], start_pos, None)
|
||||
stmt._commands = [key]
|
||||
key_stmts.append(stmt)
|
||||
arr.keys = key_stmts
|
||||
arr.type = array_type
|
||||
|
||||
new_param._commands = [arr]
|
||||
|
||||
name = copy.copy(param.get_name())
|
||||
name.parent = new_param
|
||||
return name
|
||||
|
||||
result = []
|
||||
start_offset = 0
|
||||
if isinstance(self.decorated, InstanceElement):
|
||||
# Care for self -> just exclude it and add the instance
|
||||
start_offset = 1
|
||||
self_name = copy.copy(self.decorated.params[0].get_name())
|
||||
self_name.parent = self.decorated.instance
|
||||
result.append(self_name)
|
||||
|
||||
param_dict = {}
|
||||
for param in self.decorated.params:
|
||||
param_dict[str(param.get_name())] = param
|
||||
# There may be calls, which don't fit all the params, this just ignores
|
||||
# it.
|
||||
var_arg_iterator = self.get_var_args_iterator()
|
||||
|
||||
non_matching_keys = []
|
||||
keys_used = set()
|
||||
keys_only = False
|
||||
for param in self.decorated.params[start_offset:]:
|
||||
# The value and key can both be null. There, the defaults apply.
|
||||
# args / kwargs will just be empty arrays / dicts, respectively.
|
||||
# Wrong value count is just ignored. If you try to test cases that
|
||||
# are not allowed in Python, Jedi will maybe not show any
|
||||
# completions.
|
||||
key, value = next(var_arg_iterator, (None, None))
|
||||
while key:
|
||||
keys_only = True
|
||||
try:
|
||||
key_param = param_dict[str(key)]
|
||||
except KeyError:
|
||||
non_matching_keys.append((key, value))
|
||||
else:
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(key_param,
|
||||
values=[value]))
|
||||
key, value = next(var_arg_iterator, (None, None))
|
||||
|
||||
commands = param.get_commands()
|
||||
keys = []
|
||||
values = []
|
||||
array_type = None
|
||||
ignore_creation = False
|
||||
if commands[0] == '*':
|
||||
# *args param
|
||||
array_type = pr.Array.TUPLE
|
||||
if value:
|
||||
values.append(value)
|
||||
for key, value in var_arg_iterator:
|
||||
# Iterate until a key argument is found.
|
||||
if key:
|
||||
var_arg_iterator.push_back((key, value))
|
||||
break
|
||||
values.append(value)
|
||||
elif commands[0] == '**':
|
||||
# **kwargs param
|
||||
array_type = pr.Array.DICT
|
||||
if non_matching_keys:
|
||||
keys, values = zip(*non_matching_keys)
|
||||
elif not keys_only:
|
||||
# normal param
|
||||
if value is not None:
|
||||
values = [value]
|
||||
else:
|
||||
if param.assignment_details:
|
||||
# No value: return the default values.
|
||||
ignore_creation = True
|
||||
result.append(param.get_name())
|
||||
param.is_generated = True
|
||||
else:
|
||||
# If there is no assignment detail, that means there is
|
||||
# no assignment, just the result. Therefore nothing has
|
||||
# to be returned.
|
||||
values = []
|
||||
|
||||
# Just ignore all the params that are without a key, after one
|
||||
# keyword argument was set.
|
||||
if not ignore_creation and (not keys_only or commands[0] == '**'):
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(param, keys=keys,
|
||||
values=values, array_type=array_type))
|
||||
|
||||
if keys_only:
|
||||
# sometimes param arguments are not completely written (which would
|
||||
# create an Exception, but we have to handle that).
|
||||
for k in set(param_dict) - keys_used:
|
||||
result.append(gen_param_name_copy(param_dict[k]))
|
||||
return result
|
||||
|
||||
def get_var_args_iterator(self):
|
||||
"""
|
||||
Yields a key/value pair, the key is None, if its not a named arg.
|
||||
"""
|
||||
def iterate():
|
||||
# `var_args` is typically an Array, and not a list.
|
||||
for stmt in self.var_args:
|
||||
if not isinstance(stmt, pr.Statement):
|
||||
if stmt is None:
|
||||
yield None, None
|
||||
continue
|
||||
old = stmt
|
||||
# generate a statement if it's not already one.
|
||||
module = builtin.Builtin.scope
|
||||
stmt = pr.Statement(module, [], (0, 0), None)
|
||||
stmt._commands = [old]
|
||||
|
||||
# *args
|
||||
commands = stmt.get_commands()
|
||||
if not len(commands):
|
||||
continue
|
||||
if commands[0] == '*':
|
||||
arrays = evaluate.follow_call_list(commands[1:])
|
||||
# *args must be some sort of an array, otherwise -> ignore
|
||||
|
||||
for array in arrays:
|
||||
if isinstance(array, Array):
|
||||
for field_stmt in array: # yield from plz!
|
||||
yield None, field_stmt
|
||||
elif isinstance(array, Generator):
|
||||
for field_stmt in array.iter_content():
|
||||
yield None, helpers.FakeStatement(field_stmt)
|
||||
# **kwargs
|
||||
elif commands[0] == '**':
|
||||
arrays = evaluate.follow_call_list(commands[1:])
|
||||
for array in arrays:
|
||||
if isinstance(array, Array):
|
||||
for key_stmt, value_stmt in array.items():
|
||||
# first index, is the key if syntactically correct
|
||||
call = key_stmt.get_commands()[0]
|
||||
if isinstance(call, pr.Name):
|
||||
yield call, value_stmt
|
||||
elif isinstance(call, pr.Call):
|
||||
yield call.name, value_stmt
|
||||
# Normal arguments (including key arguments).
|
||||
else:
|
||||
if stmt.assignment_details:
|
||||
key_arr, op = stmt.assignment_details[0]
|
||||
# named parameter
|
||||
if key_arr and isinstance(key_arr[0], pr.Call):
|
||||
yield key_arr[0].name, stmt
|
||||
else:
|
||||
yield None, stmt
|
||||
|
||||
return iter(common.PushBackIterator(iterate()))
|
||||
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Call the default method with the own instance (self implements all
|
||||
the necessary functions). Add also the params.
|
||||
"""
|
||||
return self.get_params() + pr.Scope.get_set_vars(self)
|
||||
|
||||
get_set_vars = get_defined_names
|
||||
|
||||
@common.rethrow_uncaught
|
||||
def copy_properties(self, prop):
|
||||
"""
|
||||
Literally copies a property of a Function. Copying is very expensive,
|
||||
because it is something like `copy.deepcopy`. However, these copied
|
||||
objects can be used for the executions, as if they were in the
|
||||
execution.
|
||||
"""
|
||||
# Copy all these lists into this local function.
|
||||
attr = getattr(self.decorated, prop)
|
||||
objects = []
|
||||
for element in attr:
|
||||
if element is None:
|
||||
copied = element
|
||||
else:
|
||||
copied = helpers.fast_parent_copy(element)
|
||||
copied.parent = self._scope_copy(copied.parent)
|
||||
if isinstance(copied, pr.Function):
|
||||
copied = Function(copied)
|
||||
objects.append(copied)
|
||||
return objects
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'imports', '_sub_module']:
|
||||
raise AttributeError('Tried to access %s: %s. Why?' % (name, self))
|
||||
return getattr(self.decorated, name)
|
||||
|
||||
@cache.memoize_default()
|
||||
@common.rethrow_uncaught
|
||||
def _scope_copy(self, scope):
|
||||
""" Copies a scope (e.g. if) in an execution """
|
||||
# TODO method uses different scopes than the subscopes property.
|
||||
|
||||
# just check the start_pos, sometimes it's difficult with closures
|
||||
# to compare the scopes directly.
|
||||
if scope.start_pos == self.start_pos:
|
||||
return self
|
||||
else:
|
||||
copied = helpers.fast_parent_copy(scope)
|
||||
copied.parent = self._scope_copy(copied.parent)
|
||||
return copied
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def returns(self):
|
||||
return self.copy_properties('returns')
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def asserts(self):
|
||||
return self.copy_properties('asserts')
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def statements(self):
|
||||
return self.copy_properties('statements')
|
||||
|
||||
@property
|
||||
@cache.memoize_default()
|
||||
def subscopes(self):
|
||||
return self.copy_properties('subscopes')
|
||||
|
||||
def get_statement_for_position(self, pos):
|
||||
return pr.Scope.get_statement_for_position(self, pos)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % \
|
||||
(type(self).__name__, self.decorated)
|
||||
|
||||
|
||||
class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
""" Cares for `yield` statements. """
|
||||
def __init__(self, func, var_args):
|
||||
super(Generator, self).__init__()
|
||||
self.func = func
|
||||
self.var_args = var_args
|
||||
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
Returns a list of names that define a generator, which can return the
|
||||
content of a generator.
|
||||
"""
|
||||
names = []
|
||||
none_pos = (0, 0)
|
||||
executes_generator = ('__next__', 'send')
|
||||
for n in ('close', 'throw') + executes_generator:
|
||||
name = pr.Name(builtin.Builtin.scope, [(n, none_pos)],
|
||||
none_pos, none_pos)
|
||||
if n in executes_generator:
|
||||
name.parent = self
|
||||
else:
|
||||
name.parent = builtin.Builtin.scope
|
||||
names.append(name)
|
||||
debug.dbg('generator names', names)
|
||||
return names
|
||||
|
||||
def iter_content(self):
|
||||
""" returns the content of __iter__ """
|
||||
return Execution(self.func, self.var_args).get_return_types(True)
|
||||
|
||||
def get_index_types(self, index=None):
|
||||
debug.warning('Tried to get array access on a generator', self)
|
||||
return []
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'get_imports',
|
||||
'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
raise AttributeError("Accessing %s of %s is not allowed."
|
||||
% (self, name))
|
||||
return getattr(self.func, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.func)
|
||||
|
||||
|
||||
class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
"""
|
||||
Used as a mirror to pr.Array, if needed. It defines some getter
|
||||
methods which are important in this module.
|
||||
"""
|
||||
def __init__(self, array):
|
||||
self._array = array
|
||||
|
||||
def get_index_types(self, index_arr=None):
|
||||
""" Get the types of a specific index or all, if not given """
|
||||
if index_arr is not None:
|
||||
if index_arr and [x for x in index_arr if ':' in x.get_commands()]:
|
||||
# array slicing
|
||||
return [self]
|
||||
|
||||
index_possibilities = self._follow_values(index_arr)
|
||||
if len(index_possibilities) == 1:
|
||||
# This is indexing only one element, with a fixed index number,
|
||||
# otherwise it just ignores the index (e.g. [1+1]).
|
||||
index = index_possibilities[0]
|
||||
if isinstance(index, Instance) \
|
||||
and str(index.name) in ['int', 'str'] \
|
||||
and len(index.var_args) == 1:
|
||||
# TODO this is just very hackish and a lot of use cases are
|
||||
# being ignored
|
||||
with common.ignored(KeyError, IndexError,
|
||||
UnboundLocalError, TypeError):
|
||||
return self.get_exact_index_types(index.var_args[0])
|
||||
|
||||
result = list(self._follow_values(self._array.values))
|
||||
result += dynamic.check_array_additions(self)
|
||||
return set(result)
|
||||
|
||||
def get_exact_index_types(self, mixed_index):
|
||||
""" Here the index is an int/str. Raises IndexError/KeyError """
|
||||
index = mixed_index
|
||||
if self.type == pr.Array.DICT:
|
||||
index = None
|
||||
for i, key_statement in enumerate(self._array.keys):
|
||||
# Because we only want the key to be a string.
|
||||
key_commands = key_statement.get_commands()
|
||||
if len(key_commands) != 1: # cannot deal with complex strings
|
||||
continue
|
||||
key = key_commands[0]
|
||||
if isinstance(key, pr.String):
|
||||
str_key = key.value
|
||||
elif isinstance(key, pr.Name):
|
||||
str_key = str(key)
|
||||
|
||||
if mixed_index == str_key:
|
||||
index = i
|
||||
break
|
||||
if index is None:
|
||||
raise KeyError('No key found in dictionary')
|
||||
|
||||
# Can raise an IndexError
|
||||
values = [self._array.values[index]]
|
||||
return self._follow_values(values)
|
||||
|
||||
def _follow_values(self, values):
|
||||
""" helper function for the index getters """
|
||||
return list(itertools.chain.from_iterable(evaluate.follow_statement(v)
|
||||
for v in values))
|
||||
|
||||
def get_defined_names(self):
|
||||
"""
|
||||
This method generates all `ArrayMethod` for one pr.Array.
|
||||
It returns e.g. for a list: append, pop, ...
|
||||
"""
|
||||
# `array.type` is a string with the type, e.g. 'list'.
|
||||
scope = evaluate.find_name(builtin.Builtin.scope, self._array.type)[0]
|
||||
scope = Instance(scope)
|
||||
names = scope.get_defined_names()
|
||||
return [ArrayMethod(n) for n in names]
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
return builtin.Builtin.scope
|
||||
|
||||
def get_parent_until(self):
|
||||
return builtin.Builtin.scope
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['type', 'start_pos', 'get_only_subelement', 'parent',
|
||||
'get_parent_until', 'items']:
|
||||
raise AttributeError('Strange access on %s: %s.' % (self, name))
|
||||
return getattr(self._array, name)
|
||||
|
||||
def __getitem__(self):
|
||||
return self._array.__getitem__()
|
||||
|
||||
def __iter__(self):
|
||||
return self._array.__iter__()
|
||||
|
||||
def __len__(self):
|
||||
return self._array.__len__()
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s>" % (type(self).__name__, self._array)
|
||||
|
||||
|
||||
class ArrayMethod(object):
|
||||
"""
|
||||
A name, e.g. `list.append`, it is used to access the original array
|
||||
methods.
|
||||
"""
|
||||
def __init__(self, name):
|
||||
super(ArrayMethod, self).__init__()
|
||||
self.name = name
|
||||
|
||||
def __getattr__(self, name):
|
||||
# Set access privileges:
|
||||
if name not in ['parent', 'names', 'start_pos', 'end_pos', 'get_code']:
|
||||
raise AttributeError('Strange accesson %s: %s.' % (self, name))
|
||||
return getattr(self.name, name)
|
||||
|
||||
def get_parent_until(self):
|
||||
return builtin.Builtin.scope
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.name)
|
||||
@@ -1,172 +0,0 @@
|
||||
"""
|
||||
Module to handle interpreted Python objects.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import tokenize
|
||||
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser import token
|
||||
|
||||
|
||||
class ObjectImporter(object):
|
||||
|
||||
"""
|
||||
Import objects in "raw" namespace such as :func:`locals`.
|
||||
"""
|
||||
|
||||
def __init__(self, scope):
|
||||
self.scope = scope
|
||||
|
||||
count = itertools.count()
|
||||
self._genname = lambda: '*jedi-%s*' % next(count)
|
||||
"""
|
||||
Generate unique variable names to avoid name collision.
|
||||
To avoid name collision to already defined names, generated
|
||||
names are invalid as Python identifier.
|
||||
"""
|
||||
|
||||
def import_raw_namespace(self, raw_namespace):
|
||||
"""
|
||||
Import interpreted Python objects in a namespace.
|
||||
|
||||
Three kinds of objects are treated here.
|
||||
|
||||
1. Functions and classes. The objects imported like this::
|
||||
|
||||
from os.path import join
|
||||
|
||||
2. Modules. The objects imported like this::
|
||||
|
||||
import os
|
||||
|
||||
3. Instances. The objects created like this::
|
||||
|
||||
from datetime import datetime
|
||||
dt = datetime(2013, 1, 1)
|
||||
|
||||
:type raw_namespace: dict
|
||||
:arg raw_namespace: e.g., the dict given by `locals`
|
||||
"""
|
||||
scope = self.scope
|
||||
for (variable, obj) in raw_namespace.items():
|
||||
objname = getattr(obj, '__name__', None)
|
||||
|
||||
# Import functions and classes
|
||||
module = getattr(obj, '__module__', None)
|
||||
if module and objname:
|
||||
fakeimport = self.make_fakeimport(module, objname, variable)
|
||||
scope.add_import(fakeimport)
|
||||
continue
|
||||
|
||||
# Import modules
|
||||
if getattr(obj, '__file__', None) and objname:
|
||||
fakeimport = self.make_fakeimport(objname)
|
||||
scope.add_import(fakeimport)
|
||||
continue
|
||||
|
||||
# Import instances
|
||||
objclass = getattr(obj, '__class__', None)
|
||||
module = getattr(objclass, '__module__', None)
|
||||
if objclass and module:
|
||||
alias = self._genname()
|
||||
fakeimport = self.make_fakeimport(module, objclass.__name__,
|
||||
alias)
|
||||
fakestmt = self.make_fakestatement(variable, alias, call=True)
|
||||
scope.add_import(fakeimport)
|
||||
scope.add_statement(fakestmt)
|
||||
continue
|
||||
|
||||
def make_fakeimport(self, module, variable=None, alias=None):
|
||||
"""
|
||||
Make a fake import object.
|
||||
|
||||
The following statements are created depending on what parameters
|
||||
are given:
|
||||
|
||||
- only `module`: ``import <module>``
|
||||
- `module` and `variable`: ``from <module> import <variable>``
|
||||
- all: ``from <module> import <variable> as <alias>``
|
||||
|
||||
:type module: str
|
||||
:arg module: ``<module>`` part in ``from <module> import ...``
|
||||
:type variable: str
|
||||
:arg variable: ``<variable>`` part in ``from ... import <variable>``
|
||||
:type alias: str
|
||||
:arg alias: ``<alias>`` part in ``... import ... as <alias>``.
|
||||
|
||||
:rtype: :class:`parsing_representation.Import`
|
||||
"""
|
||||
submodule = self.scope._sub_module
|
||||
if variable:
|
||||
varname = pr.Name(
|
||||
module=submodule,
|
||||
names=[(variable, (-1, 0))],
|
||||
start_pos=(-1, 0),
|
||||
end_pos=(None, None))
|
||||
else:
|
||||
varname = None
|
||||
modname = pr.Name(
|
||||
module=submodule,
|
||||
names=[(module, (-1, 0))],
|
||||
start_pos=(-1, 0),
|
||||
end_pos=(None, None))
|
||||
if alias:
|
||||
aliasname = pr.Name(
|
||||
module=submodule,
|
||||
names=[(alias, (-1, 0))],
|
||||
start_pos=(-1, 0),
|
||||
end_pos=(None, None))
|
||||
else:
|
||||
aliasname = None
|
||||
if varname:
|
||||
fakeimport = pr.Import(
|
||||
module=submodule,
|
||||
namespace=varname,
|
||||
from_ns=modname,
|
||||
alias=aliasname,
|
||||
start_pos=(-1, 0),
|
||||
end_pos=(None, None))
|
||||
else:
|
||||
fakeimport = pr.Import(
|
||||
module=submodule,
|
||||
namespace=modname,
|
||||
alias=aliasname,
|
||||
start_pos=(-1, 0),
|
||||
end_pos=(None, None))
|
||||
return fakeimport
|
||||
|
||||
def make_fakestatement(self, lhs, rhs, call=False):
|
||||
"""
|
||||
Make a fake statement object that represents ``lhs = rhs``.
|
||||
|
||||
:type call: bool
|
||||
:arg call: When `call` is true, make a fake statement that represents
|
||||
``lhs = rhs()``.
|
||||
|
||||
:rtype: :class:`parsing_representation.Statement`
|
||||
"""
|
||||
submodule = self.scope._sub_module
|
||||
lhsname = pr.Name(
|
||||
module=submodule,
|
||||
names=[(lhs, (0, 0))],
|
||||
start_pos=(0, 0),
|
||||
end_pos=(None, None))
|
||||
rhsname = pr.Name(
|
||||
module=submodule,
|
||||
names=[(rhs, (0, 0))],
|
||||
start_pos=(0, 0),
|
||||
end_pos=(None, None))
|
||||
token_list = [lhsname, token.Token.from_tuple(
|
||||
(tokenize.OP, '=', (0, 0))
|
||||
), rhsname]
|
||||
if call:
|
||||
token_list.extend([
|
||||
token.Token.from_tuple((tokenize.OP, '(', (0, 0))),
|
||||
token.Token.from_tuple((tokenize.OP, ')', (0, 0))),
|
||||
])
|
||||
return pr.Statement(
|
||||
module=submodule,
|
||||
token_list=token_list,
|
||||
start_pos=(0, 0),
|
||||
end_pos=(None, None))
|
||||
418
jedi/modules.py
418
jedi/modules.py
@@ -1,418 +0,0 @@
|
||||
"""
|
||||
Don't confuse these classes with :mod:`parsing_representation` modules, the
|
||||
modules here can access these representation with ``module.parser.module``.
|
||||
``Module`` exists mainly for caching purposes.
|
||||
|
||||
Basically :mod:`modules` offers the classes:
|
||||
|
||||
- ``CachedModule``, a base class for Cachedmodule.
|
||||
- ``Module`` the class for all normal Python modules (not builtins, they are at
|
||||
home at :mod:`builtin`).
|
||||
- ``ModuleWithCursor``, holds the module information for :class:`api.Script`.
|
||||
|
||||
Apart from those classes there's a ``sys.path`` fetching function, as well as
|
||||
`Virtual Env` and `Django` detection.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
import tokenizer as tokenize
|
||||
import sys
|
||||
import os
|
||||
from ast import literal_eval
|
||||
|
||||
from jedi._compatibility import exec_function, unicode
|
||||
from jedi import cache
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser import fast
|
||||
from jedi import debug
|
||||
from jedi import common
|
||||
|
||||
|
||||
class CachedModule(object):
|
||||
"""
|
||||
The base type for all modules, which is not to be confused with
|
||||
`parsing_representation.Module`. Caching happens here.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None, name=None):
|
||||
self.path = path and os.path.abspath(path)
|
||||
self.name = name
|
||||
self._parser = None
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
""" get the parser lazy """
|
||||
if self._parser is None:
|
||||
self._parser = cache.load_module(self.path, self.name) \
|
||||
or self._load_module()
|
||||
return self._parser
|
||||
|
||||
def _get_source(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _load_module(self):
|
||||
source = self._get_source()
|
||||
p = self.path or self.name
|
||||
p = fast.FastParser(source, p)
|
||||
cache.save_module(self.path, self.name, p)
|
||||
return p
|
||||
|
||||
|
||||
class Module(CachedModule):
|
||||
"""
|
||||
Manages all files, that are parsed and caches them.
|
||||
|
||||
:param path: The module path of the file.
|
||||
:param source: The source code of the file.
|
||||
"""
|
||||
def __init__(self, path, source=None):
|
||||
super(Module, self).__init__(path=path)
|
||||
if source is None:
|
||||
with open(path) as f:
|
||||
source = f.read()
|
||||
self.source = source_to_unicode(source)
|
||||
self._line_cache = None
|
||||
|
||||
def _get_source(self):
|
||||
""" Just one time """
|
||||
s = self.source
|
||||
del self.source # memory efficiency
|
||||
return s
|
||||
|
||||
|
||||
class ModuleWithCursor(Module):
|
||||
"""
|
||||
Manages all files, that are parsed and caches them.
|
||||
Important are the params source and path, one of them has to
|
||||
be there.
|
||||
|
||||
:param source: The source code of the file.
|
||||
:param path: The module path of the file or None.
|
||||
:param position: The position, the user is currently in. Only important \
|
||||
for the main file.
|
||||
"""
|
||||
def __init__(self, path, source, position):
|
||||
super(ModuleWithCursor, self).__init__(path, source)
|
||||
self.position = position
|
||||
self.source = source
|
||||
self._path_until_cursor = None
|
||||
|
||||
# this two are only used, because there is no nonlocal in Python 2
|
||||
self._line_temp = None
|
||||
self._relevant_temp = None
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
""" get the parser lazy """
|
||||
if not self._parser:
|
||||
with common.ignored(KeyError):
|
||||
parser = cache.parser_cache[self.path].parser
|
||||
cache.invalidate_star_import_cache(parser.module)
|
||||
# Call the parser already here, because it will be used anyways.
|
||||
# Also, the position is here important (which will not be used by
|
||||
# default), therefore fill the cache here.
|
||||
self._parser = fast.FastParser(self.source, self.path,
|
||||
self.position)
|
||||
# don't pickle that module, because it's changing fast
|
||||
cache.save_module(self.path, self.name, self._parser,
|
||||
pickling=False)
|
||||
return self._parser
|
||||
|
||||
def get_path_until_cursor(self):
|
||||
""" Get the path under the cursor. """
|
||||
if self._path_until_cursor is None: # small caching
|
||||
self._path_until_cursor, self._start_cursor_pos = \
|
||||
self._get_path_until_cursor(self.position)
|
||||
return self._path_until_cursor
|
||||
|
||||
def _get_path_until_cursor(self, start_pos=None):
|
||||
def fetch_line():
|
||||
if self._is_first:
|
||||
self._is_first = False
|
||||
self._line_length = self._column_temp
|
||||
line = self._first_line
|
||||
else:
|
||||
line = self.get_line(self._line_temp)
|
||||
self._line_length = len(line)
|
||||
line = line + '\n'
|
||||
# add lines with a backslash at the end
|
||||
while True:
|
||||
self._line_temp -= 1
|
||||
last_line = self.get_line(self._line_temp)
|
||||
#print self._line_temp, repr(last_line)
|
||||
if last_line and last_line[-1] == '\\':
|
||||
line = last_line[:-1] + ' ' + line
|
||||
self._line_length = len(last_line)
|
||||
else:
|
||||
break
|
||||
return line[::-1]
|
||||
|
||||
self._is_first = True
|
||||
self._line_temp, self._column_temp = start_cursor = start_pos
|
||||
self._first_line = self.get_line(self._line_temp)[:self._column_temp]
|
||||
|
||||
open_brackets = ['(', '[', '{']
|
||||
close_brackets = [')', ']', '}']
|
||||
|
||||
gen = tokenize.generate_tokens(fetch_line)
|
||||
string = ''
|
||||
level = 0
|
||||
force_point = False
|
||||
last_type = None
|
||||
try:
|
||||
for token_type, tok, start, end, line in gen:
|
||||
# print 'tok', token_type, tok, force_point
|
||||
if last_type == token_type == tokenize.NAME:
|
||||
string += ' '
|
||||
|
||||
if level > 0:
|
||||
if tok in close_brackets:
|
||||
level += 1
|
||||
if tok in open_brackets:
|
||||
level -= 1
|
||||
elif tok == '.':
|
||||
force_point = False
|
||||
elif force_point:
|
||||
# it is reversed, therefore a number is getting recognized
|
||||
# as a floating point number
|
||||
if token_type == tokenize.NUMBER and tok[0] == '.':
|
||||
force_point = False
|
||||
else:
|
||||
break
|
||||
elif tok in close_brackets:
|
||||
level += 1
|
||||
elif token_type in [tokenize.NAME, tokenize.STRING]:
|
||||
force_point = True
|
||||
elif token_type == tokenize.NUMBER:
|
||||
pass
|
||||
else:
|
||||
self._column_temp = self._line_length - end[1]
|
||||
break
|
||||
|
||||
x = start_pos[0] - end[0] + 1
|
||||
l = self.get_line(x)
|
||||
l = self._first_line if x == start_pos[0] else l
|
||||
start_cursor = x, len(l) - end[1]
|
||||
self._column_temp = self._line_length - end[1]
|
||||
string += tok
|
||||
last_type = token_type
|
||||
except tokenize.TokenError:
|
||||
debug.warning("Tokenize couldn't finish", sys.exc_info)
|
||||
|
||||
# string can still contain spaces at the end
|
||||
return string[::-1].strip(), start_cursor
|
||||
|
||||
def get_path_under_cursor(self):
|
||||
"""
|
||||
Return the path under the cursor. If there is a rest of the path left,
|
||||
it will be added to the stuff before it.
|
||||
"""
|
||||
return self.get_path_until_cursor() + self.get_path_after_cursor()
|
||||
|
||||
def get_path_after_cursor(self):
|
||||
line = self.get_line(self.position[0])
|
||||
return re.search("[\w\d]*", line[self.position[1]:]).group(0)
|
||||
|
||||
def get_operator_under_cursor(self):
|
||||
line = self.get_line(self.position[0])
|
||||
after = re.match("[^\w\s]+", line[self.position[1]:])
|
||||
before = re.match("[^\w\s]+", line[:self.position[1]][::-1])
|
||||
return (before.group(0) if before is not None else '') \
|
||||
+ (after.group(0) if after is not None else '')
|
||||
|
||||
def get_context(self, yield_positions=False):
|
||||
pos = self._start_cursor_pos
|
||||
while True:
|
||||
# remove non important white space
|
||||
line = self.get_line(pos[0])
|
||||
while True:
|
||||
if pos[1] == 0:
|
||||
line = self.get_line(pos[0] - 1)
|
||||
if line and line[-1] == '\\':
|
||||
pos = pos[0] - 1, len(line) - 1
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if line[pos[1] - 1].isspace():
|
||||
pos = pos[0], pos[1] - 1
|
||||
else:
|
||||
break
|
||||
|
||||
try:
|
||||
result, pos = self._get_path_until_cursor(start_pos=pos)
|
||||
if yield_positions:
|
||||
yield pos
|
||||
else:
|
||||
yield result
|
||||
except StopIteration:
|
||||
if yield_positions:
|
||||
yield None
|
||||
else:
|
||||
yield ''
|
||||
|
||||
def get_line(self, line_nr):
|
||||
if not self._line_cache:
|
||||
self._line_cache = self.source.splitlines()
|
||||
if self.source:
|
||||
if self.source[-1] == '\n':
|
||||
self._line_cache.append('')
|
||||
else: # ''.splitlines() == []
|
||||
self._line_cache = ['']
|
||||
|
||||
if line_nr == 0:
|
||||
# This is a fix for the zeroth line. We need a newline there, for
|
||||
# the backwards parser.
|
||||
return ''
|
||||
if line_nr < 0:
|
||||
raise StopIteration()
|
||||
try:
|
||||
return self._line_cache[line_nr - 1]
|
||||
except IndexError:
|
||||
raise StopIteration()
|
||||
|
||||
def get_position_line(self):
|
||||
return self.get_line(self.position[0])[:self.position[1]]
|
||||
|
||||
|
||||
def get_sys_path():
|
||||
def check_virtual_env(sys_path):
|
||||
""" Add virtualenv's site-packages to the `sys.path`."""
|
||||
venv = os.getenv('VIRTUAL_ENV')
|
||||
if not venv:
|
||||
return
|
||||
venv = os.path.abspath(venv)
|
||||
p = os.path.join(
|
||||
venv, 'lib', 'python%d.%d' % sys.version_info[:2], 'site-packages')
|
||||
sys_path.insert(0, p)
|
||||
|
||||
check_virtual_env(sys.path)
|
||||
return [p for p in sys.path if p != ""]
|
||||
|
||||
|
||||
@cache.memoize_default([])
|
||||
def sys_path_with_modifications(module):
|
||||
def execute_code(code):
|
||||
c = "import os; from os.path import *; result=%s"
|
||||
variables = {'__file__': module.path}
|
||||
try:
|
||||
exec_function(c % code, variables)
|
||||
except Exception:
|
||||
debug.warning('sys path detected, but failed to evaluate')
|
||||
return None
|
||||
try:
|
||||
res = variables['result']
|
||||
if isinstance(res, str):
|
||||
return os.path.abspath(res)
|
||||
else:
|
||||
return None
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def check_module(module):
|
||||
try:
|
||||
possible_stmts = module.used_names['path']
|
||||
except KeyError:
|
||||
return get_sys_path()
|
||||
|
||||
sys_path = list(get_sys_path()) # copy
|
||||
for p in possible_stmts:
|
||||
if not isinstance(p, pr.Statement):
|
||||
continue
|
||||
commands = p.get_commands()
|
||||
# sys.path command is just one thing.
|
||||
if len(commands) != 1 or not isinstance(commands[0], pr.Call):
|
||||
continue
|
||||
call = commands[0]
|
||||
n = call.name
|
||||
if not isinstance(n, pr.Name) or len(n.names) != 3:
|
||||
continue
|
||||
if n.names[:2] != ('sys', 'path'):
|
||||
continue
|
||||
array_cmd = n.names[2]
|
||||
if call.execution is None:
|
||||
continue
|
||||
exe = call.execution
|
||||
if not (array_cmd == 'insert' and len(exe) == 2
|
||||
or array_cmd == 'append' and len(exe) == 1):
|
||||
continue
|
||||
|
||||
if array_cmd == 'insert':
|
||||
exe_type, exe.type = exe.type, pr.Array.NOARRAY
|
||||
exe_pop = exe.values.pop(0)
|
||||
res = execute_code(exe.get_code())
|
||||
if res is not None:
|
||||
sys_path.insert(0, res)
|
||||
debug.dbg('sys path inserted: %s' % res)
|
||||
exe.type = exe_type
|
||||
exe.values.insert(0, exe_pop)
|
||||
elif array_cmd == 'append':
|
||||
res = execute_code(exe.get_code())
|
||||
if res is not None:
|
||||
sys_path.append(res)
|
||||
debug.dbg('sys path added: %s' % res)
|
||||
return sys_path
|
||||
|
||||
if module.path is None:
|
||||
# Support for modules without a path is bad, therefore return the
|
||||
# normal path.
|
||||
return list(get_sys_path())
|
||||
|
||||
curdir = os.path.abspath(os.curdir)
|
||||
with common.ignored(OSError):
|
||||
os.chdir(os.path.dirname(module.path))
|
||||
|
||||
result = check_module(module)
|
||||
result += detect_django_path(module.path)
|
||||
|
||||
# cleanup, back to old directory
|
||||
os.chdir(curdir)
|
||||
return result
|
||||
|
||||
|
||||
def detect_django_path(module_path):
|
||||
""" Detects the path of the very well known Django library (if used) """
|
||||
result = []
|
||||
while True:
|
||||
new = os.path.dirname(module_path)
|
||||
# If the module_path doesn't change anymore, we're finished -> /
|
||||
if new == module_path:
|
||||
break
|
||||
else:
|
||||
module_path = new
|
||||
|
||||
with common.ignored(IOError):
|
||||
with open(module_path + os.path.sep + 'manage.py'):
|
||||
debug.dbg('Found django path: %s' % module_path)
|
||||
result.append(module_path)
|
||||
return result
|
||||
|
||||
|
||||
def source_to_unicode(source, encoding=None):
|
||||
def detect_encoding():
|
||||
""" For the implementation of encoding definitions in Python, look at:
|
||||
http://www.python.org/dev/peps/pep-0263/
|
||||
http://docs.python.org/2/reference/lexical_analysis.html#encoding-\
|
||||
declarations
|
||||
"""
|
||||
byte_mark = literal_eval(r"b'\xef\xbb\xbf'")
|
||||
if source.startswith(byte_mark):
|
||||
# UTF-8 byte-order mark
|
||||
return 'utf-8'
|
||||
|
||||
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', str(source)).group(0)
|
||||
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
|
||||
first_two_lines)
|
||||
if possible_encoding:
|
||||
return possible_encoding.group(1)
|
||||
else:
|
||||
# the default if nothing else has been set -> PEP 263
|
||||
return encoding if encoding is not None else 'iso-8859-1'
|
||||
|
||||
if isinstance(source, unicode):
|
||||
# only cast str/bytes
|
||||
return source
|
||||
|
||||
# cast to unicode by default
|
||||
return unicode(source, detect_encoding(), 'replace')
|
||||
@@ -15,9 +15,6 @@ within the statement. This lowers memory usage and cpu time and reduces the
|
||||
complexity of the ``Parser`` (there's another parser sitting inside
|
||||
``Statement``, which produces ``Array`` and ``Call``).
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import tokenizer as tokenize
|
||||
import keyword
|
||||
|
||||
from jedi._compatibility import next, StringIO
|
||||
@@ -25,6 +22,7 @@ from jedi import debug
|
||||
from jedi import common
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser import token as token_pr
|
||||
from jedi.parser import tokenize
|
||||
|
||||
|
||||
class Parser(object):
|
||||
@@ -36,18 +34,12 @@ class Parser(object):
|
||||
:type source: str
|
||||
:param module_path: The path of the module in the file system, may be None.
|
||||
:type module_path: str
|
||||
:param user_position: The line/column, the user is currently on.
|
||||
:type user_position: tuple(int, int)
|
||||
:param no_docstr: If True, a string at the beginning is not a docstr.
|
||||
:param is_fast_parser: -> for fast_parser
|
||||
:param top_module: Use this module as a parent instead of `self.module`.
|
||||
"""
|
||||
def __init__(self, source, module_path=None, user_position=None,
|
||||
no_docstr=False, offset=(0, 0), is_fast_parser=None,
|
||||
top_module=None):
|
||||
self.user_position = user_position
|
||||
self.user_scope = None
|
||||
self.user_stmt = None
|
||||
def __init__(self, source, module_path=None, no_docstr=False,
|
||||
offset=(0, 0), is_fast_parser=None, top_module=None):
|
||||
self.no_docstr = no_docstr
|
||||
|
||||
self.start_pos = self.end_pos = 1 + offset[0], offset[1]
|
||||
@@ -58,8 +50,7 @@ class Parser(object):
|
||||
|
||||
source = source + '\n' # end with \n, because the parser needs it
|
||||
buf = StringIO(source)
|
||||
self._gen = common.NoErrorTokenizer(buf.readline, offset,
|
||||
is_fast_parser)
|
||||
self._gen = tokenize.NoErrorTokenizer(buf.readline, offset, is_fast_parser)
|
||||
self.top_module = top_module or self.module
|
||||
try:
|
||||
self._parse()
|
||||
@@ -97,19 +88,6 @@ class Parser(object):
|
||||
self.module.used_names[tok_name] = set([simple])
|
||||
self.module.temp_used_names = []
|
||||
|
||||
if not self.user_position:
|
||||
return
|
||||
# the position is right
|
||||
if simple.start_pos <= self.user_position <= simple.end_pos:
|
||||
if self.user_stmt is not None:
|
||||
# if there is already a user position (another import, because
|
||||
# imports are splitted) the names are checked.
|
||||
for n in simple.get_set_vars():
|
||||
if n.start_pos < self.user_position <= n.end_pos:
|
||||
self.user_stmt = simple
|
||||
else:
|
||||
self.user_stmt = simple
|
||||
|
||||
def _parse_dot_name(self, pre_used_token=None):
|
||||
"""
|
||||
The dot name parser parses a name, variable or function and returns
|
||||
@@ -255,11 +233,7 @@ class Parser(object):
|
||||
return None
|
||||
|
||||
# because of 2 line func param definitions
|
||||
scope = pr.Function(self.module, fname, params, first_pos, annotation)
|
||||
if self.user_scope and scope != self.user_scope \
|
||||
and self.user_position > first_pos:
|
||||
self.user_scope = scope
|
||||
return scope
|
||||
return pr.Function(self.module, fname, params, first_pos, annotation)
|
||||
|
||||
def _parse_class(self):
|
||||
"""
|
||||
@@ -272,11 +246,8 @@ class Parser(object):
|
||||
first_pos = self.start_pos
|
||||
token_type, cname = self.next()
|
||||
if token_type != tokenize.NAME:
|
||||
debug.warning(
|
||||
"class: syntax err, token is not a name@%s (%s: %s)" % (
|
||||
self.start_pos[0], tokenize.tok_name[token_type], cname
|
||||
)
|
||||
)
|
||||
debug.warning("class: syntax err, token is not a name@%s (%s: %s)",
|
||||
self.start_pos[0], tokenize.tok_name[token_type], cname)
|
||||
return None
|
||||
|
||||
cname = pr.Name(self.module, [(cname, self.start_pos)], self.start_pos,
|
||||
@@ -289,15 +260,10 @@ class Parser(object):
|
||||
token_type, _next = self.next()
|
||||
|
||||
if _next != ':':
|
||||
debug.warning("class syntax: %s@%s" % (cname, self.start_pos[0]))
|
||||
debug.warning("class syntax: %s@%s", cname, self.start_pos[0])
|
||||
return None
|
||||
|
||||
# because of 2 line class initializations
|
||||
scope = pr.Class(self.module, cname, super, first_pos)
|
||||
if self.user_scope and scope != self.user_scope \
|
||||
and self.user_position > first_pos:
|
||||
self.user_scope = scope
|
||||
return scope
|
||||
return pr.Class(self.module, cname, super, first_pos)
|
||||
|
||||
def _parse_statement(self, pre_used_token=None, added_breaks=None,
|
||||
stmt_class=pr.Statement, names_are_set_vars=False):
|
||||
@@ -448,15 +414,6 @@ class Parser(object):
|
||||
s = s.parent
|
||||
raise
|
||||
|
||||
if self.user_position and (
|
||||
self.start_pos[0] == self.user_position[0]
|
||||
or self.user_scope is None
|
||||
and self.start_pos[0] >= self.user_position[0]
|
||||
):
|
||||
debug.dbg('user scope found [%s] = %s' %
|
||||
(self.parserline.replace('\n', ''), repr(self._scope)))
|
||||
self.user_scope = self._scope
|
||||
|
||||
self._current = typ, tok
|
||||
return self._current
|
||||
|
||||
@@ -480,8 +437,8 @@ class Parser(object):
|
||||
# This iterator stuff is not intentional. It grew historically.
|
||||
for token_type, tok in self.iterator:
|
||||
self.module.temp_used_names = []
|
||||
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
|
||||
# % (tok, tokenize.tok_name[token_type], start_position[0]))
|
||||
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]', \
|
||||
# tok, tokenize.tok_name[token_type], start_position[0])
|
||||
|
||||
while token_type == tokenize.DEDENT and self._scope != self.module:
|
||||
token_type, tok = self.next()
|
||||
@@ -511,8 +468,7 @@ class Parser(object):
|
||||
if tok == 'def':
|
||||
func = self._parse_function()
|
||||
if func is None:
|
||||
debug.warning("function: syntax error@%s" %
|
||||
self.start_pos[0])
|
||||
debug.warning("function: syntax error@%s", self.start_pos[0])
|
||||
continue
|
||||
self.freshscope = True
|
||||
self._scope = self._scope.add_scope(func, self._decorators)
|
||||
@@ -556,7 +512,7 @@ class Parser(object):
|
||||
tok = 'import'
|
||||
mod = None
|
||||
if not mod and not relative_count or tok != "import":
|
||||
debug.warning("from: syntax error@%s" % self.start_pos[0])
|
||||
debug.warning("from: syntax error@%s", self.start_pos[0])
|
||||
defunct = True
|
||||
if tok != 'import':
|
||||
self._gen.push_last_back()
|
||||
@@ -577,25 +533,18 @@ class Parser(object):
|
||||
elif tok == 'for':
|
||||
set_stmt, tok = self._parse_statement(added_breaks=['in'],
|
||||
names_are_set_vars=True)
|
||||
if tok == 'in':
|
||||
statement, tok = self._parse_statement()
|
||||
if tok == ':':
|
||||
s = [] if statement is None else [statement]
|
||||
f = pr.ForFlow(self.module, s, first_pos, set_stmt)
|
||||
self._scope = self._scope.add_statement(f)
|
||||
else:
|
||||
debug.warning('syntax err, for flow started @%s',
|
||||
self.start_pos[0])
|
||||
if statement is not None:
|
||||
statement.parent = use_as_parent_scope
|
||||
if set_stmt is not None:
|
||||
set_stmt.parent = use_as_parent_scope
|
||||
else:
|
||||
debug.warning('syntax err, for flow incomplete @%s',
|
||||
self.start_pos[0])
|
||||
if set_stmt is not None:
|
||||
set_stmt.parent = use_as_parent_scope
|
||||
if tok != 'in':
|
||||
debug.warning('syntax err, for flow incomplete @%s', self.start_pos[0])
|
||||
|
||||
try:
|
||||
statement, tok = self._parse_statement()
|
||||
except StopIteration:
|
||||
statement, tok = None, None
|
||||
s = [] if statement is None else [statement]
|
||||
f = pr.ForFlow(self.module, s, first_pos, set_stmt)
|
||||
self._scope = self._scope.add_statement(f)
|
||||
if tok != ':':
|
||||
debug.warning('syntax err, for flow started @%s', self.start_pos[0])
|
||||
elif tok in ['if', 'while', 'try', 'with'] + extended_flow:
|
||||
added_breaks = []
|
||||
command = tok
|
||||
@@ -604,8 +553,7 @@ class Parser(object):
|
||||
# multiple inputs because of with
|
||||
inputs = []
|
||||
first = True
|
||||
while first or command == 'with' \
|
||||
and tok not in [':', '\n']:
|
||||
while first or command == 'with' and tok not in [':', '\n']:
|
||||
statement, tok = \
|
||||
self._parse_statement(added_breaks=added_breaks)
|
||||
if command == 'except' and tok == ',':
|
||||
@@ -619,25 +567,21 @@ class Parser(object):
|
||||
inputs.append(statement)
|
||||
first = False
|
||||
|
||||
if tok == ':':
|
||||
f = pr.Flow(self.module, command, inputs, first_pos)
|
||||
if command in extended_flow:
|
||||
# the last statement has to be another part of
|
||||
# the flow statement, because a dedent releases the
|
||||
# main scope, so just take the last statement.
|
||||
try:
|
||||
s = self._scope.statements[-1].set_next(f)
|
||||
except (AttributeError, IndexError):
|
||||
# If set_next doesn't exist, just add it.
|
||||
s = self._scope.add_statement(f)
|
||||
else:
|
||||
f = pr.Flow(self.module, command, inputs, first_pos)
|
||||
if command in extended_flow:
|
||||
# the last statement has to be another part of
|
||||
# the flow statement, because a dedent releases the
|
||||
# main scope, so just take the last statement.
|
||||
try:
|
||||
s = self._scope.statements[-1].set_next(f)
|
||||
except (AttributeError, IndexError):
|
||||
# If set_next doesn't exist, just add it.
|
||||
s = self._scope.add_statement(f)
|
||||
self._scope = s
|
||||
else:
|
||||
for i in inputs:
|
||||
i.parent = use_as_parent_scope
|
||||
debug.warning('syntax err, flow started @%s',
|
||||
self.start_pos[0])
|
||||
s = self._scope.add_statement(f)
|
||||
self._scope = s
|
||||
if tok != ':':
|
||||
debug.warning('syntax err, flow started @%s', self.start_pos[0])
|
||||
# returns
|
||||
elif tok in ['return', 'yield']:
|
||||
s = self.start_pos
|
||||
@@ -692,7 +636,7 @@ class Parser(object):
|
||||
else:
|
||||
if token_type not in [tokenize.COMMENT, tokenize.INDENT,
|
||||
tokenize.NEWLINE, tokenize.NL]:
|
||||
debug.warning('token not classified', tok, token_type,
|
||||
self.start_pos[0])
|
||||
debug.warning('token not classified %s %s %s', tok,
|
||||
token_type, self.start_pos[0])
|
||||
continue
|
||||
self.no_docstr = False
|
||||
|
||||
@@ -7,13 +7,11 @@ import re
|
||||
|
||||
from jedi._compatibility import use_metaclass
|
||||
from jedi import settings
|
||||
from jedi import common
|
||||
from jedi.parser import Parser
|
||||
from jedi.parser import representation as pr
|
||||
from jedi.parser import tokenize
|
||||
from jedi import cache
|
||||
from jedi import common
|
||||
|
||||
|
||||
SCOPE_CONTENTS = ['asserts', 'subscopes', 'imports', 'statements', 'returns']
|
||||
|
||||
|
||||
class Module(pr.Simple, pr.Module):
|
||||
@@ -28,10 +26,8 @@ class Module(pr.Simple, pr.Module):
|
||||
def reset_caches(self):
|
||||
""" This module does a whole lot of caching, because it uses different
|
||||
parsers. """
|
||||
self._used_names = None
|
||||
for p in self.parsers:
|
||||
p.user_scope = None
|
||||
p.user_stmt = None
|
||||
with common.ignored(AttributeError):
|
||||
del self._used_names
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name.startswith('__'):
|
||||
@@ -40,18 +36,16 @@ class Module(pr.Simple, pr.Module):
|
||||
return getattr(self.parsers[0].module, name)
|
||||
|
||||
@property
|
||||
@cache.underscore_memoization
|
||||
def used_names(self):
|
||||
if self._used_names is None:
|
||||
dct = {}
|
||||
for p in self.parsers:
|
||||
for k, statement_set in p.module.used_names.items():
|
||||
if k in dct:
|
||||
dct[k] |= statement_set
|
||||
else:
|
||||
dct[k] = set(statement_set)
|
||||
|
||||
self._used_names = dct
|
||||
return self._used_names
|
||||
used_names = {}
|
||||
for p in self.parsers:
|
||||
for k, statement_set in p.module.used_names.items():
|
||||
if k in used_names:
|
||||
used_names[k] |= statement_set
|
||||
else:
|
||||
used_names[k] = set(statement_set)
|
||||
return used_names
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s@%s-%s>" % (type(self).__name__, self.name,
|
||||
@@ -60,17 +54,16 @@ class Module(pr.Simple, pr.Module):
|
||||
|
||||
class CachedFastParser(type):
|
||||
""" This is a metaclass for caching `FastParser`. """
|
||||
def __call__(self, source, module_path=None, user_position=None):
|
||||
def __call__(self, source, module_path=None):
|
||||
if not settings.fast_parser:
|
||||
return Parser(source, module_path, user_position)
|
||||
return Parser(source, module_path)
|
||||
|
||||
pi = cache.parser_cache.get(module_path, None)
|
||||
if pi is None or isinstance(pi.parser, Parser):
|
||||
p = super(CachedFastParser, self).__call__(source, module_path,
|
||||
user_position)
|
||||
p = super(CachedFastParser, self).__call__(source, module_path)
|
||||
else:
|
||||
p = pi.parser # pi is a `cache.ParserCacheItem`
|
||||
p.update(source, user_position)
|
||||
p.update(source)
|
||||
return p
|
||||
|
||||
|
||||
@@ -95,7 +88,7 @@ class ParserNode(object):
|
||||
|
||||
scope = self.content_scope
|
||||
self._contents = {}
|
||||
for c in SCOPE_CONTENTS:
|
||||
for c in pr.SCOPE_CONTENTS:
|
||||
self._contents[c] = list(getattr(scope, c))
|
||||
self._is_generator = scope.is_generator
|
||||
|
||||
@@ -107,7 +100,6 @@ class ParserNode(object):
|
||||
for key, c in self._contents.items():
|
||||
setattr(scope, key, list(c))
|
||||
scope.is_generator = self._is_generator
|
||||
self.parser.user_scope = self.parser.module
|
||||
|
||||
if self.parent is None:
|
||||
# Global vars of the first one can be deleted, in the global scope
|
||||
@@ -147,7 +139,7 @@ class ParserNode(object):
|
||||
def _set_items(self, parser, set_parent=False):
|
||||
# insert parser objects into current structure
|
||||
scope = self.content_scope
|
||||
for c in SCOPE_CONTENTS:
|
||||
for c in pr.SCOPE_CONTENTS:
|
||||
content = getattr(scope, c)
|
||||
items = getattr(parser.module, c)
|
||||
if set_parent:
|
||||
@@ -174,6 +166,11 @@ class ParserNode(object):
|
||||
self._set_items(node.parser, set_parent=set_parent)
|
||||
node.old_children = node.children
|
||||
node.children = []
|
||||
|
||||
scope = self.content_scope
|
||||
while scope is not None:
|
||||
scope.end_pos = node.content_scope.end_pos
|
||||
scope = scope.parent
|
||||
return node
|
||||
|
||||
def add_parser(self, parser, code):
|
||||
@@ -181,11 +178,9 @@ class ParserNode(object):
|
||||
|
||||
|
||||
class FastParser(use_metaclass(CachedFastParser)):
|
||||
def __init__(self, code, module_path=None, user_position=None):
|
||||
def __init__(self, code, module_path=None):
|
||||
# set values like `pr.Module`.
|
||||
self.module_path = module_path
|
||||
self.user_position = user_position
|
||||
self._user_scope = None
|
||||
|
||||
self.current_node = None
|
||||
self.parsers = []
|
||||
@@ -199,34 +194,9 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
self.parsers[:] = []
|
||||
raise
|
||||
|
||||
@property
|
||||
def user_scope(self):
|
||||
if self._user_scope is None:
|
||||
for p in self.parsers:
|
||||
if p.user_scope:
|
||||
if isinstance(p.user_scope, pr.SubModule):
|
||||
continue
|
||||
self._user_scope = p.user_scope
|
||||
|
||||
if isinstance(self._user_scope, pr.SubModule) \
|
||||
or self._user_scope is None:
|
||||
self._user_scope = self.module
|
||||
return self._user_scope
|
||||
|
||||
@property
|
||||
def user_stmt(self):
|
||||
if self._user_stmt is None:
|
||||
for p in self.parsers:
|
||||
if p.user_stmt:
|
||||
self._user_stmt = p.user_stmt
|
||||
break
|
||||
return self._user_stmt
|
||||
|
||||
def update(self, code, user_position=None):
|
||||
self.user_position = user_position
|
||||
def update(self, code):
|
||||
self.reset_caches()
|
||||
|
||||
|
||||
try:
|
||||
self._parse(code)
|
||||
except:
|
||||
@@ -234,14 +204,6 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
self.parsers[:] = []
|
||||
raise
|
||||
|
||||
def _scan_user_scope(self, sub_module):
|
||||
""" Scan with self.user_position. """
|
||||
for scope in sub_module.statements + sub_module.subscopes:
|
||||
if isinstance(scope, pr.Scope):
|
||||
if scope.start_pos <= self.user_position <= scope.end_pos:
|
||||
return self._scan_user_scope(scope) or scope
|
||||
return None
|
||||
|
||||
def _split_parts(self, code):
|
||||
"""
|
||||
Split the code into different parts. This makes it possible to parse
|
||||
@@ -257,7 +219,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
parts.append(txt)
|
||||
current_lines[:] = []
|
||||
|
||||
r_keyword = '^[ \t]*(def|class|@|%s)' % '|'.join(common.FLOWS)
|
||||
r_keyword = '^[ \t]*(def|class|@|%s)' % '|'.join(tokenize.FLOWS)
|
||||
|
||||
self._lines = code.splitlines()
|
||||
current_lines = []
|
||||
@@ -292,7 +254,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
if not in_flow:
|
||||
m = re.match(r_keyword, l)
|
||||
if m:
|
||||
in_flow = m.group(1) in common.FLOWS
|
||||
in_flow = m.group(1) in tokenize.FLOWS
|
||||
if not is_decorator and not in_flow:
|
||||
add_part()
|
||||
add_to_last = False
|
||||
@@ -372,12 +334,6 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
else:
|
||||
self.current_node = self.current_node.add_node(node)
|
||||
|
||||
if self.current_node.parent and (isinstance(p.user_scope,
|
||||
pr.SubModule) or p.user_scope is None) \
|
||||
and self.user_position \
|
||||
and p.start_pos <= self.user_position < p.end_pos:
|
||||
p.user_scope = self.current_node.parent.content_scope
|
||||
|
||||
self.parsers.append(p)
|
||||
|
||||
is_first = False
|
||||
@@ -406,10 +362,9 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
if nodes[index].code != code:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
p = Parser(parser_code, self.module_path,
|
||||
self.user_position, offset=(line_offset, 0),
|
||||
is_fast_parser=True, top_module=self.module,
|
||||
no_docstr=no_docstr)
|
||||
p = Parser(parser_code, self.module_path, offset=(line_offset, 0),
|
||||
is_fast_parser=True, top_module=self.module,
|
||||
no_docstr=no_docstr)
|
||||
p.module.parent = self.module
|
||||
else:
|
||||
if nodes[index] != self.current_node:
|
||||
@@ -419,22 +374,10 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
p = node.parser
|
||||
m = p.module
|
||||
m.line_offset += line_offset + 1 - m.start_pos[0]
|
||||
if self.user_position is not None and \
|
||||
m.start_pos[0] <= self.user_position[0] <= m.end_pos[0]:
|
||||
# It's important to take care of the whole user
|
||||
# positioning stuff, if no reparsing is being done.
|
||||
p.user_stmt = m.get_statement_for_position(
|
||||
self.user_position, include_imports=True)
|
||||
if p.user_stmt:
|
||||
p.user_scope = p.user_stmt.parent
|
||||
else:
|
||||
p.user_scope = self._scan_user_scope(m) or m
|
||||
|
||||
return p, node
|
||||
|
||||
def reset_caches(self):
|
||||
self._user_scope = None
|
||||
self._user_stmt = None
|
||||
self.module.reset_caches()
|
||||
if self.current_node is not None:
|
||||
self.current_node.reset_contents()
|
||||
|
||||
@@ -33,20 +33,23 @@ statements in this scope. Check this out:
|
||||
|
||||
See also :attr:`Scope.subscopes` and :attr:`Scope.statements`.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import re
|
||||
import tokenizer as tokenize
|
||||
from inspect import cleandoc
|
||||
from ast import literal_eval
|
||||
|
||||
from jedi._compatibility import next, Python3Method, encoding, unicode, is_py3k
|
||||
from jedi._compatibility import next, Python3Method, encoding, unicode, is_py3
|
||||
from jedi import common
|
||||
from jedi import debug
|
||||
from jedi import cache
|
||||
from jedi.parser import tokenize
|
||||
from jedi.parser import token as token_pr
|
||||
|
||||
|
||||
SCOPE_CONTENTS = ['asserts', 'subscopes', 'imports', 'statements', 'returns']
|
||||
|
||||
|
||||
|
||||
class GetCodeState(object):
|
||||
"""A helper class for passing the state of get_code in a thread-safe
|
||||
manner"""
|
||||
@@ -55,7 +58,6 @@ class GetCodeState(object):
|
||||
def __init__(self):
|
||||
self.last_pos = (0, 0)
|
||||
|
||||
|
||||
class Base(object):
|
||||
"""
|
||||
This is just here to have an isinstance check, which is also used on
|
||||
@@ -156,7 +158,7 @@ class Simple(Base):
|
||||
|
||||
def __repr__(self):
|
||||
code = self.get_code().replace('\n', ' ')
|
||||
if not is_py3k:
|
||||
if not is_py3:
|
||||
code = code.encode(encoding, 'replace')
|
||||
return "<%s: %s@%s,%s>" % \
|
||||
(type(self).__name__, code, self.start_pos[0], self.start_pos[1])
|
||||
@@ -313,6 +315,10 @@ class Scope(Simple, IsScope):
|
||||
if self.isinstance(Function):
|
||||
checks += self.params + self.decorators
|
||||
checks += [r for r in self.returns if r is not None]
|
||||
if self.isinstance(Flow):
|
||||
checks += self.inputs
|
||||
if self.isinstance(ForFlow) and self.set_stmt is not None:
|
||||
checks.append(self.set_stmt)
|
||||
|
||||
for s in checks:
|
||||
if isinstance(s, Flow):
|
||||
@@ -368,7 +374,6 @@ class SubModule(Scope, Module):
|
||||
super(SubModule, self).__init__(self, start_pos)
|
||||
self.path = path
|
||||
self.global_vars = []
|
||||
self._name = None
|
||||
self.used_names = {}
|
||||
self.temp_used_names = []
|
||||
# this may be changed depending on fast_parser
|
||||
@@ -394,25 +399,19 @@ class SubModule(Scope, Module):
|
||||
return n
|
||||
|
||||
@property
|
||||
@cache.underscore_memoization
|
||||
def name(self):
|
||||
""" This is used for the goto functions. """
|
||||
if self._name is not None:
|
||||
return self._name
|
||||
if self.path is None:
|
||||
string = '' # no path -> empty name
|
||||
else:
|
||||
sep = (re.escape(os.path.sep),) * 2
|
||||
r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep,
|
||||
self.path)
|
||||
r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep, self.path)
|
||||
# remove PEP 3149 names
|
||||
string = re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
|
||||
# positions are not real therefore choose (0, 0)
|
||||
names = [(string, (0, 0))]
|
||||
self._name = Name(self, names, (0, 0), (0, 0), self.use_as_parent)
|
||||
return self._name
|
||||
|
||||
def is_builtin(self):
|
||||
return not (self.path is None or self.path.endswith('.py'))
|
||||
return Name(self, names, (0, 0), (0, 0), self.use_as_parent)
|
||||
|
||||
@property
|
||||
def has_explicit_absolute_import(self):
|
||||
@@ -525,7 +524,7 @@ class Function(Scope):
|
||||
try:
|
||||
n.append(p.get_name())
|
||||
except IndexError:
|
||||
debug.warning("multiple names in param %s" % n)
|
||||
debug.warning("multiple names in param %s", n)
|
||||
return n
|
||||
|
||||
def get_call_signature(self, width=72, funcname=None):
|
||||
@@ -795,7 +794,7 @@ class Statement(Simple):
|
||||
:type start_pos: 2-tuple of int
|
||||
:param start_pos: Position (line, column) of the Statement.
|
||||
"""
|
||||
__slots__ = ('token_list', '_set_vars', 'as_names', '_commands',
|
||||
__slots__ = ('token_list', '_set_vars', 'as_names', '_expression_list',
|
||||
'_assignment_details', 'docstr', '_names_are_set_vars')
|
||||
|
||||
def __init__(self, module, token_list, start_pos, end_pos, parent=None,
|
||||
@@ -817,7 +816,6 @@ class Statement(Simple):
|
||||
self.as_names = list(as_names)
|
||||
|
||||
# cache
|
||||
self._commands = None
|
||||
self._assignment_details = []
|
||||
# this is important for other scripts
|
||||
|
||||
@@ -834,7 +832,7 @@ class Statement(Simple):
|
||||
return '%s %s ' % (''.join(pieces), assignment)
|
||||
|
||||
code = ''.join(assemble(*a) for a in self.assignment_details)
|
||||
code += assemble(self.get_commands())
|
||||
code += assemble(self.expression_list())
|
||||
if self.docstr:
|
||||
code += '\n"""%s"""' % self.docstr.as_string()
|
||||
|
||||
@@ -846,12 +844,12 @@ class Statement(Simple):
|
||||
def get_set_vars(self):
|
||||
""" Get the names for the statement. """
|
||||
if self._set_vars is None:
|
||||
self._set_vars = []
|
||||
|
||||
def search_calls(calls):
|
||||
for call in calls:
|
||||
if isinstance(call, Array):
|
||||
for stmt in call:
|
||||
search_calls(stmt.get_commands())
|
||||
search_calls(stmt.expression_list())
|
||||
elif isinstance(call, Call):
|
||||
c = call
|
||||
# Check if there's an execution in it, if so this is
|
||||
@@ -865,12 +863,13 @@ class Statement(Simple):
|
||||
continue
|
||||
self._set_vars.append(call.name)
|
||||
|
||||
self._set_vars = []
|
||||
for calls, operation in self.assignment_details:
|
||||
search_calls(calls)
|
||||
|
||||
if not self.assignment_details and self._names_are_set_vars:
|
||||
# In the case of Param, it's also a defining name without ``=``
|
||||
search_calls(self.get_commands())
|
||||
search_calls(self.expression_list())
|
||||
return self._set_vars + self.as_names
|
||||
|
||||
def is_global(self):
|
||||
@@ -889,17 +888,14 @@ class Statement(Simple):
|
||||
would result in ``[(Name(x), '='), (Array([Name(y), Name(z)]), '=')]``.
|
||||
"""
|
||||
# parse statement which creates the assignment details.
|
||||
self.get_commands()
|
||||
self.expression_list()
|
||||
return self._assignment_details
|
||||
|
||||
def get_commands(self):
|
||||
if self._commands is None:
|
||||
self._commands = ['time neeeeed'] # avoid recursions
|
||||
self._commands = self._parse_statement()
|
||||
return self._commands
|
||||
|
||||
def _parse_statement(self):
|
||||
@cache.underscore_memoization
|
||||
def expression_list(self):
|
||||
"""
|
||||
Parse a statement.
|
||||
|
||||
This is not done in the main parser, because it might be slow and
|
||||
most of the statements won't need this data anyway. This is something
|
||||
'like' a lazy execution.
|
||||
@@ -966,10 +962,10 @@ class Statement(Simple):
|
||||
# it's not possible to set it earlier
|
||||
tok.parent = self
|
||||
else:
|
||||
tok = tok_temp.token
|
||||
tok = tok_temp.token
|
||||
start_tok_pos = tok_temp.start_pos
|
||||
last_end_pos = end_pos
|
||||
end_pos = tok_temp.end_pos
|
||||
last_end_pos = end_pos
|
||||
end_pos = tok_temp.end_pos
|
||||
if first:
|
||||
first = False
|
||||
start_pos = start_tok_pos
|
||||
@@ -1062,7 +1058,7 @@ class Statement(Simple):
|
||||
stmt = Statement(self._sub_module, token_list,
|
||||
start_pos, arr.end_pos)
|
||||
arr.parent = stmt
|
||||
stmt.token_list = stmt._commands = [arr]
|
||||
stmt.token_list = stmt._expression_list = [arr]
|
||||
else:
|
||||
for t in stmt.token_list:
|
||||
if isinstance(t, Name):
|
||||
@@ -1075,12 +1071,12 @@ class Statement(Simple):
|
||||
|
||||
middle, tok = parse_stmt_or_arr(token_iterator, ['in'], True)
|
||||
if tok != 'in' or middle is None:
|
||||
debug.warning('list comprehension middle @%s' % str(start_pos))
|
||||
debug.warning('list comprehension middle @%s', start_pos)
|
||||
return None, tok
|
||||
|
||||
in_clause, tok = parse_stmt_or_arr(token_iterator)
|
||||
if in_clause is None:
|
||||
debug.warning('list comprehension in @%s' % str(start_pos))
|
||||
debug.warning('list comprehension in @%s', start_pos)
|
||||
return None, tok
|
||||
|
||||
return ListComprehension(st, middle, in_clause, self), tok
|
||||
@@ -1101,9 +1097,9 @@ class Statement(Simple):
|
||||
end_pos = tok.end_pos
|
||||
else:
|
||||
token_type = tok_temp.token_type
|
||||
tok = tok_temp.token
|
||||
start_pos = tok_temp.start_pos
|
||||
end_pos = tok_temp.end_pos
|
||||
tok = tok_temp.token
|
||||
start_pos = tok_temp.start_pos
|
||||
end_pos = tok_temp.end_pos
|
||||
if is_assignment(tok):
|
||||
# This means, there is an assignment here.
|
||||
# Add assignments, which can be more than one
|
||||
@@ -1126,9 +1122,7 @@ class Statement(Simple):
|
||||
|
||||
is_literal = token_type in [tokenize.STRING, tokenize.NUMBER]
|
||||
if isinstance(tok, Name) or is_literal:
|
||||
cls = Call
|
||||
if is_literal:
|
||||
cls = String if token_type == tokenize.STRING else Number
|
||||
cls = Literal if is_literal else Call
|
||||
|
||||
call = cls(self._sub_module, tok, start_pos, end_pos, self)
|
||||
if is_chain:
|
||||
@@ -1149,7 +1143,7 @@ class Statement(Simple):
|
||||
if result and isinstance(result[-1], StatementElement):
|
||||
is_chain = True
|
||||
elif tok == ',': # implies a tuple
|
||||
# commands is now an array not a statement anymore
|
||||
# expression is now an array not a statement anymore
|
||||
t = result[0]
|
||||
start_pos = t[2] if isinstance(t, tuple) else t.start_pos
|
||||
|
||||
@@ -1172,7 +1166,7 @@ class Statement(Simple):
|
||||
self.parent,
|
||||
set_name_parents=False
|
||||
)
|
||||
stmt._commands = result
|
||||
stmt._expression_list = result
|
||||
arr, break_tok = parse_array(token_iterator, Array.TUPLE,
|
||||
stmt.start_pos, stmt)
|
||||
result = [arr]
|
||||
@@ -1213,7 +1207,7 @@ class Param(Statement):
|
||||
""" get the name of the param """
|
||||
n = self.get_set_vars()
|
||||
if len(n) > 1:
|
||||
debug.warning("Multiple param names (%s)." % n)
|
||||
debug.warning("Multiple param names (%s).", n)
|
||||
return n[0]
|
||||
|
||||
|
||||
@@ -1292,25 +1286,14 @@ class Literal(StatementElement):
|
||||
def get_code(self):
|
||||
return self.literal + super(Literal, self).get_code()
|
||||
|
||||
def type_as_string(self):
|
||||
return type(self.value).__name__
|
||||
|
||||
def __repr__(self):
|
||||
if is_py3k:
|
||||
if is_py3:
|
||||
s = self.literal
|
||||
else:
|
||||
s = self.literal.encode('ascii', 'replace')
|
||||
return "<%s: %s>" % (type(self).__name__, s)
|
||||
|
||||
|
||||
class String(Literal):
|
||||
pass
|
||||
|
||||
|
||||
class Number(Literal):
|
||||
pass
|
||||
|
||||
|
||||
class Array(StatementElement):
|
||||
"""
|
||||
Describes the different python types for an array, but also empty
|
||||
|
||||
@@ -151,8 +151,16 @@ class TokenDocstring(TokenNoCompat):
|
||||
|
||||
as_string() will clean the token representing the docstring.
|
||||
"""
|
||||
def __init__(self, token):
|
||||
self.__setstate__(token.__getstate__())
|
||||
def __init__(self, token=None, state=None):
|
||||
if token:
|
||||
self.__setstate__(token.__getstate__())
|
||||
else:
|
||||
self.__setstate__(state)
|
||||
|
||||
@classmethod
|
||||
def fake_docstring(cls, docstr):
|
||||
# TODO: fixme when tests are up again
|
||||
return TokenDocstring(state=(0, '"""\n%s\n"""' % docstr, 0, 0))
|
||||
|
||||
def as_string(self):
|
||||
"""Returns a literal cleaned version of the token"""
|
||||
|
||||
@@ -7,6 +7,7 @@ if the indentation is not right. The fast parser of jedi however requires
|
||||
Basically this is a stripped down version of the standard library module, so
|
||||
you can read the documentation there.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import string
|
||||
import re
|
||||
@@ -14,6 +15,8 @@ from token import *
|
||||
import collections
|
||||
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
|
||||
|
||||
from jedi import common
|
||||
|
||||
namechars = string.ascii_letters + '_'
|
||||
|
||||
|
||||
@@ -283,3 +286,102 @@ def generate_tokens(readline):
|
||||
for indent in indents[1:]: # pop remaining indent levels
|
||||
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
|
||||
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
||||
|
||||
|
||||
# From here on we have custom stuff (everything before was originally Python
|
||||
# internal code).
|
||||
FLOWS = ['if', 'else', 'elif', 'while', 'with', 'try', 'except', 'finally']
|
||||
|
||||
|
||||
class NoErrorTokenizer(object):
|
||||
def __init__(self, readline, offset=(0, 0), is_fast_parser=False):
|
||||
self.readline = readline
|
||||
self.gen = generate_tokens(readline)
|
||||
self.offset = offset
|
||||
self.closed = False
|
||||
self.is_first = True
|
||||
self.push_backs = []
|
||||
|
||||
# fast parser options
|
||||
self.is_fast_parser = is_fast_parser
|
||||
self.current = self.previous = [None, None, (0, 0), (0, 0), '']
|
||||
self.in_flow = False
|
||||
self.new_indent = False
|
||||
self.parser_indent = self.old_parser_indent = 0
|
||||
self.is_decorator = False
|
||||
self.first_stmt = True
|
||||
|
||||
def push_last_back(self):
|
||||
self.push_backs.append(self.current)
|
||||
|
||||
def next(self):
|
||||
""" Python 2 Compatibility """
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self):
|
||||
if self.closed:
|
||||
raise common.MultiLevelStopIteration()
|
||||
if self.push_backs:
|
||||
return self.push_backs.pop(0)
|
||||
|
||||
self.last_previous = self.previous
|
||||
self.previous = self.current
|
||||
self.current = next(self.gen)
|
||||
c = list(self.current)
|
||||
|
||||
if c[0] == ENDMARKER:
|
||||
self.current = self.previous
|
||||
self.previous = self.last_previous
|
||||
raise common.MultiLevelStopIteration()
|
||||
|
||||
# this is exactly the same check as in fast_parser, but this time with
|
||||
# tokenize and therefore precise.
|
||||
breaks = ['def', 'class', '@']
|
||||
|
||||
if self.is_first:
|
||||
c[2] = self.offset[0] + c[2][0], self.offset[1] + c[2][1]
|
||||
c[3] = self.offset[0] + c[3][0], self.offset[1] + c[3][1]
|
||||
self.is_first = False
|
||||
else:
|
||||
c[2] = self.offset[0] + c[2][0], c[2][1]
|
||||
c[3] = self.offset[0] + c[3][0], c[3][1]
|
||||
self.current = c
|
||||
|
||||
def close():
|
||||
if not self.first_stmt:
|
||||
self.closed = True
|
||||
raise common.MultiLevelStopIteration()
|
||||
# ignore indents/comments
|
||||
if self.is_fast_parser \
|
||||
and self.previous[0] in (INDENT, NL, None, NEWLINE, DEDENT) \
|
||||
and c[0] not in (COMMENT, INDENT, NL, NEWLINE, DEDENT):
|
||||
# print c, tok_name[c[0]]
|
||||
|
||||
tok = c[1]
|
||||
indent = c[2][1]
|
||||
if indent < self.parser_indent: # -> dedent
|
||||
self.parser_indent = indent
|
||||
self.new_indent = False
|
||||
if not self.in_flow or indent < self.old_parser_indent:
|
||||
close()
|
||||
self.in_flow = False
|
||||
elif self.new_indent:
|
||||
self.parser_indent = indent
|
||||
self.new_indent = False
|
||||
|
||||
if not self.in_flow:
|
||||
if tok in FLOWS or tok in breaks:
|
||||
self.in_flow = tok in FLOWS
|
||||
if not self.is_decorator and not self.in_flow:
|
||||
close()
|
||||
self.is_decorator = '@' == tok
|
||||
if not self.is_decorator:
|
||||
self.old_parser_indent = self.parser_indent
|
||||
self.parser_indent += 1 # new scope: must be higher
|
||||
self.new_indent = True
|
||||
|
||||
if tok != '@':
|
||||
if self.first_stmt and not self.new_indent:
|
||||
self.parser_indent = indent
|
||||
self.first_stmt = False
|
||||
return c
|
||||
232
jedi/parser/user_context.py
Normal file
232
jedi/parser/user_context.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
from jedi import cache
|
||||
from jedi.parser import tokenize
|
||||
from jedi.parser.fast import FastParser
|
||||
from jedi.parser import representation
|
||||
from jedi import debug
|
||||
|
||||
|
||||
class UserContext(object):
|
||||
"""
|
||||
:param source: The source code of the file.
|
||||
:param position: The position, the user is currently in. Only important \
|
||||
for the main file.
|
||||
"""
|
||||
def __init__(self, source, position):
|
||||
self.source = source
|
||||
self.position = position
|
||||
self._line_cache = None
|
||||
|
||||
# this two are only used, because there is no nonlocal in Python 2
|
||||
self._line_temp = None
|
||||
self._relevant_temp = None
|
||||
|
||||
@cache.underscore_memoization
|
||||
def get_path_until_cursor(self):
|
||||
""" Get the path under the cursor. """
|
||||
path, self._start_cursor_pos = self._calc_path_until_cursor(self.position)
|
||||
return path
|
||||
|
||||
def _calc_path_until_cursor(self, start_pos=None):
|
||||
def fetch_line():
|
||||
if self._is_first:
|
||||
self._is_first = False
|
||||
self._line_length = self._column_temp
|
||||
line = self._first_line
|
||||
else:
|
||||
line = self.get_line(self._line_temp)
|
||||
self._line_length = len(line)
|
||||
line = line + '\n'
|
||||
# add lines with a backslash at the end
|
||||
while True:
|
||||
self._line_temp -= 1
|
||||
last_line = self.get_line(self._line_temp)
|
||||
#print self._line_temp, repr(last_line)
|
||||
if last_line and last_line[-1] == '\\':
|
||||
line = last_line[:-1] + ' ' + line
|
||||
self._line_length = len(last_line)
|
||||
else:
|
||||
break
|
||||
return line[::-1]
|
||||
|
||||
self._is_first = True
|
||||
self._line_temp, self._column_temp = start_cursor = start_pos
|
||||
self._first_line = self.get_line(self._line_temp)[:self._column_temp]
|
||||
|
||||
open_brackets = ['(', '[', '{']
|
||||
close_brackets = [')', ']', '}']
|
||||
|
||||
gen = tokenize.generate_tokens(fetch_line)
|
||||
string = ''
|
||||
level = 0
|
||||
force_point = False
|
||||
last_type = None
|
||||
try:
|
||||
for token_type, tok, start, end, line in gen:
|
||||
# print 'tok', token_type, tok, force_point
|
||||
if last_type == token_type == tokenize.NAME:
|
||||
string += ' '
|
||||
|
||||
if level > 0:
|
||||
if tok in close_brackets:
|
||||
level += 1
|
||||
if tok in open_brackets:
|
||||
level -= 1
|
||||
elif tok == '.':
|
||||
force_point = False
|
||||
elif force_point:
|
||||
# it is reversed, therefore a number is getting recognized
|
||||
# as a floating point number
|
||||
if token_type == tokenize.NUMBER and tok[0] == '.':
|
||||
force_point = False
|
||||
else:
|
||||
break
|
||||
elif tok in close_brackets:
|
||||
level += 1
|
||||
elif token_type in [tokenize.NAME, tokenize.STRING]:
|
||||
force_point = True
|
||||
elif token_type == tokenize.NUMBER:
|
||||
pass
|
||||
else:
|
||||
self._column_temp = self._line_length - end[1]
|
||||
break
|
||||
|
||||
x = start_pos[0] - end[0] + 1
|
||||
l = self.get_line(x)
|
||||
l = self._first_line if x == start_pos[0] else l
|
||||
start_cursor = x, len(l) - end[1]
|
||||
self._column_temp = self._line_length - end[1]
|
||||
string += tok
|
||||
last_type = token_type
|
||||
except tokenize.TokenError:
|
||||
debug.warning("Tokenize couldn't finish: %s", sys.exc_info)
|
||||
|
||||
# string can still contain spaces at the end
|
||||
return string[::-1].strip(), start_cursor
|
||||
|
||||
def get_path_under_cursor(self):
|
||||
"""
|
||||
Return the path under the cursor. If there is a rest of the path left,
|
||||
it will be added to the stuff before it.
|
||||
"""
|
||||
return self.get_path_until_cursor() + self.get_path_after_cursor()
|
||||
|
||||
def get_path_after_cursor(self):
|
||||
line = self.get_line(self.position[0])
|
||||
return re.search("[\w\d]*", line[self.position[1]:]).group(0)
|
||||
|
||||
def get_operator_under_cursor(self):
|
||||
line = self.get_line(self.position[0])
|
||||
after = re.match("[^\w\s]+", line[self.position[1]:])
|
||||
before = re.match("[^\w\s]+", line[:self.position[1]][::-1])
|
||||
return (before.group(0) if before is not None else '') \
|
||||
+ (after.group(0) if after is not None else '')
|
||||
|
||||
def get_context(self, yield_positions=False):
|
||||
pos = self._start_cursor_pos
|
||||
while True:
|
||||
# remove non important white space
|
||||
line = self.get_line(pos[0])
|
||||
while True:
|
||||
if pos[1] == 0:
|
||||
line = self.get_line(pos[0] - 1)
|
||||
if line and line[-1] == '\\':
|
||||
pos = pos[0] - 1, len(line) - 1
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if line[pos[1] - 1].isspace():
|
||||
pos = pos[0], pos[1] - 1
|
||||
else:
|
||||
break
|
||||
|
||||
try:
|
||||
result, pos = self._calc_path_until_cursor(start_pos=pos)
|
||||
if yield_positions:
|
||||
yield pos
|
||||
else:
|
||||
yield result
|
||||
except StopIteration:
|
||||
if yield_positions:
|
||||
yield None
|
||||
else:
|
||||
yield ''
|
||||
|
||||
def get_line(self, line_nr):
|
||||
if not self._line_cache:
|
||||
self._line_cache = self.source.splitlines()
|
||||
if self.source:
|
||||
if self.source[-1] == '\n':
|
||||
self._line_cache.append('')
|
||||
else: # ''.splitlines() == []
|
||||
self._line_cache = ['']
|
||||
|
||||
if line_nr == 0:
|
||||
# This is a fix for the zeroth line. We need a newline there, for
|
||||
# the backwards parser.
|
||||
return ''
|
||||
if line_nr < 0:
|
||||
raise StopIteration()
|
||||
try:
|
||||
return self._line_cache[line_nr - 1]
|
||||
except IndexError:
|
||||
raise StopIteration()
|
||||
|
||||
def get_position_line(self):
|
||||
return self.get_line(self.position[0])[:self.position[1]]
|
||||
|
||||
|
||||
class UserContextParser(object):
|
||||
def __init__(self, source, path, position, user_context):
|
||||
self._source = source
|
||||
self._path = path and os.path.abspath(path)
|
||||
self._position = position
|
||||
self._user_context = user_context
|
||||
|
||||
@cache.underscore_memoization
|
||||
def _parser(self):
|
||||
cache.invalidate_star_import_cache(self._path)
|
||||
parser = FastParser(self._source, self._path)
|
||||
# Don't pickle that module, because the main module is changing quickly
|
||||
cache.save_parser(self._path, None, parser, pickling=False)
|
||||
return parser
|
||||
|
||||
@cache.underscore_memoization
|
||||
def _get_user_stmt(self):
|
||||
return self.module().get_statement_for_position(self._position,
|
||||
include_imports=True)
|
||||
|
||||
def user_stmt(self, is_completion=False):
|
||||
user_stmt = self._get_user_stmt()
|
||||
|
||||
debug.speed('parsed')
|
||||
|
||||
if is_completion and not user_stmt:
|
||||
# for statements like `from x import ` (cursor not in statement)
|
||||
pos = next(self._user_context.get_context(yield_positions=True))
|
||||
last_stmt = pos and \
|
||||
self.module().get_statement_for_position(pos, include_imports=True)
|
||||
if isinstance(last_stmt, representation.Import):
|
||||
user_stmt = last_stmt
|
||||
return user_stmt
|
||||
|
||||
@cache.underscore_memoization
|
||||
def user_scope(self):
|
||||
user_stmt = self._get_user_stmt()
|
||||
if user_stmt is None:
|
||||
def scan(scope):
|
||||
for s in scope.statements + scope.subscopes:
|
||||
if isinstance(s, representation.Scope):
|
||||
if s.start_pos <= self._position <= s.end_pos:
|
||||
return scan(s) or s
|
||||
|
||||
return scan(self.module()) or self.module()
|
||||
else:
|
||||
return user_stmt.parent
|
||||
|
||||
def module(self):
|
||||
return self._parser().module
|
||||
@@ -12,13 +12,10 @@ following functions (sometimes bug-prone):
|
||||
- extract variable
|
||||
- inline variable
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import difflib
|
||||
|
||||
from jedi import common
|
||||
from jedi import modules
|
||||
from jedi import helpers
|
||||
from jedi.evaluate import helpers
|
||||
from jedi.parser import representation as pr
|
||||
|
||||
|
||||
@@ -86,7 +83,7 @@ def _rename(names, replace_str):
|
||||
with open(current_path) as f:
|
||||
source = f.read()
|
||||
|
||||
new_lines = modules.source_to_unicode(source).splitlines()
|
||||
new_lines = common.source_to_unicode(source).splitlines()
|
||||
old_lines = new_lines[:]
|
||||
|
||||
nr, indent = name.line, name.column
|
||||
@@ -104,10 +101,10 @@ def extract(script, new_name):
|
||||
:type source: str
|
||||
:return: list of changed lines/changed files
|
||||
"""
|
||||
new_lines = modules.source_to_unicode(script.source).splitlines()
|
||||
new_lines = common.source_to_unicode(script.source).splitlines()
|
||||
old_lines = new_lines[:]
|
||||
|
||||
user_stmt = script._parser.user_stmt
|
||||
user_stmt = script._parser.user_stmt()
|
||||
|
||||
# TODO care for multiline extracts
|
||||
dct = {}
|
||||
@@ -163,7 +160,7 @@ def inline(script):
|
||||
"""
|
||||
:type script: api.Script
|
||||
"""
|
||||
new_lines = modules.source_to_unicode(script.source).splitlines()
|
||||
new_lines = common.source_to_unicode(script.source).splitlines()
|
||||
|
||||
dct = {}
|
||||
|
||||
@@ -176,17 +173,17 @@ def inline(script):
|
||||
if not stmt.start_pos <= (r.line, r.column) <= stmt.end_pos]
|
||||
inlines = sorted(inlines, key=lambda x: (x.module_path, x.line, x.column),
|
||||
reverse=True)
|
||||
commands = stmt.get_commands()
|
||||
expression_list = stmt.expression_list()
|
||||
# don't allow multiline refactorings for now.
|
||||
assert stmt.start_pos[0] == stmt.end_pos[0]
|
||||
index = stmt.start_pos[0] - 1
|
||||
|
||||
line = new_lines[index]
|
||||
replace_str = line[commands[0].start_pos[1]:stmt.end_pos[1] + 1]
|
||||
replace_str = line[expression_list[0].start_pos[1]:stmt.end_pos[1] + 1]
|
||||
replace_str = replace_str.strip()
|
||||
# tuples need parentheses
|
||||
if commands and isinstance(commands[0], pr.Array):
|
||||
arr = commands[0]
|
||||
if expression_list and isinstance(expression_list[0], pr.Array):
|
||||
arr = expression_list[0]
|
||||
if replace_str[0] not in ['(', '[', '{'] and len(arr) > 1:
|
||||
replace_str = '(%s)' % replace_str
|
||||
|
||||
|
||||
@@ -4,6 +4,10 @@ Utilities for end-users.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import __main__
|
||||
from collections import namedtuple
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
from jedi import Interpreter
|
||||
|
||||
@@ -32,15 +36,15 @@ def setup_readline(namespace_module=__main__):
|
||||
|
||||
This will fallback to the readline completer if Jedi is not installed.
|
||||
The readline completer will only complete names in the global namespace,
|
||||
so for example,
|
||||
so for example::
|
||||
|
||||
>>> ran<TAB> # doctest: +SKIP
|
||||
ran<TAB>
|
||||
|
||||
will complete to ``range``
|
||||
|
||||
with both Jedi and readline, but
|
||||
with both Jedi and readline, but::
|
||||
|
||||
>>> range(10).cou<TAB> # doctest: +SKIP
|
||||
range(10).cou<TAB>
|
||||
|
||||
will show complete to ``range(10).count`` only with Jedi.
|
||||
|
||||
@@ -61,7 +65,6 @@ def setup_readline(namespace_module=__main__):
|
||||
library module.
|
||||
"""
|
||||
if state == 0:
|
||||
import os, sys
|
||||
sys.path.insert(0, os.getcwd())
|
||||
# Calling python doesn't have a path, so add to sys.path.
|
||||
try:
|
||||
@@ -95,3 +98,14 @@ def setup_readline(namespace_module=__main__):
|
||||
readline.parse_and_bind("set completion-prefix-display-length 2")
|
||||
# No delimiters, Jedi handles that.
|
||||
readline.set_completer_delims('')
|
||||
|
||||
|
||||
def version_info():
|
||||
"""
|
||||
Returns a namedtuple of Jedi's version, similar to Python's
|
||||
``sys.version_info``.
|
||||
"""
|
||||
Version = namedtuple('Version', 'major, minor, micro, releaselevel, serial')
|
||||
from jedi import __version__
|
||||
tupl = re.findall('[a-z]+|\d+', __version__)
|
||||
return Version(*[x if i == 3 else int(x) for i, x in enumerate(tupl)])
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
addopts = --doctest-modules
|
||||
|
||||
# Ignore broken files in blackbox test directories
|
||||
norecursedirs = .* docs completion refactor absolute_import namespace_package
|
||||
norecursedirs = .* docs completion refactor absolute_import namespace_package scripts
|
||||
|
||||
# Activate `clean_jedi_cache` fixture for all tests. This should be
|
||||
# fine as long as we are using `clean_jedi_cache` as a session scoped
|
||||
|
||||
54
scripts/memory_check.py
Executable file
54
scripts/memory_check.py
Executable file
@@ -0,0 +1,54 @@
|
||||
#! /usr/bin/env python
|
||||
"""
|
||||
This is a convenience script to test the speed and memory usage of Jedi with
|
||||
large libraries.
|
||||
|
||||
Each library is preloaded by jedi, recording the time and memory consumed by
|
||||
each operation.
|
||||
|
||||
You can provide additional libraries via command line arguments.
|
||||
|
||||
Note: This requires the psutil library, available on PyPI.
|
||||
"""
|
||||
import time
|
||||
import sys
|
||||
import psutil
|
||||
import jedi
|
||||
|
||||
|
||||
def used_memory():
|
||||
"""Return the total MB of System Memory in use."""
|
||||
return psutil.virtual_memory().used / 2**20
|
||||
|
||||
|
||||
def profile_preload(mod):
|
||||
"""Preload a module into Jedi, recording time and memory used."""
|
||||
base = used_memory()
|
||||
t0 = time.time()
|
||||
jedi.preload_module(mod)
|
||||
elapsed = time.time() - t0
|
||||
used = used_memory() - base
|
||||
return elapsed, used
|
||||
|
||||
|
||||
def main(mods):
|
||||
"""Preload the modules, and print the time and memory used."""
|
||||
t0 = time.time()
|
||||
baseline = used_memory()
|
||||
print('Time (s) | Mem (MB) | Package')
|
||||
print('------------------------------')
|
||||
for mod in mods:
|
||||
elapsed, used = profile_preload(mod)
|
||||
if used > 0:
|
||||
print('%8.1f | %8d | %s' % (elapsed, used, mod))
|
||||
print('------------------------------')
|
||||
elapsed = time.time() - t0
|
||||
used = used_memory() - baseline
|
||||
print('%8.1f | %8d | %s' % (elapsed, used, 'Total'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mods = ['re', 'numpy', 'scipy', 'scipy.sparse', 'scipy.stats',
|
||||
'wx', 'decimal', 'PyQt4.QtGui', 'PySide.QtGui', 'Tkinter']
|
||||
mods += sys.argv[1:]
|
||||
main(mods)
|
||||
9
setup.py
9
setup.py
@@ -13,10 +13,9 @@ __AUTHOR_EMAIL__ = 'davidhalter88@gmail.com'
|
||||
readme = open('README.rst').read() + '\n\n' + open('CHANGELOG.rst').read()
|
||||
|
||||
import jedi
|
||||
VERSION = '.'.join(str(x) for x in jedi.__version__)
|
||||
|
||||
setup(name='jedi',
|
||||
version=VERSION,
|
||||
version=jedi.__version__,
|
||||
description='An autocompletion tool for Python that can be used for text editors.',
|
||||
author=__AUTHOR__,
|
||||
author_email=__AUTHOR_EMAIL__,
|
||||
@@ -26,8 +25,8 @@ setup(name='jedi',
|
||||
license='MIT',
|
||||
keywords='python completion refactoring vim',
|
||||
long_description=readme,
|
||||
packages=['jedi', 'jedi.parser'],
|
||||
package_data={'jedi': ['mixin/*.pym']},
|
||||
packages=['jedi', 'jedi.parser', 'jedi.evaluate', 'jedi.evaluate.compiled', 'jedi.api'],
|
||||
package_data={'jedi': ['evlaluate/evaluate/compiled/fake/*.pym']},
|
||||
platforms=['any'],
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
@@ -45,4 +44,4 @@ setup(name='jedi',
|
||||
'Topic :: Text Editors :: Integrated Development Environments (IDE)',
|
||||
'Topic :: Utilities',
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# -----------------
|
||||
# yield statement
|
||||
# -----------------
|
||||
|
||||
def gen():
|
||||
yield 1
|
||||
yield ""
|
||||
@@ -13,6 +12,7 @@ next(gen_exe)
|
||||
#? int() str() list
|
||||
next(gen_exe, list)
|
||||
|
||||
|
||||
def gen_ret(value):
|
||||
yield value
|
||||
|
||||
@@ -22,6 +22,7 @@ next(gen_ret(1))
|
||||
#? []
|
||||
next(gen_ret())
|
||||
|
||||
|
||||
# -----------------
|
||||
# generators should not be indexable
|
||||
# -----------------
|
||||
@@ -39,6 +40,7 @@ for a in get():
|
||||
#? int() str()
|
||||
a
|
||||
|
||||
|
||||
class Get():
|
||||
def __iter__(self):
|
||||
yield 1
|
||||
@@ -63,6 +65,7 @@ g = iter([1.0])
|
||||
#? float()
|
||||
next(g)
|
||||
|
||||
|
||||
# -----------------
|
||||
# __next__
|
||||
# -----------------
|
||||
@@ -90,6 +93,7 @@ for c in Counter(3, 8):
|
||||
#? int()
|
||||
print c
|
||||
|
||||
|
||||
# -----------------
|
||||
# tuples
|
||||
# -----------------
|
||||
@@ -105,3 +109,30 @@ a, b = next(gen())
|
||||
a
|
||||
#? str() float()
|
||||
b
|
||||
|
||||
# -----------------
|
||||
# More complicated access
|
||||
# -----------------
|
||||
|
||||
# `close` is a method wrapper.
|
||||
#? ['__call__']
|
||||
gen().close.__call__
|
||||
|
||||
#?
|
||||
gen().throw()
|
||||
|
||||
#? ['co_consts']
|
||||
gen().gi_code.co_consts
|
||||
|
||||
#? []
|
||||
gen.gi_code.co_consts
|
||||
|
||||
# `send` is also a method wrapper.
|
||||
#? ['__call__']
|
||||
gen().send.__call__
|
||||
|
||||
#? tuple()
|
||||
gen().send()
|
||||
|
||||
#?
|
||||
gen()()
|
||||
|
||||
@@ -12,11 +12,11 @@ sys.path.append(sys.path[1] + '/thirdparty')
|
||||
# syntax err
|
||||
sys.path.append('a' +* '/thirdparty')
|
||||
|
||||
#? ['evaluate', 'evaluate_representation']
|
||||
#? ['evaluate']
|
||||
import evaluate
|
||||
|
||||
#? ['goto']
|
||||
evaluate.goto
|
||||
#? ['Evaluator']
|
||||
evaluate.Evaluator
|
||||
|
||||
#? ['jedi_']
|
||||
import jedi_
|
||||
|
||||
@@ -136,9 +136,9 @@ class NestedClass():
|
||||
def __getattr__(self, name):
|
||||
return self
|
||||
|
||||
# Shouldn't find a definition, because there's no name defined (used ``getattr``).
|
||||
|
||||
#< (0, 14),
|
||||
# Shouldn't find a definition, because there's other `instance`.
|
||||
# TODO reenable that test
|
||||
##< (0, 14),
|
||||
NestedClass().instance
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from __future__ import with_statement
|
||||
import os
|
||||
import re
|
||||
|
||||
from jedi._compatibility import reduce
|
||||
from functools import reduce
|
||||
import jedi
|
||||
from jedi import refactoring
|
||||
|
||||
|
||||
@@ -101,7 +101,8 @@ import re
|
||||
from ast import literal_eval
|
||||
|
||||
import jedi
|
||||
from jedi._compatibility import unicode, reduce, StringIO, is_py3k
|
||||
from functools import reduce
|
||||
from jedi._compatibility import unicode, StringIO, is_py3
|
||||
|
||||
|
||||
TEST_COMPLETIONS = 0
|
||||
@@ -216,7 +217,7 @@ def collect_file_tests(lines, lines_to_execute):
|
||||
test_type = None
|
||||
for line_nr, line in enumerate(lines):
|
||||
line_nr += 1 # py2.5 doesn't know about the additional enumerate param
|
||||
if not is_py3k:
|
||||
if not is_py3:
|
||||
line = unicode(line, 'UTF-8')
|
||||
if correct:
|
||||
r = re.match('^(\d+)\s*(.*)$', correct)
|
||||
@@ -276,7 +277,6 @@ def collect_dir_tests(base_dir, test_files, check_thirdparty=False):
|
||||
yield case
|
||||
|
||||
|
||||
|
||||
docoptstr = """
|
||||
Using run.py to make debugging easier with integration tests.
|
||||
|
||||
@@ -339,7 +339,7 @@ if __name__ == '__main__':
|
||||
return 0
|
||||
else:
|
||||
print("\ttest fail @%d, actual = %s, desired = %s"
|
||||
% (case.line_nr - 1, actual, desired))
|
||||
% (case.line_nr - 1, actual, desired))
|
||||
return 1
|
||||
|
||||
import traceback
|
||||
|
||||
@@ -2,27 +2,25 @@
|
||||
Test all things related to the ``jedi.api`` module.
|
||||
"""
|
||||
|
||||
from jedi import common, api
|
||||
from jedi import api
|
||||
from pytest import raises
|
||||
|
||||
|
||||
def test_preload_modules():
|
||||
def check_loaded(*modules):
|
||||
# + 1 for builtin, +1 for None module (currently used)
|
||||
assert len(new) == len(modules) + 2
|
||||
for i in modules + ('__builtin__',):
|
||||
assert [i in k for k in new.keys() if k is not None]
|
||||
# +1 for None module (currently used)
|
||||
assert len(parser_cache) == len(modules) + 1
|
||||
for i in modules:
|
||||
assert [i in k for k in parser_cache.keys() if k is not None]
|
||||
|
||||
from jedi import cache
|
||||
temp_cache, cache.parser_cache = cache.parser_cache, {}
|
||||
new = cache.parser_cache
|
||||
with common.ignored(KeyError): # performance of tests -> no reload
|
||||
new['__builtin__'] = temp_cache['__builtin__']
|
||||
parser_cache = cache.parser_cache
|
||||
|
||||
api.preload_module('datetime')
|
||||
check_loaded('datetime')
|
||||
api.preload_module('sys')
|
||||
check_loaded() # compiled (c_builtin) modules shouldn't be in the cache.
|
||||
api.preload_module('json', 'token')
|
||||
check_loaded('datetime', 'json', 'token')
|
||||
check_loaded('json', 'token')
|
||||
|
||||
cache.parser_cache = temp_cache
|
||||
|
||||
@@ -30,6 +28,7 @@ def test_preload_modules():
|
||||
def test_empty_script():
|
||||
assert api.Script('')
|
||||
|
||||
|
||||
def test_line_number_errors():
|
||||
"""
|
||||
Script should raise a ValueError if line/column numbers are not in a
|
||||
|
||||
@@ -11,9 +11,10 @@ import jedi
|
||||
|
||||
def test_is_keyword():
|
||||
results = Script('import ', 1, 1, None).goto_definitions()
|
||||
assert len(results) == 1 and results[0].is_keyword == True
|
||||
assert len(results) == 1 and results[0].is_keyword is True
|
||||
results = Script('str', 1, 1, None).goto_definitions()
|
||||
assert len(results) == 1 and results[0].is_keyword == False
|
||||
assert len(results) == 1 and results[0].is_keyword is False
|
||||
|
||||
|
||||
def make_definitions():
|
||||
"""
|
||||
@@ -71,6 +72,7 @@ def test_function_call_signature_in_doc():
|
||||
doc = defs[0].doc
|
||||
assert "f(x, y = 1, z = 'a')" in doc
|
||||
|
||||
|
||||
def test_class_call_signature():
|
||||
defs = Script("""
|
||||
class Foo:
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
from jedi import builtin
|
||||
|
||||
|
||||
def test_parse_function_doc_illegal_docstr():
|
||||
|
||||
def test_func(a):
|
||||
"""
|
||||
test_func(o
|
||||
|
||||
doesn't have a closing bracket.
|
||||
"""
|
||||
|
||||
assert ('', '') == builtin._parse_function_doc(test_func)
|
||||
@@ -8,15 +8,16 @@ import pytest
|
||||
|
||||
import jedi
|
||||
from jedi import settings, cache
|
||||
from jedi.cache import ParserCacheItem, _ModulePickling
|
||||
from jedi.cache import ParserCacheItem, ParserPickling
|
||||
|
||||
|
||||
ModulePickling = _ModulePickling()
|
||||
ParserPicklingCls = type(ParserPickling)
|
||||
ParserPickling = ParserPicklingCls()
|
||||
|
||||
|
||||
def test_modulepickling_change_cache_dir(monkeypatch, tmpdir):
|
||||
"""
|
||||
ModulePickling should not save old cache when cache_directory is changed.
|
||||
ParserPickling should not save old cache when cache_directory is changed.
|
||||
|
||||
See: `#168 <https://github.com/davidhalter/jedi/pull/168>`_
|
||||
"""
|
||||
@@ -29,19 +30,19 @@ def test_modulepickling_change_cache_dir(monkeypatch, tmpdir):
|
||||
path_2 = 'fake path 2'
|
||||
|
||||
monkeypatch.setattr(settings, 'cache_directory', dir_1)
|
||||
ModulePickling.save_module(path_1, item_1)
|
||||
cached = load_stored_item(ModulePickling, path_1, item_1)
|
||||
ParserPickling.save_parser(path_1, item_1)
|
||||
cached = load_stored_item(ParserPickling, path_1, item_1)
|
||||
assert cached == item_1.parser
|
||||
|
||||
monkeypatch.setattr(settings, 'cache_directory', dir_2)
|
||||
ModulePickling.save_module(path_2, item_2)
|
||||
cached = load_stored_item(ModulePickling, path_1, item_1)
|
||||
ParserPickling.save_parser(path_2, item_2)
|
||||
cached = load_stored_item(ParserPickling, path_1, item_1)
|
||||
assert cached is None
|
||||
|
||||
|
||||
def load_stored_item(cache, path, item):
|
||||
"""Load `item` stored at `path` in `cache`."""
|
||||
return cache.load_module(path, item.change_time - 1)
|
||||
return cache.load_parser(path, item.change_time - 1)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("isolated_jedi_cache")
|
||||
@@ -49,13 +50,13 @@ def test_modulepickling_delete_incompatible_cache():
|
||||
item = ParserCacheItem('fake parser')
|
||||
path = 'fake path'
|
||||
|
||||
cache1 = _ModulePickling()
|
||||
cache1 = ParserPicklingCls()
|
||||
cache1.version = 1
|
||||
cache1.save_module(path, item)
|
||||
cache1.save_parser(path, item)
|
||||
cached1 = load_stored_item(cache1, path, item)
|
||||
assert cached1 == item.parser
|
||||
|
||||
cache2 = _ModulePickling()
|
||||
cache2 = ParserPicklingCls()
|
||||
cache2.version = 2
|
||||
cached2 = load_stored_item(cache2, path, item)
|
||||
assert cached2 is None
|
||||
@@ -66,7 +67,7 @@ def test_star_import_cache_duration():
|
||||
old, jedi.settings.star_import_cache_validity = \
|
||||
jedi.settings.star_import_cache_validity, new
|
||||
|
||||
cache.star_import_cache = {} # first empty...
|
||||
cache._star_import_cache = {} # first empty...
|
||||
# path needs to be not-None (otherwise caching effects are not visible)
|
||||
jedi.Script('', 1, 0, '').completions()
|
||||
time.sleep(2 * new)
|
||||
@@ -74,6 +75,6 @@ def test_star_import_cache_duration():
|
||||
|
||||
# reset values
|
||||
jedi.settings.star_import_cache_validity = old
|
||||
length = len(cache.star_import_cache)
|
||||
cache.star_import_cache = {}
|
||||
length = len(cache._star_import_cache)
|
||||
cache._star_import_cache = {}
|
||||
assert length == 1
|
||||
|
||||
@@ -15,9 +15,11 @@ class TestCallSignatures(TestCase):
|
||||
assert signatures[0].call_name == expected_name
|
||||
assert signatures[0].index == expected_index
|
||||
|
||||
def test_call_signatures(self):
|
||||
def run(source, name, index=0, column=None, line=1):
|
||||
self._run(source, name, index, line, column)
|
||||
def _run_simple(self, source, name, index=0, column=None, line=1):
|
||||
self._run(source, name, index, line, column)
|
||||
|
||||
def test_simple(self):
|
||||
run = self._run_simple
|
||||
|
||||
# simple
|
||||
s1 = "abs(a, str("
|
||||
@@ -60,19 +62,20 @@ class TestCallSignatures(TestCase):
|
||||
|
||||
run("import time; abc = time; abc.sleep(", 'sleep', 0)
|
||||
|
||||
# jedi-vim #9
|
||||
run("with open(", 'open', 0)
|
||||
|
||||
# jedi-vim #11
|
||||
run("for sorted(", 'sorted', 0)
|
||||
run("for s in sorted(", 'sorted', 0)
|
||||
|
||||
# jedi #57
|
||||
s = "def func(alpha, beta): pass\n" \
|
||||
"func(alpha='101',"
|
||||
run(s, 'func', 0, column=13, line=2)
|
||||
|
||||
def test_function_definition_complex(self):
|
||||
def test_flows(self):
|
||||
# jedi-vim #9
|
||||
self._run_simple("with open(", 'open', 0)
|
||||
|
||||
# jedi-vim #11
|
||||
self._run_simple("for sorted(", 'sorted', 0)
|
||||
self._run_simple("for s in sorted(", 'sorted', 0)
|
||||
|
||||
def test_complex(self):
|
||||
s = """
|
||||
def abc(a,b):
|
||||
pass
|
||||
@@ -106,7 +109,7 @@ class TestCallSignatures(TestCase):
|
||||
# just don't throw an exception (if numpy doesn't exist, just ignore it)
|
||||
assert Script(s).call_signatures() == []
|
||||
|
||||
def test_function_definition_empty_paren_pre_space(self):
|
||||
def test_call_signatures_empty_parentheses_pre_space(self):
|
||||
s = textwrap.dedent("""\
|
||||
def f(a, b):
|
||||
pass
|
||||
|
||||
40
test/test_compiled.py
Normal file
40
test/test_compiled.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from jedi._compatibility import builtins
|
||||
from jedi.parser.representation import Function
|
||||
from jedi.evaluate import compiled
|
||||
from jedi.evaluate import Evaluator
|
||||
|
||||
|
||||
def test_simple():
|
||||
e = Evaluator()
|
||||
bltn = compiled.CompiledObject(builtins)
|
||||
obj = compiled.CompiledObject('_str_', bltn)
|
||||
upper = e.find_types(obj, 'upper')
|
||||
assert len(upper) == 1
|
||||
objs = list(e.execute(upper[0]))
|
||||
assert len(objs) == 1
|
||||
assert objs[0].obj is str
|
||||
|
||||
|
||||
def test_fake_loading():
|
||||
assert isinstance(compiled.create(next), Function)
|
||||
|
||||
string = compiled.builtin.get_subscope_by_name('str')
|
||||
from_name = compiled._create_from_name(
|
||||
compiled.builtin,
|
||||
string,
|
||||
'__init__'
|
||||
)
|
||||
assert isinstance(from_name, Function)
|
||||
|
||||
|
||||
def test_fake_docstr():
|
||||
assert compiled.create(next).docstr.as_string() == next.__doc__
|
||||
|
||||
|
||||
def test_parse_function_doc_illegal_docstr():
|
||||
docstr = """
|
||||
test_func(o
|
||||
|
||||
doesn't have a closing bracket.
|
||||
"""
|
||||
assert ('', '') == compiled._parse_function_doc(docstr)
|
||||
@@ -16,7 +16,8 @@ There are three kinds of test:
|
||||
import textwrap
|
||||
|
||||
import jedi
|
||||
from jedi import api_classes
|
||||
from jedi.api import classes
|
||||
from jedi.evaluate import Evaluator
|
||||
from .helpers import TestCase
|
||||
|
||||
|
||||
@@ -81,6 +82,6 @@ def test_keyword_full_name_should_be_none():
|
||||
"""issue #94"""
|
||||
# Using `from jedi.keywords import Keyword` here does NOT work
|
||||
# in Python 3. This is due to the import hack jedi using.
|
||||
Keyword = api_classes.keywords.Keyword
|
||||
d = api_classes.Definition(Keyword('(', (0, 0)))
|
||||
Keyword = classes.keywords.Keyword
|
||||
d = classes.Definition(Evaluator(), Keyword('(', (0, 0)))
|
||||
assert d.full_name is None
|
||||
|
||||
@@ -7,6 +7,7 @@ import itertools
|
||||
|
||||
from jedi import Script
|
||||
from .helpers import cwd_at
|
||||
from jedi._compatibility import is_py26
|
||||
|
||||
|
||||
def test_goto_definition_on_import():
|
||||
@@ -21,7 +22,8 @@ def test_complete_on_empty_import():
|
||||
assert 10 < len(Script("from . import", 1, 5, '').completions()) < 30
|
||||
assert 10 < len(Script("from . import classes", 1, 5, '').completions()) < 30
|
||||
assert len(Script("import").completions()) == 0
|
||||
assert len(Script("import import", path='').completions()) > 0
|
||||
if not is_py26: # python 2.6 doesn't always come with a library `import*`.
|
||||
assert len(Script("import import", path='').completions()) > 0
|
||||
|
||||
# 111
|
||||
assert Script("from datetime import").completions()[0].name == 'import'
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from jedi.parser import Parser
|
||||
from jedi.parser.user_context import UserContextParser
|
||||
from jedi.parser import representation as pr
|
||||
|
||||
|
||||
@@ -8,7 +9,7 @@ def test_user_statement_on_import():
|
||||
" time)"
|
||||
|
||||
for pos in [(2, 1), (2, 4)]:
|
||||
u = Parser(s, user_position=pos).user_stmt
|
||||
u = UserContextParser(s, None, pos, None).user_stmt()
|
||||
assert isinstance(u, pr.Import)
|
||||
assert u.defunct is False
|
||||
assert [str(n) for n in u.get_defined_names()] == ['time']
|
||||
@@ -17,7 +18,7 @@ def test_user_statement_on_import():
|
||||
class TestCallAndName():
|
||||
def get_call(self, source):
|
||||
stmt = Parser(source, no_docstr=True).module.statements[0]
|
||||
return stmt.get_commands()[0]
|
||||
return stmt.expression_list()[0]
|
||||
|
||||
def test_name_and_call_positions(self):
|
||||
call = self.get_call('name\nsomething_else')
|
||||
@@ -37,15 +38,15 @@ class TestCallAndName():
|
||||
|
||||
def test_literal_type(self):
|
||||
literal = self.get_call('1.0')
|
||||
assert isinstance(literal, pr.Number)
|
||||
assert isinstance(literal, pr.Literal)
|
||||
assert type(literal.value) == float
|
||||
|
||||
literal = self.get_call('1')
|
||||
assert isinstance(literal, pr.Number)
|
||||
assert isinstance(literal, pr.Literal)
|
||||
assert type(literal.value) == int
|
||||
|
||||
literal = self.get_call('"hello"')
|
||||
assert isinstance(literal, pr.String)
|
||||
assert isinstance(literal, pr.Literal)
|
||||
assert literal.value == 'hello'
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,11 @@ import textwrap
|
||||
|
||||
from .helpers import TestCase, cwd_at
|
||||
|
||||
import pytest
|
||||
import jedi
|
||||
from jedi import Script
|
||||
from jedi import api
|
||||
from jedi.evaluate import imports
|
||||
from jedi.parser import Parser
|
||||
|
||||
#jedi.set_debug_function()
|
||||
@@ -73,15 +75,16 @@ class TestRegression(TestCase):
|
||||
s = Script("", 1, 0).completions()
|
||||
assert len(s) > 0
|
||||
|
||||
@pytest.mark.skip('Skip for now, test case is not really supported.')
|
||||
@cwd_at('jedi')
|
||||
def test_add_dynamic_mods(self):
|
||||
api.settings.additional_dynamic_modules = ['dynamic.py']
|
||||
fname = '__main__.py'
|
||||
api.settings.additional_dynamic_modules = [fname]
|
||||
# Fictional module that defines a function.
|
||||
src1 = "def ret(a): return a"
|
||||
src1 = "def r(a): return a"
|
||||
# Other fictional modules in another place in the fs.
|
||||
src2 = 'from .. import setup; setup.ret(1)'
|
||||
# .parser to load the module
|
||||
api.modules.Module(os.path.abspath('dynamic.py'), src2).parser
|
||||
src2 = 'from .. import setup; setup.r(1)'
|
||||
imports.load_module(os.path.abspath(fname), src2)
|
||||
result = Script(src1, path='../setup.py').goto_definitions()
|
||||
assert len(result) == 1
|
||||
assert result[0].description == 'class int'
|
||||
@@ -118,7 +121,7 @@ class TestRegression(TestCase):
|
||||
s = "x()\nx( )\nx( )\nx ( )"
|
||||
parser = Parser(s)
|
||||
for i, s in enumerate(parser.module.statements, 3):
|
||||
for c in s.get_commands():
|
||||
for c in s.expression_list():
|
||||
self.assertEqual(c.execution.end_pos[1], i)
|
||||
|
||||
def check_definition_by_marker(self, source, after_cursor, names):
|
||||
|
||||
@@ -5,7 +5,7 @@ from .helpers import TestCase, cwd_at
|
||||
|
||||
|
||||
class TestSetupReadline(TestCase):
|
||||
class NameSpace():
|
||||
class NameSpace(object):
|
||||
pass
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -98,3 +98,7 @@ class TestSetupReadline(TestCase):
|
||||
assert self.completions('colorama')
|
||||
assert self.completions('colorama.Fore.BLACK') == ['colorama.Fore.BLACK']
|
||||
del self.namespace.colorama
|
||||
|
||||
|
||||
def test_version_info():
|
||||
assert utils.version_info()[:2] > (0, 7)
|
||||
|
||||
Reference in New Issue
Block a user