forked from VimPlug/jedi
Some changes because parso has changed.
This commit is contained in:
@@ -13,9 +13,8 @@ import os
|
|||||||
import warnings
|
import warnings
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from parso.python import load_grammar
|
from parso import load_python_grammar
|
||||||
from parso.python import tree
|
from parso.python import tree
|
||||||
from parso.python import parse
|
|
||||||
from parso.utils import source_to_unicode, splitlines
|
from parso.utils import source_to_unicode, splitlines
|
||||||
|
|
||||||
from jedi.parser_utils import get_executable_nodes, get_statement_of_position
|
from jedi.parser_utils import get_executable_nodes, get_statement_of_position
|
||||||
@@ -125,7 +124,7 @@ class Script(object):
|
|||||||
|
|
||||||
cache.clear_time_caches()
|
cache.clear_time_caches()
|
||||||
debug.reset_time()
|
debug.reset_time()
|
||||||
self._grammar = load_grammar(version='%s.%s' % sys.version_info[:2])
|
self._grammar = load_python_grammar(version='%s.%s' % sys.version_info[:2])
|
||||||
if sys_path is None:
|
if sys_path is None:
|
||||||
venv = os.getenv('VIRTUAL_ENV')
|
venv = os.getenv('VIRTUAL_ENV')
|
||||||
if venv:
|
if venv:
|
||||||
@@ -135,10 +134,9 @@ class Script(object):
|
|||||||
|
|
||||||
@cache.memoize_method
|
@cache.memoize_method
|
||||||
def _get_module_node(self):
|
def _get_module_node(self):
|
||||||
return parse(
|
return self._grammar.parse(
|
||||||
code=self._source,
|
code=self._source,
|
||||||
path=self.path,
|
path=self.path,
|
||||||
grammar=self._grammar,
|
|
||||||
cache=False, # No disk cache, because the current script often changes.
|
cache=False, # No disk cache, because the current script often changes.
|
||||||
diff_cache=True,
|
diff_cache=True,
|
||||||
cache_path=settings.cache_directory
|
cache_path=settings.cache_directory
|
||||||
|
|||||||
@@ -418,7 +418,7 @@ class Completion(BaseDefinition):
|
|||||||
append = '('
|
append = '('
|
||||||
|
|
||||||
if isinstance(self._name, ParamName) and self._stack is not None:
|
if isinstance(self._name, ParamName) and self._stack is not None:
|
||||||
node_names = list(self._stack.get_node_names(self._evaluator.grammar))
|
node_names = list(self._stack.get_node_names(self._evaluator.grammar._pgen_grammar))
|
||||||
if 'trailer' in node_names and 'argument' not in node_names:
|
if 'trailer' in node_names and 'argument' not in node_names:
|
||||||
append += '='
|
append += '='
|
||||||
|
|
||||||
|
|||||||
@@ -135,14 +135,14 @@ class Completion:
|
|||||||
return self._global_completions()
|
return self._global_completions()
|
||||||
|
|
||||||
allowed_keywords, allowed_tokens = \
|
allowed_keywords, allowed_tokens = \
|
||||||
helpers.get_possible_completion_types(grammar, self.stack)
|
helpers.get_possible_completion_types(grammar._pgen_grammar, self.stack)
|
||||||
|
|
||||||
completion_names = list(self._get_keyword_completion_names(allowed_keywords))
|
completion_names = list(self._get_keyword_completion_names(allowed_keywords))
|
||||||
|
|
||||||
if token.NAME in allowed_tokens or token.INDENT in allowed_tokens:
|
if token.NAME in allowed_tokens or token.INDENT in allowed_tokens:
|
||||||
# This means that we actually have to do type inference.
|
# This means that we actually have to do type inference.
|
||||||
|
|
||||||
symbol_names = list(self.stack.get_node_names(grammar))
|
symbol_names = list(self.stack.get_node_names(grammar._pgen_grammar))
|
||||||
|
|
||||||
nodes = list(self.stack.get_nodes())
|
nodes = list(self.stack.get_nodes())
|
||||||
|
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ def get_stack_at_position(grammar, code_lines, module_node, pos):
|
|||||||
safeword = 'ZZZ_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
|
safeword = 'ZZZ_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
|
||||||
code = code + safeword
|
code = code + safeword
|
||||||
|
|
||||||
p = Parser(grammar, error_recovery=True)
|
p = Parser(grammar._pgen_grammar, error_recovery=True)
|
||||||
try:
|
try:
|
||||||
p.parse(tokens=tokenize_without_endmarker(code))
|
p.parse(tokens=tokenize_without_endmarker(code))
|
||||||
except EndMarkerReached:
|
except EndMarkerReached:
|
||||||
@@ -152,7 +152,7 @@ class Stack(list):
|
|||||||
yield node
|
yield node
|
||||||
|
|
||||||
|
|
||||||
def get_possible_completion_types(grammar, stack):
|
def get_possible_completion_types(pgen_grammar, stack):
|
||||||
def add_results(label_index):
|
def add_results(label_index):
|
||||||
try:
|
try:
|
||||||
grammar_labels.append(inversed_tokens[label_index])
|
grammar_labels.append(inversed_tokens[label_index])
|
||||||
@@ -160,17 +160,17 @@ def get_possible_completion_types(grammar, stack):
|
|||||||
try:
|
try:
|
||||||
keywords.append(inversed_keywords[label_index])
|
keywords.append(inversed_keywords[label_index])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
t, v = grammar.labels[label_index]
|
t, v = pgen_grammar.labels[label_index]
|
||||||
assert t >= 256
|
assert t >= 256
|
||||||
# See if it's a symbol and if we're in its first set
|
# See if it's a symbol and if we're in its first set
|
||||||
inversed_keywords
|
inversed_keywords
|
||||||
itsdfa = grammar.dfas[t]
|
itsdfa = pgen_grammar.dfas[t]
|
||||||
itsstates, itsfirst = itsdfa
|
itsstates, itsfirst = itsdfa
|
||||||
for first_label_index in itsfirst.keys():
|
for first_label_index in itsfirst.keys():
|
||||||
add_results(first_label_index)
|
add_results(first_label_index)
|
||||||
|
|
||||||
inversed_keywords = dict((v, k) for k, v in grammar.keywords.items())
|
inversed_keywords = dict((v, k) for k, v in pgen_grammar.keywords.items())
|
||||||
inversed_tokens = dict((v, k) for k, v in grammar.tokens.items())
|
inversed_tokens = dict((v, k) for k, v in pgen_grammar.tokens.items())
|
||||||
|
|
||||||
keywords = []
|
keywords = []
|
||||||
grammar_labels = []
|
grammar_labels = []
|
||||||
|
|||||||
@@ -105,8 +105,7 @@ class MixedObjectFilter(compiled.CompiledObjectFilter):
|
|||||||
|
|
||||||
@memoize_default(evaluator_is_first_arg=True)
|
@memoize_default(evaluator_is_first_arg=True)
|
||||||
def _load_module(evaluator, path, python_object):
|
def _load_module(evaluator, path, python_object):
|
||||||
module = parse(
|
module = evaluator.grammar.parse(
|
||||||
grammar=evaluator.grammar,
|
|
||||||
path=path,
|
path=path,
|
||||||
cache=True,
|
cache=True,
|
||||||
diff_cache=True,
|
diff_cache=True,
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ from jedi.evaluate import representation as er
|
|||||||
from jedi.evaluate.instance import InstanceFunctionExecution, \
|
from jedi.evaluate.instance import InstanceFunctionExecution, \
|
||||||
AbstractInstanceContext, CompiledInstance, BoundMethod
|
AbstractInstanceContext, CompiledInstance, BoundMethod
|
||||||
from jedi.evaluate import iterable
|
from jedi.evaluate import iterable
|
||||||
from parso.python import parse
|
|
||||||
from jedi import debug
|
from jedi import debug
|
||||||
from jedi.evaluate import precedence
|
from jedi.evaluate import precedence
|
||||||
from jedi.evaluate import param
|
from jedi.evaluate import param
|
||||||
@@ -270,7 +269,7 @@ def collections_namedtuple(evaluator, obj, arguments):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Parse source
|
# Parse source
|
||||||
generated_class = next(parse(source, grammar=evaluator.grammar).iter_classdefs())
|
generated_class = next(evaluator.grammar.parse(source).iter_classdefs())
|
||||||
return set([er.ClassContext(evaluator, generated_class, evaluator.BUILTINS)])
|
return set([er.ClassContext(evaluator, generated_class, evaluator.BUILTINS)])
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from jedi.evaluate.site import addsitedir
|
|||||||
|
|
||||||
from jedi._compatibility import exec_function, unicode
|
from jedi._compatibility import exec_function, unicode
|
||||||
from parso.python import tree
|
from parso.python import tree
|
||||||
from parso.python import parse
|
|
||||||
from jedi.evaluate.cache import memoize_default
|
from jedi.evaluate.cache import memoize_default
|
||||||
from jedi.evaluate.compiled import CompiledObject
|
from jedi.evaluate.compiled import CompiledObject
|
||||||
from jedi.evaluate.context import ContextualizedNode
|
from jedi.evaluate.context import ContextualizedNode
|
||||||
@@ -212,9 +211,8 @@ def sys_path_with_modifications(evaluator, module_context):
|
|||||||
|
|
||||||
def _get_paths_from_buildout_script(evaluator, buildout_script_path):
|
def _get_paths_from_buildout_script(evaluator, buildout_script_path):
|
||||||
try:
|
try:
|
||||||
module_node = parse(
|
module_node = evaluator.grammar.parse(
|
||||||
path=buildout_script_path,
|
path=buildout_script_path,
|
||||||
grammar=evaluator.grammar,
|
|
||||||
cache=True,
|
cache=True,
|
||||||
cache_path=settings.cache_directory
|
cache_path=settings.cache_directory
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,13 +8,14 @@ from jedi.evaluate.sys_path import (_get_parent_dir_with_file,
|
|||||||
_check_module)
|
_check_module)
|
||||||
from jedi.evaluate import Evaluator
|
from jedi.evaluate import Evaluator
|
||||||
from jedi.evaluate.representation import ModuleContext
|
from jedi.evaluate.representation import ModuleContext
|
||||||
from parso.python import parse, load_grammar
|
from parso.python import parse
|
||||||
|
from parso import load_python_grammar
|
||||||
|
|
||||||
from ..helpers import cwd_at
|
from ..helpers import cwd_at
|
||||||
|
|
||||||
|
|
||||||
def check_module_test(code):
|
def check_module_test(code):
|
||||||
grammar = load_grammar()
|
grammar = load_python_grammar()
|
||||||
module_context = ModuleContext(Evaluator(grammar), parse(code), path=None)
|
module_context = ModuleContext(Evaluator(grammar), parse(code), path=None)
|
||||||
return _check_module(module_context)
|
return _check_module(module_context)
|
||||||
|
|
||||||
@@ -66,7 +67,7 @@ def test_sys_path_with_modifications():
|
|||||||
""")
|
""")
|
||||||
|
|
||||||
path = os.path.abspath(os.path.join(os.curdir, 'module_name.py'))
|
path = os.path.abspath(os.path.join(os.curdir, 'module_name.py'))
|
||||||
grammar = load_grammar()
|
grammar = load_python_grammar()
|
||||||
module_node = parse(code, path=path)
|
module_node = parse(code, path=path)
|
||||||
module_context = ModuleContext(Evaluator(grammar), module_node, path=path)
|
module_context = ModuleContext(Evaluator(grammar), module_node, path=path)
|
||||||
paths = sys_path_with_modifications(module_context.evaluator, module_context)
|
paths = sys_path_with_modifications(module_context.evaluator, module_context)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
from jedi._compatibility import builtins, is_py3
|
from jedi._compatibility import builtins, is_py3
|
||||||
from parso.python import load_grammar
|
from parso import load_python_grammar
|
||||||
from jedi.evaluate import compiled, instance
|
from jedi.evaluate import compiled, instance
|
||||||
from jedi.evaluate.representation import FunctionContext
|
from jedi.evaluate.representation import FunctionContext
|
||||||
from jedi.evaluate import Evaluator
|
from jedi.evaluate import Evaluator
|
||||||
@@ -10,7 +10,7 @@ from jedi import Script
|
|||||||
|
|
||||||
|
|
||||||
def _evaluator():
|
def _evaluator():
|
||||||
return Evaluator(load_grammar())
|
return Evaluator(load_python_grammar())
|
||||||
|
|
||||||
|
|
||||||
def test_simple():
|
def test_simple():
|
||||||
|
|||||||
Reference in New Issue
Block a user