mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-23 13:51:27 +08:00
A lot of stuff is not needed anymore, because of the recent refactorings.
This commit is contained in:
@@ -9,7 +9,6 @@ arguments.
|
||||
|
||||
.. warning:: Please, note that Jedi is **not thread safe**.
|
||||
"""
|
||||
import re
|
||||
import os
|
||||
import warnings
|
||||
import sys
|
||||
|
||||
@@ -5,7 +5,6 @@ import re
|
||||
from collections import namedtuple
|
||||
|
||||
from jedi import common
|
||||
from jedi.parser import tree as pt
|
||||
from jedi.evaluate import imports
|
||||
from jedi import parser
|
||||
from jedi.parser import tokenize, token
|
||||
@@ -42,14 +41,6 @@ def get_on_import_stmt(evaluator, user_context, user_stmt, is_like_search=False)
|
||||
return i, name
|
||||
|
||||
|
||||
def check_error_statements(module, pos):
|
||||
for error_statement in module.error_statements:
|
||||
if error_statement.first_type in ('import_from', 'import_name') \
|
||||
and error_statement.start_pos < pos <= error_statement.end_pos:
|
||||
return importer_from_error_statement(error_statement, pos)
|
||||
return None, 0, False, False
|
||||
|
||||
|
||||
def _get_code(code, start_pos, end_pos):
|
||||
"""
|
||||
:param code_start_pos: is where the code starts.
|
||||
@@ -178,42 +169,6 @@ def get_possible_completion_types(grammar, stack):
|
||||
return keywords, grammar_labels
|
||||
|
||||
|
||||
def importer_from_error_statement(error_statement, pos):
|
||||
def check_dotted(children):
|
||||
for name in children[::2]:
|
||||
if name.start_pos <= pos:
|
||||
yield name
|
||||
|
||||
names = []
|
||||
level = 0
|
||||
only_modules = True
|
||||
unfinished_dotted = False
|
||||
for typ, nodes in error_statement.stack:
|
||||
if typ == 'dotted_name':
|
||||
names += check_dotted(nodes)
|
||||
if nodes[-1] == '.':
|
||||
# An unfinished dotted_name
|
||||
unfinished_dotted = True
|
||||
elif typ == 'import_name':
|
||||
if nodes[0].start_pos <= pos <= nodes[0].end_pos:
|
||||
# We are on the import.
|
||||
return None, 0, False, False
|
||||
elif typ == 'import_from':
|
||||
for node in nodes:
|
||||
if node.start_pos >= pos:
|
||||
break
|
||||
elif isinstance(node, pt.Node) and node.type == 'dotted_name':
|
||||
names += check_dotted(node.children)
|
||||
elif node in ('.', '...'):
|
||||
level += len(node.value)
|
||||
elif isinstance(node, pt.Name):
|
||||
names.append(node)
|
||||
elif node == 'import':
|
||||
only_modules = False
|
||||
|
||||
return names, level, only_modules, unfinished_dotted
|
||||
|
||||
|
||||
class ContextResults():
|
||||
def __init__(self, evaluator, source, module, pos):
|
||||
self._evaluator = evaluator
|
||||
|
||||
@@ -23,7 +23,7 @@ from jedi.parser import tokenize
|
||||
from jedi.parser.token import (DEDENT, INDENT, ENDMARKER, NEWLINE, NUMBER,
|
||||
STRING)
|
||||
from jedi.parser.pgen2.pgen import generate_grammar
|
||||
from jedi.parser.pgen2.parse import PgenParser, token_to_ilabel
|
||||
from jedi.parser.pgen2.parse import PgenParser
|
||||
|
||||
OPERATOR_KEYWORDS = 'and', 'for', 'if', 'else', 'in', 'is', 'lambda', 'not', 'or'
|
||||
# Not used yet. In the future I intend to add something like KeywordStatement
|
||||
@@ -58,50 +58,6 @@ def load_grammar(version='3.4'):
|
||||
return _loaded_grammars.setdefault(path, generate_grammar(path))
|
||||
|
||||
|
||||
class ErrorStatement(object):
|
||||
type = 'error_stmt'
|
||||
|
||||
def __init__(self, stack, arcs, next_token, position_modifier, next_start_pos):
|
||||
self.stack = stack
|
||||
self.arcs = arcs
|
||||
self._position_modifier = position_modifier
|
||||
self.next_token = next_token
|
||||
self._next_start_pos = next_start_pos
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s@%s>' % (
|
||||
type(self).__name__,
|
||||
repr(self.next_token),
|
||||
self.end_pos
|
||||
)
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
s = self._next_start_pos
|
||||
return s[0] + self._position_modifier.line, s[1]
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
return next(self._iter_nodes()).start_pos
|
||||
|
||||
@property
|
||||
def first_type(self):
|
||||
first_type, nodes = self.stack[0]
|
||||
return first_type
|
||||
|
||||
def is_a_valid_token(self, type_, value):
|
||||
ilabel = token_to_ilabel(type_, value)
|
||||
for i, newstate in self.arcs:
|
||||
if ilabel == i:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _iter_nodes(self):
|
||||
for _, nodes in self.stack:
|
||||
for node in nodes:
|
||||
yield node
|
||||
|
||||
|
||||
class ParserSyntaxError(object):
|
||||
def __init__(self, message, position):
|
||||
self.message = message
|
||||
@@ -146,7 +102,6 @@ class Parser(object):
|
||||
|
||||
self._used_names = {}
|
||||
self._scope_names_stack = [{}]
|
||||
self._error_statements = []
|
||||
self._last_failed_start_pos = (0, 0)
|
||||
self._global_names = []
|
||||
|
||||
@@ -330,21 +285,20 @@ class ParserWithRecovery(Parser):
|
||||
self._indent_counter = 0
|
||||
|
||||
# TODO do print absolute import detection here.
|
||||
#try:
|
||||
# try:
|
||||
# del python_grammar_no_print_statement.keywords["print"]
|
||||
#except KeyError:
|
||||
# except KeyError:
|
||||
# pass # Doesn't exist in the Python 3 grammar.
|
||||
|
||||
#if self.options["print_function"]:
|
||||
# if self.options["print_function"]:
|
||||
# python_grammar = pygram.python_grammar_no_print_statement
|
||||
#else:
|
||||
# else:
|
||||
super(ParserWithRecovery, self).__init__(grammar, source, tokenizer=tokenizer)
|
||||
|
||||
self.module = self._parsed
|
||||
self.module.used_names = self._used_names
|
||||
self.module.path = module_path
|
||||
self.module.global_names = self._global_names
|
||||
self.module.error_statements = self._error_statements
|
||||
|
||||
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
||||
add_token_callback):
|
||||
@@ -422,8 +376,6 @@ class ParserWithRecovery(Parser):
|
||||
if nodes and nodes[0] in ('def', 'class', 'lambda'):
|
||||
self._scope_names_stack.pop()
|
||||
if failed_stack:
|
||||
err = ErrorStatement(failed_stack, arcs, value, self.position_modifier, start_pos)
|
||||
self._error_statements.append(err)
|
||||
stack[start_index - 1][2][1].append(pt.ErrorNode(all_nodes))
|
||||
|
||||
self._last_failed_start_pos = start_pos
|
||||
@@ -448,8 +400,5 @@ class ParserWithRecovery(Parser):
|
||||
|
||||
yield typ, value, prefix, start_pos
|
||||
|
||||
def _add_syntax_error(self, message, position):
|
||||
self.syntax_errors.append(ParserSyntaxError(message, position))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % (type(self).__name__, self.module)
|
||||
|
||||
Reference in New Issue
Block a user