forked from VimPlug/jedi
Revisit reference finding, scan a lot of folders
This commit is contained in:
@@ -43,9 +43,11 @@ class FolderIO(AbstractFolderIO):
|
|||||||
modified_folder_ios,
|
modified_folder_ios,
|
||||||
[FileIO(os.path.join(root, f)) for f in files],
|
[FileIO(os.path.join(root, f)) for f in files],
|
||||||
)
|
)
|
||||||
modified_iterator = iter(modified_folder_ios)
|
modified_iterator = iter(reversed(modified_folder_ios))
|
||||||
current = next(modified_iterator, None)
|
current = next(modified_iterator, None)
|
||||||
for i, folder_io in enumerate(original_folder_ios):
|
i = len(original_folder_ios)
|
||||||
|
for folder_io in reversed(original_folder_ios):
|
||||||
|
i -= 1 # Basically enumerate but reversed
|
||||||
if current is folder_io:
|
if current is folder_io:
|
||||||
current = next(modified_iterator, None)
|
current = next(modified_iterator, None)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -116,8 +116,14 @@ def _search_function_arguments(module_context, funcdef, string_name):
|
|||||||
found_arguments = False
|
found_arguments = False
|
||||||
i = 0
|
i = 0
|
||||||
inference_state = module_context.inference_state
|
inference_state = module_context.inference_state
|
||||||
for for_mod_context in get_module_contexts_containing_name(
|
|
||||||
inference_state, [module_context], string_name):
|
if settings.dynamic_params_for_other_modules:
|
||||||
|
module_contexts = get_module_contexts_containing_name(
|
||||||
|
inference_state, [module_context], string_name)
|
||||||
|
else:
|
||||||
|
module_contexts = [module_context]
|
||||||
|
|
||||||
|
for for_mod_context in module_contexts:
|
||||||
for name, trailer in _get_potential_nodes(for_mod_context, string_name):
|
for name, trailer in _get_potential_nodes(for_mod_context, string_name):
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import os
|
import re
|
||||||
|
|
||||||
from parso import python_bytes_to_unicode
|
from parso import python_bytes_to_unicode
|
||||||
|
|
||||||
@@ -9,6 +9,8 @@ from jedi.inference.compiled import CompiledObject
|
|||||||
from jedi.inference.filters import ParserTreeFilter
|
from jedi.inference.filters import ParserTreeFilter
|
||||||
from jedi.inference.gradual.conversion import convert_names
|
from jedi.inference.gradual.conversion import convert_names
|
||||||
|
|
||||||
|
_IGNORE_FOLDERS = ('.tox', 'venv', '__pycache__')
|
||||||
|
|
||||||
|
|
||||||
def _resolve_names(definition_names, avoid_names=()):
|
def _resolve_names(definition_names, avoid_names=()):
|
||||||
for name in definition_names:
|
for name in definition_names:
|
||||||
@@ -113,11 +115,12 @@ def find_references(module_context, tree_name):
|
|||||||
found_names_dct = _dictionarize(found_names)
|
found_names_dct = _dictionarize(found_names)
|
||||||
|
|
||||||
module_contexts = set(d.get_root_context() for d in found_names)
|
module_contexts = set(d.get_root_context() for d in found_names)
|
||||||
module_contexts = set(m for m in module_contexts if not m.is_compiled())
|
|
||||||
|
|
||||||
non_matching_reference_maps = {}
|
non_matching_reference_maps = {}
|
||||||
potential_modules = get_module_contexts_containing_name(
|
potential_modules = get_module_contexts_containing_name(
|
||||||
inf, module_contexts, search_name
|
inf,
|
||||||
|
[module_context] + [m for m in module_contexts if m != module_context],
|
||||||
|
search_name
|
||||||
)
|
)
|
||||||
for module_context in potential_modules:
|
for module_context in potential_modules:
|
||||||
for name_leaf in module_context.tree_node.get_used_names().get(search_name, []):
|
for name_leaf in module_context.tree_node.get_used_names().get(search_name, []):
|
||||||
@@ -139,59 +142,80 @@ def find_references(module_context, tree_name):
|
|||||||
return found_names_dct.values()
|
return found_names_dct.values()
|
||||||
|
|
||||||
|
|
||||||
|
def _check_fs(inference_state, file_io, regex):
|
||||||
|
try:
|
||||||
|
code = file_io.read()
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
code = python_bytes_to_unicode(code, errors='replace')
|
||||||
|
if not regex.search(code):
|
||||||
|
return None
|
||||||
|
new_file_io = KnownContentFileIO(file_io.path, code)
|
||||||
|
m = load_module_from_path(inference_state, new_file_io)
|
||||||
|
if isinstance(m, CompiledObject):
|
||||||
|
return None
|
||||||
|
return m.as_context()
|
||||||
|
|
||||||
|
|
||||||
|
def _recurse_find_python_files(folder_io, except_paths):
|
||||||
|
for root_folder_io, folder_ios, file_ios in folder_io.walk():
|
||||||
|
# Delete folders that we don't want to iterate over.
|
||||||
|
folder_ios[:] = [
|
||||||
|
folder_io
|
||||||
|
for folder_io in folder_ios
|
||||||
|
if folder_io.path not in except_paths
|
||||||
|
and folder_io.get_base_name() not in _IGNORE_FOLDERS
|
||||||
|
]
|
||||||
|
|
||||||
|
for file_io in file_ios:
|
||||||
|
path = file_io.path
|
||||||
|
if path.endswith('.py') or path.endswith('.pyi'):
|
||||||
|
if path not in except_paths:
|
||||||
|
yield file_io
|
||||||
|
|
||||||
|
|
||||||
|
def _find_python_files_in_sys_path(inference_state, folder_io):
|
||||||
|
sys_path = inference_state.get_sys_path()
|
||||||
|
except_paths = set()
|
||||||
|
while True:
|
||||||
|
path = folder_io.path
|
||||||
|
if not any(path.startswith(p) for p in sys_path):
|
||||||
|
break
|
||||||
|
for file_io in _recurse_find_python_files(folder_io, except_paths):
|
||||||
|
yield file_io
|
||||||
|
except_paths.add(path)
|
||||||
|
folder_io = folder_io.get_parent_folder()
|
||||||
|
|
||||||
|
|
||||||
def get_module_contexts_containing_name(inference_state, module_contexts, name):
|
def get_module_contexts_containing_name(inference_state, module_contexts, name):
|
||||||
"""
|
"""
|
||||||
Search a name in the directories of modules.
|
Search a name in the directories of modules.
|
||||||
"""
|
"""
|
||||||
def check_directory(folder_io):
|
def iter_file_ios():
|
||||||
for file_name in folder_io.list():
|
yielded_paths = [m.py__file__() for m in module_contexts]
|
||||||
if file_name.endswith('.py'):
|
for module_context in module_contexts:
|
||||||
yield folder_io.get_file_io(file_name)
|
|
||||||
|
|
||||||
def check_fs(file_io, base_names):
|
|
||||||
try:
|
|
||||||
code = file_io.read()
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None
|
|
||||||
code = python_bytes_to_unicode(code, errors='replace')
|
|
||||||
if name not in code:
|
|
||||||
return None
|
|
||||||
new_file_io = KnownContentFileIO(file_io.path, code)
|
|
||||||
m = load_module_from_path(inference_state, new_file_io, base_names)
|
|
||||||
if isinstance(m, CompiledObject):
|
|
||||||
return None
|
|
||||||
return m.as_context()
|
|
||||||
|
|
||||||
# skip non python modules
|
|
||||||
used_mod_paths = set()
|
|
||||||
folders_with_names_to_be_checked = []
|
|
||||||
for module_context in module_contexts:
|
|
||||||
path = module_context.py__file__()
|
|
||||||
if path not in used_mod_paths:
|
|
||||||
file_io = module_context.get_value().file_io
|
file_io = module_context.get_value().file_io
|
||||||
if file_io is not None:
|
if file_io is None:
|
||||||
used_mod_paths.add(path)
|
continue
|
||||||
folders_with_names_to_be_checked.append((
|
|
||||||
file_io.get_parent_folder(),
|
folder_io = file_io.get_parent_folder()
|
||||||
module_context.get_value().py__package__()
|
for file_io in _find_python_files_in_sys_path(inference_state, folder_io):
|
||||||
))
|
if file_io.path not in yielded_paths:
|
||||||
|
yield file_io
|
||||||
|
|
||||||
|
# Skip non python modules
|
||||||
|
for module_context in module_contexts:
|
||||||
|
if module_context.is_compiled():
|
||||||
|
continue
|
||||||
yield module_context
|
yield module_context
|
||||||
|
|
||||||
if not settings.dynamic_params_for_other_modules:
|
if len(name) <= 2 or name == 'self':
|
||||||
return
|
return
|
||||||
|
|
||||||
def get_file_ios_to_check():
|
file_io_count = 0
|
||||||
for folder_io, base_names in folders_with_names_to_be_checked:
|
regex = re.compile(r'\b' + re.escape(name) + r'\b')
|
||||||
for file_io in check_directory(folder_io):
|
for file_io in iter_file_ios():
|
||||||
if file_io.path not in used_mod_paths:
|
file_io_count += 1
|
||||||
yield file_io, base_names
|
m = _check_fs(inference_state, file_io, regex)
|
||||||
|
|
||||||
for p in settings.additional_dynamic_modules:
|
|
||||||
p = os.path.abspath(p)
|
|
||||||
if p not in used_mod_paths:
|
|
||||||
yield FileIO(p), None
|
|
||||||
|
|
||||||
for file_io, base_names in get_file_ios_to_check():
|
|
||||||
m = check_fs(file_io, base_names)
|
|
||||||
if m is not None:
|
if m is not None:
|
||||||
yield m
|
yield m
|
||||||
|
|||||||
@@ -17,3 +17,11 @@ def test_folder_io_walk():
|
|||||||
assert root.path == join(root_dir, 'ns2')
|
assert root.path == join(root_dir, 'ns2')
|
||||||
folder_ios.clear()
|
folder_ios.clear()
|
||||||
assert next(iterator, None) is None
|
assert next(iterator, None) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_folder_io_walk2():
|
||||||
|
root_dir = get_example_dir('namespace_package')
|
||||||
|
iterator = FolderIO(root_dir).walk()
|
||||||
|
root, folder_ios, file_ios = next(iterator)
|
||||||
|
folder_ios.clear()
|
||||||
|
assert next(iterator, None) is None
|
||||||
|
|||||||
Reference in New Issue
Block a user