1
0
forked from VimPlug/jedi
Files
jedi-fork/jedi/inference/references.py

198 lines
6.8 KiB
Python

import os
from parso import python_bytes_to_unicode
from jedi import settings
from jedi.file_io import FileIO, KnownContentFileIO
from jedi.inference.imports import SubModuleName, load_module_from_path
from jedi.inference.compiled import CompiledObject
from jedi.inference.filters import ParserTreeFilter
from jedi.inference.gradual.conversion import convert_names
def _resolve_names(definition_names, avoid_names=()):
for name in definition_names:
if name in avoid_names:
# Avoiding recursions here, because goto on a module name lands
# on the same module.
continue
if not isinstance(name, SubModuleName):
# SubModuleNames are not actually existing names but created
# names when importing something like `import foo.bar.baz`.
yield name
if name.api_type == 'module':
for name in _resolve_names(name.goto(), definition_names):
yield name
def _dictionarize(names):
return dict(
(n if n.tree_name is None else n.tree_name, n)
for n in names
)
def _find_defining_names(module_context, tree_name):
found_names = _find_names(module_context, tree_name)
for name in list(found_names):
# Convert from/to stubs, because those might also be usages.
found_names |= set(convert_names(
[name],
only_stubs=not name.get_root_context().is_stub(),
prefer_stub_to_compiled=False
))
found_names |= set(_find_global_variables(found_names, tree_name.value))
for name in list(found_names):
if name.api_type == 'param' or name.tree_name is None \
or name.tree_name.parent.type == 'trailer':
continue
found_names |= set(_add_names_in_same_context(name.parent_context, name.string_name))
return set(_resolve_names(found_names))
def _find_names(module_context, tree_name):
name = module_context.create_name(tree_name)
found_names = set(name.goto())
found_names.add(name)
return set(_resolve_names(found_names))
def _add_names_in_same_context(context, string_name):
if context.tree_node is None:
return
until_position = None
while True:
filter_ = ParserTreeFilter(
parent_context=context,
until_position=until_position,
)
names = set(filter_.get(string_name))
if not names:
break
for name in names:
yield name
ordered = sorted(names, key=lambda x: x.start_pos)
until_position = ordered[0].start_pos
def _find_global_variables(names, search_name):
for name in names:
if name.tree_name is None:
continue
module_context = name.get_root_context()
try:
method = module_context.get_global_filter
except AttributeError:
continue
else:
for global_name in method().get(search_name):
yield global_name
c = module_context.create_context(global_name.tree_name)
for name in _add_names_in_same_context(c, global_name.string_name):
yield name
def find_references(module_context, tree_name):
inf = module_context.inference_state
search_name = tree_name.value
# We disable flow analysis, because if we have ifs that are only true in
# certain cases, we want both sides.
try:
inf.flow_analysis_enabled = False
found_names = _find_defining_names(module_context, tree_name)
finally:
inf.flow_analysis_enabled = True
found_names_dct = _dictionarize(found_names)
module_contexts = set(d.get_root_context() for d in found_names)
module_contexts = set(m for m in module_contexts if not m.is_compiled())
non_matching_reference_maps = {}
potential_modules = get_module_contexts_containing_name(
inf, module_contexts, search_name
)
for module_context in potential_modules:
for name_leaf in module_context.tree_node.get_used_names().get(search_name, []):
new = _dictionarize(_find_names(module_context, name_leaf))
if any(tree_name in found_names_dct for tree_name in new):
found_names_dct.update(new)
for tree_name in new:
for dct in non_matching_reference_maps.get(tree_name, []):
# A reference that was previously searched for matches
# with a now found name. Merge.
found_names_dct.update(dct)
try:
del non_matching_reference_maps[tree_name]
except KeyError:
pass
else:
for name in new:
non_matching_reference_maps.setdefault(name, []).append(new)
return found_names_dct.values()
def get_module_contexts_containing_name(inference_state, module_contexts, name):
"""
Search a name in the directories of modules.
"""
def check_directory(folder_io):
for file_name in folder_io.list():
if file_name.endswith('.py'):
yield folder_io.get_file_io(file_name)
def check_fs(file_io, base_names):
try:
code = file_io.read()
except FileNotFoundError:
return None
code = python_bytes_to_unicode(code, errors='replace')
if name not in code:
return None
new_file_io = KnownContentFileIO(file_io.path, code)
m = load_module_from_path(inference_state, new_file_io, base_names)
if isinstance(m, CompiledObject):
return None
return m.as_context()
# skip non python modules
used_mod_paths = set()
folders_with_names_to_be_checked = []
for module_context in module_contexts:
path = module_context.py__file__()
if path not in used_mod_paths:
file_io = module_context.get_value().file_io
if file_io is not None:
used_mod_paths.add(path)
folders_with_names_to_be_checked.append((
file_io.get_parent_folder(),
module_context.get_value().py__package__()
))
yield module_context
if not settings.dynamic_params_for_other_modules:
return
def get_file_ios_to_check():
for folder_io, base_names in folders_with_names_to_be_checked:
for file_io in check_directory(folder_io):
if file_io.path not in used_mod_paths:
yield file_io, base_names
for p in settings.additional_dynamic_modules:
p = os.path.abspath(p)
if p not in used_mod_paths:
yield FileIO(p), None
for file_io, base_names in get_file_ios_to_check():
m = check_fs(file_io, base_names)
if m is not None:
yield m