diff --git a/jedi/evaluate/__init__.py b/jedi/evaluate/__init__.py index 29438d31..e61b144b 100644 --- a/jedi/evaluate/__init__.py +++ b/jedi/evaluate/__init__.py @@ -95,8 +95,7 @@ class Evaluator(object): self.latest_grammar = parso.load_grammar(version='3.6') self.memoize_cache = {} # for memoize decorators - # To memorize modules -> equals `sys.modules`. - self.modules = {} # like `sys.modules`. + self.module_cache = imports.ModuleCache() # does the job of `sys.modules`. self.compiled_cache = {} # see `evaluate.compiled.create()` self.inferred_element_counts = {} self.mixed_cache = {} # see `evaluate.compiled.mixed._create()` diff --git a/jedi/evaluate/context/module.py b/jedi/evaluate/context/module.py index 78d80df4..cedaed63 100644 --- a/jedi/evaluate/context/module.py +++ b/jedi/evaluate/context/module.py @@ -65,7 +65,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, TreeContext)): # I'm not sure if the star import cache is really that effective anymore # with all the other really fast import caches. Recheck. Also we would need - # to push the star imports into Evaluator.modules, if we reenable this. + # to push the star imports into Evaluator.module_cache, if we reenable this. @evaluator_method_cache([]) def star_imports(self): modules = [] @@ -115,7 +115,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, TreeContext)): return None def py__name__(self): - for name, module in self.evaluator.modules.items(): + for name, module in self.evaluator.module_cache.iterate_modules_with_names(): if module == self and name != '': return name diff --git a/jedi/evaluate/imports.py b/jedi/evaluate/imports.py index 586458a6..5f976b49 100644 --- a/jedi/evaluate/imports.py +++ b/jedi/evaluate/imports.py @@ -31,6 +31,23 @@ from jedi.evaluate.filters import AbstractNameDefinition from jedi.evaluate.base_context import ContextSet, NO_CONTEXTS +class ModuleCache(): + def __init__(self): + self._path_cache = {} + self._name_cache = {} + + def add(self, module, name): + path = module.py__file__() + self._path_cache[path] = module + self._name_cache[name] = module + + def iterate_modules_with_names(self): + return self._name_cache.items() + + def get(self, name): + return self._name_cache[name] + + # This memoization is needed, because otherwise we will infinitely loop on # certain imports. @evaluator_method_cache(default=NO_CONTEXTS) @@ -289,7 +306,7 @@ class Importer(object): module_name = '.'.join(import_parts) try: - return ContextSet(self._evaluator.modules[module_name]) + return ContextSet(self._evaluator.module_cache.get(module_name)) except KeyError: pass @@ -367,7 +384,7 @@ class Importer(object): # importable. return NO_CONTEXTS - self._evaluator.modules[module_name] = module + self._evaluator.module_cache.add(module, module_name) return ContextSet(module) def _generate_name(self, name, in_module=None): @@ -481,7 +498,7 @@ def add_module(evaluator, module_name, module): # the sepatator dots for nested packages. Therefore we return # `__main__` in ModuleWrapper.py__name__(), which is similar to # Python behavior. - evaluator.modules[module_name] = module + evaluator.module_cache.add(module, module_name) def get_modules_containing_name(evaluator, modules, name):