1
0
forked from VimPlug/jedi

Remove the old star import cache, because it's not even used.

This commit is contained in:
Dave Halter
2017-03-18 03:30:23 +01:00
parent aaf6c61e69
commit d0b6d41e99
4 changed files with 1 additions and 38 deletions

View File

@@ -133,7 +133,6 @@ class Script(object):
@cache.memoize_method
def _get_module_node(self):
cache.invalidate_star_import_cache(self._path)
parser = FastParser(self._grammar, self._source, self.path)
save_parser(self.path, parser, pickling=False)

View File

@@ -90,31 +90,3 @@ def memoize_method(method):
dct[key] = result
return result
return wrapper
def _invalidate_star_import_cache_module(module, only_main=False):
""" Important if some new modules are being reparsed """
try:
t, modules = _time_caches['star_import_cache_validity'][module]
except KeyError:
pass
else:
del _time_caches['star_import_cache_validity'][module]
# This stuff was part of load_parser. However since we're most likely
# not going to use star import caching anymore, just ignore it.
#else:
# In case there is already a module cached and this module
# has to be reparsed, we also need to invalidate the import
# caches.
# _invalidate_star_import_cache_module(parser_cache_item.parser.module)
def invalidate_star_import_cache(path):
"""On success returns True."""
try:
parser_cache_item = parser_cache[path]
except KeyError:
pass
else:
_invalidate_star_import_cache_module(parser_cache_item.parser.module)

View File

@@ -196,8 +196,8 @@ class ParserWithRecovery(Parser):
def parse(self, tokenizer):
root_node = super(ParserWithRecovery, self).parse(self._tokenize(tokenizer))
root_node.path = self._module_path
self.module = root_node
self.module.path = self._module_path
return root_node
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,

View File

@@ -68,7 +68,6 @@ definitely worse in some cases. But a completion should also be fast.
Caching
~~~~~~~
.. autodata:: star_import_cache_validity
.. autodata:: call_signatures_validity
@@ -217,13 +216,6 @@ scale `max_executions` and `max_until_execution_unique`:
# caching validity (time)
# ----------------
star_import_cache_validity = 60.0
"""
In huge packages like numpy, checking all star imports on every completion
might be slow, therefore we do a star import caching, that lasts a certain
time span (in seconds).
"""
call_signatures_validity = 3.0
"""
Finding function calls might be slow (0.1-0.5s). This is not acceptible for