The parser cache in RAM has now grammar versioning.

This commit is contained in:
Dave Halter
2017-05-26 12:48:59 -04:00
parent f997b91a12
commit dafffdc9b4
4 changed files with 25 additions and 28 deletions

View File

@@ -58,11 +58,9 @@ On Linux, if environment variable ``$XDG_CACHE_HOME`` is set,
``$XDG_CACHE_HOME/parso`` is used instead of the default one.
"""
# for fast_parser, should not be deleted
parser_cache = {}
class _NodeCacheItem(object):
def __init__(self, node, lines, change_time=None):
self.node = node
@@ -72,7 +70,7 @@ class _NodeCacheItem(object):
self.change_time = change_time
def load_module(grammar_hash, path, cache_path=None):
def load_module(hashed_grammar, path, cache_path=None):
"""
Returns a module or None, if it fails.
"""
@@ -82,16 +80,15 @@ def load_module(grammar_hash, path, cache_path=None):
return None
try:
# TODO Add grammar sha256
module_cache_item = parser_cache[path]
module_cache_item = parser_cache[hashed_grammar][path]
if p_time <= module_cache_item.change_time:
return module_cache_item.node
except KeyError:
return _load_from_file_system(grammar_hash, path, p_time, cache_path=cache_path)
return _load_from_file_system(hashed_grammar, path, p_time, cache_path=cache_path)
def _load_from_file_system(grammar_hash, path, p_time, cache_path=None):
cache_path = _get_hashed_path(grammar_hash, path, cache_path=cache_path)
def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None):
cache_path = _get_hashed_path(hashed_grammar, path, cache_path=cache_path)
try:
try:
if p_time > os.path.getmtime(cache_path):
@@ -113,12 +110,12 @@ def _load_from_file_system(grammar_hash, path, p_time, cache_path=None):
except FileNotFoundError:
return None
else:
parser_cache[path] = module_cache_item
parser_cache.setdefault(hashed_grammar, {})[path] = module_cache_item
logging.debug('pickle loaded: %s', path)
return module_cache_item.node
def save_module(grammar_hash, path, module, lines, pickling=True, cache_path=None):
def save_module(hashed_grammar, path, module, lines, pickling=True, cache_path=None):
try:
p_time = None if path is None else os.path.getmtime(path)
except OSError:
@@ -126,13 +123,13 @@ def save_module(grammar_hash, path, module, lines, pickling=True, cache_path=Non
pickling = False
item = _NodeCacheItem(module, lines, p_time)
parser_cache[path] = item
parser_cache.setdefault(hashed_grammar, {})[path] = item
if pickling and path is not None:
_save_to_file_system(grammar_hash, path, item)
_save_to_file_system(hashed_grammar, path, item)
def _save_to_file_system(grammar_hash, path, item, cache_path=None):
with open(_get_hashed_path(grammar_hash, path, cache_path=cache_path), 'wb') as f:
def _save_to_file_system(hashed_grammar, path, item, cache_path=None):
with open(_get_hashed_path(hashed_grammar, path, cache_path=cache_path), 'wb') as f:
pickle.dump(item, f, pickle.HIGHEST_PROTOCOL)
@@ -143,11 +140,11 @@ def clear_cache(cache_path=None):
parser_cache.clear()
def _get_hashed_path(grammar_hash, path, cache_path=None):
def _get_hashed_path(hashed_grammar, path, cache_path=None):
directory = _get_cache_directory_path(cache_path=cache_path)
file_hash = hashlib.sha256(path.encode("utf-8")).hexdigest()
return os.path.join(directory, '%s-%s.pkl' % (grammar_hash, file_hash))
return os.path.join(directory, '%s-%s.pkl' % (hashed_grammar, file_hash))
def _get_cache_directory_path(cache_path=None):