237 regels
9.8 KiB
Python
237 regels
9.8 KiB
Python
#!/usr/bin/env python
|
|
# -*- coding: utf-8 -*-
|
|
#
|
|
"""
|
|
caching (Caching Module)
|
|
========================
|
|
|
|
**Author:**
|
|
|
|
* Dirk Alders <sudo-dirk@mount-mockery.de>
|
|
|
|
**Description:**
|
|
|
|
This Module supports functions and classes for caching e.g. properties of other instances.
|
|
|
|
**Submodules:**
|
|
|
|
* :class:`caching.property_cache_json`
|
|
* :class:`caching.property_cache_pickle`
|
|
|
|
**Unittest:**
|
|
|
|
See also the :download:`unittest <caching/_testresults_/unittest.pdf>` documentation.
|
|
"""
|
|
__DEPENDENCIES__ = []
|
|
|
|
import hashlib
|
|
import hmac
|
|
import json
|
|
import logging
|
|
import os
|
|
import pickle
|
|
import sys
|
|
|
|
try:
|
|
from config import APP_NAME as ROOT_LOGGER_NAME
|
|
except ImportError:
|
|
ROOT_LOGGER_NAME = 'root'
|
|
logger = logging.getLogger(ROOT_LOGGER_NAME).getChild(__name__)
|
|
|
|
__DESCRIPTION__ = """The Module {\\tt %s} is designed to store information in {\\tt json} or {\\tt pickle} files to support them much faster then generating them from the original source file.
|
|
For more Information read the documentation.""" % __name__.replace('_', '\_')
|
|
"""The Module Description"""
|
|
__INTERPRETER__ = (2, 3)
|
|
"""The Tested Interpreter-Versions"""
|
|
|
|
|
|
class property_cache_pickle(object):
|
|
"""
|
|
Class to cache properties, which take longer on initialising than reading a file in pickle format.
|
|
|
|
:param source_instance: The source instance holding the data
|
|
:type source_instance: instance
|
|
:param cache_filename: File name, where the properties are stored as cache
|
|
:type cache_filename: str
|
|
:param load_all_on_init: Optionally init behaviour control parameter. True will load all available properties from source on init, False not.
|
|
|
|
.. note:: source_instance needs to have at least the following methods: uid(), keys(), data_version(), get()
|
|
|
|
* uid(): returns the unique id of the source.
|
|
* keys(): returns a list of all available keys.
|
|
* data_version(): returns a version number of the current data (it should be increased, if the get method of the source instance returns improved values or the data structure had been changed).
|
|
* get(key, default): returns the property for a key. If key does not exists, default will be returned.
|
|
|
|
Reasons for updating the complete data set:
|
|
|
|
* UID of source_instance has changed (in comparison to the cached value).
|
|
* data_version is increased
|
|
|
|
**Example:**
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_pickle.py
|
|
|
|
Will result on the first execution to the following output (with a long execution time):
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_pickle_1.log
|
|
|
|
With every following execution (slow for getting "two" which is not cached - see implementation):
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_pickle_2.log
|
|
"""
|
|
LOG_PREFIX = 'PickCache:'
|
|
DATA_VERSION_TAG = '_property_cache_data_version_'
|
|
UID_TAG = '_property_cache_uid_'
|
|
|
|
def __init__(self, source_instance, cache_filename, load_all_on_init=False, callback_on_data_storage=None):
|
|
self._source_instance = source_instance
|
|
self._cache_filename = cache_filename
|
|
self._load_all_on_init = load_all_on_init
|
|
self._callback_on_data_storage = callback_on_data_storage
|
|
self._cached_props = None
|
|
|
|
def get(self, key, default=None):
|
|
"""
|
|
Method to get the cached property. If key does not exists in cache, the property will be loaded from source_instance and stored in cache (file).
|
|
|
|
:param key: key for value to get.
|
|
:param default: value to be returned, if key does not exists.
|
|
:returns: value for a given key or default value.
|
|
"""
|
|
if key in self.keys():
|
|
if self._cached_props is None:
|
|
self._init_cache()
|
|
if self._key_filter(key) not in self._cached_props:
|
|
val = self._source_instance.get(key, None)
|
|
logger.debug("%s Loading property for '%s' from source instance (%s)", self.LOG_PREFIX, key, repr(val))
|
|
self._cached_props[self._key_filter(key)] = val
|
|
self._save_cache()
|
|
else:
|
|
logger.debug("%s Providing property for '%s' from cache (%s)", self.LOG_PREFIX, key, repr(self._cached_props.get(self._key_filter(key), default)))
|
|
return self._cached_props.get(self._key_filter(key), default)
|
|
else:
|
|
logger.info("%s Key '%s' is not in cached_keys. Uncached data will be returned.", self.LOG_PREFIX, key)
|
|
return self._source_instance.get(key, default)
|
|
|
|
def keys(self):
|
|
"""
|
|
Method to get the available keys (from :data:`source_instance`).
|
|
"""
|
|
return self._source_instance.keys()
|
|
|
|
def _data_version(self):
|
|
if self._cached_props is None:
|
|
return None
|
|
else:
|
|
return self._cached_props.get(self.DATA_VERSION_TAG, None)
|
|
|
|
def _init_cache(self):
|
|
if not self._load_cache() or self._source_instance.uid() != self._uid() or self._source_instance.data_version() > self._data_version():
|
|
if self._uid() is not None and self._source_instance.uid() != self._uid():
|
|
logger.debug("%s Source uid changed, ignoring previous cache data", self.LOG_PREFIX)
|
|
if self._data_version() is not None and self._source_instance.data_version() > self._data_version():
|
|
logger.debug("%s Data version increased, ignoring previous cache data", self.LOG_PREFIX)
|
|
self._cached_props = dict()
|
|
if self._load_all_on_init:
|
|
self._load_source()
|
|
self._cached_props[self.UID_TAG] = self._source_instance.uid()
|
|
self._cached_props[self.DATA_VERSION_TAG] = self._source_instance.data_version()
|
|
self._save_cache()
|
|
|
|
def _load_cache(self):
|
|
if os.path.exists(self._cache_filename):
|
|
with open(self._cache_filename, 'rb') as fh:
|
|
self._cached_props = pickle.load(fh)
|
|
logger.info('%s Loading properties from cache (%s)', self.LOG_PREFIX, self._cache_filename)
|
|
return True
|
|
else:
|
|
logger.debug('%s Cache file does not exists (yet).', self.LOG_PREFIX)
|
|
return False
|
|
|
|
def _key_filter(self, key):
|
|
if sys.version_info >= (3, 0):
|
|
tps = [str]
|
|
else:
|
|
tps = [str, unicode]
|
|
if type(key) in tps:
|
|
if key.endswith(self.DATA_VERSION_TAG) or key.endswith(self.UID_TAG):
|
|
return '_' + key
|
|
return key
|
|
|
|
def _load_source(self):
|
|
logger.debug('%s Loading all data from source - %s', self.LOG_PREFIX, repr(self._source_instance.keys()))
|
|
for key in self._source_instance.keys():
|
|
val = self._source_instance.get(key)
|
|
self._cached_props[self._key_filter(key)] = val
|
|
|
|
def _save_cache(self):
|
|
with open(self._cache_filename, 'wb') as fh:
|
|
pickle.dump(self._cached_props, fh)
|
|
logger.info('%s cache-file stored (%s)', self.LOG_PREFIX, self._cache_filename)
|
|
if self._callback_on_data_storage is not None:
|
|
self._callback_on_data_storage()
|
|
|
|
def _uid(self):
|
|
if self._cached_props is None:
|
|
return None
|
|
else:
|
|
return self._cached_props.get(self.UID_TAG, None)
|
|
|
|
|
|
class property_cache_json(property_cache_pickle):
|
|
"""
|
|
Class to cache properties, which take longer on initialising than reading a file in json format. See also parent :py:class:`property_cache_pickle`
|
|
|
|
:param source_instance: The source instance holding the data
|
|
:type source_instance: instance
|
|
:param cache_filename: File name, where the properties are stored as cache
|
|
:type cache_filename: str
|
|
:param load_all_on_init: Optionally init behaviour control parameter. True will load all available properties from source on init, False not.
|
|
|
|
.. warning::
|
|
* This class uses json. You should **only** use keys of type string!
|
|
* Unicode types are transfered to strings
|
|
|
|
.. note:: source_instance needs to have at least the following methods: uid(), keys(), data_version(), get()
|
|
|
|
* uid(): returns the unique id of the source.
|
|
* keys(): returns a list of all available keys.
|
|
* data_version(): returns a version number of the current data (it should be increased, if the get method of the source instance returns improved values or the data structure had been changed).
|
|
* get(key, default): returns the property for a key. If key does not exists, default will be returned.
|
|
|
|
Reasons for updating the complete data set:
|
|
|
|
* UID of source_instance has changed (in comparison to the cached value).
|
|
* data_version is increased
|
|
|
|
**Example:**
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_json.py
|
|
|
|
Will result on the first execution to the following output (with a long execution time):
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_json_1.log
|
|
|
|
With every following execution (slow for getting "two" which is not cached - see implementation):
|
|
|
|
.. literalinclude:: caching/_examples_/property_cache_json_2.log
|
|
"""
|
|
LOG_PREFIX = 'JsonCache:'
|
|
|
|
def _load_cache(self):
|
|
if os.path.exists(self._cache_filename):
|
|
with open(self._cache_filename, 'r') as fh:
|
|
self._cached_props = json.load(fh)
|
|
logger.info('%s Loading properties from cache (%s)', self.LOG_PREFIX, self._cache_filename)
|
|
return True
|
|
else:
|
|
logger.debug('%s Cache file does not exists (yet).', self.LOG_PREFIX)
|
|
return False
|
|
|
|
def _save_cache(self):
|
|
with open(self._cache_filename, 'w') as fh:
|
|
json.dump(self._cached_props, fh, sort_keys=True, indent=4)
|
|
logger.info('%s cache-file stored (%s)', self.LOG_PREFIX, self._cache_filename)
|
|
if self._callback_on_data_storage is not None:
|
|
self._callback_on_data_storage()
|