Make BaseFileCache into an abstractbaseclass so it's a proper interface

Push the opening and closing of files into the _load and _dump methods
so that we don't invoke the slow codec machinery without reason.
pull/21780/head
Toshio Kuratomi 8 years ago committed by Brian Coca
parent c033e5111f
commit 45251f910c

@ -21,8 +21,6 @@ __metaclass__ = type
import os import os
import time import time
import errno import errno
import codecs
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from ansible import constants as C from ansible import constants as C
@ -74,12 +72,9 @@ class BaseFileCacheModule(BaseCacheModule):
""" """
A caching module backed by file based storage. A caching module backed by file based storage.
""" """
plugin_name = None
read_mode = 'r'
write_mode = 'w'
encoding = 'utf-8'
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.plugin_name = self.__module__.split('.')[-1]
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT) self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._cache = {} self._cache = {}
self._cache_dir = None self._cache_dir = None
@ -89,7 +84,8 @@ class BaseFileCacheModule(BaseCacheModule):
self._cache_dir = os.path.expanduser(os.path.expandvars(C.CACHE_PLUGIN_CONNECTION)) self._cache_dir = os.path.expanduser(os.path.expandvars(C.CACHE_PLUGIN_CONNECTION))
if not self._cache_dir: if not self._cache_dir:
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option to be set (to a writeable directory path)" % self.plugin_name) raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option"
" to be set (to a writeable directory path)" % self.plugin_name)
if not os.path.exists(self._cache_dir): if not os.path.exists(self._cache_dir):
try: try:
@ -111,15 +107,16 @@ class BaseFileCacheModule(BaseCacheModule):
cachefile = "%s/%s" % (self._cache_dir, key) cachefile = "%s/%s" % (self._cache_dir, key)
try: try:
with codecs.open(cachefile, self.read_mode, encoding=self.encoding) as f: try:
try: value = self._load(cachefile)
value = self._load(f) self._cache[key] = value
self._cache[key] = value return value
return value except ValueError as e:
except ValueError as e: display.warning("error in '%s' cache plugin while trying to read %s : %s."
display.warning("error in '%s' cache plugin while trying to read %s : %s. Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e))) " Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key) self.delete(key)
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. It has been removed, so you can re-run your command now." % cachefile) raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data."
" It has been removed, so you can re-run your command now." % cachefile)
except (OSError,IOError) as e: except (OSError,IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e))) display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError raise KeyError
@ -132,17 +129,9 @@ class BaseFileCacheModule(BaseCacheModule):
cachefile = "%s/%s" % (self._cache_dir, key) cachefile = "%s/%s" % (self._cache_dir, key)
try: try:
f = codecs.open(cachefile, self.write_mode, encoding=self.encoding) self._dump(value, cachefile)
except (OSError,IOError) as e: except (OSError,IOError) as e:
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e))) display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
pass
else:
self._dump(value, f)
finally:
try:
f.close()
except UnboundLocalError:
pass
def has_expired(self, key): def has_expired(self, key):
@ -212,9 +201,31 @@ class BaseFileCacheModule(BaseCacheModule):
ret[key] = self.get(key) ret[key] = self.get(key)
return ret return ret
def _load(self, f): @abstractmethod
raise AnsibleError("Plugin '%s' must implement _load method" % self.plugin_name) def _load(self, filepath):
"""
Read data from a filepath and return it as a value
:arg filepath: The filepath to read from.
:returns: The value stored in the filepath
This method reads from the file on disk and takes care of any parsing
and transformation of the data before returning it. The value
returned should be what Ansible would expect if it were uncached data.
.. note:: Filehandles have advantages but calling code doesn't know
whether this file is text or binary, should be decoded, or accessed via
a library function. Therefore the API uses a filepath and opens
the file inside of the method.
"""
pass
def _dump(self, value, f): @abstractmethod
raise AnsibleError("Plugin '%s' must implement _dump method" % self.plugin_name) def _dump(self, value, filepath):
"""
Write data to a filepath
:arg value: The value to store
:arg filepath: The filepath to store it at
"""
pass

@ -19,6 +19,8 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import codecs
try: try:
import simplejson as json import simplejson as json
except ImportError: except ImportError:
@ -31,10 +33,12 @@ class CacheModule(BaseFileCacheModule):
""" """
A caching module backed by json files. A caching module backed by json files.
""" """
plugin_name = 'jsonfile'
def _load(self, f): def _load(self, filepath):
return json.load(f) # Valid JSON is always UTF-8 encoded.
with codecs.open(filepath, 'r', encoding='utf-8') as f:
return json.load(f)
def _dump(self, value, f): def _dump(self, value, filepath):
f.write(jsonify(value, format=True)) with codecs.open(filepath, 'w', encoding='utf-8') as f:
f.write(jsonify(value, format=True))

@ -30,13 +30,13 @@ class CacheModule(BaseFileCacheModule):
""" """
A caching module backed by pickle files. A caching module backed by pickle files.
""" """
plugin_name = 'pickle'
read_mode = 'rb'
write_mode = 'wb'
encoding = None
def _load(self, f): def _load(self, filepath):
return pickle.load(f) # Pickle is a binary format
with open(filepath, 'rb') as f:
return pickle.load(f)
def _dump(self, value, f): def _dump(self, value, filepath):
pickle.dump(value, f) with open(filepath, 'wb') as f:
# Use pickle protocol 2 which is compatible with Python 2.3+.
pickle.dump(value, f, protocol=2)

@ -19,6 +19,9 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import codecs
import yaml import yaml
from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.loader import AnsibleLoader
@ -29,10 +32,11 @@ class CacheModule(BaseFileCacheModule):
""" """
A caching module backed by yaml files. A caching module backed by yaml files.
""" """
plugin_name = 'yaml'
def _load(self, f): def _load(self, filepath):
return AnsibleLoader(f).get_single_data() with codecs.open(filepath, 'r', encoding='utf-8') as f:
return AnsibleLoader(f).get_single_data()
def _dump(self, value, f): def _dump(self, value, filepath):
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False) with codecs.open(filepath, 'w', encoding='utf-8') as f:
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)

@ -243,7 +243,6 @@ lib/ansible/playbook/role/metadata.py
lib/ansible/plugins/action/set_fact.py lib/ansible/plugins/action/set_fact.py
lib/ansible/plugins/action/set_stats.py lib/ansible/plugins/action/set_stats.py
lib/ansible/plugins/action/synchronize.py lib/ansible/plugins/action/synchronize.py
lib/ansible/plugins/cache/base.py
lib/ansible/plugins/callback/default.py lib/ansible/plugins/callback/default.py
lib/ansible/plugins/callback/logentries.py lib/ansible/plugins/callback/logentries.py
lib/ansible/plugins/callback/oneline.py lib/ansible/plugins/callback/oneline.py

Loading…
Cancel
Save