First commit

- added ApplicationCache class
- added gitignore
- added & configured setup.py
master
Felix Stupp 4 years ago
commit 748edeb53d
Signed by: zocker
GPG Key ID: 93E1BD26F6B02FB7

153
.gitignore vendored

@ -0,0 +1,153 @@
# Created by https://www.toptal.com/developers/gitignore/api/python
# Edit at https://www.toptal.com/developers/gitignore?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
pytestdebug.log
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
doc/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
#poetry.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
# .env
.env/
.venv/
env/
venv/
ENV/
env.bak/
venv.bak/
pythonenv*
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# operating system-related files
*.DS_Store #file properties cache/storage on macOS
Thumbs.db #thumbnail cache on Windows
# profiling data
.prof
# End of https://www.toptal.com/developers/gitignore/api/python

@ -0,0 +1,141 @@
#!/usr/bin/env python3
from functools import partialmethod, wraps
import gzip
import hashlib
from io import BufferedReader
import json
from pathlib import Path
import time
# tries to follow XDG Base Directory Specification, if it fails due to a missing module, uses default cache dir ~/.cache
try:
import xdg
CACHE_DIR = xdg.xdg_cache_home()
except ModuleNotFoundError:
CACHE_DIR = Path.home() / ".cache"
GZIP_MAGIC_NUMBER = b'\x1f\x8b'
IDENTITY = lambda x: x
class ApplicationCache():
app_name: str
compress_data: bool
compress_threshold: int
default_max_age: int
encoding: str
def __init__(
self,
app_name: str,
compress_data: bool = True,
compress_threshold: int = 1024,
create_cache_dir: bool = True,
default_max_age: int = 3600,
encoding: str = "utf-8",
):
self.app_name = app_name
self.compress_data = compress_data
self.compress_threshold = compress_threshold
self.default_max_age = default_max_age
self.encoding = encoding
if create_cache_dir and not self.cache_dir.exists():
self.cache_dir.mkdir(parents=True)
if not self.cache_dir.is_dir():
raise Exception(f'Expected "{self.cache_dir}" to be a directory')
@property
def cache_dir(self) -> Path:
return CACHE_DIR / self.app_name
@staticmethod
def get_hash(args: tuple, kwargs: dict, *arbitary) -> str:
val = (*arbitary, args, kwargs)
m = hashlib.sha3_512()
m.update(json.dumps(val).encode("utf-8"))
return m.hexdigest()
@classmethod
def gen_key(cls, cache_id: str, args: list, kwargs: dict) -> str:
return cls.get_hash(args, kwargs, cache_id)
def compress(self, data: str) -> bytes:
bin_data = data.encode(self.encoding)
if self.compress_data and len(bin_data) > self.compress_threshold:
return gzip.compress(bin_data)
return bin_data
def decompress(self, compressed_data: bytes) -> str:
bin_data = compressed_data
if bin_data[:2] == GZIP_MAGIC_NUMBER:
bin_data = gzip.decompress(bin_data)
return bin_data.decode(self.encoding)
def load(self, key: str, max_age: int = None) -> str:
max_age: int = max_age or self.default_max_age
cache_path: Path = self.cache_dir / key
if not cache_path.is_file():
if cache_path.exists():
cache_path.unlink()
return None
cache_stat = cache_path.stat()
if cache_stat.st_mtime + max_age <= time.time():
cache_path.unlink()
return None
with cache_path.open("rb") as f:
return self.decompress(f.read())
def store(self, key: str, data: str):
cache_path: Path = self.cache_dir / key
with cache_path.open("wb") as f:
f.write(self.compress(data))
def cache_anything(self, key_prefix: str = None, max_age: int = None, packer = IDENTITY, unpacker = IDENTITY):
def decorator(fun):
key_prefix = key_prefix or fun.__name__
@wraps(fun)
def decorated(*args, no_cache: bool = False, cache_no_lookup: bool = False, cache_no_store: bool = False, **kwargs):
cache_no_lookup = no_cache or cache_no_lookup
cache_no_store = no_cache or cache_no_store
no_cache_key = cache_no_lookup and cache_no_store
if not no_cache_key:
cache_key = self.gen_key(key_prefix, args, kwargs)
if not cache_no_lookup:
cached_data = self.load(key=cache_key, max_age=max_age)
if cached_data is not None:
return unpacker(cached_data)
data = fun(*args, **kwargs)
if not cache_no_store:
self.store(key=cache_key, data=packer(data))
return data
return decorated
return decorator
def cache_anything_async(self, cache_id: str = None, max_age: int = None, packer = IDENTITY, unpacker = IDENTITY):
def decorator(fun):
key_prefix = cache_id or fun.__name__
@wraps(fun)
async def decorated(*args, no_cache: bool = False, cache_no_lookup: bool = False, cache_no_store: bool = False, **kwargs):
cache_no_lookup = no_cache or cache_no_lookup
cache_no_store = no_cache or cache_no_store
no_cache_key = cache_no_lookup and cache_no_store
if not no_cache_key:
cache_key = self.gen_key(key_prefix, args, kwargs)
if not cache_no_lookup:
cached_data = self.load(key=cache_key, max_age=max_age)
if cached_data is not None:
return unpacker(cached_data)
data = await fun(*args, **kwargs)
if not cache_no_store:
self.store(key=cache_key, data=packer(data))
return data
return decorated
return decorator
cache_int = partialmethod(cache_anything, packer=str, unpacker=int)
cache_json = partialmethod(cache_anything, packer=json.dumps, unpacker=json.loads)
cache_str = partialmethod(cache_anything, packer=str, unpacker=str)
cache_int_async = partialmethod(cache_anything_async, packer=str, unpacker=int)
cache_json_async = partialmethod(cache_anything_async, packer=json.dumps, unpacker=json.loads)
cache_str_async = partialmethod(cache_anything_async, packer=str, unpacker=str)

@ -0,0 +1 @@
from .ApplicationCache import ApplicationCache

@ -0,0 +1,28 @@
#!/usr/bin/env python3
import setuptools
setuptools.setup(
name="jsoncache",
version='0.1',
author="Felix Stupp",
author_email="me+code@banananet.work",
description="Utility package for caching",
url="https://git.banananet.work/zocker/python-jsoncache",
python_requires='>=3.6',
extras_require={
"XDG": ["pyxdg >=0.25"],
},
packages=[
"jsoncache",
],
classifiers = [
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Utilities",
],
)
Loading…
Cancel
Save