passerelle/passerelle/apps/atos_genesys/utils.py

76 lines
2.5 KiB
Python

import time
from contextlib import contextmanager
from django.db import transaction
from django.core.cache import cache
from django.utils import six
from passerelle.utils.jsonresponse import APIError
DEFAULT_DURATION = 5 * 60 # 5 minutes
# keep data in cache for 1 day, i.e. we can answer a request from cache for 1 day
# day
CACHE_DURATION = 86400
@contextmanager
def row_lock(row):
if row:
with transaction.atomic():
list(row.__class__.objects.filter(pk=row.pk).select_for_update())
yield
else:
yield
class RowLockedCache(object):
"""Cache return value of a function, always return the cached value for
performance but if the cache is stale update it asynchronously using
a thread, prevent multiple update using row locks on database models and
an update cache key.
"""
def __init__(self, function, logger=None, row=None, duration=DEFAULT_DURATION, key_prefix=None):
self.function = function
self.row = row
self.duration = duration
self.key_prefix = key_prefix or function.__name__
self.logger = logger
def _key(self, *args, **kwargs):
keys = []
if self.row:
keys.append(str(self.row.pk))
for arg in args:
if isinstance(arg, six.string_types):
keys.append(arg)
else:
keys.append(hash(arg))
return self.key_prefix + '-' + '-'.join(keys)
def __call__(self, *args, **kwargs):
now = time.time()
key = self._key(*args, **kwargs)
# Fast path
cacheline = cache.get(key)
if cacheline:
timestamp = cacheline['timestamp']
if now - timestamp >= self.duration:
with row_lock(self.row):
# Slow path, check cacheline again
cacheline = cache.get(key)
if now - timestamp < self.duration:
return cacheline['value']
try:
value = self.function(*args, **kwargs)
cache.set(key, {'value': value, 'timestamp': now}, CACHE_DURATION)
except APIError as e:
if self.logger:
self.logger.error('failure to update cache (%s)', e)
return cacheline['value']
else:
value = self.function(*args, **kwargs)
cache.set(key, {'value': value, 'timestamp': now}, CACHE_DURATION)
return value