Merging upstream version 1.2.0.

This commit is contained in:
Mathias Behrle 2015-05-12 14:07:22 +02:00
parent d3c089d289
commit c2db9695c9
12 changed files with 319 additions and 578 deletions

View File

@ -12,3 +12,4 @@ Contributors
* Tin Tvrtković <tinchester@gmail.com>
* @bcho <bcho@vtmer.com>
* George Sakkis (@gsakkis)

View File

@ -3,11 +3,19 @@
History
-------
1.2.0 (2015-04-28)
++++++++++++++++++
* Overall code and test refactoring, thanks to @gsakkis
* Allow the del statement for resetting cached properties with ttl instead of del obj._cache[attr], thanks to @gsakkis.
* Uncovered a bug in PyPy, https://bitbucket.org/pypy/pypy/issue/2033/attributeerror-object-attribute-is-read, thanks to @gsakkis
* Fixed threaded_cached_property_with_ttl to actually be thread-safe, thanks to @gsakkis
1.1.0 (2015-04-04)
++++++++++++++++++
* Regression: As the cache was not always clearing, we've broken out the time to expire feature to it's own set of specific tools.
* Fixed typo in README, thanks to @zoidbergwill.
* Regression: As the cache was not always clearing, weve broken out the time to expire feature to its own set of specific tools, thanks to @pydanny
* Fixed typo in README, thanks to @zoidbergwill
1.0.0 (2015-02-13)
++++++++++++++++++

View File

@ -1,7 +1,7 @@
Metadata-Version: 1.1
Name: cached-property
Version: 1.1.0
Summary: A cached-property for decorating methods in classes.
Version: 1.2.0
Summary: A decorator for caching properties in classes.
Home-page: https://github.com/pydanny/cached-property
Author: Daniel Greenfeld
Author-email: pydanny@gmail.com
@ -20,7 +20,7 @@ Description: ===============================
:target: https://pypi.python.org/pypi/cached-property
A cached-property for decorating methods in classes.
A decorator for caching properties in classes.
Why?
-----
@ -104,7 +104,7 @@ Description: ===============================
>>> monopoly.boardwalk
550
>>> # invalidate the cache
>>> del monopoly.boardwalk
>>> del monopoly['boardwalk']
>>> # request the boardwalk property again
>>> monopoly.boardwalk
600
@ -194,10 +194,6 @@ Description: ===============================
3
>>> monopoly.dice
3
>>> # This cache clearing does not always work, see note below.
>>> del monopoly['dice']
>>> monopoly.dice
6
**Note:** The ``ttl`` tools do not reliably allow the clearing of the cache. This
is why they are broken out into seperate tools. See https://github.com/pydanny/cached-property/issues/16.
@ -220,11 +216,19 @@ Description: ===============================
History
-------
1.2.0 (2015-04-28)
++++++++++++++++++
* Overall code and test refactoring, thanks to @gsakkis
* Allow the del statement for resetting cached properties with ttl instead of del obj._cache[attr], thanks to @gsakkis.
* Uncovered a bug in PyPy, https://bitbucket.org/pypy/pypy/issue/2033/attributeerror-object-attribute-is-read, thanks to @gsakkis
* Fixed threaded_cached_property_with_ttl to actually be thread-safe, thanks to @gsakkis
1.1.0 (2015-04-04)
++++++++++++++++++
* Regression: As the cache was not always clearing, we've broken out the time to expire feature to it's own set of specific tools.
* Fixed typo in README, thanks to @zoidbergwill.
* Regression: As the cache was not always clearing, weve broken out the time to expire feature to its own set of specific tools, thanks to @pydanny
* Fixed typo in README, thanks to @zoidbergwill
1.0.0 (2015-02-13)
++++++++++++++++++

View File

@ -12,7 +12,7 @@ cached-property
:target: https://pypi.python.org/pypi/cached-property
A cached-property for decorating methods in classes.
A decorator for caching properties in classes.
Why?
-----
@ -96,7 +96,7 @@ Results of cached functions can be invalidated by outside forces. Let's demonstr
>>> monopoly.boardwalk
550
>>> # invalidate the cache
>>> del monopoly.boardwalk
>>> del monopoly['boardwalk']
>>> # request the boardwalk property again
>>> monopoly.boardwalk
600
@ -186,10 +186,6 @@ Now use it:
3
>>> monopoly.dice
3
>>> # This cache clearing does not always work, see note below.
>>> del monopoly['dice']
>>> monopoly.dice
6
**Note:** The ``ttl`` tools do not reliably allow the clearing of the cache. This
is why they are broken out into seperate tools. See https://github.com/pydanny/cached-property/issues/16.

View File

@ -1,7 +1,7 @@
Metadata-Version: 1.1
Name: cached-property
Version: 1.1.0
Summary: A cached-property for decorating methods in classes.
Version: 1.2.0
Summary: A decorator for caching properties in classes.
Home-page: https://github.com/pydanny/cached-property
Author: Daniel Greenfeld
Author-email: pydanny@gmail.com
@ -20,7 +20,7 @@ Description: ===============================
:target: https://pypi.python.org/pypi/cached-property
A cached-property for decorating methods in classes.
A decorator for caching properties in classes.
Why?
-----
@ -104,7 +104,7 @@ Description: ===============================
>>> monopoly.boardwalk
550
>>> # invalidate the cache
>>> del monopoly.boardwalk
>>> del monopoly['boardwalk']
>>> # request the boardwalk property again
>>> monopoly.boardwalk
600
@ -194,10 +194,6 @@ Description: ===============================
3
>>> monopoly.dice
3
>>> # This cache clearing does not always work, see note below.
>>> del monopoly['dice']
>>> monopoly.dice
6
**Note:** The ``ttl`` tools do not reliably allow the clearing of the cache. This
is why they are broken out into seperate tools. See https://github.com/pydanny/cached-property/issues/16.
@ -220,11 +216,19 @@ Description: ===============================
History
-------
1.2.0 (2015-04-28)
++++++++++++++++++
* Overall code and test refactoring, thanks to @gsakkis
* Allow the del statement for resetting cached properties with ttl instead of del obj._cache[attr], thanks to @gsakkis.
* Uncovered a bug in PyPy, https://bitbucket.org/pypy/pypy/issue/2033/attributeerror-object-attribute-is-read, thanks to @gsakkis
* Fixed threaded_cached_property_with_ttl to actually be thread-safe, thanks to @gsakkis
1.1.0 (2015-04-04)
++++++++++++++++++
* Regression: As the cache was not always clearing, we've broken out the time to expire feature to it's own set of specific tools.
* Fixed typo in README, thanks to @zoidbergwill.
* Regression: As the cache was not always clearing, weve broken out the time to expire feature to its own set of specific tools, thanks to @pydanny
* Fixed typo in README, thanks to @zoidbergwill
1.0.0 (2015-02-13)
++++++++++++++++++

View File

@ -13,6 +13,4 @@ cached_property.egg-info/dependency_links.txt
cached_property.egg-info/not-zip-safe
cached_property.egg-info/top_level.txt
tests/__init__.py
tests/test_cached_property.py
tests/test_cached_property_ttl.py
tests/test_threaded_cached_property.py
tests/test_cached_property.py

View File

@ -2,7 +2,7 @@
__author__ = 'Daniel Greenfeld'
__email__ = 'pydanny@gmail.com'
__version__ = '1.1.0'
__version__ = '1.2.0'
__license__ = 'BSD'
from time import time
@ -10,11 +10,11 @@ import threading
class cached_property(object):
""" A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
"""
"""
A property that is only computed once per instance and then replaces itself
with an ordinary attribute. Deleting the attribute resets the property.
Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
""" # noqa
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
@ -27,49 +27,50 @@ class cached_property(object):
return value
class threaded_cached_property(cached_property):
""" A cached_property version for use in environments where multiple
threads might concurrently try to access the property.
"""
class threaded_cached_property(object):
"""
A cached_property version for use in environments where multiple threads
might concurrently try to access the property.
"""
def __init__(self, func):
super(threaded_cached_property, self).__init__(func)
self.__doc__ = getattr(func, '__doc__')
self.func = func
self.lock = threading.RLock()
def __get__(self, obj, cls):
with self.lock:
# Double check if the value was computed before the lock was
# acquired.
prop_name = self.func.__name__
if prop_name in obj.__dict__:
return obj.__dict__[prop_name]
if obj is None:
return self
# If not, do the calculation and release the lock.
return super(threaded_cached_property, self).__get__(obj, cls)
obj_dict = obj.__dict__
name = self.func.__name__
with self.lock:
try:
# check if the value was computed before the lock was acquired
return obj_dict[name]
except KeyError:
# if not, do the calculation and release the lock
return obj_dict.setdefault(name, self.func(obj))
class cached_property_with_ttl(object):
""" A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Setting the ttl to a number expresses
how long the property will last before being timed out.
""" # noqa
"""
A property that is only computed once per instance and then replaces itself
with an ordinary attribute. Setting the ttl to a number expresses how long
the property will last before being timed out.
"""
def __init__(self, ttl=None):
ttl_or_func = ttl
self.ttl = None
if callable(ttl_or_func):
self.prepare_func(ttl_or_func)
if callable(ttl):
func = ttl
ttl = None
else:
self.ttl = ttl_or_func
func = None
self.ttl = ttl
self._prepare_func(func)
def prepare_func(self, func, doc=None):
'''Prepare to cache object method.'''
self.func = func
self.__doc__ = doc or func.__doc__
self.__name__ = func.__name__
self.__module__ = func.__module__
def __call__(self, func, doc=None):
self.prepare_func(func, doc)
def __call__(self, func):
self._prepare_func(func)
return self
def __get__(self, obj, cls):
@ -77,45 +78,54 @@ class cached_property_with_ttl(object):
return self
now = time()
obj_dict = obj.__dict__
name = self.__name__
try:
value, last_update = obj._cache[self.__name__]
if self.ttl and self.ttl > 0 and now - last_update > self.ttl:
raise AttributeError
except (KeyError, AttributeError):
value = self.func(obj)
try:
cache = obj._cache
except AttributeError:
cache = obj._cache = {}
cache[self.__name__] = (value, now)
value, last_updated = obj_dict[name]
except KeyError:
pass
else:
ttl_expired = self.ttl and self.ttl < now - last_updated
if not ttl_expired:
return value
value = self.func(obj)
obj_dict[name] = (value, now)
return value
def __delete__(self, obj):
obj.__dict__.pop(self.__name__, None)
def __set__(self, obj, value):
obj.__dict__[self.__name__] = (value, time())
def _prepare_func(self, func):
self.func = func
if func:
self.__doc__ = func.__doc__
self.__name__ = func.__name__
self.__module__ = func.__module__
# Aliases to make cached_property_with_ttl easier to use
cached_property_ttl = cached_property_with_ttl
timed_cached_property = cached_property_with_ttl
class threaded_cached_property_with_ttl(cached_property_with_ttl):
""" A cached_property version for use in environments where multiple
threads might concurrently try to access the property.
"""
"""
A cached_property version for use in environments where multiple threads
might concurrently try to access the property.
"""
def __init__(self, ttl=None):
super(threaded_cached_property_with_ttl, self).__init__(ttl)
self.lock = threading.RLock()
def __get__(self, obj, cls):
with self.lock:
# Double check if the value was computed before the lock was
# acquired.
prop_name = self.__name__
if hasattr(obj, '_cache') and prop_name in obj._cache:
return obj._cache[prop_name][0]
# If not, do the calculation and release the lock.
return super(threaded_cached_property_with_ttl, self).__get__(obj, cls)
return super(threaded_cached_property_with_ttl, self).__get__(obj,
cls)
# Alias to make threaded_cached_property_with_ttl easier to use
threaded_cached_property_ttl = threaded_cached_property_with_ttl
timed_threaded_cached_property = threaded_cached_property_with_ttl

View File

@ -9,7 +9,7 @@ try:
except ImportError:
from distutils.core import setup
__version__ = '1.1.0'
__version__ = '1.2.0'
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
@ -23,7 +23,7 @@ if sys.argv[-1] == 'publish':
setup(
name='cached-property',
version=__version__,
description='A cached-property for decorating methods in classes.',
description='A decorator for caching properties in classes.',
long_description=readme + '\n\n' + history,
author='Daniel Greenfeld',
author_email='pydanny@gmail.com',

2
tests/__init__.py Executable file → Normal file
View File

@ -1 +1 @@
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-

301
tests/test_cached_property.py Executable file → Normal file
View File

@ -1,132 +1,241 @@
# -*- coding: utf-8 -*-
"""
tests.py
----------------------------------
Tests for `cached-property` module.
"""
from time import sleep
from threading import Lock, Thread
import time
import unittest
from threading import Lock, Thread
from freezegun import freeze_time
from cached_property import cached_property
import cached_property
def CheckFactory(cached_property_decorator, threadsafe=False):
"""
Create dynamically a Check class whose add_cached method is decorated by
the cached_property_decorator.
"""
class Check(object):
def __init__(self):
self.control_total = 0
self.cached_total = 0
self.lock = Lock()
@property
def add_control(self):
self.control_total += 1
return self.control_total
@cached_property_decorator
def add_cached(self):
if threadsafe:
time.sleep(1)
# Need to guard this since += isn't atomic.
with self.lock:
self.cached_total += 1
else:
self.cached_total += 1
return self.cached_total
def run_threads(self, num_threads):
threads = []
for _ in range(num_threads):
thread = Thread(target=lambda: self.add_cached)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
return Check
class TestCachedProperty(unittest.TestCase):
"""Tests for cached_property"""
cached_property_factory = cached_property.cached_property
def assert_control(self, check, expected):
"""
Assert that both `add_control` and 'control_total` equal `expected`
"""
self.assertEqual(check.add_control, expected)
self.assertEqual(check.control_total, expected)
def assert_cached(self, check, expected):
"""
Assert that both `add_cached` and 'cached_total` equal `expected`
"""
self.assertEqual(check.add_cached, expected)
self.assertEqual(check.cached_total, expected)
def test_cached_property(self):
Check = CheckFactory(self.cached_property_factory)
check = Check()
class Check(object):
# The control shows that we can continue to add 1
self.assert_control(check, 1)
self.assert_control(check, 2)
def __init__(self):
self.total1 = 0
self.total2 = 0
# The cached version demonstrates how nothing is added after the first
self.assert_cached(check, 1)
self.assert_cached(check, 1)
@property
def add_control(self):
self.total1 += 1
return self.total1
# The cache does not expire
with freeze_time("9999-01-01"):
self.assert_cached(check, 1)
@cached_property
def add_cached(self):
self.total2 += 1
return self.total2
c = Check()
# The control shows that we can continue to add 1.
self.assertEqual(c.add_control, 1)
self.assertEqual(c.add_control, 2)
# The cached version demonstrates how nothing new is added
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# It's customary for descriptors to return themselves if accessed
# though the class, rather than through an instance.
self.assertTrue(isinstance(Check.add_cached, cached_property))
# Typically descriptors return themselves if accessed though the class
# rather than through an instance.
self.assertTrue(isinstance(Check.add_cached,
self.cached_property_factory))
def test_reset_cached_property(self):
class Check(object):
def __init__(self):
self.total = 0
@cached_property
def add_cached(self):
self.total += 1
return self.total
c = Check()
Check = CheckFactory(self.cached_property_factory)
check = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
self.assert_cached(check, 1)
self.assert_cached(check, 1)
# Reset the cache.
del c.add_cached
self.assertEqual(c.add_cached, 2)
self.assertEqual(c.add_cached, 2)
# Clear the cache
del check.add_cached
# Value is cached again after the next access
self.assert_cached(check, 2)
self.assert_cached(check, 2)
def test_none_cached_property(self):
class Check(object):
def __init__(self):
self.total = None
self.cached_total = None
@cached_property
@self.cached_property_factory
def add_cached(self):
return self.total
return self.cached_total
c = Check()
self.assert_cached(Check(), None)
# Run standard cache assertion
self.assertEqual(c.add_cached, None)
class TestThreadingIssues(unittest.TestCase):
def test_set_cached_property(self):
Check = CheckFactory(self.cached_property_factory)
check = Check()
check.add_cached = 'foo'
self.assertEqual(check.add_cached, 'foo')
self.assertEqual(check.cached_total, 0)
def test_threads(self):
""" How well does the standard cached_property implementation work with threads?
Short answer: It doesn't! Use threaded_cached_property instead!
"""
class Check(object):
def __init__(self):
self.total = 0
self.lock = Lock()
@cached_property
def add_cached(self):
sleep(1)
# Need to guard this since += isn't atomic.
with self.lock:
self.total += 1
return self.total
c = Check()
threads = []
num_threads = 10
for x in range(num_threads):
thread = Thread(target=lambda: c.add_cached)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
# Threads means that caching is bypassed.
self.assertNotEqual(c.add_cached, 1)
Check = CheckFactory(self.cached_property_factory, threadsafe=True)
check = Check()
num_threads = 5
# cached_property_with_ttl is *not* thread-safe!
check.run_threads(num_threads)
# This assertion hinges on the fact the system executing the test can
# spawn and start running num_threads threads within the sleep period
# (defined in the Check class as 1 second). If num_threads were to be
# massively increased (try 10000), the actual value returned would be
# between 1 and num_threads, depending on thread scheduling and
# preemption.
self.assertEqual(c.add_cached, num_threads)
self.assert_cached(check, num_threads)
self.assert_cached(check, num_threads)
# The cache does not expire
with freeze_time("9999-01-01"):
check.run_threads(num_threads)
self.assert_cached(check, num_threads)
self.assert_cached(check, num_threads)
class TestThreadedCachedProperty(TestCachedProperty):
"""Tests for threaded_cached_property"""
cached_property_factory = cached_property.threaded_cached_property
def test_threads(self):
Check = CheckFactory(self.cached_property_factory, threadsafe=True)
check = Check()
num_threads = 5
# threaded_cached_property_with_ttl is thread-safe
check.run_threads(num_threads)
self.assert_cached(check, 1)
self.assert_cached(check, 1)
# The cache does not expire
with freeze_time("9999-01-01"):
check.run_threads(num_threads)
self.assert_cached(check, 1)
self.assert_cached(check, 1)
class TestCachedPropertyWithTTL(TestCachedProperty):
"""Tests for cached_property_with_ttl"""
cached_property_factory = cached_property.cached_property_with_ttl
def test_ttl_expiry(self):
Check = CheckFactory(self.cached_property_factory(ttl=100000))
check = Check()
# Run standard cache assertion
self.assert_cached(check, 1)
self.assert_cached(check, 1)
# The cache expires in the future
with freeze_time("9999-01-01"):
self.assert_cached(check, 2)
self.assert_cached(check, 2)
# Things are not reverted when we are back to the present
self.assert_cached(check, 2)
self.assert_cached(check, 2)
def test_threads_ttl_expiry(self):
Check = CheckFactory(self.cached_property_factory(ttl=100000),
threadsafe=True)
check = Check()
num_threads = 5
# Same as in test_threads
check.run_threads(num_threads)
self.assert_cached(check, num_threads)
self.assert_cached(check, num_threads)
# The cache expires in the future
with freeze_time("9999-01-01"):
check.run_threads(num_threads)
self.assert_cached(check, 2 * num_threads)
self.assert_cached(check, 2 * num_threads)
# Things are not reverted when we are back to the present
self.assert_cached(check, 2 * num_threads)
self.assert_cached(check, 2 * num_threads)
class TestThreadedCachedPropertyWithTTL(TestThreadedCachedProperty,
TestCachedPropertyWithTTL):
"""Tests for threaded_cached_property_with_ttl"""
cached_property_factory = cached_property.threaded_cached_property_with_ttl
def test_threads_ttl_expiry(self):
Check = CheckFactory(self.cached_property_factory(ttl=100000),
threadsafe=True)
check = Check()
num_threads = 5
# Same as in test_threads
check.run_threads(num_threads)
self.assert_cached(check, 1)
self.assert_cached(check, 1)
# The cache expires in the future
with freeze_time("9999-01-01"):
check.run_threads(num_threads)
self.assert_cached(check, 2)
self.assert_cached(check, 2)
# Things are not reverted when we are back to the present
self.assert_cached(check, 2)
self.assert_cached(check, 2)

View File

@ -1,274 +0,0 @@
# -*- coding: utf-8 -*-
"""
test_threaded_cache_property.py
----------------------------------
Tests for `cached-property` module, cached_property_with_ttl.
Tests for `cached-property` module, threaded_cache_property_with_ttl.
"""
import unittest
from freezegun import freeze_time
from cached_property import (
cached_property_with_ttl,
threaded_cached_property_with_ttl
)
from time import sleep
from threading import Lock, Thread
import unittest
from freezegun import freeze_time
from cached_property import cached_property
class TestCachedProperty(unittest.TestCase):
def test_cached_property(self):
class Check(object):
def __init__(self):
self.total1 = 0
self.total2 = 0
@property
def add_control(self):
self.total1 += 1
return self.total1
@cached_property_with_ttl
def add_cached(self):
self.total2 += 1
return self.total2
c = Check()
# The control shows that we can continue to add 1.
self.assertEqual(c.add_control, 1)
self.assertEqual(c.add_control, 2)
# The cached version demonstrates how nothing new is added
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# Cannot expire the cache.
with freeze_time("9999-01-01"):
self.assertEqual(c.add_cached, 1)
# It's customary for descriptors to return themselves if accessed
# though the class, rather than through an instance.
self.assertTrue(isinstance(Check.add_cached, cached_property_with_ttl))
def test_reset_cached_property(self):
class Check(object):
def __init__(self):
self.total = 0
@cached_property_with_ttl
def add_cached(self):
self.total += 1
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# Reset the cache.
del c._cache['add_cached']
self.assertEqual(c.add_cached, 2)
self.assertEqual(c.add_cached, 2)
def test_none_cached_property(self):
class Check(object):
def __init__(self):
self.total = None
@cached_property_with_ttl
def add_cached(self):
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, None)
class TestThreadingIssues(unittest.TestCase):
def test_threads(self):
""" How well does the standard cached_property implementation work with threads?
Short answer: It doesn't! Use threaded_cached_property instead!
""" # noqa
class Check(object):
def __init__(self):
self.total = 0
self.lock = Lock()
@cached_property_with_ttl
def add_cached(self):
sleep(1)
# Need to guard this since += isn't atomic.
with self.lock:
self.total += 1
return self.total
c = Check()
threads = []
num_threads = 10
for x in range(num_threads):
thread = Thread(target=lambda: c.add_cached)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
# Threads means that caching is bypassed.
self.assertNotEqual(c.add_cached, 1)
# This assertion hinges on the fact the system executing the test can
# spawn and start running num_threads threads within the sleep period
# (defined in the Check class as 1 second). If num_threads were to be
# massively increased (try 10000), the actual value returned would be
# between 1 and num_threads, depending on thread scheduling and
# preemption.
self.assertEqual(c.add_cached, num_threads)
class TestCachedPropertyWithTTL(unittest.TestCase):
def test_ttl_expiry(self):
class Check(object):
def __init__(self):
self.total = 0
@cached_property_with_ttl(ttl=100000)
def add_cached(self):
self.total += 1
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# Expire the cache.
with freeze_time("9999-01-01"):
self.assertEqual(c.add_cached, 2)
self.assertEqual(c.add_cached, 2)
class TestCachedProperty(unittest.TestCase):
def test_cached_property(self):
class Check(object):
def __init__(self):
self.total1 = 0
self.total2 = 0
@property
def add_control(self):
self.total1 += 1
return self.total1
@threaded_cached_property_with_ttl
def add_cached(self):
self.total2 += 1
return self.total2
c = Check()
# The control shows that we can continue to add 1.
self.assertEqual(c.add_control, 1)
self.assertEqual(c.add_control, 2)
# The cached version demonstrates how nothing new is added
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
def test_reset_cached_property(self):
class Check(object):
def __init__(self):
self.total = 0
@threaded_cached_property_with_ttl
def add_cached(self):
self.total += 1
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# Reset the cache.
del c._cache['add_cached']
self.assertEqual(c.add_cached, 2)
self.assertEqual(c.add_cached, 2)
def test_none_cached_property(self):
class Check(object):
def __init__(self):
self.total = None
@threaded_cached_property_with_ttl
def add_cached(self):
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, None)
class TestThreadingIssues(unittest.TestCase):
def test_threads(self):
""" How well does this implementation work with threads?"""
class Check(object):
def __init__(self):
self.total = 0
self.lock = Lock()
@threaded_cached_property_with_ttl
def add_cached(self):
sleep(1)
# Need to guard this since += isn't atomic.
with self.lock:
self.total += 1
return self.total
c = Check()
threads = []
for x in range(10):
thread = Thread(target=lambda: c.add_cached)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
self.assertEqual(c.add_cached, 1)

View File

@ -1,115 +0,0 @@
# -*- coding: utf-8 -*-
"""
test_threaded_cache_property.py
----------------------------------
Tests for `cached-property` module, threaded_cache_property.
"""
from time import sleep
from threading import Thread, Lock
import unittest
from cached_property import threaded_cached_property
class TestCachedProperty(unittest.TestCase):
def test_cached_property(self):
class Check(object):
def __init__(self):
self.total1 = 0
self.total2 = 0
@property
def add_control(self):
self.total1 += 1
return self.total1
@threaded_cached_property
def add_cached(self):
self.total2 += 1
return self.total2
c = Check()
# The control shows that we can continue to add 1.
self.assertEqual(c.add_control, 1)
self.assertEqual(c.add_control, 2)
# The cached version demonstrates how nothing new is added
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
def test_reset_cached_property(self):
class Check(object):
def __init__(self):
self.total = 0
@threaded_cached_property
def add_cached(self):
self.total += 1
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, 1)
self.assertEqual(c.add_cached, 1)
# Reset the cache.
del c.add_cached
self.assertEqual(c.add_cached, 2)
self.assertEqual(c.add_cached, 2)
def test_none_cached_property(self):
class Check(object):
def __init__(self):
self.total = None
@threaded_cached_property
def add_cached(self):
return self.total
c = Check()
# Run standard cache assertion
self.assertEqual(c.add_cached, None)
class TestThreadingIssues(unittest.TestCase):
def test_threads(self):
""" How well does this implementation work with threads?"""
class Check(object):
def __init__(self):
self.total = 0
self.lock = Lock()
@threaded_cached_property
def add_cached(self):
sleep(1)
# Need to guard this since += isn't atomic.
with self.lock:
self.total += 1
return self.total
c = Check()
threads = []
for x in range(10):
thread = Thread(target=lambda: c.add_cached)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
self.assertEqual(c.add_cached, 1)