Browse Source

Merge branch 'feature/UpdateDiskcache' into develop

tags/release_0.25.1
JackDandy 5 years ago
parent
commit
bd22a6975b
  1. 2
      CHANGES.md
  2. 4
      lib/diskcache_py2/__init__.py
  3. 38
      lib/diskcache_py2/fanout.py
  4. 4
      lib/diskcache_py3/__init__.py
  5. 38
      lib/diskcache_py3/fanout.py

2
CHANGES.md

@ -2,6 +2,8 @@
* Change abbreviate long titles under menu tab
* Update attr 20.2.0 (4f74fba) to 20.3.0 (f3762ba)
* Update diskcache_py3 5.0.1 (9670fbb) to 5.1.0 (40ce0de)
* Update diskcache_py2 4.1.0 (b0451e0) from 5.1.0 (40ce0de)
* Update Requests library 2.24.0 (2f70990) to 2.25.0 (03957eb)
* Update urllib3 1.25.11 (00f1769) to 1.26.1 (7675532)

4
lib/diskcache_py2/__init__.py

@ -46,8 +46,8 @@ except Exception: # pylint: disable=broad-except
pass
__title__ = 'diskcache'
__version__ = '5.0.3'
__build__ = 0x050003
__version__ = '5.1.0'
__build__ = 0x050100
__author__ = 'Grant Jenks'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016-2020 Grant Jenks'

38
lib/diskcache_py2/fanout.py

@ -1,5 +1,6 @@
"Fanout cache automatically shards keys and values."
import contextlib as cl
import itertools as it
import operator
import os.path as op
@ -69,9 +70,46 @@ class FanoutCache(object):
def __getattr__(self, name):
safe_names = {'timeout', 'disk'}
valid_name = name in DEFAULT_SETTINGS or name in safe_names
assert valid_name, 'cannot access {} in cache shard'.format(name)
return getattr(self._shards[0], name)
@cl.contextmanager
def transact(self, retry=True):
"""Context manager to perform a transaction by locking the cache.
While the cache is locked, no other write operation is permitted.
Transactions should therefore be as short as possible. Read and write
operations performed in a transaction are atomic. Read operations may
occur concurrent to a transaction.
Transactions may be nested and may not be shared between threads.
Blocks until transactions are held on all cache shards by retrying as
necessary.
>>> cache = FanoutCache()
>>> with cache.transact(): # Atomically increment two keys.
... _ = cache.incr('total', 123.4)
... _ = cache.incr('count', 1)
>>> with cache.transact(): # Atomically calculate average.
... average = cache['total'] / cache['count']
>>> average
123.4
:return: context manager for use in `with` statement
"""
assert retry, 'retry must be True in FanoutCache'
with cl.ExitStack() as stack:
for shard in self._shards:
shard_transaction = shard.transact(retry=True)
stack.enter_context(shard_transaction)
yield
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
"""Set `key` and `value` item in cache.

4
lib/diskcache_py3/__init__.py

@ -46,8 +46,8 @@ except Exception: # pylint: disable=broad-except
pass
__title__ = 'diskcache'
__version__ = '5.0.3'
__build__ = 0x050003
__version__ = '5.1.0'
__build__ = 0x050100
__author__ = 'Grant Jenks'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016-2020 Grant Jenks'

38
lib/diskcache_py3/fanout.py

@ -1,5 +1,6 @@
"Fanout cache automatically shards keys and values."
import contextlib as cl
import functools
import itertools as it
import operator
@ -58,9 +59,46 @@ class FanoutCache(object):
def __getattr__(self, name):
safe_names = {'timeout', 'disk'}
valid_name = name in DEFAULT_SETTINGS or name in safe_names
assert valid_name, 'cannot access {} in cache shard'.format(name)
return getattr(self._shards[0], name)
@cl.contextmanager
def transact(self, retry=True):
"""Context manager to perform a transaction by locking the cache.
While the cache is locked, no other write operation is permitted.
Transactions should therefore be as short as possible. Read and write
operations performed in a transaction are atomic. Read operations may
occur concurrent to a transaction.
Transactions may be nested and may not be shared between threads.
Blocks until transactions are held on all cache shards by retrying as
necessary.
>>> cache = FanoutCache()
>>> with cache.transact(): # Atomically increment two keys.
... _ = cache.incr('total', 123.4)
... _ = cache.incr('count', 1)
>>> with cache.transact(): # Atomically calculate average.
... average = cache['total'] / cache['count']
>>> average
123.4
:return: context manager for use in `with` statement
"""
assert retry, 'retry must be True in FanoutCache'
with cl.ExitStack() as stack:
for shard in self._shards:
shard_transaction = shard.transact(retry=True)
stack.enter_context(shard_transaction)
yield
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
"""Set `key` and `value` item in cache.

Loading…
Cancel
Save