Merge branch 'feature/UpdateDiskcache' into dev

This commit is contained in:
JackDandy 2023-04-27 12:25:37 +01:00
commit 3e5c399b05
7 changed files with 136 additions and 40 deletions

View file

@ -1,5 +1,6 @@
### 3.29.0 (2023-xx-xx xx:xx:00 UTC)
* Update diskcache 5.4.0 (1cb1425) to 5.6.1 (4d30686)
* Update filelock 3.9.0 (ce3e891) to 3.11.0 (d3241b9)
* Update Msgpack 1.0.4 (b5acfd5) to 1.0.5 (0516c2c)
* Update Requests library 2.28.1 (ec553c2) to 2.29.0 (87d63de)

View file

@ -61,8 +61,8 @@ except Exception: # pylint: disable=broad-except # pragma: no cover
pass
__title__ = 'diskcache'
__version__ = '5.4.0'
__build__ = 0x050400
__version__ = '5.6.1'
__build__ = 0x050601
__author__ = 'Grant Jenks'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016-2022 Grant Jenks'
__copyright__ = 'Copyright 2016-2023 Grant Jenks'

View file

@ -50,14 +50,14 @@ DEFAULT_SETTINGS = {
'statistics': 0, # False
'tag_index': 0, # False
'eviction_policy': 'least-recently-stored',
'size_limit': 2 ** 30, # 1gb
'size_limit': 2**30, # 1gb
'cull_limit': 10,
'sqlite_auto_vacuum': 1, # FULL
'sqlite_cache_size': 2 ** 13, # 8,192 pages
'sqlite_cache_size': 2**13, # 8,192 pages
'sqlite_journal_mode': 'wal',
'sqlite_mmap_size': 2 ** 26, # 64mb
'sqlite_mmap_size': 2**26, # 64mb
'sqlite_synchronous': 1, # NORMAL
'disk_min_file_size': 2 ** 15, # 32kb
'disk_min_file_size': 2**15, # 32kb
'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL,
}
@ -171,7 +171,7 @@ class Disk:
:return: corresponding Python key
"""
# pylint: disable=no-self-use,unidiomatic-typecheck
# pylint: disable=unidiomatic-typecheck
if raw:
return bytes(key) if type(key) is sqlite3.Binary else key
else:
@ -213,7 +213,7 @@ class Disk:
size = op.getsize(full_path)
return size, MODE_TEXT, filename, None
elif read:
reader = ft.partial(value.read, 2 ** 22)
reader = ft.partial(value.read, 2**22)
filename, full_path = self.filename(key, value)
iterator = iter(reader, b'')
size = self._write(full_path, iterator, 'xb')
@ -229,7 +229,6 @@ class Disk:
return len(result), MODE_PICKLE, filename, None
def _write(self, full_path, iterator, mode, encoding=None):
# pylint: disable=no-self-use
full_dir, _ = op.split(full_path)
for count in range(1, 11):
@ -265,7 +264,7 @@ class Disk:
:raises: IOError if the value cannot be read
"""
# pylint: disable=no-self-use,unidiomatic-typecheck,consider-using-with
# pylint: disable=unidiomatic-typecheck,consider-using-with
if mode == MODE_RAW:
return bytes(value) if type(value) is sqlite3.Binary else value
elif mode == MODE_BINARY:
@ -435,6 +434,7 @@ class Cache:
if directory is None:
directory = tempfile.mkdtemp(prefix='diskcache-')
directory = str(directory)
directory = op.expanduser(directory)
directory = op.expandvars(directory)
@ -1380,6 +1380,7 @@ class Cache:
:raises Timeout: if database timeout occurs
"""
# pylint: disable=unnecessary-dunder-call
try:
return self.__delitem__(key, retry=retry)
except KeyError:

View file

@ -44,14 +44,15 @@ class DjangoCache(BaseCache):
"""
return self._cache.cache(name)
def deque(self, name):
def deque(self, name, maxlen=None):
"""Return Deque with given `name` in subdirectory.
:param str name: subdirectory name for Deque
:param maxlen: max length (default None, no max)
:return: Deque with given name
"""
return self._cache.deque(name)
return self._cache.deque(name, maxlen=maxlen)
def index(self, name):
"""Return Index with given `name` in subdirectory.

View file

@ -30,6 +30,7 @@ class FanoutCache:
"""
if directory is None:
directory = tempfile.mkdtemp(prefix='diskcache-')
directory = str(directory)
directory = op.expanduser(directory)
directory = op.expandvars(directory)
@ -45,7 +46,7 @@ class FanoutCache:
timeout=timeout,
disk=disk,
size_limit=size_limit,
**settings
**settings,
)
for num in range(shards)
)
@ -573,9 +574,11 @@ class FanoutCache:
break
return result
def cache(self, name):
def cache(self, name, timeout=60, disk=None, **settings):
"""Return Cache with given `name` in subdirectory.
If disk is none (default), uses the fanout cache disk.
>>> fanout_cache = FanoutCache()
>>> cache = fanout_cache.cache('test')
>>> cache.set('abc', 123)
@ -588,6 +591,9 @@ class FanoutCache:
True
:param str name: subdirectory name for Cache
:param float timeout: SQLite connection timeout
:param disk: Disk type or subclass for serialization
:param settings: any of DEFAULT_SETTINGS
:return: Cache with given name
"""
@ -598,11 +604,16 @@ class FanoutCache:
except KeyError:
parts = name.split('/')
directory = op.join(self._directory, 'cache', *parts)
temp = Cache(directory=directory, disk=self._disk)
temp = Cache(
directory=directory,
timeout=timeout,
disk=self._disk if disk is None else Disk,
**settings,
)
_caches[name] = temp
return temp
def deque(self, name):
def deque(self, name, maxlen=None):
"""Return Deque with given `name` in subdirectory.
>>> cache = FanoutCache()
@ -616,6 +627,7 @@ class FanoutCache:
1
:param str name: subdirectory name for Deque
:param maxlen: max length (default None, no max)
:return: Deque with given name
"""
@ -626,8 +638,12 @@ class FanoutCache:
except KeyError:
parts = name.split('/')
directory = op.join(self._directory, 'deque', *parts)
cache = Cache(directory=directory, disk=self._disk)
deque = Deque.fromcache(cache)
cache = Cache(
directory=directory,
disk=self._disk,
eviction_policy='none',
)
deque = Deque.fromcache(cache, maxlen=maxlen)
_deques[name] = deque
return deque
@ -658,7 +674,11 @@ class FanoutCache:
except KeyError:
parts = name.split('/')
directory = op.join(self._directory, 'index', *parts)
cache = Cache(directory=directory, disk=self._disk)
cache = Cache(
directory=directory,
disk=self._disk,
eviction_policy='none',
)
index = Index.fromcache(cache)
_indexes[name] = index
return index

View file

@ -76,7 +76,7 @@ class Deque(Sequence):
"""
def __init__(self, iterable=(), directory=None):
def __init__(self, iterable=(), directory=None, maxlen=None):
"""Initialize deque instance.
If directory is None then temporary directory created. The directory
@ -87,10 +87,11 @@ class Deque(Sequence):
"""
self._cache = Cache(directory, eviction_policy='none')
self.extend(iterable)
self._maxlen = float('inf') if maxlen is None else maxlen
self._extend(iterable)
@classmethod
def fromcache(cls, cache, iterable=()):
def fromcache(cls, cache, iterable=(), maxlen=None):
"""Initialize deque using `cache`.
>>> cache = Cache()
@ -112,7 +113,8 @@ class Deque(Sequence):
# pylint: disable=no-member,protected-access
self = cls.__new__(cls)
self._cache = cache
self.extend(iterable)
self._maxlen = float('inf') if maxlen is None else maxlen
self._extend(iterable)
return self
@property
@ -125,6 +127,31 @@ class Deque(Sequence):
"""Directory path where deque is stored."""
return self._cache.directory
@property
def maxlen(self):
"""Max length of the deque."""
return self._maxlen
@maxlen.setter
def maxlen(self, value):
"""Set max length of the deque.
Pops items from left while length greater than max.
>>> deque = Deque()
>>> deque.extendleft('abcde')
>>> deque.maxlen = 3
>>> list(deque)
['c', 'd', 'e']
:param value: max length
"""
self._maxlen = value
with self._cache.transact(retry=True):
while len(self._cache) > self._maxlen:
self._popleft()
def _index(self, index, func):
len_self = len(self)
@ -245,7 +272,7 @@ class Deque(Sequence):
:return: deque with added items
"""
self.extend(iterable)
self._extend(iterable)
return self
def __iter__(self):
@ -293,10 +320,11 @@ class Deque(Sequence):
pass
def __getstate__(self):
return self.directory
return self.directory, self.maxlen
def __setstate__(self, state):
self.__init__(directory=state)
directory, maxlen = state
self.__init__(directory=directory, maxlen=maxlen)
def append(self, value):
"""Add `value` to back of deque.
@ -311,7 +339,12 @@ class Deque(Sequence):
:param value: value to add to back of deque
"""
self._cache.push(value, retry=True)
with self._cache.transact(retry=True):
self._cache.push(value, retry=True)
if len(self._cache) > self._maxlen:
self._popleft()
_append = append
def appendleft(self, value):
"""Add `value` to front of deque.
@ -326,7 +359,12 @@ class Deque(Sequence):
:param value: value to add to front of deque
"""
self._cache.push(value, side='front', retry=True)
with self._cache.transact(retry=True):
self._cache.push(value, side='front', retry=True)
if len(self._cache) > self._maxlen:
self._pop()
_appendleft = appendleft
def clear(self):
"""Remove all elements from deque.
@ -341,6 +379,13 @@ class Deque(Sequence):
"""
self._cache.clear(retry=True)
_clear = clear
def copy(self):
"""Copy deque with same directory and max length."""
TypeSelf = type(self)
return TypeSelf(directory=self.directory, maxlen=self.maxlen)
def count(self, value):
"""Return number of occurrences of `value` in deque.
@ -366,7 +411,9 @@ class Deque(Sequence):
"""
for value in iterable:
self.append(value)
self._append(value)
_extend = extend
def extendleft(self, iterable):
"""Extend front side of deque with value from `iterable`.
@ -380,7 +427,7 @@ class Deque(Sequence):
"""
for value in iterable:
self.appendleft(value)
self._appendleft(value)
def peek(self):
"""Peek at value at back of deque.
@ -460,6 +507,8 @@ class Deque(Sequence):
raise IndexError('pop from an empty deque')
return value
_pop = pop
def popleft(self):
"""Remove and return value at front of deque.
@ -484,6 +533,8 @@ class Deque(Sequence):
raise IndexError('pop from an empty deque')
return value
_popleft = popleft
def remove(self, value):
"""Remove first occurrence of `value` in deque.
@ -531,15 +582,17 @@ class Deque(Sequence):
['c', 'b', 'a']
"""
# pylint: disable=protected-access
# GrantJ 2019-03-22 Consider using an algorithm that swaps the values
# at two keys. Like self._cache.swap(key1, key2, retry=True) The swap
# method would exchange the values at two given keys. Then, using a
# forward iterator and a reverse iterator, the reversis method could
# forward iterator and a reverse iterator, the reverse method could
# avoid making copies of the values.
temp = Deque(iterable=reversed(self))
self.clear()
self.extend(temp)
self._clear()
self._extend(temp)
directory = temp.directory
temp._cache.close()
del temp
rmtree(directory)
@ -574,22 +627,22 @@ class Deque(Sequence):
for _ in range(steps):
try:
value = self.pop()
value = self._pop()
except IndexError:
return
else:
self.appendleft(value)
self._appendleft(value)
else:
steps *= -1
steps %= len_self
for _ in range(steps):
try:
value = self.popleft()
value = self._popleft()
except IndexError:
return
else:
self.append(value)
self._append(value)
__hash__ = None # type: ignore
@ -668,7 +721,9 @@ class Index(MutableMapping):
args = args[1:]
directory = None
self._cache = Cache(directory, eviction_policy='none')
self.update(*args, **kwargs)
self._update(*args, **kwargs)
_update = MutableMapping.update
@classmethod
def fromcache(cls, cache, *args, **kwargs):
@ -694,7 +749,7 @@ class Index(MutableMapping):
# pylint: disable=no-member,protected-access
self = cls.__new__(cls)
self._cache = cache
self.update(*args, **kwargs)
self._update(*args, **kwargs)
return self
@property

View file

@ -17,6 +17,9 @@ class Averager:
Sometimes known as "online statistics," the running average maintains the
total and count. The average can then be calculated at any time.
Assumes the key will not be evicted. Set the eviction policy to 'none' on
the cache to guarantee the key is not evicted.
>>> import diskcache
>>> cache = diskcache.FanoutCache()
>>> ave = Averager(cache, 'latency')
@ -65,6 +68,9 @@ class Averager:
class Lock:
"""Recipe for cross-process and cross-thread lock.
Assumes the key will not be evicted. Set the eviction policy to 'none' on
the cache to guarantee the key is not evicted.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> lock = Lock(cache, 'report-123')
@ -113,6 +119,9 @@ class Lock:
class RLock:
"""Recipe for cross-process and cross-thread re-entrant lock.
Assumes the key will not be evicted. Set the eviction policy to 'none' on
the cache to guarantee the key is not evicted.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> rlock = RLock(cache, 'user-123')
@ -181,6 +190,9 @@ class RLock:
class BoundedSemaphore:
"""Recipe for cross-process and cross-thread bounded semaphore.
Assumes the key will not be evicted. Set the eviction policy to 'none' on
the cache to guarantee the key is not evicted.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2)
@ -251,6 +263,9 @@ def throttle(
):
"""Decorator to throttle calls to function.
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
cache to guarantee the keys are not evicted.
>>> import diskcache, time
>>> cache = diskcache.Cache()
>>> count = 0
@ -305,6 +320,9 @@ def barrier(cache, lock_factory, name=None, expire=None, tag=None):
Supports different kinds of locks: Lock, RLock, BoundedSemaphore.
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
cache to guarantee the keys are not evicted.
>>> import diskcache, time
>>> cache = diskcache.Cache()
>>> @barrier(cache, Lock)