Changed to new cache handler that stores its cached data in sqlite db files for persistance

This commit is contained in:
echel0n 2014-03-27 05:06:40 -07:00
parent 6a7906eeb1
commit ff1e6e6dbc
16 changed files with 989 additions and 51 deletions

View file

@ -0,0 +1,31 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache
~~~~~~~~~~~~~~
Transparent cache for ``requests`` library with persistence and async support
Just write::
import requests_cache
requests_cache.install_cache()
And requests to resources will be cached for faster repeated access::
import requests
for i in range(10):
r = requests.get('http://httpbin.org/delay/5')
# will took approximately 5 seconds instead 50
:copyright: (c) 2012 by Roman Haritonov.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext'
__version__ = '0.4.4'
from .core import(
CachedSession, install_cache, uninstall_cache,
disabled, enabled, get_cache, clear, configure
)

View file

@ -0,0 +1,50 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends
~~~~~~~~~~~~~~~~~~~~~~~
Classes and functions for cache persistence
"""
from .base import BaseCache
registry = {
'memory': BaseCache,
}
try:
# Heroku doesn't allow the SQLite3 module to be installed
from .sqlite import DbCache
registry['sqlite'] = DbCache
except ImportError:
DbCache = None
try:
from .mongo import MongoCache
registry['mongo'] = registry['mongodb'] = MongoCache
except ImportError:
MongoCache = None
try:
from .redis import RedisCache
registry['redis'] = RedisCache
except ImportError:
RedisCache = None
def create_backend(backend_name, cache_name, options):
if backend_name is None:
backend_name = _get_default_backend_name()
try:
return registry[backend_name](cache_name, **options)
except KeyError:
raise ValueError('Unsupported backend "%s" try one of: %s' %
(backend_name, ', '.join(registry.keys())))
def _get_default_backend_name():
if 'sqlite' in registry:
return 'sqlite'
return 'memory'

View file

@ -0,0 +1,171 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Contains BaseCache class which can be used as in-memory cache backend or
extended to support persistence.
"""
from datetime import datetime
import hashlib
from copy import copy
import requests
from ..compat import is_py2
class BaseCache(object):
""" Base class for cache implementations, can be used as in-memory cache.
To extend it you can provide dictionary-like objects for
:attr:`keys_map` and :attr:`responses` or override public methods.
"""
def __init__(self, *args, **kwargs):
#: `key` -> `key_in_responses` mapping
self.keys_map = {}
#: `key_in_cache` -> `response` mapping
self.responses = {}
def save_response(self, key, response):
""" Save response to cache
:param key: key for this response
:param response: response to save
.. note:: Response is reduced before saving (with :meth:`reduce_response`)
to make it picklable
"""
self.responses[key] = self.reduce_response(response), datetime.utcnow()
def add_key_mapping(self, new_key, key_to_response):
"""
Adds mapping of `new_key` to `key_to_response` to make it possible to
associate many keys with single response
:param new_key: new key (e.g. url from redirect)
:param key_to_response: key which can be found in :attr:`responses`
:return:
"""
self.keys_map[new_key] = key_to_response
def get_response_and_time(self, key, default=(None, None)):
""" Retrieves response and timestamp for `key` if it's stored in cache,
otherwise returns `default`
:param key: key of resource
:param default: return this if `key` not found in cache
:returns: tuple (response, datetime)
.. note:: Response is restored after unpickling with :meth:`restore_response`
"""
try:
if key not in self.responses:
key = self.keys_map[key]
response, timestamp = self.responses[key]
except KeyError:
return default
return self.restore_response(response), timestamp
def delete(self, key):
""" Delete `key` from cache. Also deletes all responses from response history
"""
try:
if key in self.responses:
response, _ = self.responses[key]
del self.responses[key]
else:
response, _ = self.responses[self.keys_map[key]]
del self.keys_map[key]
for r in response.history:
del self.keys_map[self.create_key(r.request)]
except KeyError:
pass
def delete_url(self, url):
""" Delete response associated with `url` from cache.
Also deletes all responses from response history. Works only for GET requests
"""
self.delete(self._url_to_key(url))
def clear(self):
""" Clear cache
"""
self.responses.clear()
self.keys_map.clear()
def has_key(self, key):
""" Returns `True` if cache has `key`, `False` otherwise
"""
return key in self.responses or key in self.keys_map
def has_url(self, url):
""" Returns `True` if cache has `url`, `False` otherwise.
Works only for GET request urls
"""
return self.has_key(self._url_to_key(url))
def _url_to_key(self, url):
from requests import Request
return self.create_key(Request('GET', url).prepare())
_response_attrs = ['_content', 'url', 'status_code', 'cookies',
'headers', 'encoding', 'request', 'reason', 'raw']
_raw_response_attrs = ['_original_response', 'decode_content', 'headers',
'reason', 'status', 'strict', 'version']
def reduce_response(self, response):
""" Reduce response object to make it compatible with ``pickle``
"""
result = _Store()
# prefetch
response.content
for field in self._response_attrs:
setattr(result, field, self._picklable_field(response, field))
result.history = tuple(self.reduce_response(r) for r in response.history)
return result
def _picklable_field(self, response, name):
value = getattr(response, name)
if name == 'request':
value = copy(value)
value.hooks = []
elif name == 'raw':
result = _Store()
for field in self._raw_response_attrs:
setattr(result, field, getattr(value, field, None))
value = result
return value
def restore_response(self, response):
""" Restore response object after unpickling
"""
result = requests.Response()
for field in self._response_attrs:
setattr(result, field, getattr(response, field, None))
result.history = tuple(self.restore_response(r) for r in response.history)
return result
def create_key(self, request):
key = hashlib.sha256()
key.update(_to_bytes(request.method.upper()))
key.update(_to_bytes(request.url))
if request.body:
key.update(_to_bytes(request.body))
return key.hexdigest()
def __str__(self):
return 'keys: %s\nresponses: %s' % (self.keys_map, self.responses)
# used for saving response attributes
class _Store(object):
pass
def _to_bytes(s, encoding='utf-8'):
if is_py2 or isinstance(s, bytes):
return s
return bytes(s, encoding)

View file

@ -0,0 +1,25 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.mongo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``mongo`` cache backend
"""
from .base import BaseCache
from .storage.mongodict import MongoDict, MongoPickleDict
class MongoCache(BaseCache):
""" ``mongo`` cache backend.
"""
def __init__(self, db_name='requests-cache', **options):
"""
:param db_name: database name (default: ``'requests-cache'``)
:param connection: (optional) ``pymongo.Connection``
"""
super(MongoCache, self).__init__()
self.responses = MongoPickleDict(db_name, 'responses',
options.get('connection'))
self.keys_map = MongoDict(db_name, 'urls', self.responses.connection)

View file

@ -0,0 +1,24 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.redis
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``redis`` cache backend
"""
from .base import BaseCache
from .storage.redisdict import RedisDict
class RedisCache(BaseCache):
""" ``redis`` cache backend.
"""
def __init__(self, namespace='requests-cache', **options):
"""
:param namespace: redis namespace (default: ``'requests-cache'``)
:param connection: (optional) ``redis.StrictRedis``
"""
super(RedisCache, self).__init__()
self.responses = RedisDict(namespace, 'responses',
options.get('connection'))
self.keys_map = RedisDict(namespace, 'urls', self.responses.connection)

View file

@ -0,0 +1,30 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.sqlite
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
``sqlite3`` cache backend
"""
from .base import BaseCache
from .storage.dbdict import DbDict, DbPickleDict
class DbCache(BaseCache):
""" sqlite cache backend.
Reading is fast, saving is a bit slower. It can store big amount of data
with low memory usage.
"""
def __init__(self, location='cache',
fast_save=False, extension='.sqlite', **options):
"""
:param location: database filename prefix (default: ``'cache'``)
:param fast_save: Speedup cache saving up to 50 times but with possibility of data loss.
See :ref:`backends.DbDict <backends_dbdict>` for more info
:param extension: extension for filename (default: ``'.sqlite'``)
"""
super(DbCache, self).__init__()
self.responses = DbPickleDict(location + extension, 'responses', fast_save=fast_save)
self.keys_map = DbDict(location + extension, 'urls')

View file

@ -0,0 +1,171 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.dbdict
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Dictionary-like objects for saving large data sets to `sqlite` database
"""
from collections import MutableMapping
import sqlite3 as sqlite
from contextlib import contextmanager
try:
import threading
except ImportError:
import dummy_threading as threading
try:
import cPickle as pickle
except ImportError:
import pickle
from requests_cache.compat import bytes
class DbDict(MutableMapping):
""" DbDict - a dictionary-like object for saving large datasets to `sqlite` database
It's possible to create multiply DbDict instances, which will be stored as separate
tables in one database::
d1 = DbDict('test', 'table1')
d2 = DbDict('test', 'table2')
d3 = DbDict('test', 'table3')
all data will be stored in ``test.sqlite`` database into
correspondent tables: ``table1``, ``table2`` and ``table3``
"""
def __init__(self, filename, table_name='data', fast_save=False, **options):
"""
:param filename: filename for database (without extension)
:param table_name: table name
:param fast_save: If it's True, then sqlite will be configured with
`"PRAGMA synchronous = 0;" <http://www.sqlite.org/pragma.html#pragma_synchronous>`_
to speedup cache saving, but be careful, it's dangerous.
Tests showed that insertion order of records can be wrong with this option.
"""
self.filename = filename
self.table_name = table_name
self.fast_save = fast_save
#: Transactions can be commited if this property is set to `True`
self.can_commit = True
self._bulk_commit = False
self._pending_connection = None
self._lock = threading.RLock()
with self.connection() as con:
con.execute("create table if not exists `%s` (key PRIMARY KEY, value)" % self.table_name)
@contextmanager
def connection(self, commit_on_success=False):
with self._lock:
if self._bulk_commit:
if self._pending_connection is None:
self._pending_connection = sqlite.connect(self.filename)
con = self._pending_connection
else:
con = sqlite.connect(self.filename)
try:
if self.fast_save:
con.execute("PRAGMA synchronous = 0;")
yield con
if commit_on_success and self.can_commit:
con.commit()
finally:
if not self._bulk_commit:
con.close()
def commit(self, force=False):
"""
Commits pending transaction if :attr:`can_commit` or `force` is `True`
:param force: force commit, ignore :attr:`can_commit`
"""
if force or self.can_commit:
if self._pending_connection is not None:
self._pending_connection.commit()
@contextmanager
def bulk_commit(self):
"""
Context manager used to speedup insertion of big number of records
::
>>> d1 = DbDict('test')
>>> with d1.bulk_commit():
... for i in range(1000):
... d1[i] = i * 2
"""
self._bulk_commit = True
self.can_commit = False
try:
yield
self.commit(True)
finally:
self._bulk_commit = False
self.can_commit = True
self._pending_connection.close()
self._pending_connection = None
def __getitem__(self, key):
with self.connection() as con:
row = con.execute("select value from `%s` where key=?" %
self.table_name, (key,)).fetchone()
if not row:
raise KeyError
return row[0]
def __setitem__(self, key, item):
with self.connection(True) as con:
if con.execute("select key from `%s` where key=?" %
self.table_name, (key,)).fetchone():
con.execute("update `%s` set value=? where key=?" %
self.table_name, (item, key))
else:
con.execute("insert into `%s` (key,value) values (?,?)" %
self.table_name, (key, item))
def __delitem__(self, key):
with self.connection(True) as con:
if con.execute("select key from `%s` where key=?" %
self.table_name, (key,)).fetchone():
con.execute("delete from `%s` where key=?" %
self.table_name, (key,))
else:
raise KeyError
def __iter__(self):
with self.connection() as con:
for row in con.execute("select key from `%s`" %
self.table_name):
yield row[0]
def __len__(self):
with self.connection() as con:
return con.execute("select count(key) from `%s`" %
self.table_name).fetchone()[0]
def clear(self):
with self.connection(True) as con:
con.execute("drop table `%s`" % self.table_name)
con.execute("create table `%s` (key PRIMARY KEY, value)" %
self.table_name)
def __str__(self):
return str(dict(self.items()))
class DbPickleDict(DbDict):
""" Same as :class:`DbDict`, but pickles values before saving
"""
def __setitem__(self, key, item):
super(DbPickleDict, self).__setitem__(key,
sqlite.Binary(pickle.dumps(item)))
def __getitem__(self, key):
return pickle.loads(bytes(super(DbPickleDict, self).__getitem__(key)))

View file

@ -0,0 +1,74 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.mongodict
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Dictionary-like objects for saving large data sets to ``mongodb`` database
"""
from collections import MutableMapping
try:
import cPickle as pickle
except ImportError:
import pickle
from pymongo import Connection
class MongoDict(MutableMapping):
""" MongoDict - a dictionary-like interface for ``mongo`` database
"""
def __init__(self, db_name,
collection_name='mongo_dict_data', connection=None):
"""
:param db_name: database name (be careful with production databases)
:param collection_name: collection name (default: mongo_dict_data)
:param connection: ``pymongo.Connection`` instance. If it's ``None``
(default) new connection with default options will
be created
"""
if connection is not None:
self.connection = connection
else:
self.connection = Connection()
self.db = self.connection[db_name]
self.collection = self.db[collection_name]
def __getitem__(self, key):
result = self.collection.find_one({'_id': key})
if result is None:
raise KeyError
return result['data']
def __setitem__(self, key, item):
self.collection.save({'_id': key, 'data': item})
def __delitem__(self, key):
spec = {'_id': key}
if self.collection.find_one(spec, fields=['_id']):
self.collection.remove(spec)
else:
raise KeyError
def __len__(self):
return self.collection.count()
def __iter__(self):
for d in self.collection.find(fields=['_id']):
yield d['_id']
def clear(self):
self.collection.drop()
def __str__(self):
return str(dict(self.items()))
class MongoPickleDict(MongoDict):
""" Same as :class:`MongoDict`, but pickles values before saving
"""
def __setitem__(self, key, item):
super(MongoPickleDict, self).__setitem__(key, pickle.dumps(item))
def __getitem__(self, key):
return pickle.loads(bytes(super(MongoPickleDict, self).__getitem__(key)))

View file

@ -0,0 +1,68 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.redisdict
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Dictionary-like objects for saving large data sets to ``redis`` key-store
"""
from collections import MutableMapping
try:
import cPickle as pickle
except ImportError:
import pickle
from redis import StrictRedis as Redis
class RedisDict(MutableMapping):
""" RedisDict - a dictionary-like interface for ``redis`` key-stores
"""
def __init__(self, namespace, collection_name='redis_dict_data',
connection=None):
"""
The actual key name on the redis server will be
``namespace``:``collection_name``
In order to deal with how redis stores data/keys,
everything, i.e. keys and data, must be pickled.
:param namespace: namespace to use
:param collection_name: name of the hash map stored in redis
(default: redis_dict_data)
:param connection: ``redis.StrictRedis`` instance.
If it's ``None`` (default), a new connection with
default options will be created
"""
if connection is not None:
self.connection = connection
else:
self.connection = Redis()
self._self_key = ':'.join([namespace, collection_name])
def __getitem__(self, key):
result = self.connection.hget(self._self_key, pickle.dumps(key))
if result is None:
raise KeyError
return pickle.loads(bytes(result))
def __setitem__(self, key, item):
self.connection.hset(self._self_key, pickle.dumps(key),
pickle.dumps(item))
def __delitem__(self, key):
if not self.connection.hdel(self._self_key, pickle.dumps(key)):
raise KeyError
def __len__(self):
return self.connection.hlen(self._self_key)
def __iter__(self):
for v in self.connection.hkeys(self._self_key):
yield pickle.loads(bytes(v))
def clear(self):
self.connection.delete(self._self_key)
def __str__(self):
return str(dict(self.items()))

View file

@ -0,0 +1,103 @@
# -*- coding: utf-8 -*-
# taken from requests library: https://github.com/kennethreitz/requests
"""
pythoncompat
"""
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, urlencode
from urlparse import urlparse, urlunparse, urljoin, urlsplit
from urllib2 import parse_http_list
import cookielib
from StringIO import StringIO
bytes = str
str = unicode
basestring = basestring
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote
from urllib.request import parse_http_list
from http import cookiejar as cookielib
from http.cookies import SimpleCookie
from io import StringIO
str = str
bytes = bytes
basestring = (str,bytes)

227
lib/requests_cache/core.py Normal file
View file

@ -0,0 +1,227 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.core
~~~~~~~~~~~~~~~~~~~
Core functions for configuring cache and monkey patching ``requests``
"""
from contextlib import contextmanager
from datetime import datetime, timedelta
import requests
from requests import Session as OriginalSession
from requests.hooks import dispatch_hook
from requests_cache import backends
from requests_cache.compat import str, basestring
try:
ver = tuple(map(int, requests.__version__.split(".")))
except ValueError:
pass
else:
# We don't need to dispatch hook in Requests <= 1.1.0
if ver < (1, 2, 0):
dispatch_hook = lambda key, hooks, hook_data, *a, **kw: hook_data
del ver
class CachedSession(OriginalSession):
""" Requests ``Sessions`` with caching support.
"""
def __init__(self, cache_name='cache', backend=None, expire_after=None,
allowable_codes=(200,), allowable_methods=('GET',),
**backend_options):
"""
:param cache_name: for ``sqlite`` backend: cache file will start with this prefix,
e.g ``cache.sqlite``
for ``mongodb``: it's used as database name
for ``redis``: it's used as the namespace. This means all keys
are prefixed with ``'cache_name:'``
:param backend: cache backend name e.g ``'sqlite'``, ``'mongodb'``, ``'redis'``, ``'memory'``.
(see :ref:`persistence`). Or instance of backend implementation.
Default value is ``None``, which means use ``'sqlite'`` if available,
otherwise fallback to ``'memory'``.
:param expire_after: number of seconds after cache will be expired
or `None` (default) to ignore expiration
:type expire_after: float
:param allowable_codes: limit caching only for response with this codes (default: 200)
:type allowable_codes: tuple
:param allowable_methods: cache only requests of this methods (default: 'GET')
:type allowable_methods: tuple
:kwarg backend_options: options for chosen backend. See corresponding
:ref:`sqlite <backends_sqlite>`, :ref:`mongo <backends_mongo>`
and :ref:`redis <backends_redis>` backends API documentation
"""
if backend is None or isinstance(backend, basestring):
self.cache = backends.create_backend(backend, cache_name,
backend_options)
else:
self.cache = backend
self._cache_expire_after = expire_after
self._cache_allowable_codes = allowable_codes
self._cache_allowable_methods = allowable_methods
self._is_cache_disabled = False
super(CachedSession, self).__init__()
def send(self, request, **kwargs):
if (self._is_cache_disabled
or request.method not in self._cache_allowable_methods):
response = super(CachedSession, self).send(request, **kwargs)
response.from_cache = False
return response
cache_key = self.cache.create_key(request)
def send_request_and_cache_response():
response = super(CachedSession, self).send(request, **kwargs)
if response.status_code in self._cache_allowable_codes:
self.cache.save_response(cache_key, response)
response.from_cache = False
return response
response, timestamp = self.cache.get_response_and_time(cache_key)
if response is None:
return send_request_and_cache_response()
if self._cache_expire_after is not None:
difference = datetime.utcnow() - timestamp
if difference > timedelta(seconds=self._cache_expire_after):
self.cache.delete(cache_key)
return send_request_and_cache_response()
# dispatch hook here, because we've removed it before pickling
response.from_cache = True
response = dispatch_hook('response', request.hooks, response, **kwargs)
return response
def request(self, method, url, params=None, data=None, headers=None,
cookies=None, files=None, auth=None, timeout=None,
allow_redirects=True, proxies=None, hooks=None, stream=None,
verify=None, cert=None):
response = super(CachedSession, self).request(method, url, params, data,
headers, cookies, files,
auth, timeout,
allow_redirects, proxies,
hooks, stream, verify, cert)
if self._is_cache_disabled:
return response
main_key = self.cache.create_key(response.request)
for r in response.history:
self.cache.add_key_mapping(
self.cache.create_key(r.request), main_key
)
return response
@contextmanager
def cache_disabled(self):
"""
Context manager for temporary disabling cache
::
>>> s = CachedSession()
>>> with s.cache_disabled():
... s.get('http://httpbin.org/ip')
"""
self._is_cache_disabled = True
try:
yield
finally:
self._is_cache_disabled = False
def install_cache(cache_name='cache', backend=None, expire_after=None,
allowable_codes=(200,), allowable_methods=('GET',),
session_factory=CachedSession, **backend_options):
"""
Installs cache for all ``Requests`` requests by monkey-patching ``Session``
Parameters are the same as in :class:`CachedSession`. Additional parameters:
:param session_factory: Session factory. It should inherit :class:`CachedSession` (default)
"""
if backend:
backend = backends.create_backend(backend, cache_name, backend_options)
_patch_session_factory(
lambda : session_factory(cache_name=cache_name,
backend=backend,
expire_after=expire_after,
allowable_codes=allowable_codes,
allowable_methods=allowable_methods,
**backend_options)
)
# backward compatibility
configure = install_cache
def uninstall_cache():
""" Restores ``requests.Session`` and disables cache
"""
_patch_session_factory(OriginalSession)
@contextmanager
def disabled():
"""
Context manager for temporary disabling globally installed cache
.. warning:: not thread-safe
::
>>> with requests_cache.disabled():
... requests.get('http://httpbin.org/ip')
... requests.get('http://httpbin.org/get')
"""
previous = requests.Session
uninstall_cache()
try:
yield
finally:
_patch_session_factory(previous)
@contextmanager
def enabled(*args, **kwargs):
"""
Context manager for temporary installing global cache.
Accepts same arguments as :func:`install_cache`
.. warning:: not thread-safe
::
>>> with requests_cache.enabled('cache_db'):
... requests.get('http://httpbin.org/get')
"""
install_cache(*args, **kwargs)
try:
yield
finally:
uninstall_cache()
def get_cache():
""" Returns internal cache object from globally installed ``CachedSession``
"""
return requests.Session().cache
def clear():
""" Clears globally installed cache
"""
get_cache().clear()
def _patch_session_factory(session_factory=CachedSession):
requests.Session = requests.sessions.Session = session_factory

View file

@ -19,13 +19,11 @@ __version__ = "1.9"
import os import os
import time import time
import urllib
import getpass import getpass
import StringIO import StringIO
import tempfile import tempfile
import warnings import warnings
import logging import logging
import datetime
import zipfile import zipfile
try: try:
@ -39,6 +37,7 @@ except ImportError:
gzip = None gzip = None
from lib import requests from lib import requests
from lib import requests_cache
from tvdb_ui import BaseUI, ConsoleUI from tvdb_ui import BaseUI, ConsoleUI
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound, from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
@ -430,15 +429,12 @@ class Tvdb:
if cache is True: if cache is True:
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = self._getTempDir() requests_cache.install_cache(self._getTempDir())
elif cache is False: elif cache is False:
self.config['cache_enabled'] = False self.config['cache_enabled'] = False
elif isinstance(cache, basestring): elif isinstance(cache, basestring):
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = cache requests_cache.install_cache(cache)
else: else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
@ -541,11 +537,9 @@ class Tvdb:
# get response from TVDB # get response from TVDB
if self.config['cache_enabled']: if self.config['cache_enabled']:
s = requests.Session() resp = requests.get(url, params=params)
s.mount('http://', CachingHTTPAdapter())
resp = s.get(url, params=params)
else: else:
with requests_cache.disabled():
resp = requests.get(url, params=params) resp = requests.get(url, params=params)
except requests.HTTPError, e: except requests.HTTPError, e:

View file

@ -30,7 +30,9 @@ except ImportError:
import xml.etree.ElementTree as ElementTree import xml.etree.ElementTree as ElementTree
from lib.dateutil.parser import parse from lib.dateutil.parser import parse
from lib import requests from lib import requests
from lib import requests_cache
from tvrage_ui import BaseUI from tvrage_ui import BaseUI
from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound, from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound,
@ -270,10 +272,12 @@ class TVRage:
if cache is True: if cache is True:
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
requests_cache.install_cache(self._getTempDir())
elif cache is False: elif cache is False:
self.config['cache_enabled'] = False self.config['cache_enabled'] = False
elif isinstance(cache, basestring): elif isinstance(cache, basestring):
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
requests_cache.install_cache(cache)
else: else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
@ -366,11 +370,9 @@ class TVRage:
# get response from TVRage # get response from TVRage
if self.config['cache_enabled']: if self.config['cache_enabled']:
s = requests.Session() resp = requests.get(url, params=params)
s.mount('http://', CachingHTTPAdapter())
resp = s.get(url, params=params)
else: else:
with requests_cache.disabled():
resp = requests.get(url, params=params) resp = requests.get(url, params=params)
except requests.HTTPError, e: except requests.HTTPError, e:

View file

@ -328,11 +328,8 @@ def searchIndexerForShowID(regShowName, indexer, indexer_id=None):
if (seriesname == name) or (indexer_id is not None and part['id'] == indexer_id): if (seriesname == name) or (indexer_id is not None and part['id'] == indexer_id):
return [sickbeard.indexerApi(indexer).config['id'], part['id']] return [sickbeard.indexerApi(indexer).config['id'], part['id']]
except KeyError, e: except KeyError:break
break except Exception:continue
except Exception, e:
continue
def sizeof_fmt(num): def sizeof_fmt(num):
''' '''

View file

@ -51,35 +51,6 @@ class ShowUpdater():
else: else:
return return
# clean out cache directory, remove everything > 12 hours old
if sickbeard.CACHE_DIR:
for indexer in sickbeard.indexerApi().indexers:
cache_dir = sickbeard.indexerApi(indexer).cache
logger.log(u"Trying to clean cache folder " + cache_dir)
# Does our cache_dir exists
if not ek.ek(os.path.isdir, cache_dir):
logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING)
else:
max_age = datetime.timedelta(hours=12)
# Get all our cache files
cache_files = ek.ek(os.listdir, cache_dir)
for cache_file in cache_files:
cache_file_path = ek.ek(os.path.join, cache_dir, cache_file)
if ek.ek(os.path.isfile, cache_file_path):
cache_file_modified = datetime.datetime.fromtimestamp(
ek.ek(os.path.getmtime, cache_file_path))
if update_datetime - cache_file_modified > max_age:
try:
ek.ek(os.remove, cache_file_path)
except OSError, e:
logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e),
logger.WARNING)
break
# select 10 'Ended' tv_shows updated more than 90 days ago to include in this update # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
stale_should_update = [] stale_should_update = []
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal() stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()