diff --git a/lib/feedcache/__init__.py b/lib/feedcache/__init__.py
new file mode 100644
index 00000000..96ebc102
--- /dev/null
+++ b/lib/feedcache/__init__.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+
+
+#
+# Import local modules
+#
+from cache import Cache
+
+#
+# Module
+#
diff --git a/lib/feedcache/cache.py b/lib/feedcache/cache.py
new file mode 100644
index 00000000..fbaef63a
--- /dev/null
+++ b/lib/feedcache/cache.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+from feedparser import feedparser
+
+import logging
+import time
+
+#
+# Import local modules
+#
+
+
+#
+# Module
+#
+
+logger = logging.getLogger('feedcache.cache')
+
+
+class Cache:
+ """A class to wrap Mark Pilgrim's Universal Feed Parser module
+ (http://www.feedparser.org) so that parameters can be used to
+ cache the feed results locally instead of fetching the feed every
+ time it is requested. Uses both etag and modified times for
+ caching.
+ """
+
+ def __init__(self, storage, timeToLiveSeconds=300, userAgent='feedcache'):
+ """
+ Arguments:
+
+ storage -- Backing store for the cache. It should follow
+ the dictionary API, with URLs used as keys. It should
+ persist data.
+
+ timeToLiveSeconds=300 -- The length of time content should
+ live in the cache before an update is attempted.
+
+ userAgent='feedcache' -- User agent string to be used when
+ fetching feed contents.
+
+ """
+ self.storage = storage
+ self.time_to_live = timeToLiveSeconds
+ self.user_agent = userAgent
+ return
+
+ def purge(self, olderThanSeconds):
+ """Remove cached data from the storage if the data is older than the
+ date given. If olderThanSeconds is None, the entire cache is purged.
+ """
+ if olderThanSeconds is None:
+ logger.debug('purging the entire cache')
+ for key in self.storage.keys():
+ del self.storage[key]
+ else:
+ now = time.time()
+ # Iterate over the keys and load each item one at a time
+ # to avoid having the entire cache loaded into memory
+ # at one time.
+ for url in self.storage.keys():
+ (cached_time, cached_data) = self.storage[url]
+ age = now - cached_time
+ if age >= olderThanSeconds:
+ logger.debug('removing %s with age %d', url, age)
+ del self.storage[url]
+ return
+
+ def fetch(self, url, force_update=False, offline=False):
+ """Return the feed at url.
+
+ url - The URL of the feed.
+
+ force_update=False - When True, update the cache whether the
+ current contents have
+ exceeded their time-to-live
+ or not.
+
+ offline=False - When True, only return data from the local
+ cache and never access the remote
+ URL.
+
+ If there is data for that feed in the cache already, check
+ the expiration date before accessing the server. If the
+ cached data has not expired, return it without accessing the
+ server.
+
+ In cases where the server is accessed, check for updates
+ before deciding what to return. If the server reports a
+ status of 304, the previously cached content is returned.
+
+ The cache is only updated if the server returns a status of
+ 200, to avoid holding redirected data in the cache.
+ """
+ logger.debug('url="%s"' % url)
+
+ # Convert the URL to a value we can use
+ # as a key for the storage backend.
+ key = url
+ if isinstance(key, unicode):
+ key = key.encode('utf-8')
+
+ modified = None
+ etag = None
+ now = time.time()
+
+ cached_time, cached_content = self.storage.get(key, (None, None))
+
+ # Offline mode support (no networked requests)
+ # so return whatever we found in the storage.
+ # If there is nothing in the storage, we'll be returning None.
+ if offline:
+ logger.debug('offline mode')
+ return cached_content
+
+ # Does the storage contain a version of the data
+ # which is older than the time-to-live?
+ logger.debug('cache modified time: %s' % str(cached_time))
+ if cached_time is not None and not force_update:
+ if self.time_to_live:
+ age = now - cached_time
+ if age <= self.time_to_live:
+ logger.debug('cache contents still valid')
+ return cached_content
+ else:
+ logger.debug('cache contents older than TTL')
+ else:
+ logger.debug('no TTL value')
+
+ # The cache is out of date, but we have
+ # something. Try to use the etag and modified_time
+ # values from the cached content.
+ etag = cached_content.get('etag')
+ modified = cached_content.get('modified')
+ logger.debug('cached etag=%s' % etag)
+ logger.debug('cached modified=%s' % str(modified))
+ else:
+ logger.debug('nothing in the cache, or forcing update')
+
+ # We know we need to fetch, so go ahead and do it.
+ logger.debug('fetching...')
+ parsed_result = feedparser.parse(url,
+ agent=self.user_agent,
+ modified=modified,
+ etag=etag,
+ )
+
+ status = parsed_result.get('status', None)
+ logger.debug('HTTP status=%s' % status)
+ if status == 304:
+ # No new data, based on the etag or modified values.
+ # We need to update the modified time in the
+ # storage, though, so we know that what we have
+ # stored is up to date.
+ self.storage[key] = (now, cached_content)
+
+ # Return the data from the cache, since
+ # the parsed data will be empty.
+ parsed_result = cached_content
+ elif status == 200:
+ # There is new content, so store it unless there was an error.
+ error = parsed_result.get('bozo_exception')
+ if not error:
+ logger.debug('Updating stored data for %s' % url)
+ self.storage[key] = (now, parsed_result)
+ else:
+ logger.warning('Not storing data with exception: %s',
+ error)
+ else:
+ logger.warning('Not updating cache with HTTP status %s', status)
+
+ return parsed_result
diff --git a/lib/feedcache/cachestoragelock.py b/lib/feedcache/cachestoragelock.py
new file mode 100644
index 00000000..05babde6
--- /dev/null
+++ b/lib/feedcache/cachestoragelock.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+from __future__ import with_statement
+
+"""Lock wrapper for cache storage which do not permit multi-threaded access.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import threading
+
+#
+# Import local modules
+#
+
+
+#
+# Module
+#
+
+class CacheStorageLock:
+ """Lock wrapper for cache storage which do not permit multi-threaded access.
+ """
+
+ def __init__(self, shelf):
+ self.lock = threading.Lock()
+ self.shelf = shelf
+ return
+
+ def __getitem__(self, key):
+ with self.lock:
+ return self.shelf[key]
+
+ def get(self, key, default=None):
+ with self.lock:
+ try:
+ return self.shelf[key]
+ except KeyError:
+ return default
+
+ def __setitem__(self, key, value):
+ with self.lock:
+ self.shelf[key] = value
diff --git a/lib/feedcache/example.py b/lib/feedcache/example.py
new file mode 100644
index 00000000..4df7ab68
--- /dev/null
+++ b/lib/feedcache/example.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Example use of feedcache.Cache.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import sys
+import shelve
+
+#
+# Import local modules
+#
+import cache
+
+#
+# Module
+#
+
+def main(urls=[]):
+ print 'Saving feed data to ./.feedcache'
+ storage = shelve.open('.feedcache')
+ try:
+ fc = cache.Cache(storage)
+ for url in urls:
+ parsed_data = fc.fetch(url)
+ print parsed_data.feed.title
+ for entry in parsed_data.entries:
+ print '\t', entry.title
+ finally:
+ storage.close()
+ return
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
+
diff --git a/lib/feedcache/example_threads.py b/lib/feedcache/example_threads.py
new file mode 100644
index 00000000..2eb56d30
--- /dev/null
+++ b/lib/feedcache/example_threads.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Example use of feedcache.Cache combined with threads.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import Queue
+import sys
+import shove
+import threading
+
+#
+# Import local modules
+#
+import cache
+
+#
+# Module
+#
+
+MAX_THREADS=5
+OUTPUT_DIR='/tmp/feedcache_example'
+
+
+def main(urls=[]):
+
+ if not urls:
+ print 'Specify the URLs to a few RSS or Atom feeds on the command line.'
+ return
+
+ # Decide how many threads to start
+ num_threads = min(len(urls), MAX_THREADS)
+
+ # Add the URLs to a queue
+ url_queue = Queue.Queue()
+ for url in urls:
+ url_queue.put(url)
+
+ # Add poison pills to the url queue to cause
+ # the worker threads to break out of their loops
+ for i in range(num_threads):
+ url_queue.put(None)
+
+ # Track the entries in the feeds being fetched
+ entry_queue = Queue.Queue()
+
+ print 'Saving feed data to', OUTPUT_DIR
+ storage = shove.Shove('file://' + OUTPUT_DIR)
+ try:
+
+ # Start a few worker threads
+ worker_threads = []
+ for i in range(num_threads):
+ t = threading.Thread(target=fetch_urls,
+ args=(storage, url_queue, entry_queue,))
+ worker_threads.append(t)
+ t.setDaemon(True)
+ t.start()
+
+ # Start a thread to print the results
+ printer_thread = threading.Thread(target=print_entries, args=(entry_queue,))
+ printer_thread.setDaemon(True)
+ printer_thread.start()
+
+ # Wait for all of the URLs to be processed
+ url_queue.join()
+
+ # Wait for the worker threads to finish
+ for t in worker_threads:
+ t.join()
+
+ # Poison the print thread and wait for it to exit
+ entry_queue.put((None,None))
+ entry_queue.join()
+ printer_thread.join()
+
+ finally:
+ storage.close()
+ return
+
+
+def fetch_urls(storage, input_queue, output_queue):
+ """Thread target for fetching feed data.
+ """
+ c = cache.Cache(storage)
+
+ while True:
+ next_url = input_queue.get()
+ if next_url is None: # None causes thread to exit
+ input_queue.task_done()
+ break
+
+ feed_data = c.fetch(next_url)
+ for entry in feed_data.entries:
+ output_queue.put( (feed_data.feed, entry) )
+ input_queue.task_done()
+ return
+
+
+def print_entries(input_queue):
+ """Thread target for printing the contents of the feeds.
+ """
+ while True:
+ feed, entry = input_queue.get()
+ if feed is None: # None causes thread to exist
+ input_queue.task_done()
+ break
+
+ print '%s: %s' % (feed.title, entry.title)
+ input_queue.task_done()
+ return
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
+
diff --git a/lib/feedcache/test_cache.py b/lib/feedcache/test_cache.py
new file mode 100644
index 00000000..2c1ac096
--- /dev/null
+++ b/lib/feedcache/test_cache.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Unittests for feedcache.cache
+
+"""
+
+__module_id__ = "$Id$"
+
+import logging
+logging.basicConfig(level=logging.DEBUG,
+ format='%(asctime)s %(levelname)-8s %(name)s %(message)s',
+ )
+logger = logging.getLogger('feedcache.test_cache')
+
+#
+# Import system modules
+#
+import copy
+import time
+import unittest
+import UserDict
+
+#
+# Import local modules
+#
+import cache
+from test_server import HTTPTestBase, TestHTTPServer
+
+#
+# Module
+#
+
+
+class CacheTestBase(HTTPTestBase):
+
+ CACHE_TTL = 30
+
+ def setUp(self):
+ HTTPTestBase.setUp(self)
+
+ self.storage = self.getStorage()
+ self.cache = cache.Cache(self.storage,
+ timeToLiveSeconds=self.CACHE_TTL,
+ userAgent='feedcache.test',
+ )
+ return
+
+ def getStorage(self):
+ "Return a cache storage for the test."
+ return {}
+
+
+class CacheTest(CacheTestBase):
+
+ CACHE_TTL = 30
+
+ def getServer(self):
+ "These tests do not want to use the ETag or If-Modified-Since headers"
+ return TestHTTPServer(applyModifiedHeaders=False)
+
+ def testRetrieveNotInCache(self):
+ # Retrieve data not already in the cache.
+ feed_data = self.cache.fetch(self.TEST_URL)
+ self.failUnless(feed_data)
+ self.failUnlessEqual(feed_data.feed.title, 'CacheTest test data')
+ return
+
+ def testRetrieveIsInCache(self):
+ # Retrieve data which is alread in the cache,
+ # and verify that the second copy is identitical
+ # to the first.
+
+ # First fetch
+ feed_data = self.cache.fetch(self.TEST_URL)
+
+ # Second fetch
+ feed_data2 = self.cache.fetch(self.TEST_URL)
+
+ # Since it is the in-memory storage, we should have the
+ # exact same object.
+ self.failUnless(feed_data is feed_data2)
+ return
+
+ def testExpireDataInCache(self):
+ # Retrieve data which is in the cache but which
+ # has expired and verify that the second copy
+ # is different from the first.
+
+ # First fetch
+ feed_data = self.cache.fetch(self.TEST_URL)
+
+ # Change the timeout and sleep to move the clock
+ self.cache.time_to_live = 0
+ time.sleep(1)
+
+ # Second fetch
+ feed_data2 = self.cache.fetch(self.TEST_URL)
+
+ # Since we reparsed, the cache response should be different.
+ self.failIf(feed_data is feed_data2)
+ return
+
+ def testForceUpdate(self):
+ # Force cache to retrieve data which is alread in the cache,
+ # and verify that the new data is different.
+
+ # Pre-populate the storage with bad data
+ self.cache.storage[self.TEST_URL] = (time.time() + 100, self.id())
+
+ # Fetch the data
+ feed_data = self.cache.fetch(self.TEST_URL, force_update=True)
+
+ self.failIfEqual(feed_data, self.id())
+ return
+
+ def testOfflineMode(self):
+ # Retrieve data which is alread in the cache,
+ # whether it is expired or not.
+
+ # Pre-populate the storage with data
+ self.cache.storage[self.TEST_URL] = (0, self.id())
+
+ # Fetch it
+ feed_data = self.cache.fetch(self.TEST_URL, offline=True)
+
+ self.failUnlessEqual(feed_data, self.id())
+ return
+
+ def testUnicodeURL(self):
+ # Pass in a URL which is unicode
+
+ url = unicode(self.TEST_URL)
+ feed_data = self.cache.fetch(url)
+
+ storage = self.cache.storage
+ key = unicode(self.TEST_URL).encode('UTF-8')
+
+ # Verify that the storage has a key
+ self.failUnless(key in storage)
+
+ # Now pull the data from the storage directly
+ storage_timeout, storage_data = self.cache.storage.get(key)
+ self.failUnlessEqual(feed_data, storage_data)
+ return
+
+
+class SingleWriteMemoryStorage(UserDict.UserDict):
+ """Cache storage which only allows the cache value
+ for a URL to be updated one time.
+ """
+
+ def __setitem__(self, url, data):
+ if url in self.keys():
+ modified, existing = self[url]
+ # Allow the modified time to change,
+ # but not the feed content.
+ if data[1] != existing:
+ raise AssertionError('Trying to update cache for %s to %s' \
+ % (url, data))
+ UserDict.UserDict.__setitem__(self, url, data)
+ return
+
+
+class CacheConditionalGETTest(CacheTestBase):
+
+ CACHE_TTL = 0
+
+ def getStorage(self):
+ return SingleWriteMemoryStorage()
+
+ def testFetchOnceForEtag(self):
+ # Fetch data which has a valid ETag value, and verify
+ # that while we hit the server twice the response
+ # codes cause us to use the same data.
+
+ # First fetch populates the cache
+ response1 = self.cache.fetch(self.TEST_URL)
+ self.failUnlessEqual(response1.feed.title, 'CacheTest test data')
+
+ # Remove the modified setting from the cache so we know
+ # the next time we check the etag will be used
+ # to check for updates. Since we are using an in-memory
+ # cache, modifying response1 updates the cache storage
+ # directly.
+ response1['modified'] = None
+
+ # This should result in a 304 status, and no data from
+ # the server. That means the cache won't try to
+ # update the storage, so our SingleWriteMemoryStorage
+ # should not raise and we should have the same
+ # response object.
+ response2 = self.cache.fetch(self.TEST_URL)
+ self.failUnless(response1 is response2)
+
+ # Should have hit the server twice
+ self.failUnlessEqual(self.server.getNumRequests(), 2)
+ return
+
+ def testFetchOnceForModifiedTime(self):
+ # Fetch data which has a valid Last-Modified value, and verify
+ # that while we hit the server twice the response
+ # codes cause us to use the same data.
+
+ # First fetch populates the cache
+ response1 = self.cache.fetch(self.TEST_URL)
+ self.failUnlessEqual(response1.feed.title, 'CacheTest test data')
+
+ # Remove the etag setting from the cache so we know
+ # the next time we check the modified time will be used
+ # to check for updates. Since we are using an in-memory
+ # cache, modifying response1 updates the cache storage
+ # directly.
+ response1['etag'] = None
+
+ # This should result in a 304 status, and no data from
+ # the server. That means the cache won't try to
+ # update the storage, so our SingleWriteMemoryStorage
+ # should not raise and we should have the same
+ # response object.
+ response2 = self.cache.fetch(self.TEST_URL)
+ self.failUnless(response1 is response2)
+
+ # Should have hit the server twice
+ self.failUnlessEqual(self.server.getNumRequests(), 2)
+ return
+
+
+class CacheRedirectHandlingTest(CacheTestBase):
+
+ def _test(self, response):
+ # Set up the server to redirect requests,
+ # then verify that the cache is not updated
+ # for the original or new URL and that the
+ # redirect status is fed back to us with
+ # the fetched data.
+
+ self.server.setResponse(response, '/redirected')
+
+ response1 = self.cache.fetch(self.TEST_URL)
+
+ # The response should include the status code we set
+ self.failUnlessEqual(response1.get('status'), response)
+
+ # The response should include the new URL, too
+ self.failUnlessEqual(response1.href, self.TEST_URL + 'redirected')
+
+ # The response should not have been cached under either URL
+ self.failIf(self.TEST_URL in self.storage)
+ self.failIf(self.TEST_URL + 'redirected' in self.storage)
+ return
+
+ def test301(self):
+ self._test(301)
+
+ def test302(self):
+ self._test(302)
+
+ def test303(self):
+ self._test(303)
+
+ def test307(self):
+ self._test(307)
+
+
+class CachePurgeTest(CacheTestBase):
+
+ def testPurgeAll(self):
+ # Remove everything from the cache
+
+ self.cache.fetch(self.TEST_URL)
+ self.failUnless(self.storage.keys(),
+ 'Have no data in the cache storage')
+
+ self.cache.purge(None)
+
+ self.failIf(self.storage.keys(),
+ 'Still have data in the cache storage')
+ return
+
+ def testPurgeByAge(self):
+ # Remove old content from the cache
+
+ self.cache.fetch(self.TEST_URL)
+ self.failUnless(self.storage.keys(),
+ 'have no data in the cache storage')
+
+ time.sleep(1)
+
+ remains = (time.time(), copy.deepcopy(self.storage[self.TEST_URL][1]))
+ self.storage['http://this.should.remain/'] = remains
+
+ self.cache.purge(1)
+
+ self.failUnlessEqual(self.storage.keys(),
+ ['http://this.should.remain/'])
+ return
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/feedcache/test_cachestoragelock.py b/lib/feedcache/test_cachestoragelock.py
new file mode 100644
index 00000000..741a39ab
--- /dev/null
+++ b/lib/feedcache/test_cachestoragelock.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Tests for shelflock.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import os
+import shelve
+import tempfile
+import threading
+import unittest
+
+#
+# Import local modules
+#
+from cache import Cache
+from cachestoragelock import CacheStorageLock
+from test_server import HTTPTestBase
+
+#
+# Module
+#
+
+class CacheShelveTest(HTTPTestBase):
+
+ def setUp(self):
+ HTTPTestBase.setUp(self)
+ handle, self.shelve_filename = tempfile.mkstemp('.shelve')
+ os.close(handle) # we just want the file name, so close the open handle
+ os.unlink(self.shelve_filename) # remove the empty file
+ return
+
+ def tearDown(self):
+ try:
+ os.unlink(self.shelve_filename)
+ except AttributeError:
+ pass
+ HTTPTestBase.tearDown(self)
+ return
+
+ def test(self):
+ storage = shelve.open(self.shelve_filename)
+ locking_storage = CacheStorageLock(storage)
+ try:
+ fc = Cache(locking_storage)
+
+ # First fetch the data through the cache
+ parsed_data = fc.fetch(self.TEST_URL)
+ self.failUnlessEqual(parsed_data.feed.title, 'CacheTest test data')
+
+ # Now retrieve the same data directly from the shelf
+ modified, shelved_data = storage[self.TEST_URL]
+
+ # The data should be the same
+ self.failUnlessEqual(parsed_data, shelved_data)
+ finally:
+ storage.close()
+ return
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/feedcache/test_server.py b/lib/feedcache/test_server.py
new file mode 100644
index 00000000..f48be105
--- /dev/null
+++ b/lib/feedcache/test_server.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Simple HTTP server for testing the feed cache.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import BaseHTTPServer
+import logging
+import md5
+import threading
+import time
+import unittest
+import urllib
+
+#
+# Import local modules
+#
+
+
+#
+# Module
+#
+logger = logging.getLogger('feedcache.test_server')
+
+
+def make_etag(data):
+ """Given a string containing data to be returned to the client,
+ compute an ETag value for the data.
+ """
+ _md5 = md5.new()
+ _md5.update(data)
+ return _md5.hexdigest()
+
+
+class TestHTTPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ "HTTP request handler which serves the same feed data every time."
+
+ FEED_DATA = """
+
+
+ CacheTest test data
+
+
+ http://localhost/feedcache/
+ 2006-10-14T11:00:36Z
+
+ single test entry
+
+ 2006-10-14T11:00:36Z
+
+ author goes here
+ authoremail@example.com
+
+ http://www.example.com/
+ description goes here
+
+
+
+"""
+
+ # The data does not change, so save the ETag and modified times
+ # as class attributes.
+ ETAG = make_etag(FEED_DATA)
+ # Calculated using email.utils.formatdate(usegmt=True)
+ MODIFIED_TIME = 'Sun, 08 Apr 2012 20:16:48 GMT'
+
+ def do_GET(self):
+ "Handle GET requests."
+ logger.debug('GET %s', self.path)
+
+ if self.path == '/shutdown':
+ # Shortcut to handle stopping the server
+ logger.debug('Stopping server')
+ self.server.stop()
+ self.send_response(200)
+
+ else:
+ # Record the request for tests that count them
+ self.server.requests.append(self.path)
+ # Process the request
+ logger.debug('pre-defined response code: %d', self.server.response)
+ handler_method_name = 'do_GET_%d' % self.server.response
+ handler_method = getattr(self, handler_method_name)
+ handler_method()
+ return
+
+ def do_GET_3xx(self):
+ "Handle redirects"
+ if self.path.endswith('/redirected'):
+ logger.debug('already redirected')
+ # We have already redirected, so return the data.
+ return self.do_GET_200()
+ new_path = self.server.new_path
+ logger.debug('redirecting to %s', new_path)
+ self.send_response(self.server.response)
+ self.send_header('Location', new_path)
+ return
+
+ do_GET_301 = do_GET_3xx
+ do_GET_302 = do_GET_3xx
+ do_GET_303 = do_GET_3xx
+ do_GET_307 = do_GET_3xx
+
+ def do_GET_200(self):
+ logger.debug('Etag: %s' % self.ETAG)
+ logger.debug('Last-Modified: %s' % self.MODIFIED_TIME)
+
+ incoming_etag = self.headers.get('If-None-Match', None)
+ logger.debug('Incoming ETag: "%s"' % incoming_etag)
+
+ incoming_modified = self.headers.get('If-Modified-Since', None)
+ logger.debug('Incoming If-Modified-Since: %s' % incoming_modified)
+
+ send_data = True
+
+ # Does the client have the same version of the data we have?
+ if self.server.apply_modified_headers:
+ if incoming_etag == self.ETAG:
+ logger.debug('Response 304, etag')
+ self.send_response(304)
+ send_data = False
+
+ elif incoming_modified == self.MODIFIED_TIME:
+ logger.debug('Response 304, modified time')
+ self.send_response(304)
+ send_data = False
+
+ # Now optionally send the data, if the client needs it
+ if send_data:
+ logger.debug('Response 200')
+ self.send_response(200)
+
+ self.send_header('Content-Type', 'application/atom+xml')
+
+ logger.debug('Outgoing Etag: %s' % self.ETAG)
+ self.send_header('ETag', self.ETAG)
+
+ logger.debug('Outgoing modified time: %s' % self.MODIFIED_TIME)
+ self.send_header('Last-Modified', self.MODIFIED_TIME)
+
+ self.end_headers()
+
+ logger.debug('Sending data')
+ self.wfile.write(self.FEED_DATA)
+ return
+
+
+class TestHTTPServer(BaseHTTPServer.HTTPServer):
+ """HTTP Server which counts the number of requests made
+ and can stop based on client instructions.
+ """
+
+ def __init__(self, applyModifiedHeaders=True, handler=TestHTTPHandler):
+ self.apply_modified_headers = applyModifiedHeaders
+ self.keep_serving = True
+ self.requests = []
+ self.setResponse(200)
+ BaseHTTPServer.HTTPServer.__init__(self, ('', 9999), handler)
+ return
+
+ def setResponse(self, newResponse, newPath=None):
+ """Sets the response code to use for future requests, and a new
+ path to be used as a redirect target, if necessary.
+ """
+ self.response = newResponse
+ self.new_path = newPath
+ return
+
+ def getNumRequests(self):
+ "Return the number of requests which have been made on the server."
+ return len(self.requests)
+
+ def stop(self):
+ "Stop serving requests, after the next request."
+ self.keep_serving = False
+ return
+
+ def serve_forever(self):
+ "Main loop for server"
+ while self.keep_serving:
+ self.handle_request()
+ logger.debug('exiting')
+ return
+
+
+class HTTPTestBase(unittest.TestCase):
+ "Base class for tests that use a TestHTTPServer"
+
+ TEST_URL = 'http://localhost:9999/'
+
+ CACHE_TTL = 0
+
+ def setUp(self):
+ self.server = self.getServer()
+ self.server_thread = threading.Thread(target=self.server.serve_forever)
+ # set daemon flag so the tests don't hang if cleanup fails
+ self.server_thread.setDaemon(True)
+ self.server_thread.start()
+ return
+
+ def getServer(self):
+ "Return a web server for the test."
+ s = TestHTTPServer()
+ s.setResponse(200)
+ return s
+
+ def tearDown(self):
+ # Stop the server thread
+ urllib.urlretrieve(self.TEST_URL + 'shutdown')
+ time.sleep(1)
+ self.server.server_close()
+ self.server_thread.join()
+ return
diff --git a/lib/feedcache/test_shovefilesystem.py b/lib/feedcache/test_shovefilesystem.py
new file mode 100644
index 00000000..1a48dead
--- /dev/null
+++ b/lib/feedcache/test_shovefilesystem.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Doug Hellmann.
+#
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Doug
+# Hellmann not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+
+"""Tests with shove filesystem storage.
+
+"""
+
+__module_id__ = "$Id$"
+
+#
+# Import system modules
+#
+import os
+import shove
+import tempfile
+import threading
+import unittest
+
+#
+# Import local modules
+#
+from cache import Cache
+from test_server import HTTPTestBase
+
+#
+# Module
+#
+
+class CacheShoveTest(HTTPTestBase):
+
+ def setUp(self):
+ HTTPTestBase.setUp(self)
+ self.shove_dirname = tempfile.mkdtemp('shove')
+ return
+
+ def tearDown(self):
+ try:
+ os.system('rm -rf %s' % self.storage_dirname)
+ except AttributeError:
+ pass
+ HTTPTestBase.tearDown(self)
+ return
+
+ def test(self):
+ # First fetch the data through the cache
+ storage = shove.Shove('file://' + self.shove_dirname)
+ try:
+ fc = Cache(storage)
+ parsed_data = fc.fetch(self.TEST_URL)
+ self.failUnlessEqual(parsed_data.feed.title, 'CacheTest test data')
+ finally:
+ storage.close()
+
+ # Now retrieve the same data directly from the shelf
+ storage = shove.Shove('file://' + self.shove_dirname)
+ try:
+ modified, shelved_data = storage[self.TEST_URL]
+ finally:
+ storage.close()
+
+ # The data should be the same
+ self.failUnlessEqual(parsed_data, shelved_data)
+ return
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/__init__.py b/lib/shove/__init__.py
new file mode 100644
index 00000000..3be119b4
--- /dev/null
+++ b/lib/shove/__init__.py
@@ -0,0 +1,519 @@
+# -*- coding: utf-8 -*-
+'''Common object storage frontend.'''
+
+import os
+import zlib
+import urllib
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+from collections import deque
+
+try:
+ # Import store and cache entry points if setuptools installed
+ import pkg_resources
+ stores = dict((_store.name, _store) for _store in
+ pkg_resources.iter_entry_points('shove.stores'))
+ caches = dict((_cache.name, _cache) for _cache in
+ pkg_resources.iter_entry_points('shove.caches'))
+ # Pass if nothing loaded
+ if not stores and not caches:
+ raise ImportError()
+except ImportError:
+ # Static store backend registry
+ stores = dict(
+ bsddb='shove.store.bsdb:BsdStore',
+ cassandra='shove.store.cassandra:CassandraStore',
+ dbm='shove.store.dbm:DbmStore',
+ durus='shove.store.durusdb:DurusStore',
+ file='shove.store.file:FileStore',
+ firebird='shove.store.db:DbStore',
+ ftp='shove.store.ftp:FtpStore',
+ hdf5='shove.store.hdf5:HDF5Store',
+ leveldb='shove.store.leveldbstore:LevelDBStore',
+ memory='shove.store.memory:MemoryStore',
+ mssql='shove.store.db:DbStore',
+ mysql='shove.store.db:DbStore',
+ oracle='shove.store.db:DbStore',
+ postgres='shove.store.db:DbStore',
+ redis='shove.store.redisdb:RedisStore',
+ s3='shove.store.s3:S3Store',
+ simple='shove.store.simple:SimpleStore',
+ sqlite='shove.store.db:DbStore',
+ svn='shove.store.svn:SvnStore',
+ zodb='shove.store.zodb:ZodbStore',
+ )
+ # Static cache backend registry
+ caches = dict(
+ bsddb='shove.cache.bsdb:BsdCache',
+ file='shove.cache.file:FileCache',
+ filelru='shove.cache.filelru:FileLRUCache',
+ firebird='shove.cache.db:DbCache',
+ memcache='shove.cache.memcached:MemCached',
+ memlru='shove.cache.memlru:MemoryLRUCache',
+ memory='shove.cache.memory:MemoryCache',
+ mssql='shove.cache.db:DbCache',
+ mysql='shove.cache.db:DbCache',
+ oracle='shove.cache.db:DbCache',
+ postgres='shove.cache.db:DbCache',
+ redis='shove.cache.redisdb:RedisCache',
+ simple='shove.cache.simple:SimpleCache',
+ simplelru='shove.cache.simplelru:SimpleLRUCache',
+ sqlite='shove.cache.db:DbCache',
+ )
+
+
+def getbackend(uri, engines, **kw):
+ '''
+ Loads the right backend based on a URI.
+
+ @param uri Instance or name string
+ @param engines A dictionary of scheme/class pairs
+ '''
+ if isinstance(uri, basestring):
+ mod = engines[uri.split('://', 1)[0]]
+ # Load module if setuptools not present
+ if isinstance(mod, basestring):
+ # Isolate classname from dot path
+ module, klass = mod.split(':')
+ # Load module
+ mod = getattr(__import__(module, '', '', ['']), klass)
+ # Load appropriate class from setuptools entry point
+ else:
+ mod = mod.load()
+ # Return instance
+ return mod(uri, **kw)
+ # No-op for existing instances
+ return uri
+
+
+def synchronized(func):
+ '''
+ Decorator to lock and unlock a method (Phillip J. Eby).
+
+ @param func Method to decorate
+ '''
+ def wrapper(self, *__args, **__kw):
+ self._lock.acquire()
+ try:
+ return func(self, *__args, **__kw)
+ finally:
+ self._lock.release()
+ wrapper.__name__ = func.__name__
+ wrapper.__dict__ = func.__dict__
+ wrapper.__doc__ = func.__doc__
+ return wrapper
+
+
+class Base(object):
+
+ '''Base Mapping class.'''
+
+ def __init__(self, engine, **kw):
+ '''
+ @keyword compress True, False, or an integer compression level (1-9).
+ '''
+ self._compress = kw.get('compress', False)
+ self._protocol = kw.get('protocol', pickle.HIGHEST_PROTOCOL)
+
+ def __getitem__(self, key):
+ raise NotImplementedError()
+
+ def __setitem__(self, key, value):
+ raise NotImplementedError()
+
+ def __delitem__(self, key):
+ raise NotImplementedError()
+
+ def __contains__(self, key):
+ try:
+ value = self[key]
+ except KeyError:
+ return False
+ return True
+
+ def get(self, key, default=None):
+ '''
+ Fetch a given key from the mapping. If the key does not exist,
+ return the default.
+
+ @param key Keyword of item in mapping.
+ @param default Default value (default: None)
+ '''
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def dumps(self, value):
+ '''Optionally serializes and compresses an object.'''
+ # Serialize everything but ASCII strings
+ value = pickle.dumps(value, protocol=self._protocol)
+ if self._compress:
+ level = 9 if self._compress is True else self._compress
+ value = zlib.compress(value, level)
+ return value
+
+ def loads(self, value):
+ '''Deserializes and optionally decompresses an object.'''
+ if self._compress:
+ try:
+ value = zlib.decompress(value)
+ except zlib.error:
+ pass
+ value = pickle.loads(value)
+ return value
+
+
+class BaseStore(Base):
+
+ '''Base Store class (based on UserDict.DictMixin).'''
+
+ def __init__(self, engine, **kw):
+ super(BaseStore, self).__init__(engine, **kw)
+ self._store = None
+
+ def __cmp__(self, other):
+ if other is None:
+ return False
+ if isinstance(other, BaseStore):
+ return cmp(dict(self.iteritems()), dict(other.iteritems()))
+
+ def __del__(self):
+ # __init__ didn't succeed, so don't bother closing
+ if not hasattr(self, '_store'):
+ return
+ self.close()
+
+ def __iter__(self):
+ for k in self.keys():
+ yield k
+
+ def __len__(self):
+ return len(self.keys())
+
+ def __repr__(self):
+ return repr(dict(self.iteritems()))
+
+ def close(self):
+ '''Closes internal store and clears object references.'''
+ try:
+ self._store.close()
+ except AttributeError:
+ pass
+ self._store = None
+
+ def clear(self):
+ '''Removes all keys and values from a store.'''
+ for key in self.keys():
+ del self[key]
+
+ def items(self):
+ '''Returns a list with all key/value pairs in the store.'''
+ return list(self.iteritems())
+
+ def iteritems(self):
+ '''Lazily returns all key/value pairs in a store.'''
+ for k in self:
+ yield (k, self[k])
+
+ def iterkeys(self):
+ '''Lazy returns all keys in a store.'''
+ return self.__iter__()
+
+ def itervalues(self):
+ '''Lazily returns all values in a store.'''
+ for _, v in self.iteritems():
+ yield v
+
+ def keys(self):
+ '''Returns a list with all keys in a store.'''
+ raise NotImplementedError()
+
+ def pop(self, key, *args):
+ '''
+ Removes and returns a value from a store.
+
+ @param args Default to return if key not present.
+ '''
+ if len(args) > 1:
+ raise TypeError('pop expected at most 2 arguments, got ' + repr(
+ 1 + len(args))
+ )
+ try:
+ value = self[key]
+ # Return default if key not in store
+ except KeyError:
+ if args:
+ return args[0]
+ del self[key]
+ return value
+
+ def popitem(self):
+ '''Removes and returns a key, value pair from a store.'''
+ try:
+ k, v = self.iteritems().next()
+ except StopIteration:
+ raise KeyError('Store is empty.')
+ del self[k]
+ return (k, v)
+
+ def setdefault(self, key, default=None):
+ '''
+ Returns the value corresponding to an existing key or sets the
+ to key to the default and returns the default.
+
+ @param default Default value (default: None)
+ '''
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+ def update(self, other=None, **kw):
+ '''
+ Adds to or overwrites the values in this store with values from
+ another store.
+
+ other Another store
+ kw Additional keys and values to store
+ '''
+ if other is None:
+ pass
+ elif hasattr(other, 'iteritems'):
+ for k, v in other.iteritems():
+ self[k] = v
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ self[k] = other[k]
+ else:
+ for k, v in other:
+ self[k] = v
+ if kw:
+ self.update(kw)
+
+ def values(self):
+ '''Returns a list with all values in a store.'''
+ return list(v for _, v in self.iteritems())
+
+
+class Shove(BaseStore):
+
+ '''Common object frontend class.'''
+
+ def __init__(self, store='simple://', cache='simple://', **kw):
+ super(Shove, self).__init__(store, **kw)
+ # Load store
+ self._store = getbackend(store, stores, **kw)
+ # Load cache
+ self._cache = getbackend(cache, caches, **kw)
+ # Buffer for lazy writing and setting for syncing frequency
+ self._buffer, self._sync = dict(), kw.get('sync', 2)
+
+ def __getitem__(self, key):
+ '''Gets a item from shove.'''
+ try:
+ return self._cache[key]
+ except KeyError:
+ # Synchronize cache and store
+ self.sync()
+ value = self._store[key]
+ self._cache[key] = value
+ return value
+
+ def __setitem__(self, key, value):
+ '''Sets an item in shove.'''
+ self._cache[key] = self._buffer[key] = value
+ # When the buffer reaches self._limit, writes the buffer to the store
+ if len(self._buffer) >= self._sync:
+ self.sync()
+
+ def __delitem__(self, key):
+ '''Deletes an item from shove.'''
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ self.sync()
+ del self._store[key]
+
+ def keys(self):
+ '''Returns a list of keys in shove.'''
+ self.sync()
+ return self._store.keys()
+
+ def sync(self):
+ '''Writes buffer to store.'''
+ for k, v in self._buffer.iteritems():
+ self._store[k] = v
+ self._buffer.clear()
+
+ def close(self):
+ '''Finalizes and closes shove.'''
+ # If close has been called, pass
+ if self._store is not None:
+ try:
+ self.sync()
+ except AttributeError:
+ pass
+ self._store.close()
+ self._store = self._cache = self._buffer = None
+
+
+class FileBase(Base):
+
+ '''Base class for file based storage.'''
+
+ def __init__(self, engine, **kw):
+ super(FileBase, self).__init__(engine, **kw)
+ if engine.startswith('file://'):
+ engine = urllib.url2pathname(engine.split('://')[1])
+ self._dir = engine
+ # Create directory
+ if not os.path.exists(self._dir):
+ self._createdir()
+
+ def __getitem__(self, key):
+ # (per Larry Meyn)
+ try:
+ item = open(self._key_to_file(key), 'rb')
+ data = item.read()
+ item.close()
+ return self.loads(data)
+ except:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ # (per Larry Meyn)
+ try:
+ item = open(self._key_to_file(key), 'wb')
+ item.write(self.dumps(value))
+ item.close()
+ except (IOError, OSError):
+ raise KeyError(key)
+
+ def __delitem__(self, key):
+ try:
+ os.remove(self._key_to_file(key))
+ except (IOError, OSError):
+ raise KeyError(key)
+
+ def __contains__(self, key):
+ return os.path.exists(self._key_to_file(key))
+
+ def __len__(self):
+ return len(os.listdir(self._dir))
+
+ def _createdir(self):
+ '''Creates the store directory.'''
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError(
+ 'Cache directory "%s" does not exist and ' \
+ 'could not be created' % self._dir
+ )
+
+ def _key_to_file(self, key):
+ '''Gives the filesystem path for a key.'''
+ return os.path.join(self._dir, urllib.quote_plus(key))
+
+ def keys(self):
+ '''Returns a list of keys in the store.'''
+ return [urllib.unquote_plus(name) for name in os.listdir(self._dir)]
+
+
+class SimpleBase(Base):
+
+ '''Single-process in-memory store base class.'''
+
+ def __init__(self, engine, **kw):
+ super(SimpleBase, self).__init__(engine, **kw)
+ self._store = dict()
+
+ def __getitem__(self, key):
+ try:
+ return self._store[key]
+ except:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ self._store[key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self._store[key]
+ except:
+ raise KeyError(key)
+
+ def __len__(self):
+ return len(self._store)
+
+ def keys(self):
+ '''Returns a list of keys in the store.'''
+ return self._store.keys()
+
+
+class LRUBase(SimpleBase):
+
+ def __init__(self, engine, **kw):
+ super(LRUBase, self).__init__(engine, **kw)
+ self._max_entries = kw.get('max_entries', 300)
+ self._hits = 0
+ self._misses = 0
+ self._queue = deque()
+ self._refcount = dict()
+
+ def __getitem__(self, key):
+ try:
+ value = super(LRUBase, self).__getitem__(key)
+ self._hits += 1
+ except KeyError:
+ self._misses += 1
+ raise
+ self._housekeep(key)
+ return value
+
+ def __setitem__(self, key, value):
+ super(LRUBase, self).__setitem__(key, value)
+ self._housekeep(key)
+ if len(self._store) > self._max_entries:
+ while len(self._store) > self._max_entries:
+ k = self._queue.popleft()
+ self._refcount[k] -= 1
+ if not self._refcount[k]:
+ super(LRUBase, self).__delitem__(k)
+ del self._refcount[k]
+
+ def _housekeep(self, key):
+ self._queue.append(key)
+ self._refcount[key] = self._refcount.get(key, 0) + 1
+ if len(self._queue) > self._max_entries * 4:
+ self._purge_queue()
+
+ def _purge_queue(self):
+ for i in [None] * len(self._queue):
+ k = self._queue.popleft()
+ if self._refcount[k] == 1:
+ self._queue.append(k)
+ else:
+ self._refcount[k] -= 1
+
+
+class DbBase(Base):
+
+ '''Database common base class.'''
+
+ def __init__(self, engine, **kw):
+ super(DbBase, self).__init__(engine, **kw)
+
+ def __delitem__(self, key):
+ self._store.delete(self._store.c.key == key).execute()
+
+ def __len__(self):
+ return self._store.count().execute().fetchone()[0]
+
+
+__all__ = ['Shove']
diff --git a/lib/shove/cache/__init__.py b/lib/shove/cache/__init__.py
new file mode 100644
index 00000000..40a96afc
--- /dev/null
+++ b/lib/shove/cache/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/lib/shove/cache/db.py b/lib/shove/cache/db.py
new file mode 100644
index 00000000..21fea01f
--- /dev/null
+++ b/lib/shove/cache/db.py
@@ -0,0 +1,117 @@
+# -*- coding: utf-8 -*-
+'''
+Database object cache.
+
+The shove psuedo-URL used for database object caches is the format used by
+SQLAlchemy:
+
+://:@:/
+
+ is the database engine. The engines currently supported SQLAlchemy are
+sqlite, mysql, postgres, oracle, mssql, and firebird.
+ is the database account user name
+ is the database accound password
+ is the database location
+ is the database port
+ is the name of the specific database
+
+For more information on specific databases see:
+
+http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
+'''
+
+import time
+import random
+from datetime import datetime
+try:
+ from sqlalchemy import (
+ MetaData, Table, Column, String, Binary, DateTime, select, update,
+ insert, delete,
+ )
+ from shove import DbBase
+except ImportError:
+ raise ImportError('Requires SQLAlchemy >= 0.4')
+
+__all__ = ['DbCache']
+
+
+class DbCache(DbBase):
+
+ '''database cache backend'''
+
+ def __init__(self, engine, **kw):
+ super(DbCache, self).__init__(engine, **kw)
+ # Get table name
+ tablename = kw.get('tablename', 'cache')
+ # Bind metadata
+ self._metadata = MetaData(engine)
+ # Make cache table
+ self._store = Table(tablename, self._metadata,
+ Column('key', String(60), primary_key=True, nullable=False),
+ Column('value', Binary, nullable=False),
+ Column('expires', DateTime, nullable=False),
+ )
+ # Create cache table if it does not exist
+ if not self._store.exists():
+ self._store.create()
+ # Set maximum entries
+ self._max_entries = kw.get('max_entries', 300)
+ # Maximum number of entries to cull per call if cache is full
+ self._maxcull = kw.get('maxcull', 10)
+ # Set timeout
+ self.timeout = kw.get('timeout', 300)
+
+ def __getitem__(self, key):
+ row = select(
+ [self._store.c.value, self._store.c.expires],
+ self._store.c.key == key
+ ).execute().fetchone()
+ if row is not None:
+ # Remove if item expired
+ if row.expires < datetime.now().replace(microsecond=0):
+ del self[key]
+ raise KeyError(key)
+ return self.loads(str(row.value))
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ timeout, value, cache = self.timeout, self.dumps(value), self._store
+ # Cull if too many items
+ if len(self) >= self._max_entries:
+ self._cull()
+ # Generate expiration time
+ expires = datetime.fromtimestamp(
+ time.time() + timeout
+ ).replace(microsecond=0)
+ # Update database if key already present
+ if key in self:
+ update(
+ cache,
+ cache.c.key == key,
+ dict(value=value, expires=expires),
+ ).execute()
+ # Insert new key if key not present
+ else:
+ insert(
+ cache, dict(key=key, value=value, expires=expires)
+ ).execute()
+
+ def _cull(self):
+ '''Remove items in cache to make more room.'''
+ cache, maxcull = self._store, self._maxcull
+ # Remove items that have timed out
+ now = datetime.now().replace(microsecond=0)
+ delete(cache, cache.c.expires < now).execute()
+ # Remove any items over the maximum allowed number in the cache
+ if len(self) >= self._max_entries:
+ # Upper limit for key query
+ ul = maxcull * 2
+ # Get list of keys
+ keys = [
+ i[0] for i in select(
+ [cache.c.key], limit=ul
+ ).execute().fetchall()
+ ]
+ # Get some keys at random
+ delkeys = list(random.choice(keys) for i in xrange(maxcull))
+ delete(cache, cache.c.key.in_(delkeys)).execute()
diff --git a/lib/shove/cache/file.py b/lib/shove/cache/file.py
new file mode 100644
index 00000000..7b9a4ae7
--- /dev/null
+++ b/lib/shove/cache/file.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+'''
+File-based cache
+
+shove's psuedo-URL for file caches follows the form:
+
+file://
+
+Where the path is a URL path to a directory on a local filesystem.
+Alternatively, a native pathname to the directory can be passed as the 'engine'
+argument.
+'''
+
+import time
+
+from shove import FileBase
+from shove.cache.simple import SimpleCache
+
+
+class FileCache(FileBase, SimpleCache):
+
+ '''File-based cache backend'''
+
+ def __init__(self, engine, **kw):
+ super(FileCache, self).__init__(engine, **kw)
+
+ def __getitem__(self, key):
+ try:
+ exp, value = super(FileCache, self).__getitem__(key)
+ # Remove item if time has expired.
+ if exp < time.time():
+ del self[key]
+ raise KeyError(key)
+ return value
+ except:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ if len(self) >= self._max_entries:
+ self._cull()
+ super(FileCache, self).__setitem__(
+ key, (time.time() + self.timeout, value)
+ )
+
+
+__all__ = ['FileCache']
diff --git a/lib/shove/cache/filelru.py b/lib/shove/cache/filelru.py
new file mode 100644
index 00000000..de076613
--- /dev/null
+++ b/lib/shove/cache/filelru.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+'''
+File-based LRU cache
+
+shove's psuedo-URL for file caches follows the form:
+
+file://
+
+Where the path is a URL path to a directory on a local filesystem.
+Alternatively, a native pathname to the directory can be passed as the 'engine'
+argument.
+'''
+
+from shove import FileBase
+from shove.cache.simplelru import SimpleLRUCache
+
+
+class FileCache(FileBase, SimpleLRUCache):
+
+ '''File-based LRU cache backend'''
+
+
+__all__ = ['FileCache']
diff --git a/lib/shove/cache/memcached.py b/lib/shove/cache/memcached.py
new file mode 100644
index 00000000..aedfe282
--- /dev/null
+++ b/lib/shove/cache/memcached.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+'''
+"memcached" cache.
+
+The shove psuedo-URL for a memcache cache is:
+
+memcache://
+'''
+
+try:
+ import memcache
+except ImportError:
+ raise ImportError("Memcache cache requires the 'memcache' library")
+
+from shove import Base
+
+
+class MemCached(Base):
+
+ '''Memcached cache backend'''
+
+ def __init__(self, engine, **kw):
+ super(MemCached, self).__init__(engine, **kw)
+ if engine.startswith('memcache://'):
+ engine = engine.split('://')[1]
+ self._store = memcache.Client(engine.split(';'))
+ # Set timeout
+ self.timeout = kw.get('timeout', 300)
+
+ def __getitem__(self, key):
+ value = self._store.get(key)
+ if value is None:
+ raise KeyError(key)
+ return self.loads(value)
+
+ def __setitem__(self, key, value):
+ self._store.set(key, self.dumps(value), self.timeout)
+
+ def __delitem__(self, key):
+ self._store.delete(key)
+
+
+__all__ = ['MemCached']
diff --git a/lib/shove/cache/memlru.py b/lib/shove/cache/memlru.py
new file mode 100644
index 00000000..7db61ec5
--- /dev/null
+++ b/lib/shove/cache/memlru.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+'''
+Thread-safe in-memory cache using LRU.
+
+The shove psuedo-URL for a memory cache is:
+
+memlru://
+'''
+
+import copy
+import threading
+
+from shove import synchronized
+from shove.cache.simplelru import SimpleLRUCache
+
+
+class MemoryLRUCache(SimpleLRUCache):
+
+ '''Thread-safe in-memory cache backend using LRU.'''
+
+ def __init__(self, engine, **kw):
+ super(MemoryLRUCache, self).__init__(engine, **kw)
+ self._lock = threading.Condition()
+
+ @synchronized
+ def __setitem__(self, key, value):
+ super(MemoryLRUCache, self).__setitem__(key, value)
+
+ @synchronized
+ def __getitem__(self, key):
+ return copy.deepcopy(super(MemoryLRUCache, self).__getitem__(key))
+
+ @synchronized
+ def __delitem__(self, key):
+ super(MemoryLRUCache, self).__delitem__(key)
+
+
+__all__ = ['MemoryLRUCache']
diff --git a/lib/shove/cache/memory.py b/lib/shove/cache/memory.py
new file mode 100644
index 00000000..e70f9bbb
--- /dev/null
+++ b/lib/shove/cache/memory.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+'''
+Thread-safe in-memory cache.
+
+The shove psuedo-URL for a memory cache is:
+
+memory://
+'''
+
+import copy
+import threading
+
+from shove import synchronized
+from shove.cache.simple import SimpleCache
+
+
+class MemoryCache(SimpleCache):
+
+ '''Thread-safe in-memory cache backend.'''
+
+ def __init__(self, engine, **kw):
+ super(MemoryCache, self).__init__(engine, **kw)
+ self._lock = threading.Condition()
+
+ @synchronized
+ def __setitem__(self, key, value):
+ super(MemoryCache, self).__setitem__(key, value)
+
+ @synchronized
+ def __getitem__(self, key):
+ return copy.deepcopy(super(MemoryCache, self).__getitem__(key))
+
+ @synchronized
+ def __delitem__(self, key):
+ super(MemoryCache, self).__delitem__(key)
+
+
+__all__ = ['MemoryCache']
diff --git a/lib/shove/cache/redisdb.py b/lib/shove/cache/redisdb.py
new file mode 100644
index 00000000..c53536c1
--- /dev/null
+++ b/lib/shove/cache/redisdb.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+'''
+Redis-based object cache
+
+The shove psuedo-URL for a redis cache is:
+
+redis://:/
+'''
+
+import urlparse
+
+try:
+ import redis
+except ImportError:
+ raise ImportError('This store requires the redis library')
+
+from shove import Base
+
+
+class RedisCache(Base):
+
+ '''Redis cache backend'''
+
+ init = 'redis://'
+
+ def __init__(self, engine, **kw):
+ super(RedisCache, self).__init__(engine, **kw)
+ spliturl = urlparse.urlsplit(engine)
+ host, port = spliturl[1].split(':')
+ db = spliturl[2].replace('/', '')
+ self._store = redis.Redis(host, int(port), db)
+ # Set timeout
+ self.timeout = kw.get('timeout', 300)
+
+ def __getitem__(self, key):
+ return self.loads(self._store[key])
+
+ def __setitem__(self, key, value):
+ self._store.setex(key, self.dumps(value), self.timeout)
+
+ def __delitem__(self, key):
+ self._store.delete(key)
+
+
+__all__ = ['RedisCache']
diff --git a/lib/shove/cache/simple.py b/lib/shove/cache/simple.py
new file mode 100644
index 00000000..6855603e
--- /dev/null
+++ b/lib/shove/cache/simple.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+'''
+Single-process in-memory cache.
+
+The shove psuedo-URL for a simple cache is:
+
+simple://
+'''
+
+import time
+import random
+
+from shove import SimpleBase
+
+
+class SimpleCache(SimpleBase):
+
+ '''Single-process in-memory cache.'''
+
+ def __init__(self, engine, **kw):
+ super(SimpleCache, self).__init__(engine, **kw)
+ # Get random seed
+ random.seed()
+ # Set maximum number of items to cull if over max
+ self._maxcull = kw.get('maxcull', 10)
+ # Set max entries
+ self._max_entries = kw.get('max_entries', 300)
+ # Set timeout
+ self.timeout = kw.get('timeout', 300)
+
+ def __getitem__(self, key):
+ exp, value = super(SimpleCache, self).__getitem__(key)
+ # Delete if item timed out.
+ if exp < time.time():
+ super(SimpleCache, self).__delitem__(key)
+ raise KeyError(key)
+ return value
+
+ def __setitem__(self, key, value):
+ # Cull values if over max # of entries
+ if len(self) >= self._max_entries:
+ self._cull()
+ # Set expiration time and value
+ exp = time.time() + self.timeout
+ super(SimpleCache, self).__setitem__(key, (exp, value))
+
+ def _cull(self):
+ '''Remove items in cache to make room.'''
+ num, maxcull = 0, self._maxcull
+ # Cull number of items allowed (set by self._maxcull)
+ for key in self.keys():
+ # Remove only maximum # of items allowed by maxcull
+ if num <= maxcull:
+ # Remove items if expired
+ try:
+ self[key]
+ except KeyError:
+ num += 1
+ else:
+ break
+ # Remove any additional items up to max # of items allowed by maxcull
+ while len(self) >= self._max_entries and num <= maxcull:
+ # Cull remainder of allowed quota at random
+ del self[random.choice(self.keys())]
+ num += 1
+
+
+__all__ = ['SimpleCache']
diff --git a/lib/shove/cache/simplelru.py b/lib/shove/cache/simplelru.py
new file mode 100644
index 00000000..fbb6e446
--- /dev/null
+++ b/lib/shove/cache/simplelru.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+'''
+Single-process in-memory LRU cache.
+
+The shove psuedo-URL for a simple cache is:
+
+simplelru://
+'''
+
+from shove import LRUBase
+
+
+class SimpleLRUCache(LRUBase):
+
+ '''In-memory cache that purges based on least recently used item.'''
+
+
+__all__ = ['SimpleLRUCache']
diff --git a/lib/shove/store/__init__.py b/lib/shove/store/__init__.py
new file mode 100644
index 00000000..5d639a07
--- /dev/null
+++ b/lib/shove/store/__init__.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+from urllib import url2pathname
+from shove.store.simple import SimpleStore
+
+
+class ClientStore(SimpleStore):
+
+ '''Base class for stores where updates have to be committed.'''
+
+ def __init__(self, engine, **kw):
+ super(ClientStore, self).__init__(engine, **kw)
+ if engine.startswith(self.init):
+ self._engine = url2pathname(engine.split('://')[1])
+
+ def __getitem__(self, key):
+ return self.loads(super(ClientStore, self).__getitem__(key))
+
+ def __setitem__(self, key, value):
+ super(ClientStore, self).__setitem__(key, self.dumps(value))
+
+
+class SyncStore(ClientStore):
+
+ '''Base class for stores where updates have to be committed.'''
+
+ def __getitem__(self, key):
+ return self.loads(super(SyncStore, self).__getitem__(key))
+
+ def __setitem__(self, key, value):
+ super(SyncStore, self).__setitem__(key, value)
+ try:
+ self.sync()
+ except AttributeError:
+ pass
+
+ def __delitem__(self, key):
+ super(SyncStore, self).__delitem__(key)
+ try:
+ self.sync()
+ except AttributeError:
+ pass
+
+
+__all__ = [
+ 'bsdb', 'db', 'dbm', 'durusdb', 'file', 'ftp', 'memory', 's3', 'simple',
+ 'svn', 'zodb', 'redisdb', 'hdf5db', 'leveldbstore', 'cassandra',
+]
diff --git a/lib/shove/store/bsdb.py b/lib/shove/store/bsdb.py
new file mode 100644
index 00000000..d1f9c6dc
--- /dev/null
+++ b/lib/shove/store/bsdb.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+'''
+Berkeley Source Database Store.
+
+shove's psuedo-URL for BSDDB stores follows the form:
+
+bsddb://
+
+Where the path is a URL path to a Berkeley database. Alternatively, the native
+pathname to a Berkeley database can be passed as the 'engine' parameter.
+'''
+try:
+ import bsddb
+except ImportError:
+ raise ImportError('requires bsddb library')
+
+import threading
+
+from shove import synchronized
+from shove.store import SyncStore
+
+
+class BsdStore(SyncStore):
+
+ '''Class for Berkeley Source Database Store.'''
+
+ init = 'bsddb://'
+
+ def __init__(self, engine, **kw):
+ super(BsdStore, self).__init__(engine, **kw)
+ self._store = bsddb.hashopen(self._engine)
+ self._lock = threading.Condition()
+ self.sync = self._store.sync
+
+ @synchronized
+ def __getitem__(self, key):
+ return super(BsdStore, self).__getitem__(key)
+
+ @synchronized
+ def __setitem__(self, key, value):
+ super(BsdStore, self).__setitem__(key, value)
+
+ @synchronized
+ def __delitem__(self, key):
+ super(BsdStore, self).__delitem__(key)
+
+
+__all__ = ['BsdStore']
diff --git a/lib/shove/store/cassandra.py b/lib/shove/store/cassandra.py
new file mode 100644
index 00000000..1f6532ee
--- /dev/null
+++ b/lib/shove/store/cassandra.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+'''
+Cassandra-based object store
+
+The shove psuedo-URL for a cassandra-based store is:
+
+cassandra://://
+'''
+
+import urlparse
+
+try:
+ import pycassa
+except ImportError:
+ raise ImportError('This store requires the pycassa library')
+
+from shove import BaseStore
+
+
+class CassandraStore(BaseStore):
+
+ '''Cassandra based store'''
+
+ init = 'cassandra://'
+
+ def __init__(self, engine, **kw):
+ super(CassandraStore, self).__init__(engine, **kw)
+ spliturl = urlparse.urlsplit(engine)
+ _, keyspace, column_family = spliturl[2].split('/')
+ try:
+ self._pool = pycassa.connect(keyspace, [spliturl[1]])
+ self._store = pycassa.ColumnFamily(self._pool, column_family)
+ except pycassa.InvalidRequestException:
+ from pycassa.system_manager import SystemManager
+ system_manager = SystemManager(spliturl[1])
+ system_manager.create_keyspace(
+ keyspace,
+ pycassa.system_manager.SIMPLE_STRATEGY,
+ {'replication_factor': str(kw.get('replication', 1))}
+ )
+ system_manager.create_column_family(keyspace, column_family)
+ self._pool = pycassa.connect(keyspace, [spliturl[1]])
+ self._store = pycassa.ColumnFamily(self._pool, column_family)
+
+ def __getitem__(self, key):
+ try:
+ item = self._store.get(key).get(key)
+ if item is not None:
+ return self.loads(item)
+ raise KeyError(key)
+ except pycassa.NotFoundException:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ self._store.insert(key, dict(key=self.dumps(value)))
+
+ def __delitem__(self, key):
+ # beware eventual consistency
+ try:
+ self._store.remove(key)
+ except pycassa.NotFoundException:
+ raise KeyError(key)
+
+ def clear(self):
+ # beware eventual consistency
+ self._store.truncate()
+
+ def keys(self):
+ return list(i[0] for i in self._store.get_range())
+
+
+__all__ = ['CassandraStore']
diff --git a/lib/shove/store/db.py b/lib/shove/store/db.py
new file mode 100644
index 00000000..da83e53d
--- /dev/null
+++ b/lib/shove/store/db.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+'''
+Database object store.
+
+The shove psuedo-URL used for database object stores is the format used by
+SQLAlchemy:
+
+://:@:/
+
+ is the database engine. The engines currently supported SQLAlchemy are
+sqlite, mysql, postgres, oracle, mssql, and firebird.
+ is the database account user name
+ is the database accound password
+ is the database location
+ is the database port
+ is the name of the specific database
+
+For more information on specific databases see:
+
+http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
+'''
+
+try:
+ from sqlalchemy import MetaData, Table, Column, String, Binary, select
+ from shove import BaseStore, DbBase
+except ImportError:
+ raise ImportError('Requires SQLAlchemy >= 0.4')
+
+
+class DbStore(BaseStore, DbBase):
+
+ '''Database cache backend.'''
+
+ def __init__(self, engine, **kw):
+ super(DbStore, self).__init__(engine, **kw)
+ # Get tablename
+ tablename = kw.get('tablename', 'store')
+ # Bind metadata
+ self._metadata = MetaData(engine)
+ # Make store table
+ self._store = Table(tablename, self._metadata,
+ Column('key', String(255), primary_key=True, nullable=False),
+ Column('value', Binary, nullable=False),
+ )
+ # Create store table if it does not exist
+ if not self._store.exists():
+ self._store.create()
+
+ def __getitem__(self, key):
+ row = select(
+ [self._store.c.value], self._store.c.key == key,
+ ).execute().fetchone()
+ if row is not None:
+ return self.loads(str(row.value))
+ raise KeyError(key)
+
+ def __setitem__(self, k, v):
+ v, store = self.dumps(v), self._store
+ # Update database if key already present
+ if k in self:
+ store.update(store.c.key == k).execute(value=v)
+ # Insert new key if key not present
+ else:
+ store.insert().execute(key=k, value=v)
+
+ def keys(self):
+ '''Returns a list of keys in the store.'''
+ return list(i[0] for i in select(
+ [self._store.c.key]
+ ).execute().fetchall())
+
+
+__all__ = ['DbStore']
diff --git a/lib/shove/store/dbm.py b/lib/shove/store/dbm.py
new file mode 100644
index 00000000..323d2484
--- /dev/null
+++ b/lib/shove/store/dbm.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+'''
+DBM Database Store.
+
+shove's psuedo-URL for DBM stores follows the form:
+
+dbm://
+
+Where is a URL path to a DBM database. Alternatively, the native
+pathname to a DBM database can be passed as the 'engine' parameter.
+'''
+
+import anydbm
+
+from shove.store import SyncStore
+
+
+class DbmStore(SyncStore):
+
+ '''Class for variants of the DBM database.'''
+
+ init = 'dbm://'
+
+ def __init__(self, engine, **kw):
+ super(DbmStore, self).__init__(engine, **kw)
+ self._store = anydbm.open(self._engine, 'c')
+ try:
+ self.sync = self._store.sync
+ except AttributeError:
+ pass
+
+
+__all__ = ['DbmStore']
diff --git a/lib/shove/store/durusdb.py b/lib/shove/store/durusdb.py
new file mode 100644
index 00000000..8e27670e
--- /dev/null
+++ b/lib/shove/store/durusdb.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+'''
+Durus object database frontend.
+
+shove's psuedo-URL for Durus stores follows the form:
+
+durus://
+
+
+Where the path is a URL path to a durus FileStorage database. Alternatively, a
+native pathname to a durus database can be passed as the 'engine' parameter.
+'''
+
+try:
+ from durus.connection import Connection
+ from durus.file_storage import FileStorage
+except ImportError:
+ raise ImportError('Requires Durus library')
+
+from shove.store import SyncStore
+
+
+class DurusStore(SyncStore):
+
+ '''Class for Durus object database frontend.'''
+
+ init = 'durus://'
+
+ def __init__(self, engine, **kw):
+ super(DurusStore, self).__init__(engine, **kw)
+ self._db = FileStorage(self._engine)
+ self._connection = Connection(self._db)
+ self.sync = self._connection.commit
+ self._store = self._connection.get_root()
+
+ def close(self):
+ '''Closes all open storage and connections.'''
+ self.sync()
+ self._db.close()
+ super(DurusStore, self).close()
+
+
+__all__ = ['DurusStore']
diff --git a/lib/shove/store/file.py b/lib/shove/store/file.py
new file mode 100644
index 00000000..e66e9c4f
--- /dev/null
+++ b/lib/shove/store/file.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+'''
+Filesystem-based object store
+
+shove's psuedo-URL for filesystem-based stores follows the form:
+
+file://
+
+Where the path is a URL path to a directory on a local filesystem.
+Alternatively, a native pathname to the directory can be passed as the 'engine'
+argument.
+'''
+
+from shove import BaseStore, FileBase
+
+
+class FileStore(FileBase, BaseStore):
+
+ '''File-based store.'''
+
+ def __init__(self, engine, **kw):
+ super(FileStore, self).__init__(engine, **kw)
+
+
+__all__ = ['FileStore']
diff --git a/lib/shove/store/ftp.py b/lib/shove/store/ftp.py
new file mode 100644
index 00000000..c2d4aec6
--- /dev/null
+++ b/lib/shove/store/ftp.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+'''
+FTP-accessed stores
+
+shove's URL for FTP accessed stores follows the standard form for FTP URLs
+defined in RFC-1738:
+
+ftp://:@:/
+'''
+
+import urlparse
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+from ftplib import FTP, error_perm
+
+from shove import BaseStore
+
+
+class FtpStore(BaseStore):
+
+ def __init__(self, engine, **kw):
+ super(FtpStore, self).__init__(engine, **kw)
+ user = kw.get('user', 'anonymous')
+ password = kw.get('password', '')
+ spliturl = urlparse.urlsplit(engine)
+ # Set URL, path, and strip 'ftp://' off
+ base, path = spliturl[1], spliturl[2] + '/'
+ if '@' in base:
+ auth, base = base.split('@')
+ user, password = auth.split(':')
+ self._store = FTP(base, user, password)
+ # Change to remote path if it exits
+ try:
+ self._store.cwd(path)
+ except error_perm:
+ self._makedir(path)
+ self._base, self._user, self._password = base, user, password
+ self._updated, self ._keys = True, None
+
+ def __getitem__(self, key):
+ try:
+ local = StringIO()
+ # Download item
+ self._store.retrbinary('RETR %s' % key, local.write)
+ self._updated = False
+ return self.loads(local.getvalue())
+ except:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ local = StringIO(self.dumps(value))
+ self._store.storbinary('STOR %s' % key, local)
+ self._updated = True
+
+ def __delitem__(self, key):
+ try:
+ self._store.delete(key)
+ self._updated = True
+ except:
+ raise KeyError(key)
+
+ def _makedir(self, path):
+ '''Makes remote paths on an FTP server.'''
+ paths = list(reversed([i for i in path.split('/') if i != '']))
+ while paths:
+ tpath = paths.pop()
+ self._store.mkd(tpath)
+ self._store.cwd(tpath)
+
+ def keys(self):
+ '''Returns a list of keys in a store.'''
+ if self._updated or self._keys is None:
+ rlist, nlist = list(), list()
+ # Remote directory listing
+ self._store.retrlines('LIST -a', rlist.append)
+ for rlisting in rlist:
+ # Split remote file based on whitespace
+ rfile = rlisting.split()
+ # Append tuple of remote item type & name
+ if rfile[-1] not in ('.', '..') and rfile[0].startswith('-'):
+ nlist.append(rfile[-1])
+ self._keys = nlist
+ return self._keys
+
+
+__all__ = ['FtpStore']
diff --git a/lib/shove/store/hdf5.py b/lib/shove/store/hdf5.py
new file mode 100644
index 00000000..a9b618e5
--- /dev/null
+++ b/lib/shove/store/hdf5.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+'''
+HDF5 Database Store.
+
+shove's psuedo-URL for HDF5 stores follows the form:
+
+hdf5:///
+
+Where is a URL path to a HDF5 database. Alternatively, the native
+pathname to a HDF5 database can be passed as the 'engine' parameter.
+ is the name of the database.
+'''
+
+try:
+ import h5py
+except ImportError:
+ raise ImportError('This store requires h5py library')
+
+from shove.store import ClientStore
+
+
+class HDF5Store(ClientStore):
+
+ '''LevelDB based store'''
+
+ init = 'hdf5://'
+
+ def __init__(self, engine, **kw):
+ super(HDF5Store, self).__init__(engine, **kw)
+ engine, group = self._engine.rsplit('/')
+ self._store = h5py.File(engine).require_group(group).attrs
+
+
+__all__ = ['HDF5Store']
diff --git a/lib/shove/store/leveldbstore.py b/lib/shove/store/leveldbstore.py
new file mode 100644
index 00000000..ca73a494
--- /dev/null
+++ b/lib/shove/store/leveldbstore.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+'''
+LevelDB Database Store.
+
+shove's psuedo-URL for LevelDB stores follows the form:
+
+leveldb://
+
+Where is a URL path to a LevelDB database. Alternatively, the native
+pathname to a LevelDB database can be passed as the 'engine' parameter.
+'''
+
+try:
+ import leveldb
+except ImportError:
+ raise ImportError('This store requires py-leveldb library')
+
+from shove.store import ClientStore
+
+
+class LevelDBStore(ClientStore):
+
+ '''LevelDB based store'''
+
+ init = 'leveldb://'
+
+ def __init__(self, engine, **kw):
+ super(LevelDBStore, self).__init__(engine, **kw)
+ self._store = leveldb.LevelDB(self._engine)
+
+ def __getitem__(self, key):
+ item = self.loads(self._store.Get(key))
+ if item is not None:
+ return item
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ self._store.Put(key, self.dumps(value))
+
+ def __delitem__(self, key):
+ self._store.Delete(key)
+
+ def keys(self):
+ return list(k for k in self._store.RangeIter(include_value=False))
+
+
+__all__ = ['LevelDBStore']
diff --git a/lib/shove/store/memory.py b/lib/shove/store/memory.py
new file mode 100644
index 00000000..525ae69e
--- /dev/null
+++ b/lib/shove/store/memory.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+'''
+Thread-safe in-memory store.
+
+The shove psuedo-URL for a memory store is:
+
+memory://
+'''
+
+import copy
+import threading
+
+from shove import synchronized
+from shove.store.simple import SimpleStore
+
+
+class MemoryStore(SimpleStore):
+
+ '''Thread-safe in-memory store.'''
+
+ def __init__(self, engine, **kw):
+ super(MemoryStore, self).__init__(engine, **kw)
+ self._lock = threading.Condition()
+
+ @synchronized
+ def __getitem__(self, key):
+ return copy.deepcopy(super(MemoryStore, self).__getitem__(key))
+
+ @synchronized
+ def __setitem__(self, key, value):
+ super(MemoryStore, self).__setitem__(key, value)
+
+ @synchronized
+ def __delitem__(self, key):
+ super(MemoryStore, self).__delitem__(key)
+
+
+__all__ = ['MemoryStore']
diff --git a/lib/shove/store/redisdb.py b/lib/shove/store/redisdb.py
new file mode 100644
index 00000000..67fa2ebd
--- /dev/null
+++ b/lib/shove/store/redisdb.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+'''
+Redis-based object store
+
+The shove psuedo-URL for a redis-based store is:
+
+redis://:/
+'''
+
+import urlparse
+
+try:
+ import redis
+except ImportError:
+ raise ImportError('This store requires the redis library')
+
+from shove.store import ClientStore
+
+
+class RedisStore(ClientStore):
+
+ '''Redis based store'''
+
+ init = 'redis://'
+
+ def __init__(self, engine, **kw):
+ super(RedisStore, self).__init__(engine, **kw)
+ spliturl = urlparse.urlsplit(engine)
+ host, port = spliturl[1].split(':')
+ db = spliturl[2].replace('/', '')
+ self._store = redis.Redis(host, int(port), db)
+
+ def __contains__(self, key):
+ return self._store.exists(key)
+
+ def clear(self):
+ self._store.flushdb()
+
+ def keys(self):
+ return self._store.keys()
+
+ def setdefault(self, key, default=None):
+ return self._store.getset(key, default)
+
+ def update(self, other=None, **kw):
+ args = kw if other is not None else other
+ self._store.mset(args)
+
+
+__all__ = ['RedisStore']
diff --git a/lib/shove/store/s3.py b/lib/shove/store/s3.py
new file mode 100644
index 00000000..dbf12f21
--- /dev/null
+++ b/lib/shove/store/s3.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+'''
+S3-accessed stores
+
+shove's psuedo-URL for stores found on Amazon.com's S3 web service follows this
+form:
+
+s3://:@
+
+ is the Access Key issued by Amazon
+ is the Secret Access Key issued by Amazon
+ is the name of the bucket accessed through the S3 service
+'''
+
+try:
+ from boto.s3.connection import S3Connection
+ from boto.s3.key import Key
+except ImportError:
+ raise ImportError('Requires boto library')
+
+from shove import BaseStore
+
+
+class S3Store(BaseStore):
+
+ def __init__(self, engine=None, **kw):
+ super(S3Store, self).__init__(engine, **kw)
+ # key = Access Key, secret=Secret Access Key, bucket=bucket name
+ key, secret, bucket = kw.get('key'), kw.get('secret'), kw.get('bucket')
+ if engine is not None:
+ auth, bucket = engine.split('://')[1].split('@')
+ key, secret = auth.split(':')
+ # kw 'secure' = (True or False, use HTTPS)
+ self._conn = S3Connection(key, secret, kw.get('secure', False))
+ buckets = self._conn.get_all_buckets()
+ # Use bucket if it exists
+ for b in buckets:
+ if b.name == bucket:
+ self._store = b
+ break
+ # Create bucket if it doesn't exist
+ else:
+ self._store = self._conn.create_bucket(bucket)
+ # Set bucket permission ('private', 'public-read',
+ # 'public-read-write', 'authenticated-read'
+ self._store.set_acl(kw.get('acl', 'private'))
+ # Updated flag used for avoiding network calls
+ self._updated, self._keys = True, None
+
+ def __getitem__(self, key):
+ rkey = self._store.lookup(key)
+ if rkey is None:
+ raise KeyError(key)
+ # Fetch string
+ value = self.loads(rkey.get_contents_as_string())
+ # Flag that the store has not been updated
+ self._updated = False
+ return value
+
+ def __setitem__(self, key, value):
+ rkey = Key(self._store)
+ rkey.key = key
+ rkey.set_contents_from_string(self.dumps(value))
+ # Flag that the store has been updated
+ self._updated = True
+
+ def __delitem__(self, key):
+ try:
+ self._store.delete_key(key)
+ # Flag that the store has been updated
+ self._updated = True
+ except:
+ raise KeyError(key)
+
+ def keys(self):
+ '''Returns a list of keys in the store.'''
+ return list(i[0] for i in self.items())
+
+ def items(self):
+ '''Returns a list of items from the store.'''
+ if self._updated or self._keys is None:
+ self._keys = self._store.get_all_keys()
+ return list((str(k.key), k) for k in self._keys)
+
+ def iteritems(self):
+ '''Lazily returns items from the store.'''
+ for k in self.items():
+ yield (k.key, k)
+
+
+__all__ = ['S3Store']
diff --git a/lib/shove/store/simple.py b/lib/shove/store/simple.py
new file mode 100644
index 00000000..8f7ebb33
--- /dev/null
+++ b/lib/shove/store/simple.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+'''
+Single-process in-memory store.
+
+The shove psuedo-URL for a simple store is:
+
+simple://
+'''
+
+from shove import BaseStore, SimpleBase
+
+
+class SimpleStore(SimpleBase, BaseStore):
+
+ '''Single-process in-memory store.'''
+
+ def __init__(self, engine, **kw):
+ super(SimpleStore, self).__init__(engine, **kw)
+
+
+__all__ = ['SimpleStore']
diff --git a/lib/shove/store/svn.py b/lib/shove/store/svn.py
new file mode 100644
index 00000000..5bb8c33e
--- /dev/null
+++ b/lib/shove/store/svn.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+'''
+subversion managed store.
+
+The shove psuedo-URL used for a subversion store that is password protected is:
+
+svn::?url=
+
+or for non-password protected repositories:
+
+svn://?url=
+
+ is the local repository copy
+ is the URL of the subversion repository
+'''
+
+import os
+import urllib
+import threading
+
+try:
+ import pysvn
+except ImportError:
+ raise ImportError('Requires Python Subversion library')
+
+from shove import BaseStore, synchronized
+
+
+class SvnStore(BaseStore):
+
+ '''Class for subversion store.'''
+
+ def __init__(self, engine=None, **kw):
+ super(SvnStore, self).__init__(engine, **kw)
+ # Get path, url from keywords if used
+ path, url = kw.get('path'), kw.get('url')
+ # Get username. password from keywords if used
+ user, password = kw.get('user'), kw.get('password')
+ # Process psuedo URL if used
+ if engine is not None:
+ path, query = engine.split('n://')[1].split('?')
+ url = query.split('=')[1]
+ # Check for username, password
+ if '@' in path:
+ auth, path = path.split('@')
+ user, password = auth.split(':')
+ path = urllib.url2pathname(path)
+ # Create subversion client
+ self._client = pysvn.Client()
+ # Assign username, password
+ if user is not None:
+ self._client.set_username(user)
+ if password is not None:
+ self._client.set_password(password)
+ # Verify that store exists in repository
+ try:
+ self._client.info2(url)
+ # Create store in repository if it doesn't exist
+ except pysvn.ClientError:
+ self._client.mkdir(url, 'Adding directory')
+ # Verify that local copy exists
+ try:
+ if self._client.info(path) is None:
+ self._client.checkout(url, path)
+ # Check it out if it doesn't exist
+ except pysvn.ClientError:
+ self._client.checkout(url, path)
+ self._path, self._url = path, url
+ # Lock
+ self._lock = threading.Condition()
+
+ @synchronized
+ def __getitem__(self, key):
+ try:
+ return self.loads(self._client.cat(self._key_to_file(key)))
+ except:
+ raise KeyError(key)
+
+ @synchronized
+ def __setitem__(self, key, value):
+ fname = self._key_to_file(key)
+ # Write value to file
+ open(fname, 'wb').write(self.dumps(value))
+ # Add to repository
+ if key not in self:
+ self._client.add(fname)
+ self._client.checkin([fname], 'Adding %s' % fname)
+
+ @synchronized
+ def __delitem__(self, key):
+ try:
+ fname = self._key_to_file(key)
+ self._client.remove(fname)
+ # Remove deleted value from repository
+ self._client.checkin([fname], 'Removing %s' % fname)
+ except:
+ raise KeyError(key)
+
+ def _key_to_file(self, key):
+ '''Gives the filesystem path for a key.'''
+ return os.path.join(self._path, urllib.quote_plus(key))
+
+ @synchronized
+ def keys(self):
+ '''Returns a list of keys in the subversion repository.'''
+ return list(str(i.name.split('/')[-1]) for i
+ in self._client.ls(self._path))
+
+
+__all__ = ['SvnStore']
diff --git a/lib/shove/store/zodb.py b/lib/shove/store/zodb.py
new file mode 100644
index 00000000..43768dde
--- /dev/null
+++ b/lib/shove/store/zodb.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+'''
+Zope Object Database store frontend.
+
+shove's psuedo-URL for ZODB stores follows the form:
+
+zodb:
+
+
+Where the path is a URL path to a ZODB FileStorage database. Alternatively, a
+native pathname to a ZODB database can be passed as the 'engine' argument.
+'''
+
+try:
+ import transaction
+ from ZODB import FileStorage, DB
+except ImportError:
+ raise ImportError('Requires ZODB library')
+
+from shove.store import SyncStore
+
+
+class ZodbStore(SyncStore):
+
+ '''ZODB store front end.'''
+
+ init = 'zodb://'
+
+ def __init__(self, engine, **kw):
+ super(ZodbStore, self).__init__(engine, **kw)
+ # Handle psuedo-URL
+ self._storage = FileStorage.FileStorage(self._engine)
+ self._db = DB(self._storage)
+ self._connection = self._db.open()
+ self._store = self._connection.root()
+ # Keeps DB in synch through commits of transactions
+ self.sync = transaction.commit
+
+ def close(self):
+ '''Closes all open storage and connections.'''
+ self.sync()
+ super(ZodbStore, self).close()
+ self._connection.close()
+ self._db.close()
+ self._storage.close()
+
+
+__all__ = ['ZodbStore']
diff --git a/lib/shove/tests/__init__.py b/lib/shove/tests/__init__.py
new file mode 100644
index 00000000..40a96afc
--- /dev/null
+++ b/lib/shove/tests/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/lib/shove/tests/test_bsddb_store.py b/lib/shove/tests/test_bsddb_store.py
new file mode 100644
index 00000000..3de7896e
--- /dev/null
+++ b/lib/shove/tests/test_bsddb_store.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestBsdbStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('bsddb://test.db', compress=True)
+
+ def tearDown(self):
+ import os
+ self.store.close()
+ os.remove('test.db')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_cassandra_store.py b/lib/shove/tests/test_cassandra_store.py
new file mode 100644
index 00000000..a5c60f6a
--- /dev/null
+++ b/lib/shove/tests/test_cassandra_store.py
@@ -0,0 +1,137 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestCassandraStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ from pycassa.system_manager import SystemManager
+ system_manager = SystemManager('localhost:9160')
+ try:
+ system_manager.create_column_family('Foo', 'shove')
+ except:
+ pass
+ self.store = Shove('cassandra://localhost:9160/Foo/shove')
+
+ def tearDown(self):
+ self.store.clear()
+ self.store.close()
+ from pycassa.system_manager import SystemManager
+ system_manager = SystemManager('localhost:9160')
+ system_manager.drop_column_family('Foo', 'shove')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+# def test_clear(self):
+# self.store['max'] = 3
+# self.store['min'] = 6
+# self.store['pow'] = 7
+# self.store.clear()
+# self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+# def test_popitem(self):
+# self.store['max'] = 3
+# self.store['min'] = 6
+# self.store['pow'] = 7
+# item = self.store.popitem()
+# self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+# self.store['pow'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store.setdefault('pow', 8), 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_db_cache.py b/lib/shove/tests/test_db_cache.py
new file mode 100644
index 00000000..9dd27a06
--- /dev/null
+++ b/lib/shove/tests/test_db_cache.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestDbCache(unittest.TestCase):
+
+ initstring = 'sqlite:///'
+
+ def setUp(self):
+ from shove.cache.db import DbCache
+ self.cache = DbCache(self.initstring)
+
+ def tearDown(self):
+ self.cache = None
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.db import DbCache
+ cache = DbCache(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(2)
+
+ def tmp():
+ cache['test']
+ self.assertRaises(KeyError, tmp)
+
+ def test_cull(self):
+ from shove.cache.db import DbCache
+ cache = DbCache(self.initstring, max_entries=1)
+ cache['test'] = 'test'
+ cache['test2'] = 'test'
+ cache['test2'] = 'test'
+ self.assertEquals(len(cache), 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_db_store.py b/lib/shove/tests/test_db_store.py
new file mode 100644
index 00000000..1d9ad616
--- /dev/null
+++ b/lib/shove/tests/test_db_store.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestDbStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('sqlite://', compress=True)
+
+ def tearDown(self):
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_dbm_store.py b/lib/shove/tests/test_dbm_store.py
new file mode 100644
index 00000000..e64ac9e7
--- /dev/null
+++ b/lib/shove/tests/test_dbm_store.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestDbmStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('dbm://test.dbm', compress=True)
+
+ def tearDown(self):
+ import os
+ self.store.close()
+ try:
+ os.remove('test.dbm.db')
+ except OSError:
+ pass
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.setdefault('how', 8)
+ self.assertEqual(self.store['how'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_durus_store.py b/lib/shove/tests/test_durus_store.py
new file mode 100644
index 00000000..006fcc41
--- /dev/null
+++ b/lib/shove/tests/test_durus_store.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestDurusStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('durus://test.durus', compress=True)
+
+ def tearDown(self):
+ import os
+ self.store.close()
+ os.remove('test.durus')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_file_cache.py b/lib/shove/tests/test_file_cache.py
new file mode 100644
index 00000000..b288ce82
--- /dev/null
+++ b/lib/shove/tests/test_file_cache.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestFileCache(unittest.TestCase):
+
+ initstring = 'file://test'
+
+ def setUp(self):
+ from shove.cache.file import FileCache
+ self.cache = FileCache(self.initstring)
+
+ def tearDown(self):
+ import os
+ self.cache = None
+ for x in os.listdir('test'):
+ os.remove(os.path.join('test', x))
+ os.rmdir('test')
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.file import FileCache
+ cache = FileCache(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(2)
+
+ def tmp():
+ cache['test']
+ self.assertRaises(KeyError, tmp)
+
+ def test_cull(self):
+ from shove.cache.file import FileCache
+ cache = FileCache(self.initstring, max_entries=1)
+ cache['test'] = 'test'
+ cache['test2'] = 'test'
+ num = len(cache)
+ self.assertEquals(num, 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_file_store.py b/lib/shove/tests/test_file_store.py
new file mode 100644
index 00000000..35643ced
--- /dev/null
+++ b/lib/shove/tests/test_file_store.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestFileStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('file://test', compress=True)
+
+ def tearDown(self):
+ import os
+ self.store.close()
+ for x in os.listdir('test'):
+ os.remove(os.path.join('test', x))
+ os.rmdir('test')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_ftp_store.py b/lib/shove/tests/test_ftp_store.py
new file mode 100644
index 00000000..17679a2c
--- /dev/null
+++ b/lib/shove/tests/test_ftp_store.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestFtpStore(unittest.TestCase):
+
+ ftpstring = 'put ftp string here'
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove(self.ftpstring, compress=True)
+
+ def tearDown(self):
+ self.store.clear()
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ self.assertEqual(len(self.store), 2)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ item = self.store.popitem()
+ self.store.sync()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.store.sync()
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.update(tstore)
+ self.store.sync()
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_hdf5_store.py b/lib/shove/tests/test_hdf5_store.py
new file mode 100644
index 00000000..b1342ecf
--- /dev/null
+++ b/lib/shove/tests/test_hdf5_store.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+
+import unittest2
+
+
+class TestHDF5Store(unittest2.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('hdf5://test.hdf5/test')
+
+ def tearDown(self):
+ import os
+ self.store.close()
+ try:
+ os.remove('test.hdf5')
+ except OSError:
+ pass
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.setdefault('bow', 8)
+ self.assertEqual(self.store['bow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/lib/shove/tests/test_leveldb_store.py b/lib/shove/tests/test_leveldb_store.py
new file mode 100644
index 00000000..b3a3d177
--- /dev/null
+++ b/lib/shove/tests/test_leveldb_store.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+
+import unittest2
+
+
+class TestLevelDBStore(unittest2.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('leveldb://test', compress=True)
+
+ def tearDown(self):
+ import shutil
+ shutil.rmtree('test')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.setdefault('bow', 8)
+ self.assertEqual(self.store['bow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/lib/shove/tests/test_memcached_cache.py b/lib/shove/tests/test_memcached_cache.py
new file mode 100644
index 00000000..98f0b96d
--- /dev/null
+++ b/lib/shove/tests/test_memcached_cache.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestMemcached(unittest.TestCase):
+
+ initstring = 'memcache://localhost:11211'
+
+ def setUp(self):
+ from shove.cache.memcached import MemCached
+ self.cache = MemCached(self.initstring)
+
+ def tearDown(self):
+ self.cache = None
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.memcached import MemCached
+ cache = MemCached(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(1)
+
+ def tmp():
+ cache['test']
+ self.assertRaises(KeyError, tmp)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_memory_cache.py b/lib/shove/tests/test_memory_cache.py
new file mode 100644
index 00000000..87749cdb
--- /dev/null
+++ b/lib/shove/tests/test_memory_cache.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestMemoryCache(unittest.TestCase):
+
+ initstring = 'memory://'
+
+ def setUp(self):
+ from shove.cache.memory import MemoryCache
+ self.cache = MemoryCache(self.initstring)
+
+ def tearDown(self):
+ self.cache = None
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.memory import MemoryCache
+ cache = MemoryCache(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(1)
+
+ def tmp():
+ cache['test']
+ self.assertRaises(KeyError, tmp)
+
+ def test_cull(self):
+ from shove.cache.memory import MemoryCache
+ cache = MemoryCache(self.initstring, max_entries=1)
+ cache['test'] = 'test'
+ cache['test2'] = 'test'
+ cache['test2'] = 'test'
+ self.assertEquals(len(cache), 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_memory_store.py b/lib/shove/tests/test_memory_store.py
new file mode 100644
index 00000000..12e505dd
--- /dev/null
+++ b/lib/shove/tests/test_memory_store.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestMemoryStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('memory://', compress=True)
+
+ def tearDown(self):
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_redis_cache.py b/lib/shove/tests/test_redis_cache.py
new file mode 100644
index 00000000..c8e9b8db
--- /dev/null
+++ b/lib/shove/tests/test_redis_cache.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestRedisCache(unittest.TestCase):
+
+ initstring = 'redis://localhost:6379/0'
+
+ def setUp(self):
+ from shove.cache.redisdb import RedisCache
+ self.cache = RedisCache(self.initstring)
+
+ def tearDown(self):
+ self.cache = None
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.redisdb import RedisCache
+ cache = RedisCache(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(3)
+ def tmp(): #@IgnorePep8
+ return cache['test']
+ self.assertRaises(KeyError, tmp)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_redis_store.py b/lib/shove/tests/test_redis_store.py
new file mode 100644
index 00000000..06b1e0e9
--- /dev/null
+++ b/lib/shove/tests/test_redis_store.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestRedisStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('redis://localhost:6379/0')
+
+ def tearDown(self):
+ self.store.clear()
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store.setdefault('pow', 8), 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_s3_store.py b/lib/shove/tests/test_s3_store.py
new file mode 100644
index 00000000..8a0f08d7
--- /dev/null
+++ b/lib/shove/tests/test_s3_store.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestS3Store(unittest.TestCase):
+
+ s3string = 's3 test string here'
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove(self.s3string, compress=True)
+
+ def tearDown(self):
+ self.store.clear()
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ self.assertEqual(len(self.store), 2)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ item = self.store.popitem()
+ self.store.sync()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.store.sync()
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.update(tstore)
+ self.store.sync()
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_simple_cache.py b/lib/shove/tests/test_simple_cache.py
new file mode 100644
index 00000000..8cd1830c
--- /dev/null
+++ b/lib/shove/tests/test_simple_cache.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestSimpleCache(unittest.TestCase):
+
+ initstring = 'simple://'
+
+ def setUp(self):
+ from shove.cache.simple import SimpleCache
+ self.cache = SimpleCache(self.initstring)
+
+ def tearDown(self):
+ self.cache = None
+
+ def test_getitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_setitem(self):
+ self.cache['test'] = 'test'
+ self.assertEqual(self.cache['test'], 'test')
+
+ def test_delitem(self):
+ self.cache['test'] = 'test'
+ del self.cache['test']
+ self.assertEqual('test' in self.cache, False)
+
+ def test_get(self):
+ self.assertEqual(self.cache.get('min'), None)
+
+ def test_timeout(self):
+ import time
+ from shove.cache.simple import SimpleCache
+ cache = SimpleCache(self.initstring, timeout=1)
+ cache['test'] = 'test'
+ time.sleep(1)
+
+ def tmp():
+ cache['test']
+ self.assertRaises(KeyError, tmp)
+
+ def test_cull(self):
+ from shove.cache.simple import SimpleCache
+ cache = SimpleCache(self.initstring, max_entries=1)
+ cache['test'] = 'test'
+ cache['test2'] = 'test'
+ cache['test2'] = 'test'
+ self.assertEquals(len(cache), 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_simple_store.py b/lib/shove/tests/test_simple_store.py
new file mode 100644
index 00000000..d2431ec5
--- /dev/null
+++ b/lib/shove/tests/test_simple_store.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestSimpleStore(unittest.TestCase):
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove('simple://', compress=True)
+
+ def tearDown(self):
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_svn_store.py b/lib/shove/tests/test_svn_store.py
new file mode 100644
index 00000000..b3103816
--- /dev/null
+++ b/lib/shove/tests/test_svn_store.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestSvnStore(unittest.TestCase):
+
+ svnstring = 'SVN test string here'
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove(self.svnstring, compress=True)
+
+ def tearDown(self):
+ self.store.clear()
+ self.store.close()
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.store.sync()
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.store.sync()
+ tstore.sync()
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ self.assertEqual(len(self.store), 2)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store.sync()
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ item = self.store.popitem()
+ self.store.sync()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.store.sync()
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.sync()
+ self.store.update(tstore)
+ self.store.sync()
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.sync()
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/shove/tests/test_zodb_store.py b/lib/shove/tests/test_zodb_store.py
new file mode 100644
index 00000000..9d979fea
--- /dev/null
+++ b/lib/shove/tests/test_zodb_store.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+
+
+class TestZodbStore(unittest.TestCase):
+
+ init = 'zodb://test.db'
+
+ def setUp(self):
+ from shove import Shove
+ self.store = Shove(self.init, compress=True)
+
+ def tearDown(self):
+ self.store.close()
+ import os
+ os.remove('test.db')
+ os.remove('test.db.index')
+ os.remove('test.db.tmp')
+ os.remove('test.db.lock')
+
+ def test__getitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__setitem__(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store['max'], 3)
+
+ def test__delitem__(self):
+ self.store['max'] = 3
+ del self.store['max']
+ self.assertEqual('max' in self.store, False)
+
+ def test_get(self):
+ self.store['max'] = 3
+ self.assertEqual(self.store.get('min'), None)
+
+ def test__cmp__(self):
+ from shove import Shove
+ tstore = Shove()
+ self.store['max'] = 3
+ tstore['max'] = 3
+ self.assertEqual(self.store, tstore)
+
+ def test__len__(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.assertEqual(len(self.store), 2)
+
+ def test_close(self):
+ self.store.close()
+ self.assertEqual(self.store, None)
+
+ def test_clear(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ self.store.clear()
+ self.assertEqual(len(self.store), 0)
+
+ def test_items(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.items())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iteritems(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iteritems())
+ self.assertEqual(('min', 6) in slist, True)
+
+ def test_iterkeys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.iterkeys())
+ self.assertEqual('min' in slist, True)
+
+ def test_itervalues(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = list(self.store.itervalues())
+ self.assertEqual(6 in slist, True)
+
+ def test_pop(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ item = self.store.pop('min')
+ self.assertEqual(item, 6)
+
+ def test_popitem(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ item = self.store.popitem()
+ self.assertEqual(len(item) + len(self.store), 4)
+
+ def test_setdefault(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['powl'] = 7
+ self.store.setdefault('pow', 8)
+ self.assertEqual(self.store['pow'], 8)
+
+ def test_update(self):
+ from shove import Shove
+ tstore = Shove()
+ tstore['max'] = 3
+ tstore['min'] = 6
+ tstore['pow'] = 7
+ self.store['max'] = 2
+ self.store['min'] = 3
+ self.store['pow'] = 7
+ self.store.update(tstore)
+ self.assertEqual(self.store['min'], 6)
+
+ def test_values(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.values()
+ self.assertEqual(6 in slist, True)
+
+ def test_keys(self):
+ self.store['max'] = 3
+ self.store['min'] = 6
+ self.store['pow'] = 7
+ slist = self.store.keys()
+ self.assertEqual('min' in slist, True)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py
index b188eb6f..0beeb7ae 100644
--- a/sickbeard/generic_queue.py
+++ b/sickbeard/generic_queue.py
@@ -22,6 +22,7 @@ import threading
from sickbeard import logger
+
class QueuePriorities:
LOW = 10
NORMAL = 20
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index cef3ad7e..2bd5908e 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -952,7 +952,7 @@ def get_show_by_name(name):
return
indexerid = sickbeard.name_cache.retrieveNameFromCache(name)
- if indexerid or indexerid == 0:
+ if indexerid:
in_cache = True
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
@@ -966,10 +966,6 @@ def get_show_by_name(name):
if indexerid:
break
- # add to name cache if we didn't get it from the cache
- if not in_cache:
- sickbeard.name_cache.addNameToCache(name, indexerid if indexerid else 0)
-
if indexerid:
logger.log(u"Found Indexer ID:[" + repr(indexerid) + "], using that for [" + str(name) + "}",logger.DEBUG)
if not showObj:
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index 388d95f8..996a426e 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -378,7 +378,7 @@ class ParseResult(object):
new_episode_numbers = []
new_season_numbers = []
for epNo in self.episode_numbers:
- (s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.season_number, epNo)
+ (s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number, epNo)
new_episode_numbers.append(e)
new_season_numbers.append(s)
diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py
index 98653d50..6fcbafb2 100644
--- a/sickbeard/providers/dtt.py
+++ b/sickbeard/providers/dtt.py
@@ -80,7 +80,7 @@ class DTTProvider(generic.TorrentProvider):
logger.log(u"Search string: " + searchURL, logger.DEBUG)
- data = self.getRSSFeed(searchURL)
+ data = self.cache.getRSSFeed(searchURL)
if not data:
return []
@@ -126,7 +126,7 @@ class DTTCache(tvcache.TVCache):
url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params)
logger.log(u"DTT cache update URL: " + url, logger.DEBUG)
- return self.provider.getRSSFeed(url)
+ return self.getRSSFeed(url)
def _parseItem(self, item):
title, url = self.provider._get_title_and_url(item)
diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py
index 6170b088..58fcdf46 100644
--- a/sickbeard/providers/ezrss.py
+++ b/sickbeard/providers/ezrss.py
@@ -117,7 +117,7 @@ class EZRSSProvider(generic.TorrentProvider):
logger.log(u"Search string: " + search_url, logger.DEBUG)
- data = self.getRSSFeed(search_url)
+ data = self.cache.getRSSFeed(search_url)
if not data:
return []
@@ -173,7 +173,7 @@ class EZRSSCache(tvcache.TVCache):
rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
- return self.provider.getRSSFeed(rss_url)
+ return self.getRSSFeed(rss_url)
def _parseItem(self, item):
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 9820002a..1b1cc68e 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -121,28 +121,6 @@ class GenericProvider:
return data
- def getRSSFeed(self, url, post_data=None):
- parsed = list(urlparse.urlparse(url))
- parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
-
- if post_data:
- url = url + 'api?' + urllib.urlencode(post_data)
-
- f = feedparser.parse(url)
-
- if not f:
- logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
- return None
- elif 'error' in f.feed:
- logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']),
- logger.DEBUG)
- return None
- elif not f.entries:
- logger.log(u"No items found on " + self.name + " using URL: " + url, logger.WARNING)
- return None
-
- return f
-
def downloadResult(self, result):
"""
Save the result to disk.
@@ -252,13 +230,11 @@ class GenericProvider:
self._checkAuth()
self.show = show
- itemList = []
results = {}
+ searchStrings = []
+ itemList = []
for epObj in episodes:
- scene_season = epObj.scene_season
- scene_episode = epObj.scene_episode
-
cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult):
return cacheResult
@@ -268,94 +244,111 @@ class GenericProvider:
else:
logger.log(u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
- # get our season pack search results
+ # get our search strings
if seasonSearch:
- for curString in self._get_season_search_strings(epObj):
- itemList += self._doSearch(curString)
+ searchStrings += self._get_season_search_strings(epObj)
+ searchStrings += self._get_episode_search_strings(epObj)
- # get our season/episode search results
- for curString in self._get_episode_search_strings(epObj):
- itemList += self._doSearch(curString)
+ # remove duplicate search strings
+ searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]]
- for item in itemList:
+ for curString in sorted(searchStrings):
+ itemList += self._doSearch(curString)
- (title, url) = self._get_title_and_url(item)
+ # remove duplicate items
+ itemList = list(set(itemList))
+ for item in itemList:
- quality = self.getQuality(item)
+ (title, url) = self._get_title_and_url(item)
- # parse the file name
- try:
- myParser = NameParser(False)
- parse_result = myParser.parse(title)
- except InvalidNameException:
- logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
+ quality = self.getQuality(item)
+
+ # parse the file name
+ try:
+ myParser = NameParser(False)
+ parse_result = myParser.parse(title).convert()
+ except InvalidNameException:
+ logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
+ continue
+
+ if not (self.show.air_by_date or self.show.sports):
+ if not parse_result.episode_numbers and (parse_result.season_number != None and parse_result.season_number != season) or (
+ parse_result.season_number == None and season != 1):
+ logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
+ season) + ", ignoring", logger.DEBUG)
+ continue
+ elif len(parse_result.episode_numbers) and (parse_result.season_number != season or parse_result.episode_numbers[0] not in parse_result.episode_numbers):
+ logger.log(u"Episode " + title + " isn't " + str(season) + "x" + str(
+ parse_result.episode_numbers[0]) + ", skipping it", logger.DEBUG)
continue
- if not (self.show.air_by_date or self.show.sports):
- if not parse_result.episode_numbers and (parse_result.season_number != None and parse_result.season_number != season) or (
- parse_result.season_number == None and season != 1):
- logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
- season) + ", ignoring", logger.DEBUG)
- continue
- elif len(parse_result.episode_numbers) and (parse_result.season_number != scene_season or scene_episode not in parse_result.episode_numbers):
- logger.log(u"Episode " + title + " isn't " + str(scene_season) + "x" + str(
- scene_episode) + ", skipping it", logger.DEBUG)
- continue
-
- else:
- if not (parse_result.air_by_date or parse_result.sports):
- logger.log(
- u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
- logger.DEBUG)
- continue
-
- if ((parse_result.air_date != epObj.airdate and parse_result.air_by_date) or (
- parse_result.sports_event_date != epObj.airdate and parse_result.sports)):
- logger.log(u"Episode " + title + " didn't air on " + str(epObj.airdate) + ", skipping it",
- logger.DEBUG)
- continue
-
- # make sure we want the episode
- if not self.show.wantEpisode(epObj.season, epObj.episode, quality, manualSearch=manualSearch):
+ # we just use the existing info for normal searches
+ actual_season = season
+ actual_episodes = parse_result.episode_numbers
+ else:
+ if not (parse_result.air_by_date or parse_result.sports):
logger.log(
- u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
- quality], logger.DEBUG)
+ u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
+ logger.DEBUG)
continue
- logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
+ myDB = db.DBConnection()
+ sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
+ [show.indexerid, parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
- # make a result object
- epObjs = [epObj]
+ if len(sql_results) != 1:
+ logger.log(
+ u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
+ logger.WARNING)
+ continue
- result = self.getResult(epObjs)
- result.url = url
- result.name = title
- result.quality = quality
- result.provider = self
- result.content = None
+ actual_season = int(sql_results[0]["season"])
+ actual_episodes = [int(sql_results[0]["episode"])]
- if len(epObjs) == 1:
- epNum = epObj.episode
- logger.log(u"Single episode result.", logger.DEBUG)
- elif len(epObjs) > 1:
- epNum = MULTI_EP_RESULT
- logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
- parse_result.episode_numbers), logger.DEBUG)
- elif len(epObjs) == 0:
- epNum = SEASON_RESULT
- result.extraInfo = [self.show]
- logger.log(u"Separating full season result to check for later", logger.DEBUG)
+ # make sure we want the episode
+ wantEp = True
+ for epNo in actual_episodes:
+ if not show.wantEpisode(actual_season, epNo, quality):
+ wantEp = False
+ break
- if epNum in results:
- results[epNum].append(result)
- else:
- results[epNum] = [result]
+ if not wantEp:
+ logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
+ continue
- # remove duplicate results
- results[epNum] = list(set(results[epNum]))
+ logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
- # found the result we wanted
- break
+ # make a result object
+ epObj = []
+ for curEp in actual_episodes:
+ epObj.append(show.getEpisode(actual_season, curEp))
+
+ result = self.getResult(epObj)
+ result.url = url
+ result.name = title
+ result.quality = quality
+ result.provider = self
+ result.content = None
+
+ if len(epObj) == 1:
+ epNum = epObj[0].episode
+ logger.log(u"Single episode result.", logger.DEBUG)
+ elif len(epObj) > 1:
+ epNum = MULTI_EP_RESULT
+ logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
+ parse_result.episode_numbers), logger.DEBUG)
+ elif len(epObj) == 0:
+ epNum = SEASON_RESULT
+ result.extraInfo = [show]
+ logger.log(u"Separating full season result to check for later", logger.DEBUG)
+
+ if epNum in results:
+ results[epNum].append(result)
+ else:
+ results[epNum] = [result]
+
+ # remove duplicate results
+ results[epNum] = list(set(results[epNum]))
return results
diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py
index 56653437..fb5a0cdb 100644
--- a/sickbeard/providers/newzbin.py
+++ b/sickbeard/providers/newzbin.py
@@ -319,7 +319,7 @@ class NewzbinProvider(generic.NZBProvider):
url = self.url + "search/?%s" % urllib.urlencode(params)
logger.log("Newzbin search URL: " + url, logger.DEBUG)
- return self.getRSSFeed(url)
+ return self.cache.getRSSFeed(url)
def _checkAuth(self):
if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index c4553f25..92d4f437 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -268,7 +268,7 @@ class NewznabCache(tvcache.TVCache):
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
- return self.provider.getRSSFeed(rss_url)
+ return self.getRSSFeed(rss_url)
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index 1ae156c4..e9cfd77d 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -133,7 +133,7 @@ class NyaaCache(tvcache.TVCache):
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
- return self.provider.getRSSFeed(url)
+ return self.getRSSFeed(url)
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
diff --git a/sickbeard/providers/nzbs_org_old.py b/sickbeard/providers/nzbs_org_old.py
index c6afc979..95337852 100644
--- a/sickbeard/providers/nzbs_org_old.py
+++ b/sickbeard/providers/nzbs_org_old.py
@@ -76,7 +76,7 @@ class NZBsProvider(generic.NZBProvider):
logger.log(u"Search string: " + searchURL, logger.DEBUG)
- data = self.getRSSFeed(searchURL)
+ data = self.cache.getRSSFeed(searchURL)
# Pause to avoid 503's
time.sleep(5)
diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py
index b3146ebc..3272fbff 100644
--- a/sickbeard/providers/nzbsrus.py
+++ b/sickbeard/providers/nzbsrus.py
@@ -67,7 +67,7 @@ class NZBsRUSProvider(generic.NZBProvider):
searchURL = self.url + 'api.php?' + urllib.urlencode(params)
logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG)
- data = self.getRSSFeed(searchURL)
+ data = self.cache.getRSSFeed(searchURL)
if not data:
return []
@@ -107,7 +107,7 @@ class NZBsRUSCache(tvcache.TVCache):
url += urllib.urlencode(urlArgs)
logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG)
- return self.provider.getRSSFeed(url)
+ return self.getRSSFeed(url)
def _checkAuth(self, data):
return data != 'Invalid Link'
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index 6e358631..e1d473ef 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -164,7 +164,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
- return self.provider.getRSSFeed(rss_url)
+ return self.getRSSFeed(rss_url)
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index 52e79575..1c0f2bc5 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -33,7 +33,6 @@ from sickbeard.exceptions import ex
from lib import requests
from lib.requests import exceptions
from lib.bencode import bdecode
-from lib.feedparser import feedparser
class TorrentRssProvider(generic.TorrentProvider):
def __init__(self, name, url):
@@ -156,7 +155,7 @@ class TorrentRssCache(tvcache.TVCache):
def _getRSSData(self):
url = self.provider.url
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
- return self.provider.getRSSFeed(url)
+ return self.getRSSFeed(url)
def _parseItem(self, item):
diff --git a/sickbeard/providers/tvtorrents.py b/sickbeard/providers/tvtorrents.py
index 63d50b8b..11bd3089 100644
--- a/sickbeard/providers/tvtorrents.py
+++ b/sickbeard/providers/tvtorrents.py
@@ -84,7 +84,7 @@ class TvTorrentsCache(tvcache.TVCache):
rss_url = self.provider.url + 'RssServlet?digest=' + sickbeard.TVTORRENTS_DIGEST + '&hash=' + sickbeard.TVTORRENTS_HASH + '&fname=true&exclude=(' + ignore_regex + ')'
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
- return self.provider.getRSSFeed(rss_url)
+ return self.getRSSFeed(rss_url)
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)
diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py
index 920239da..719c7209 100644
--- a/sickbeard/providers/womble.py
+++ b/sickbeard/providers/womble.py
@@ -42,7 +42,7 @@ class WombleCache(tvcache.TVCache):
def _getRSSData(self):
url = self.provider.url + 'rss/?sec=TV-x264&fr=false'
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
- return self.provider.getRSSFeed(url)
+ return self.getRSSFeed(url)
def _checkAuth(self, data):
return data != 'Invalid Link'
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 8bd3afe7..9cf38961 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -47,6 +47,8 @@ class SearchQueue(generic_queue.GenericQueue):
for cur_item in self.queue:
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
return True
+ if isinstance(cur_item, BacklogQueueItem) and cur_item.ep_obj == ep_obj:
+ return True
return False
def pause_backlog(self):
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 419af7ab..3d31f41c 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -15,21 +15,28 @@
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import os
import time
import datetime
import sqlite3
+import urllib
+import urlparse
+import re
import sickbeard
+from shove import Shove
+from feedcache import cache
+
from sickbeard import db
from sickbeard import logger
from sickbeard.common import Quality
from sickbeard import helpers, show_name_helpers
-from sickbeard import name_cache, scene_exceptions
from sickbeard.exceptions import MultipleShowObjectsException, ex
-from sickbeard.exceptions import ex, AuthException
+from sickbeard.exceptions import AuthException
+from sickbeard import encodingKludge as ek
from name_parser.parser import NameParser, InvalidNameException
@@ -85,6 +92,34 @@ class TVCache():
def _checkItemAuth(self, title, url):
return True
+ def getRSSFeed(self, url, post_data=None):
+ # create provider storaqe cache
+ storage = Shove('file://' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.providerID))
+ fc = cache.Cache(storage)
+
+ parsed = list(urlparse.urlparse(url))
+ parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
+
+ if post_data:
+ url = url + 'api?' + urllib.urlencode(post_data)
+
+ f = fc.fetch(url)
+
+ if not f:
+ logger.log(u"Error loading " + self.providerID + " URL: " + url, logger.ERROR)
+ return None
+ elif 'error' in f.feed:
+ logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']),
+ logger.DEBUG)
+ return None
+ elif not f.entries:
+ logger.log(u"No items found on " + self.providerID + " using URL: " + url, logger.WARNING)
+ return None
+
+ storage.close()
+
+ return f
+
def updateCache(self):
if not self.shouldUpdate():
@@ -174,6 +209,7 @@ class TVCache():
lastUpdate = property(_getLastUpdate)
def shouldUpdate(self):
+ return True
# if we've updated recently then skip the update
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
@@ -184,8 +220,11 @@ class TVCache():
def _addCacheEntry(self, name, url, quality=None):
- season = None
- episodes = None
+
+ cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
+ if cacheResult:
+ logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}", logger.DEBUG)
+ return
# if we don't have complete info then parse the filename to get it
try:
@@ -208,12 +247,17 @@ class TVCache():
logger.log(u"Could not find a show matching " + parse_result.series_name + " in the database, skipping ...", logger.DEBUG)
return None
+ logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
+ sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
+
+ season = episodes = None
if parse_result.air_by_date:
myDB = db.DBConnection()
airdate = parse_result.air_date.toordinal()
- sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
- [showObj.indexerid, showObj.indexer, airdate])
+ sql_results = myDB.select(
+ "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
+ [showObj.indexerid, showObj.indexer, airdate])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
@@ -235,11 +279,12 @@ class TVCache():
if not isinstance(name, unicode):
name = unicode(name, 'utf-8')
-
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
- return ["INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
+ return [
+ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
+
def searchCache(self, episode, manualSearch=False):
neededEps = self.findNeededEpisodes(episode, manualSearch)
return neededEps
diff --git a/tests/show name/show name - s04e02.mkv b/tests/show name/show name - s04e02.mkv
new file mode 100644
index 00000000..96c90675
--- /dev/null
+++ b/tests/show name/show name - s04e02.mkv
@@ -0,0 +1 @@
+foo bar
\ No newline at end of file