Moved code for cleaning up cache folder to seperate function located in helper.py.

Added call to cache folder cleanup during SickRage main init.

Changed a error message in rss feed class to a debug message for when url returns no data.

Moved indexer api cache files to be placed under cache folder subfolder indexers.

Moved rss feed cache files to be placed under cache folder subfolder rss.
This commit is contained in:
echel0n 2014-07-27 04:58:14 -07:00
parent 14c354b551
commit b13e72e0a3
6 changed files with 52 additions and 39 deletions

View file

@ -525,6 +525,10 @@ def initialize(consoleLogging=True):
logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR) logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR)
CACHE_DIR = None CACHE_DIR = None
# clean cache folders
if CACHE_DIR:
helpers.clearCache()
GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick') GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')
ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs') ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')

View file

@ -32,6 +32,7 @@ import urlparse
import uuid import uuid
import base64 import base64
import zipfile import zipfile
import datetime
import sickbeard import sickbeard
import subliminal import subliminal
@ -1306,4 +1307,40 @@ def download_file(url, filename, session=None):
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING) resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
return False return False
return True return True
def clearCache(force=False):
update_datetime = datetime.datetime.now()
# clean out cache directory, remove everything > 12 hours old
if sickbeard.CACHE_DIR:
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR)
# Does our cache_dir exists
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING)
else:
max_age = datetime.timedelta(hours=12)
# Get all our cache files
for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR):
path = os.path.basename(cache_root)
# skip rss provider caches
if path == 'rss':
continue
for file in cache_files:
cache_file = ek.ek(os.path.join, cache_root, file)
if ek.ek(os.path.isfile, cache_file):
cache_file_modified = datetime.datetime.fromtimestamp(
ek.ek(os.path.getmtime, cache_file))
if force or (update_datetime - cache_file_modified > max_age):
try:
ek.ek(os.remove, cache_file)
except OSError, e:
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
logger.WARNING)
break

View file

@ -47,7 +47,7 @@ class indexerApi(object):
def api_params(self): def api_params(self):
if self.indexerID: if self.indexerID:
if sickbeard.CACHE_DIR: if sickbeard.CACHE_DIR:
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name) indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name)
if sickbeard.PROXY_SETTING: if sickbeard.PROXY_SETTING:
indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING

View file

@ -224,7 +224,6 @@ class KATProvider(generic.TorrentProvider):
results = [] results = []
items = {'Season': [], 'Episode': [], 'RSS': []} items = {'Season': [], 'Episode': [], 'RSS': []}
soup = None
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
@ -379,13 +378,10 @@ class KATCache(tvcache.TVCache):
if ci is not None: if ci is not None:
cl.append(ci) cl.append(ci)
if len(cl) > 0: if len(cl) > 0:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
def _parseItem(self, item): def _parseItem(self, item):
(title, url) = item (title, url) = item

View file

@ -16,7 +16,9 @@ from shove import Shove
class RSSFeeds: class RSSFeeds:
def __init__(self, db_name): def __init__(self, db_name):
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db') self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name + '.db')
if not os.path.exists(os.path.dirname(self.db_name)):
sickbeard.helpers.makeDir(os.path.dirname(self.db_name))
def clearCache(self, age=None): def clearCache(self, age=None):
try: try:
@ -24,7 +26,7 @@ class RSSFeeds:
fc = cache.Cache(fs) fc = cache.Cache(fs)
fc.purge(age) fc.purge(age)
except Exception as e: except Exception as e:
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG)
def getFeed(self, url, post_data=None, request_headers=None): def getFeed(self, url, post_data=None, request_headers=None):
parsed = list(urlparse.urlparse(url)) parsed = list(urlparse.urlparse(url))
@ -39,7 +41,7 @@ class RSSFeeds:
feed = fc.fetch(url, False, False, request_headers) feed = fc.fetch(url, False, False, request_headers)
if not feed or not feed.entries: if not feed or not feed.entries:
logger.log(u"RSS cache error loading url: " + url, logger.ERROR) logger.log(u"RSS error loading url: " + url, logger.DEBUG)
return return
elif 'error' in feed.feed: elif 'error' in feed.feed:
err_code = feed.feed['error']['code'] err_code = feed.feed['error']['code']
@ -48,7 +50,7 @@ class RSSFeeds:
logger.log( logger.log(
u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG) u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG)
return return
else:
return feed return feed
except Exception as e: except Exception as e:
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) logger.log(u"RSS error: " + ex(e), logger.DEBUG)

View file

@ -47,33 +47,7 @@ class ShowUpdater():
logger.log(u"Doing full update on all shows") logger.log(u"Doing full update on all shows")
# clean out cache directory, remove everything > 12 hours old # clean out cache directory, remove everything > 12 hours old
if sickbeard.CACHE_DIR: sickbeard.helpers.clearCache()
for indexer in sickbeard.indexerApi().indexers:
cache_dir = sickbeard.indexerApi(indexer).cache
logger.log(u"Trying to clean cache folder " + cache_dir)
# Does our cache_dir exists
if not ek.ek(os.path.isdir, cache_dir):
logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING)
else:
max_age = datetime.timedelta(hours=12)
# Get all our cache files
cache_files = ek.ek(os.listdir, cache_dir)
for cache_file in cache_files:
cache_file_path = ek.ek(os.path.join, cache_dir, cache_file)
if ek.ek(os.path.isfile, cache_file_path):
cache_file_modified = datetime.datetime.fromtimestamp(
ek.ek(os.path.getmtime, cache_file_path))
if update_datetime - cache_file_modified > max_age:
try:
ek.ek(os.remove, cache_file_path)
except OSError, e:
logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e),
logger.WARNING)
break
# select 10 'Ended' tv_shows updated more than 90 days ago to include in this update # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
stale_should_update = [] stale_should_update = []