mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Moved code for cleaning up cache folder to seperate function located in helper.py.
Added call to cache folder cleanup during SickRage main init. Changed a error message in rss feed class to a debug message for when url returns no data. Moved indexer api cache files to be placed under cache folder subfolder indexers. Moved rss feed cache files to be placed under cache folder subfolder rss.
This commit is contained in:
parent
14c354b551
commit
b13e72e0a3
6 changed files with 52 additions and 39 deletions
|
@ -525,6 +525,10 @@ def initialize(consoleLogging=True):
|
|||
logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR)
|
||||
CACHE_DIR = None
|
||||
|
||||
# clean cache folders
|
||||
if CACHE_DIR:
|
||||
helpers.clearCache()
|
||||
|
||||
GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')
|
||||
|
||||
ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
|
||||
|
|
|
@ -32,6 +32,7 @@ import urlparse
|
|||
import uuid
|
||||
import base64
|
||||
import zipfile
|
||||
import datetime
|
||||
|
||||
import sickbeard
|
||||
import subliminal
|
||||
|
@ -1306,4 +1307,40 @@ def download_file(url, filename, session=None):
|
|||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
|
||||
return False
|
||||
|
||||
return True
|
||||
return True
|
||||
|
||||
def clearCache(force=False):
|
||||
update_datetime = datetime.datetime.now()
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
if sickbeard.CACHE_DIR:
|
||||
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR)
|
||||
|
||||
# Does our cache_dir exists
|
||||
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
|
||||
logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING)
|
||||
else:
|
||||
max_age = datetime.timedelta(hours=12)
|
||||
|
||||
# Get all our cache files
|
||||
for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR):
|
||||
path = os.path.basename(cache_root)
|
||||
|
||||
# skip rss provider caches
|
||||
if path == 'rss':
|
||||
continue
|
||||
|
||||
for file in cache_files:
|
||||
cache_file = ek.ek(os.path.join, cache_root, file)
|
||||
|
||||
if ek.ek(os.path.isfile, cache_file):
|
||||
cache_file_modified = datetime.datetime.fromtimestamp(
|
||||
ek.ek(os.path.getmtime, cache_file))
|
||||
|
||||
if force or (update_datetime - cache_file_modified > max_age):
|
||||
try:
|
||||
ek.ek(os.remove, cache_file)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
|
||||
logger.WARNING)
|
||||
break
|
|
@ -47,7 +47,7 @@ class indexerApi(object):
|
|||
def api_params(self):
|
||||
if self.indexerID:
|
||||
if sickbeard.CACHE_DIR:
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name)
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name)
|
||||
if sickbeard.PROXY_SETTING:
|
||||
indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING
|
||||
|
||||
|
|
|
@ -224,7 +224,6 @@ class KATProvider(generic.TorrentProvider):
|
|||
results = []
|
||||
items = {'Season': [], 'Episode': [], 'RSS': []}
|
||||
|
||||
soup = None
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
|
@ -379,13 +378,10 @@ class KATCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -16,7 +16,9 @@ from shove import Shove
|
|||
|
||||
class RSSFeeds:
|
||||
def __init__(self, db_name):
|
||||
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db')
|
||||
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name + '.db')
|
||||
if not os.path.exists(os.path.dirname(self.db_name)):
|
||||
sickbeard.helpers.makeDir(os.path.dirname(self.db_name))
|
||||
|
||||
def clearCache(self, age=None):
|
||||
try:
|
||||
|
@ -24,7 +26,7 @@ class RSSFeeds:
|
|||
fc = cache.Cache(fs)
|
||||
fc.purge(age)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
||||
logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG)
|
||||
|
||||
def getFeed(self, url, post_data=None, request_headers=None):
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
|
@ -39,7 +41,7 @@ class RSSFeeds:
|
|||
feed = fc.fetch(url, False, False, request_headers)
|
||||
|
||||
if not feed or not feed.entries:
|
||||
logger.log(u"RSS cache error loading url: " + url, logger.ERROR)
|
||||
logger.log(u"RSS error loading url: " + url, logger.DEBUG)
|
||||
return
|
||||
elif 'error' in feed.feed:
|
||||
err_code = feed.feed['error']['code']
|
||||
|
@ -48,7 +50,7 @@ class RSSFeeds:
|
|||
logger.log(
|
||||
u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG)
|
||||
return
|
||||
|
||||
return feed
|
||||
else:
|
||||
return feed
|
||||
except Exception as e:
|
||||
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
||||
logger.log(u"RSS error: " + ex(e), logger.DEBUG)
|
|
@ -47,33 +47,7 @@ class ShowUpdater():
|
|||
logger.log(u"Doing full update on all shows")
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
if sickbeard.CACHE_DIR:
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
cache_dir = sickbeard.indexerApi(indexer).cache
|
||||
logger.log(u"Trying to clean cache folder " + cache_dir)
|
||||
|
||||
# Does our cache_dir exists
|
||||
if not ek.ek(os.path.isdir, cache_dir):
|
||||
logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING)
|
||||
else:
|
||||
max_age = datetime.timedelta(hours=12)
|
||||
# Get all our cache files
|
||||
cache_files = ek.ek(os.listdir, cache_dir)
|
||||
|
||||
for cache_file in cache_files:
|
||||
cache_file_path = ek.ek(os.path.join, cache_dir, cache_file)
|
||||
|
||||
if ek.ek(os.path.isfile, cache_file_path):
|
||||
cache_file_modified = datetime.datetime.fromtimestamp(
|
||||
ek.ek(os.path.getmtime, cache_file_path))
|
||||
|
||||
if update_datetime - cache_file_modified > max_age:
|
||||
try:
|
||||
ek.ek(os.remove, cache_file_path)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e),
|
||||
logger.WARNING)
|
||||
break
|
||||
sickbeard.helpers.clearCache()
|
||||
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
|
||||
stale_should_update = []
|
||||
|
|
Loading…
Reference in a new issue