Merge pull request #812 from JackDandy/feature/ChangeCachePrune

Change improve performance and reduce start up time.
This commit is contained in:
JackDandy 2016-10-28 15:36:25 +01:00 committed by GitHub
commit c20b847fb7
2 changed files with 25 additions and 29 deletions

View file

@ -188,6 +188,7 @@
* Add if all torrent caches fail, save magnets from RARBG and TPB as files for clients (or plugins) that now support it * Add if all torrent caches fail, save magnets from RARBG and TPB as files for clients (or plugins) that now support it
* Add advice to logs if all caches fail to switch to direct client connect instead of the basic blackhole method * Add advice to logs if all caches fail to switch to direct client connect instead of the basic blackhole method
* Add search setting "Disable auto full backlog" * Add search setting "Disable auto full backlog"
* Change improve performance and reduce start up time
[develop changelog] [develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url * Change send nzb data to NZBGet for Anizb instead of url

View file

@ -1285,37 +1285,28 @@ def download_file(url, filename, session=None):
def clearCache(force=False): def clearCache(force=False):
update_datetime = datetime.datetime.now()
# clean out cache directory, remove everything > 12 hours old # clean out cache directory, remove everything > 12 hours old
if sickbeard.CACHE_DIR: if sickbeard.CACHE_DIR:
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR) logger.log(u'Trying to clean cache folder %s' % sickbeard.CACHE_DIR)
# Does our cache_dir exists # Does our cache_dir exists
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR): if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING) logger.log(u'Skipping clean of non-existing folder: %s' % sickbeard.CACHE_DIR, logger.WARNING)
else: else:
max_age = datetime.timedelta(hours=12) exclude = ['rss', 'images', 'zoneinfo']
del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(hours=12)).timetuple())
# Get all our cache files for f in scantree(sickbeard.CACHE_DIR, exclude, follow_symlinks=True):
exclude = ['rss', 'images'] if f.is_file(follow_symlinks=False) and (force or del_time > f.stat(follow_symlinks=False).st_mtime):
for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR, topdown=True): try:
cache_dirs[:] = [d for d in cache_dirs if d not in exclude] ek.ek(os.remove, f.path)
except OSError as e:
for file in cache_files: logger.log('Unable to delete %s: %r / %s' % (f.path, e, str(e)), logger.WARNING)
cache_file = ek.ek(os.path.join, cache_root, file) elif f.is_dir(follow_symlinks=False) and f.name not in ['cheetah', 'sessions', 'indexers']:
try:
if ek.ek(os.path.isfile, cache_file): ek.ek(os.rmdir, f.path)
cache_file_modified = datetime.datetime.fromtimestamp( except OSError:
ek.ek(os.path.getmtime, cache_file)) pass
if force or (update_datetime - cache_file_modified > max_age):
try:
ek.ek(os.remove, cache_file)
except OSError as e:
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
logger.WARNING)
break
def human(size): def human(size):
@ -1474,11 +1465,14 @@ def cpu_sleep():
time.sleep(cpu_presets[sickbeard.CPU_PRESET]) time.sleep(cpu_presets[sickbeard.CPU_PRESET])
def scantree(path): def scantree(path, exclude=None, follow_symlinks=False):
"""Recursively yield DirEntry objects for given directory.""" """Recursively yield DirEntry objects for given directory."""
exclude = (exclude, ([exclude], [])[None is exclude])[not isinstance(exclude, list)]
for entry in ek.ek(scandir, path): for entry in ek.ek(scandir, path):
if entry.is_dir(follow_symlinks=False): if entry.is_dir(follow_symlinks=follow_symlinks):
for entry in scantree(entry.path): if entry.name not in exclude:
for subentry in scantree(entry.path):
yield subentry
yield entry yield entry
else: else:
yield entry yield entry
@ -1497,7 +1491,8 @@ def delete_not_changed_in(paths, days=30, minutes=0):
Delete files under paths not changed in n days and/or n minutes. Delete files under paths not changed in n days and/or n minutes.
If a file was modified later than days/and or minutes, then don't delete it. If a file was modified later than days/and or minutes, then don't delete it.
:param paths: List of paths to scan for files to delete :param paths: Path(s) to scan for files to delete
:type paths: String or List of strings
:param days: Purge files not modified in this number of days (default: 30 days) :param days: Purge files not modified in this number of days (default: 30 days)
:param minutes: Purge files not modified in this number of minutes (default: 0 minutes) :param minutes: Purge files not modified in this number of minutes (default: 0 minutes)
:return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted :return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted
@ -1505,7 +1500,7 @@ def delete_not_changed_in(paths, days=30, minutes=0):
del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)).timetuple()) del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)).timetuple())
errors = 0 errors = 0
qualified = 0 qualified = 0
for c in paths: for c in (paths, [paths])[not isinstance(paths, list)]:
try: try:
for f in scantree(c): for f in scantree(c):
if f.is_file(follow_symlinks=False) and del_time > f.stat(follow_symlinks=False).st_mtime: if f.is_file(follow_symlinks=False) and del_time > f.stat(follow_symlinks=False).st_mtime: