Merge pull request #644 from JackDandy/feature/ChangeZoneinfoDir

Change move dateutil-zoneinfo.tar.gz file to data files /cache.
This commit is contained in:
JackDandy 2016-02-08 19:48:39 +00:00
commit 9e540ffa91
5 changed files with 23 additions and 9 deletions

View file

@ -25,6 +25,7 @@
* Change emails to Unicode aware * Change emails to Unicode aware
* Add force episode recent search to API * Add force episode recent search to API
* Change process episodes with utf8 dir and nzb names, handle failed episodes without a dir, add log output streaming * Change process episodes with utf8 dir and nzb names, handle failed episodes without a dir, add log output streaming
* Change move dateutil-zoneinfo.tar.gz file to data files /cache
### 0.11.5 (2016-02-01 19:40:00 UTC) ### 0.11.5 (2016-02-01 19:40:00 UTC)

View file

@ -1,10 +1,11 @@
Libs with customisations... Libs with customisations...
/lib/cachecontrol/caches/file_cache.py /lib/cachecontrol/caches/file_cache.py
/lib/dateutil/zoneinfo/__init__.py
/lib/hachoir_core/config.py /lib/hachoir_core/config.py
/lib/pynma/pynma.py /lib/pynma/pynma.py
/lib/requests/packages/urllib3/connectionpool.py /lib/requests/packages/urllib3/connectionpool.py
/lib/requests/packages/urllib3/util/ssl_.py /lib/requests/packages/urllib3/util/ssl_.py
/tornado /lib/tornado
/lib/tvdb/tvdb_api.py
/lib/unrar2/unix.py /lib/unrar2/unix.py
/lib/tvdb/tvdb_api.py

View file

@ -14,6 +14,9 @@ from contextlib import closing
from dateutil.tz import tzfile from dateutil.tz import tzfile
from sickbeard import encodingKludge as ek
import sickbeard
__all__ = ["gettz", "gettz_db_metadata", "rebuild"] __all__ = ["gettz", "gettz_db_metadata", "rebuild"]
ZONEFILENAME = "dateutil-zoneinfo.tar.gz" ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
@ -34,7 +37,9 @@ class tzfile(tzfile):
def getzoneinfofile_stream(): def getzoneinfofile_stream():
try: try:
return BytesIO(get_data(__name__, ZONEFILENAME)) # return BytesIO(get_data(__name__, ZONEFILENAME))
with open(ek.ek(os.path.join, sickbeard.ZONEINFO_DIR, ZONEFILENAME), 'rb') as f:
return BytesIO(f.read())
except IOError as e: # TODO switch to FileNotFoundError? except IOError as e: # TODO switch to FileNotFoundError?
warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))
return None return None

View file

@ -43,6 +43,7 @@ from indexers.indexer_api import indexerApi
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, \ from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, \
indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts indexer_episodenotfound, indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
from sickbeard.providers.generic import GenericProvider from sickbeard.providers.generic import GenericProvider
from sickbeard import encodingKludge as ek
from lib.configobj import ConfigObj from lib.configobj import ConfigObj
from lib.libtrakt import TraktAPI from lib.libtrakt import TraktAPI
import trakt_helpers import trakt_helpers
@ -133,6 +134,7 @@ HTTPS_KEY = None
LAUNCH_BROWSER = False LAUNCH_BROWSER = False
CACHE_DIR = None CACHE_DIR = None
ACTUAL_CACHE_DIR = None ACTUAL_CACHE_DIR = None
ZONEINFO_DIR = None
ROOT_DIRS = None ROOT_DIRS = None
TRASH_REMOVE_SHOW = False TRASH_REMOVE_SHOW = False
TRASH_ROTATE_LOGS = False TRASH_ROTATE_LOGS = False
@ -503,7 +505,7 @@ def initialize(consoleLogging=True):
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_ACCESS_TOKEN, PUSHBULLET_DEVICE_IDEN, \ USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_ACCESS_TOKEN, PUSHBULLET_DEVICE_IDEN, \
versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, CPU_PRESET, \ versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, CPU_PRESET, \
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_RECENTSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \ KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_RECENTSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TIMEZONE_DISPLAY, \ showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, ZONEINFO_DIR, TIMEZONE_DISPLAY, \
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \
RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
providerList, newznabProviderList, torrentRssProviderList, \ providerList, newznabProviderList, torrentRssProviderList, \
@ -581,6 +583,9 @@ def initialize(consoleLogging=True):
# clean cache folders # clean cache folders
if CACHE_DIR: if CACHE_DIR:
helpers.clearCache() helpers.clearCache()
ZONEINFO_DIR = ek.ek(os.path.join, CACHE_DIR, 'zoneinfo')
if not ek.ek(os.path.isdir, ZONEINFO_DIR) and not helpers.make_dirs(ZONEINFO_DIR):
logger.log(u'!!! Creating local zoneinfo dir failed', logger.ERROR)
THEME_NAME = check_setting_str(CFG, 'GUI', 'theme_name', 'dark') THEME_NAME = check_setting_str(CFG, 'GUI', 'theme_name', 'dark')
GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick') GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')

View file

@ -24,9 +24,11 @@ from sickbeard import helpers
from sickbeard import logger from sickbeard import logger
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
from os.path import basename, join, isfile from os.path import basename, join, isfile
from itertools import chain
import os import os
import re import re
import datetime import datetime
import sickbeard
# regex to parse time (12/24 hour format) # regex to parse time (12/24 hour format)
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I) time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
@ -53,10 +55,10 @@ def _remove_old_zoneinfo():
return return
cur_zoneinfo = ek.ek(basename, zonefilename) cur_zoneinfo = ek.ek(basename, zonefilename)
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) cur_file = helpers.real_path(ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo))
for (path, dirs, files) in ek.ek(os.walk, for (path, dirs, files) in chain.from_iterable(ek.ek(os.walk,
helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))): helpers.real_path(di)) for di in (sickbeard.ZONEINFO_DIR, ek.ek(os.path.dirname, zoneinfo.__file__))):
for filename in files: for filename in files:
if filename.endswith('.tar.gz'): if filename.endswith('.tar.gz'):
file_w_path = ek.ek(join, path, filename) file_w_path = ek.ek(join, path, filename)
@ -87,7 +89,7 @@ def _update_zoneinfo():
cur_zoneinfo = zonefilename cur_zoneinfo = zonefilename
if None is not cur_zoneinfo: if None is not cur_zoneinfo:
cur_zoneinfo = ek.ek(basename, zonefilename) cur_zoneinfo = ek.ek(basename, zonefilename)
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) zonefile = helpers.real_path(ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo))
zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
@ -125,7 +127,7 @@ def _update_zoneinfo():
# remove the old zoneinfo file # remove the old zoneinfo file
if cur_zoneinfo is not None: if cur_zoneinfo is not None:
old_file = helpers.real_path( old_file = helpers.real_path(
ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo)) ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo))
if ek.ek(os.path.exists, old_file): if ek.ek(os.path.exists, old_file):
ek.ek(os.remove, old_file) ek.ek(os.remove, old_file)
# rename downloaded file # rename downloaded file