Merge pull request #423 from JackDandy/feature/ChangeMetadataZoneinfo

Change use metadata for zoneinfo files and remove hack of dateutil lib.
This commit is contained in:
JackDandy 2015-06-15 15:30:04 +01:00
commit 81de04efa1
4 changed files with 33 additions and 36 deletions

View file

@ -55,6 +55,7 @@
* Change reload_module call to explicit import lib/six.moves
* Change queue, httplib, cookielib and xmlrpclib to use explicit import lib/six.moves
* Change zoneinfo update/loader to be compatible with dateutil 2.4.2
* Change use metadata for zoneinfo files and remove hack of dateutil lib
* Change param item "features" passed to Beautiful Soup to prevent false +ve warning in r353

View file

@ -3,4 +3,3 @@ Libs with customisations...
/tornado
/lib/requests/packages/urllib3/connectionpool.py
/lib/requests/packages/urllib3/util/ssl_.py
/lib/dateutil/zoneinfo/__init__.py

View file

@ -16,17 +16,7 @@ from dateutil.tz import tzfile
__all__ = ["gettz", "gettz_db_metadata", "rebuild"]
def getzoneinfofile():
filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__))))
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
return os.path.join(os.path.dirname(__file__), entry)
return None
_ZONEFILENAME = getzoneinfofile()
# _ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
_ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
_METADATA_FN = 'METADATA'
# python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but

View file

@ -16,8 +16,9 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from lib.dateutil import tz
import lib.dateutil.zoneinfo
from lib.six import iteritems
from lib.dateutil import tz, zoneinfo
from sickbeard import db
from sickbeard import helpers
from sickbeard import logger
@ -26,12 +27,11 @@ from os.path import basename, join, isfile
import os
import re
import datetime
from lib.six import iteritems, moves
# regex to parse time (12/24 hour format)
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b', flags=re.IGNORECASE)
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.IGNORECASE)
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.IGNORECASE)
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I)
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
network_dict = None
@ -48,15 +48,15 @@ def _remove_zoneinfo_failed(filename):
# helper to remove old unneeded zoneinfo files
def _remove_old_zoneinfo():
zonefilename = lib.dateutil.zoneinfo._ZONEFILENAME
zonefilename = zoneinfo._ZONEFILENAME
if None is zonefilename:
return
cur_zoneinfo = ek.ek(basename, zonefilename)
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
for (path, dirs, files) in ek.ek(os.walk,
helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
for filename in files:
if filename.endswith('.tar.gz'):
file_w_path = ek.ek(join, path, filename)
@ -83,19 +83,24 @@ def _update_zoneinfo():
logger.WARNING)
return
zonefilename = lib.dateutil.zoneinfo._ZONEFILENAME
zonefilename = zoneinfo._ZONEFILENAME
cur_zoneinfo = zonefilename
if None is not cur_zoneinfo:
cur_zoneinfo = ek.ek(basename, zonefilename)
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
if not newtz_regex or len(newtz_regex.groups()) != 1:
return
newtzversion = newtz_regex.group(1)
if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata['tzversion'] == newtzversion:
return
# now load the new zoneinfo
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
if ek.ek(os.path.exists, zonefile_tmp):
@ -120,13 +125,15 @@ def _update_zoneinfo():
# remove the old zoneinfo file
if cur_zoneinfo is not None:
old_file = helpers.real_path(
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
if ek.ek(os.path.exists, old_file):
ek.ek(os.remove, old_file)
# rename downloaded file
ek.ek(os.rename, zonefile_tmp, zonefile)
# load the new zoneinfo
moves.reload_module(lib.dateutil.zoneinfo)
from dateutil.zoneinfo import gettz
if '_CLASS_ZONE_INSTANCE' in gettz.func_globals:
gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list())
sb_timezone = tz.tzlocal()
except:
_remove_zoneinfo_failed(zonefile_tmp)
@ -172,7 +179,7 @@ def update_network_dict():
# list of sql commands to update the network_timezones table
cl = []
for cur_d, cur_t in iteritems(d):
h_k = old_d.has_key(cur_d)
h_k = cur_d in old_d
if h_k and cur_t != old_d[cur_d]:
# update old record
cl.append(
@ -215,7 +222,7 @@ def get_network_timezone(network, network_dict):
return sb_timezone
try:
if lib.dateutil.zoneinfo._ZONEFILENAME is not None:
if zoneinfo._ZONEFILENAME is not None:
try:
n_t = tz.gettz(network_dict[network])
except:
@ -283,11 +290,11 @@ def test_timeformat(t):
def standardize_network(network, country):
myDB = db.DBConnection('cache.db')
sqlResults = myDB.select('SELECT * FROM network_conversions WHERE tvrage_network = ? and tvrage_country = ?',
[network, country])
if len(sqlResults) == 1:
return sqlResults[0]['tvdb_network']
my_db = db.DBConnection('cache.db')
sql_results = my_db.select('SELECT * FROM network_conversions WHERE tvrage_network = ? and tvrage_country = ?',
[network, country])
if len(sql_results) == 1:
return sql_results[0]['tvdb_network']
else:
return network
@ -324,7 +331,7 @@ def load_network_conversions():
for n_w in conversions:
cl.append(['INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country)'
'VALUES (?,?,?)', [n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country']]])
'VALUES (?,?,?)', [n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country']]])
try:
del old_d[n_w['tvdb_network']]
except:
@ -338,4 +345,4 @@ def load_network_conversions():
# change all network conversion info at once (much faster)
if len(cl) > 0:
my_db.mass_action(cl)
my_db.mass_action(cl)