mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-07 10:33:38 +00:00
Merge pull request #423 from JackDandy/feature/ChangeMetadataZoneinfo
Change use metadata for zoneinfo files and remove hack of dateutil lib.
This commit is contained in:
commit
81de04efa1
4 changed files with 33 additions and 36 deletions
|
@ -55,6 +55,7 @@
|
||||||
* Change reload_module call to explicit import lib/six.moves
|
* Change reload_module call to explicit import lib/six.moves
|
||||||
* Change queue, httplib, cookielib and xmlrpclib to use explicit import lib/six.moves
|
* Change queue, httplib, cookielib and xmlrpclib to use explicit import lib/six.moves
|
||||||
* Change zoneinfo update/loader to be compatible with dateutil 2.4.2
|
* Change zoneinfo update/loader to be compatible with dateutil 2.4.2
|
||||||
|
* Change use metadata for zoneinfo files and remove hack of dateutil lib
|
||||||
* Change param item "features" passed to Beautiful Soup to prevent false +ve warning in r353
|
* Change param item "features" passed to Beautiful Soup to prevent false +ve warning in r353
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,4 +3,3 @@ Libs with customisations...
|
||||||
/tornado
|
/tornado
|
||||||
/lib/requests/packages/urllib3/connectionpool.py
|
/lib/requests/packages/urllib3/connectionpool.py
|
||||||
/lib/requests/packages/urllib3/util/ssl_.py
|
/lib/requests/packages/urllib3/util/ssl_.py
|
||||||
/lib/dateutil/zoneinfo/__init__.py
|
|
||||||
|
|
|
@ -16,17 +16,7 @@ from dateutil.tz import tzfile
|
||||||
|
|
||||||
__all__ = ["gettz", "gettz_db_metadata", "rebuild"]
|
__all__ = ["gettz", "gettz_db_metadata", "rebuild"]
|
||||||
|
|
||||||
|
_ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
||||||
def getzoneinfofile():
|
|
||||||
filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__))))
|
|
||||||
filenames.reverse()
|
|
||||||
for entry in filenames:
|
|
||||||
if entry.startswith("zoneinfo") and ".tar." in entry:
|
|
||||||
return os.path.join(os.path.dirname(__file__), entry)
|
|
||||||
return None
|
|
||||||
|
|
||||||
_ZONEFILENAME = getzoneinfofile()
|
|
||||||
# _ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
|
||||||
_METADATA_FN = 'METADATA'
|
_METADATA_FN = 'METADATA'
|
||||||
|
|
||||||
# python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but
|
# python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but
|
||||||
|
|
|
@ -16,8 +16,9 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from lib.dateutil import tz
|
from lib.six import iteritems
|
||||||
import lib.dateutil.zoneinfo
|
|
||||||
|
from lib.dateutil import tz, zoneinfo
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
@ -26,12 +27,11 @@ from os.path import basename, join, isfile
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
from lib.six import iteritems, moves
|
|
||||||
|
|
||||||
# regex to parse time (12/24 hour format)
|
# regex to parse time (12/24 hour format)
|
||||||
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b', flags=re.IGNORECASE)
|
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2}))? ?([PA][. ]? ?M)|[:.](\d{2}))\b', flags=re.I)
|
||||||
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.IGNORECASE)
|
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.I)
|
||||||
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.IGNORECASE)
|
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
|
||||||
|
|
||||||
network_dict = None
|
network_dict = None
|
||||||
|
|
||||||
|
@ -48,15 +48,15 @@ def _remove_zoneinfo_failed(filename):
|
||||||
|
|
||||||
# helper to remove old unneeded zoneinfo files
|
# helper to remove old unneeded zoneinfo files
|
||||||
def _remove_old_zoneinfo():
|
def _remove_old_zoneinfo():
|
||||||
zonefilename = lib.dateutil.zoneinfo._ZONEFILENAME
|
zonefilename = zoneinfo._ZONEFILENAME
|
||||||
if None is zonefilename:
|
if None is zonefilename:
|
||||||
return
|
return
|
||||||
cur_zoneinfo = ek.ek(basename, zonefilename)
|
cur_zoneinfo = ek.ek(basename, zonefilename)
|
||||||
|
|
||||||
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
|
||||||
|
|
||||||
for (path, dirs, files) in ek.ek(os.walk,
|
for (path, dirs, files) in ek.ek(os.walk,
|
||||||
helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
if filename.endswith('.tar.gz'):
|
if filename.endswith('.tar.gz'):
|
||||||
file_w_path = ek.ek(join, path, filename)
|
file_w_path = ek.ek(join, path, filename)
|
||||||
|
@ -83,19 +83,24 @@ def _update_zoneinfo():
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
return
|
return
|
||||||
|
|
||||||
zonefilename = lib.dateutil.zoneinfo._ZONEFILENAME
|
zonefilename = zoneinfo._ZONEFILENAME
|
||||||
cur_zoneinfo = zonefilename
|
cur_zoneinfo = zonefilename
|
||||||
if None is not cur_zoneinfo:
|
if None is not cur_zoneinfo:
|
||||||
cur_zoneinfo = ek.ek(basename, zonefilename)
|
cur_zoneinfo = ek.ek(basename, zonefilename)
|
||||||
|
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
|
||||||
|
zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None
|
||||||
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
|
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
|
||||||
|
newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
|
||||||
|
if not newtz_regex or len(newtz_regex.groups()) != 1:
|
||||||
|
return
|
||||||
|
newtzversion = newtz_regex.group(1)
|
||||||
|
|
||||||
if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
|
if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata['tzversion'] == newtzversion:
|
||||||
return
|
return
|
||||||
|
|
||||||
# now load the new zoneinfo
|
# now load the new zoneinfo
|
||||||
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo
|
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo
|
||||||
|
|
||||||
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
|
||||||
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
|
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
|
||||||
|
|
||||||
if ek.ek(os.path.exists, zonefile_tmp):
|
if ek.ek(os.path.exists, zonefile_tmp):
|
||||||
|
@ -120,13 +125,15 @@ def _update_zoneinfo():
|
||||||
# remove the old zoneinfo file
|
# remove the old zoneinfo file
|
||||||
if cur_zoneinfo is not None:
|
if cur_zoneinfo is not None:
|
||||||
old_file = helpers.real_path(
|
old_file = helpers.real_path(
|
||||||
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
|
||||||
if ek.ek(os.path.exists, old_file):
|
if ek.ek(os.path.exists, old_file):
|
||||||
ek.ek(os.remove, old_file)
|
ek.ek(os.remove, old_file)
|
||||||
# rename downloaded file
|
# rename downloaded file
|
||||||
ek.ek(os.rename, zonefile_tmp, zonefile)
|
ek.ek(os.rename, zonefile_tmp, zonefile)
|
||||||
# load the new zoneinfo
|
from dateutil.zoneinfo import gettz
|
||||||
moves.reload_module(lib.dateutil.zoneinfo)
|
if '_CLASS_ZONE_INSTANCE' in gettz.func_globals:
|
||||||
|
gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list())
|
||||||
|
|
||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
except:
|
except:
|
||||||
_remove_zoneinfo_failed(zonefile_tmp)
|
_remove_zoneinfo_failed(zonefile_tmp)
|
||||||
|
@ -172,7 +179,7 @@ def update_network_dict():
|
||||||
# list of sql commands to update the network_timezones table
|
# list of sql commands to update the network_timezones table
|
||||||
cl = []
|
cl = []
|
||||||
for cur_d, cur_t in iteritems(d):
|
for cur_d, cur_t in iteritems(d):
|
||||||
h_k = old_d.has_key(cur_d)
|
h_k = cur_d in old_d
|
||||||
if h_k and cur_t != old_d[cur_d]:
|
if h_k and cur_t != old_d[cur_d]:
|
||||||
# update old record
|
# update old record
|
||||||
cl.append(
|
cl.append(
|
||||||
|
@ -215,7 +222,7 @@ def get_network_timezone(network, network_dict):
|
||||||
return sb_timezone
|
return sb_timezone
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if lib.dateutil.zoneinfo._ZONEFILENAME is not None:
|
if zoneinfo._ZONEFILENAME is not None:
|
||||||
try:
|
try:
|
||||||
n_t = tz.gettz(network_dict[network])
|
n_t = tz.gettz(network_dict[network])
|
||||||
except:
|
except:
|
||||||
|
@ -283,11 +290,11 @@ def test_timeformat(t):
|
||||||
|
|
||||||
|
|
||||||
def standardize_network(network, country):
|
def standardize_network(network, country):
|
||||||
myDB = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
sqlResults = myDB.select('SELECT * FROM network_conversions WHERE tvrage_network = ? and tvrage_country = ?',
|
sql_results = my_db.select('SELECT * FROM network_conversions WHERE tvrage_network = ? and tvrage_country = ?',
|
||||||
[network, country])
|
[network, country])
|
||||||
if len(sqlResults) == 1:
|
if len(sql_results) == 1:
|
||||||
return sqlResults[0]['tvdb_network']
|
return sql_results[0]['tvdb_network']
|
||||||
else:
|
else:
|
||||||
return network
|
return network
|
||||||
|
|
||||||
|
@ -324,7 +331,7 @@ def load_network_conversions():
|
||||||
|
|
||||||
for n_w in conversions:
|
for n_w in conversions:
|
||||||
cl.append(['INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country)'
|
cl.append(['INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country)'
|
||||||
'VALUES (?,?,?)', [n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country']]])
|
'VALUES (?,?,?)', [n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country']]])
|
||||||
try:
|
try:
|
||||||
del old_d[n_w['tvdb_network']]
|
del old_d[n_w['tvdb_network']]
|
||||||
except:
|
except:
|
||||||
|
@ -338,4 +345,4 @@ def load_network_conversions():
|
||||||
|
|
||||||
# change all network conversion info at once (much faster)
|
# change all network conversion info at once (much faster)
|
||||||
if len(cl) > 0:
|
if len(cl) > 0:
|
||||||
my_db.mass_action(cl)
|
my_db.mass_action(cl)
|
||||||
|
|
Loading…
Reference in a new issue