2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 12:37:22 +00:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 07:30:21 +00:00
|
|
|
import sickbeard
|
2014-03-10 05:18:05 +00:00
|
|
|
from lib.dateutil import tz
|
|
|
|
import lib.dateutil.zoneinfo
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import encodingKludge as ek
|
2014-03-20 08:15:22 +00:00
|
|
|
from os.path import basename, join, isfile
|
2014-03-10 05:18:05 +00:00
|
|
|
import os
|
|
|
|
import re
|
2014-05-07 07:50:49 +00:00
|
|
|
import time
|
2014-03-10 05:18:05 +00:00
|
|
|
import datetime
|
|
|
|
|
|
|
|
# regex to parse time (12/24 hour format)
|
2014-03-11 20:18:49 +00:00
|
|
|
time_regex = re.compile(r"(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b", flags=re.IGNORECASE)
|
|
|
|
am_regex = re.compile(r"(A[. ]? ?M)", flags=re.IGNORECASE)
|
|
|
|
pm_regex = re.compile(r"(P[. ]? ?M)", flags=re.IGNORECASE)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
network_dict = None
|
|
|
|
|
|
|
|
sb_timezone = tz.tzlocal()
|
|
|
|
|
|
|
|
# helper to remove failed temp download
|
|
|
|
def _remove_zoneinfo_failed(filename):
|
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.remove, filename)
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# helper to remove old unneeded zoneinfo files
|
|
|
|
def _remove_old_zoneinfo():
|
|
|
|
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
|
|
|
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
|
|
|
else:
|
|
|
|
return
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
|
|
|
|
|
|
|
for (path, dirs, files) in ek.ek(os.walk,
|
|
|
|
helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
2014-03-10 05:18:05 +00:00
|
|
|
for filename in files:
|
|
|
|
if filename.endswith('.tar.gz'):
|
2014-03-25 05:57:24 +00:00
|
|
|
file_w_path = ek.ek(join, path, filename)
|
|
|
|
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.remove, file_w_path)
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
|
|
|
except:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Unable to delete: " + file_w_path, logger.ERROR)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# update the dateutil zoneinfo
|
|
|
|
def _update_zoneinfo():
|
|
|
|
global sb_timezone
|
|
|
|
sb_timezone = tz.tzlocal()
|
|
|
|
|
|
|
|
# now check if the zoneinfo needs update
|
2014-06-12 18:40:23 +00:00
|
|
|
url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
url_data = helpers.getURL(url_zv)
|
|
|
|
|
|
|
|
if url_data is None:
|
|
|
|
# When urlData is None, trouble connecting to github
|
|
|
|
logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
|
|
|
|
return
|
|
|
|
|
|
|
|
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
|
|
|
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
|
|
|
else:
|
|
|
|
cur_zoneinfo = None
|
|
|
|
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
|
|
|
|
|
|
|
|
if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)):
|
|
|
|
return
|
|
|
|
|
|
|
|
# now load the new zoneinfo
|
2014-06-12 18:40:23 +00:00
|
|
|
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/' + new_zoneinfo
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
|
|
|
zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile)
|
|
|
|
|
|
|
|
if (ek.ek(os.path.exists, zonefile_tmp)):
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.remove, zonefile_tmp)
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if not helpers.download_file(url_tar, zonefile_tmp):
|
|
|
|
return
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
if not ek.ek(os.path.exists, zonefile_tmp):
|
|
|
|
logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR)
|
2014-03-20 08:15:22 +00:00
|
|
|
return
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
|
|
|
|
|
|
|
if (zoneinfo_md5.upper() == new_hash.upper()):
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE)
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
|
|
|
# remove the old zoneinfo file
|
|
|
|
if (cur_zoneinfo is not None):
|
2014-03-25 05:57:24 +00:00
|
|
|
old_file = helpers.real_path(
|
|
|
|
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
|
|
|
if (ek.ek(os.path.exists, old_file)):
|
|
|
|
ek.ek(os.remove, old_file)
|
2014-03-10 05:18:05 +00:00
|
|
|
# rename downloaded file
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.rename, zonefile_tmp, zonefile)
|
2014-03-10 05:18:05 +00:00
|
|
|
# load the new zoneinfo
|
|
|
|
reload(lib.dateutil.zoneinfo)
|
|
|
|
sb_timezone = tz.tzlocal()
|
|
|
|
except:
|
|
|
|
_remove_zoneinfo_failed(zonefile_tmp)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
_remove_zoneinfo_failed(zonefile_tmp)
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
return
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# update the network timezone table
|
|
|
|
def update_network_dict():
|
|
|
|
_remove_old_zoneinfo()
|
|
|
|
_update_zoneinfo()
|
|
|
|
|
|
|
|
d = {}
|
|
|
|
|
|
|
|
# network timezones are stored on github pages
|
2014-06-12 18:40:23 +00:00
|
|
|
url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
url_data = helpers.getURL(url)
|
|
|
|
|
|
|
|
if url_data is None:
|
|
|
|
# When urlData is None, trouble connecting to github
|
|
|
|
logger.log(u"Loading Network Timezones update failed. Unable to get URL: " + url, logger.ERROR)
|
|
|
|
load_network_dict()
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
for line in url_data.splitlines():
|
2014-03-25 05:57:24 +00:00
|
|
|
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
|
|
|
|
if key is None or val is None:
|
|
|
|
continue
|
|
|
|
d[key] = val
|
2014-03-10 05:18:05 +00:00
|
|
|
except (IOError, OSError):
|
|
|
|
pass
|
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection('cache.db')
|
|
|
|
# load current network timezones
|
|
|
|
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
|
|
|
|
|
|
|
|
# list of sql commands to update the network_timezones table
|
|
|
|
ql = []
|
|
|
|
for cur_d, cur_t in d.iteritems():
|
|
|
|
h_k = old_d.has_key(cur_d)
|
|
|
|
if h_k and cur_t != old_d[cur_d]:
|
|
|
|
# update old record
|
|
|
|
ql.append(
|
|
|
|
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
|
|
|
elif not h_k:
|
|
|
|
# add new record
|
|
|
|
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
|
|
|
if h_k:
|
|
|
|
del old_d[cur_d]
|
|
|
|
# remove deleted records
|
|
|
|
if len(old_d) > 0:
|
|
|
|
L = list(va for va in old_d)
|
|
|
|
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
|
|
|
# change all network timezone infos at once (much faster)
|
|
|
|
if ql:
|
|
|
|
myDB.mass_action(ql)
|
|
|
|
load_network_dict()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# load network timezones from db into dict
|
|
|
|
def load_network_dict():
|
|
|
|
d = {}
|
|
|
|
try:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection('cache.db')
|
|
|
|
cur_network_list = myDB.select("SELECT * FROM network_timezones")
|
|
|
|
if cur_network_list is None or len(cur_network_list) < 1:
|
|
|
|
update_network_dict()
|
2014-03-10 05:18:05 +00:00
|
|
|
cur_network_list = myDB.select("SELECT * FROM network_timezones")
|
|
|
|
d = dict(cur_network_list)
|
|
|
|
except:
|
|
|
|
d = {}
|
|
|
|
global network_dict
|
|
|
|
network_dict = d
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# get timezone of a network or return default timezone
|
|
|
|
def get_network_timezone(network, network_dict):
|
|
|
|
if network is None:
|
|
|
|
return sb_timezone
|
|
|
|
|
|
|
|
try:
|
2014-03-11 20:18:49 +00:00
|
|
|
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
|
2014-06-10 00:39:52 +00:00
|
|
|
try:
|
|
|
|
n_t = tz.gettz(network_dict[network])
|
|
|
|
except:
|
|
|
|
return sb_timezone
|
2014-06-11 08:34:28 +00:00
|
|
|
|
2014-03-20 08:15:22 +00:00
|
|
|
if n_t is not None:
|
|
|
|
return n_t
|
|
|
|
else:
|
|
|
|
return sb_timezone
|
2014-03-11 20:18:49 +00:00
|
|
|
else:
|
|
|
|
return sb_timezone
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
|
|
|
return sb_timezone
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# parse date and time string into local time
|
2014-05-17 07:30:21 +00:00
|
|
|
def parse_date_time(d, t, network):
|
2014-03-10 05:18:05 +00:00
|
|
|
if network_dict is None:
|
|
|
|
load_network_dict()
|
|
|
|
mo = time_regex.search(t)
|
2014-03-11 20:18:49 +00:00
|
|
|
if mo is not None and len(mo.groups()) >= 5:
|
|
|
|
if mo.group(5) is not None:
|
|
|
|
try:
|
|
|
|
hr = helpers.tryInt(mo.group(1))
|
|
|
|
m = helpers.tryInt(mo.group(4))
|
|
|
|
ap = mo.group(5)
|
|
|
|
# convert am/pm to 24 hour clock
|
|
|
|
if ap is not None:
|
|
|
|
if pm_regex.search(ap) is not None and hr != 12:
|
|
|
|
hr += 12
|
|
|
|
elif am_regex.search(ap) is not None and hr == 12:
|
|
|
|
hr -= 12
|
|
|
|
except:
|
|
|
|
hr = 0
|
|
|
|
m = 0
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
hr = helpers.tryInt(mo.group(1))
|
|
|
|
m = helpers.tryInt(mo.group(6))
|
|
|
|
except:
|
|
|
|
hr = 0
|
|
|
|
m = 0
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
hr = 0
|
|
|
|
m = 0
|
|
|
|
if hr < 0 or hr > 23 or m < 0 or m > 59:
|
|
|
|
hr = 0
|
|
|
|
m = 0
|
2014-05-17 07:30:21 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
te = datetime.datetime.fromordinal(helpers.tryInt(d))
|
|
|
|
try:
|
2014-05-17 07:30:21 +00:00
|
|
|
if sickbeard.TIMEZONE_DISPLAY == 'local':
|
|
|
|
foreign_timezone = get_network_timezone(network, network_dict)
|
2014-05-18 18:21:18 +00:00
|
|
|
foreign_naive = datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=foreign_timezone)
|
|
|
|
return foreign_naive.astimezone(sb_timezone)
|
2014-05-18 18:44:49 +00:00
|
|
|
else:
|
|
|
|
return datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=sb_timezone)
|
2014-05-17 07:30:21 +00:00
|
|
|
except:
|
|
|
|
return datetime.datetime(te.year, te.month, te.day, hr, m)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def test_timeformat(t):
|
|
|
|
mo = time_regex.search(t)
|
|
|
|
if mo is None or len(mo.groups()) < 2:
|
|
|
|
return False
|
|
|
|
else:
|
2014-06-10 00:39:52 +00:00
|
|
|
return True
|