mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Fix infinite loop loading network_timezones.
Change add optional "stack_size" setting as integer to config.ini under "General" stanza. Change prevent too many retries when loading network timezones, conversions, and zoneinfo in a short time. Update to UnRar 5.50 beta 6.
This commit is contained in:
parent
9c3a158a5d
commit
1bd9cf863e
5 changed files with 65 additions and 4 deletions
10
CHANGES.md
10
CHANGES.md
|
@ -1,4 +1,12 @@
|
|||
### 0.12.21 (2017-06-19 23:35:00 UTC)
|
||||
### 0.12.26 (2017-08-20 13:05:00 UTC)
|
||||
|
||||
* Fix infinite loop loading network_timezones
|
||||
* Change add optional "stack_size" setting as integer to config.ini under "General" stanza
|
||||
* Change prevent too many retries when loading network timezones, conversions, and zoneinfo in a short time
|
||||
* Update to UnRar 5.50 beta 6
|
||||
|
||||
|
||||
### 0.12.25 (2017-06-19 23:35:00 UTC)
|
||||
|
||||
* Remove provider SceneAccess
|
||||
|
||||
|
|
11
SickBeard.py
11
SickBeard.py
|
@ -302,6 +302,17 @@ class SickGear(object):
|
|||
print(u'Unable to find "%s", all settings will be default!' % sickbeard.CONFIG_FILE)
|
||||
|
||||
sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)
|
||||
stack_size = None
|
||||
try:
|
||||
stack_size = int(sickbeard.CFG['General']['stack_size'])
|
||||
except:
|
||||
stack_size = None
|
||||
|
||||
if stack_size:
|
||||
try:
|
||||
threading.stack_size(stack_size)
|
||||
except (StandardError, Exception) as e:
|
||||
print('Stack Size %s not set: %s' % (stack_size, e.message))
|
||||
|
||||
# check all db versions
|
||||
for d, min_v, max_v, mo in [
|
||||
|
|
Binary file not shown.
|
@ -1427,6 +1427,9 @@ def save_config():
|
|||
# For passwords you must include the word `password` in the item_name and
|
||||
# add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
new_config['General'] = {}
|
||||
s_z = check_setting_int(CFG, 'General', 'stack_size', 0)
|
||||
if s_z:
|
||||
new_config['General']['stack_size'] = s_z
|
||||
new_config['General']['config_version'] = CONFIG_VERSION
|
||||
new_config['General']['branch'] = BRANCH
|
||||
new_config['General']['git_remote'] = GIT_REMOTE
|
||||
|
|
|
@ -38,6 +38,9 @@ pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.I)
|
|||
|
||||
network_dict = None
|
||||
network_dupes = None
|
||||
last_failure = {'datetime': datetime.datetime.fromordinal(1), 'count': 0}
|
||||
max_retry_time = 900
|
||||
max_retry_count = 3
|
||||
|
||||
country_timezones = {
|
||||
'AU': 'Australia/Sydney', 'AR': 'America/Buenos_Aires', 'AUSTRALIA': 'Australia/Sydney', 'BR': 'America/Sao_Paulo',
|
||||
|
@ -49,6 +52,24 @@ country_timezones = {
|
|||
'TW': 'Asia/Taipei', 'UK': 'Europe/London', 'US': 'US/Eastern', 'ZA': 'Africa/Johannesburg'}
|
||||
|
||||
|
||||
def reset_last_retry():
|
||||
global last_failure
|
||||
last_failure = {'datetime': datetime.datetime.fromordinal(1), 'count': 0}
|
||||
|
||||
|
||||
def update_last_retry():
|
||||
global last_failure
|
||||
last_failure = {'datetime': datetime.datetime.now(), 'count': last_failure.get('count', 0) + 1}
|
||||
|
||||
|
||||
def should_try_loading():
|
||||
global last_failure
|
||||
if last_failure.get('count', 0) >= max_retry_count and \
|
||||
(datetime.datetime.now() - last_failure.get('datetime', datetime.datetime.fromordinal(1))).seconds < max_retry_time:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def tz_fallback(t):
|
||||
return t if isinstance(t, datetime.tzinfo) else tz.tzlocal()
|
||||
|
||||
|
@ -99,6 +120,9 @@ def _remove_old_zoneinfo():
|
|||
|
||||
# update the dateutil zoneinfo
|
||||
def _update_zoneinfo():
|
||||
if not should_try_loading():
|
||||
return
|
||||
|
||||
global sb_timezone
|
||||
sb_timezone = get_tz()
|
||||
|
||||
|
@ -107,10 +131,13 @@ def _update_zoneinfo():
|
|||
|
||||
url_data = helpers.getURL(url_zv)
|
||||
if url_data is None:
|
||||
update_last_retry()
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv,
|
||||
logger.WARNING)
|
||||
return
|
||||
else:
|
||||
reset_last_retry()
|
||||
|
||||
zonefilename = zoneinfo.ZONEFILENAME
|
||||
cur_zoneinfo = zonefilename
|
||||
|
@ -175,6 +202,9 @@ def _update_zoneinfo():
|
|||
|
||||
# update the network timezone table
|
||||
def update_network_dict():
|
||||
if not should_try_loading():
|
||||
return
|
||||
|
||||
_remove_old_zoneinfo()
|
||||
_update_zoneinfo()
|
||||
load_network_conversions()
|
||||
|
@ -186,10 +216,13 @@ def update_network_dict():
|
|||
|
||||
url_data = helpers.getURL(url)
|
||||
if url_data is None:
|
||||
update_last_retry()
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.WARNING)
|
||||
load_network_dict()
|
||||
load_network_dict(load=False)
|
||||
return
|
||||
else:
|
||||
reset_last_retry()
|
||||
|
||||
try:
|
||||
for line in url_data.splitlines():
|
||||
|
@ -231,7 +264,7 @@ def update_network_dict():
|
|||
|
||||
|
||||
# load network timezones from db into dict
|
||||
def load_network_dict():
|
||||
def load_network_dict(load=True):
|
||||
global network_dict, network_dupes
|
||||
|
||||
my_db = db.DBConnection('cache.db')
|
||||
|
@ -240,7 +273,7 @@ def load_network_dict():
|
|||
sql = 'SELECT %s AS network_name, timezone FROM [network_timezones] ' % sql_name + \
|
||||
'GROUP BY %s HAVING COUNT(*) = 1 ORDER BY %s;' % (sql_name, sql_name)
|
||||
cur_network_list = my_db.select(sql)
|
||||
if cur_network_list is None or len(cur_network_list) < 1:
|
||||
if load and (cur_network_list is None or len(cur_network_list) < 1):
|
||||
update_network_dict()
|
||||
cur_network_list = my_db.select(sql)
|
||||
network_dict = dict(cur_network_list)
|
||||
|
@ -360,6 +393,9 @@ def standardize_network(network, country):
|
|||
|
||||
def load_network_conversions():
|
||||
|
||||
if not should_try_loading():
|
||||
return
|
||||
|
||||
conversions = []
|
||||
|
||||
# network conversions are stored on github pages
|
||||
|
@ -367,9 +403,12 @@ def load_network_conversions():
|
|||
|
||||
url_data = helpers.getURL(url)
|
||||
if url_data is None:
|
||||
update_last_retry()
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url, logger.WARNING)
|
||||
return
|
||||
else:
|
||||
reset_last_retry()
|
||||
|
||||
try:
|
||||
for line in url_data.splitlines():
|
||||
|
|
Loading…
Reference in a new issue