2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import re
|
2014-06-01 06:44:44 +00:00
|
|
|
import time
|
2014-06-30 13:18:02 +00:00
|
|
|
import threading
|
2014-07-19 11:52:55 +00:00
|
|
|
import datetime
|
2014-03-25 05:57:24 +00:00
|
|
|
import sickbeard
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-12-05 04:09:23 +00:00
|
|
|
from collections import defaultdict
|
2014-05-26 06:29:22 +00:00
|
|
|
from lib import adba
|
2014-03-10 05:18:05 +00:00
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import name_cache
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import db
|
2015-12-05 04:09:23 +00:00
|
|
|
from sickbeard.classes import OrderedDefaultdict
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 04:10:57 +00:00
|
|
|
exception_dict = {}
|
|
|
|
anidb_exception_dict = {}
|
|
|
|
xem_exception_dict = {}
|
2015-12-06 11:36:45 +00:00
|
|
|
xem_ids_list = defaultdict(list)
|
2014-07-15 04:10:57 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
exceptionsCache = {}
|
|
|
|
exceptionsSeasonCache = {}
|
|
|
|
|
2014-07-18 02:37:14 +00:00
|
|
|
exceptionLock = threading.Lock()
|
2014-06-30 13:18:02 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
def shouldRefresh(list):
|
2015-07-25 09:19:46 +00:00
|
|
|
max_refresh_age_secs = 86400 # 1 day
|
2014-06-30 10:20:49 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-07-25 09:19:46 +00:00
|
|
|
rows = my_db.select('SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?', [list])
|
2014-06-30 06:59:27 +00:00
|
|
|
if rows:
|
2015-07-25 09:19:46 +00:00
|
|
|
last_refresh = int(rows[0]['last_refreshed'])
|
|
|
|
return int(time.mktime(datetime.datetime.today().timetuple())) > last_refresh + max_refresh_age_secs
|
2014-06-30 06:59:27 +00:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
def setLastRefresh(list):
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-07-25 09:19:46 +00:00
|
|
|
my_db.upsert('scene_exceptions_refresh',
|
|
|
|
{'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))},
|
|
|
|
{'list': list})
|
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def get_scene_exceptions(indexer_id, season=-1):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Given a indexer_id, return a list of all the scene exceptions.
|
|
|
|
"""
|
2014-07-03 17:30:15 +00:00
|
|
|
global exceptionsCache
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptions_list = []
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-07-03 17:30:15 +00:00
|
|
|
if indexer_id not in exceptionsCache or season not in exceptionsCache[indexer_id]:
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptions = my_db.select('SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?',
|
|
|
|
[indexer_id, season])
|
2014-07-03 17:30:15 +00:00
|
|
|
if exceptions:
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptions_list = list(set([cur_exception['show_name'] for cur_exception in exceptions]))
|
2014-07-03 17:30:15 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
if indexer_id not in exceptionsCache:
|
2014-07-03 17:30:15 +00:00
|
|
|
exceptionsCache[indexer_id] = {}
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptionsCache[indexer_id][season] = exceptions_list
|
2014-07-03 17:30:15 +00:00
|
|
|
else:
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptions_list = exceptionsCache[indexer_id][season]
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
if 1 == season: # if we where looking for season 1 we can add generic names
|
|
|
|
exceptions_list += get_scene_exceptions(indexer_id, season=-1)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
return exceptions_list
|
2014-06-30 06:59:27 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def get_all_scene_exceptions(indexer_id):
|
2015-12-05 04:09:23 +00:00
|
|
|
exceptions_dict = OrderedDefaultdict(list)
|
2014-06-30 11:09:55 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-12-05 04:09:23 +00:00
|
|
|
exceptions = my_db.select('SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ? ORDER BY season', [indexer_id])
|
2014-06-30 11:09:55 +00:00
|
|
|
|
|
|
|
if exceptions:
|
|
|
|
for cur_exception in exceptions:
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptions_dict[cur_exception['season']].append(cur_exception['show_name'])
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
return exceptions_dict
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def get_scene_seasons(indexer_id):
|
|
|
|
"""
|
|
|
|
return a list of season numbers that have scene exceptions
|
|
|
|
"""
|
2014-07-03 17:30:15 +00:00
|
|
|
global exceptionsSeasonCache
|
2015-07-25 09:19:46 +00:00
|
|
|
exception_sseason_list = []
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-03 17:30:15 +00:00
|
|
|
if indexer_id not in exceptionsSeasonCache:
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-07-25 09:19:46 +00:00
|
|
|
sql_results = my_db.select('SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?',
|
|
|
|
[indexer_id])
|
|
|
|
if sql_results:
|
|
|
|
exception_sseason_list = list(set([int(x['season']) for x in sql_results]))
|
2014-07-03 17:30:15 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
if indexer_id not in exceptionsSeasonCache:
|
2014-07-03 17:30:15 +00:00
|
|
|
exceptionsSeasonCache[indexer_id] = {}
|
2014-06-30 11:09:55 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
exceptionsSeasonCache[indexer_id] = exception_sseason_list
|
2014-07-03 17:30:15 +00:00
|
|
|
else:
|
2015-07-25 09:19:46 +00:00
|
|
|
exception_sseason_list = exceptionsSeasonCache[indexer_id]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
return exception_sseason_list
|
2014-05-26 18:07:10 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def get_scene_exception_by_name(show_name):
|
2014-05-26 06:29:22 +00:00
|
|
|
return get_scene_exception_by_name_multiple(show_name)[0]
|
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def get_scene_exception_by_name_multiple(show_name):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Given a show name, return the indexerid of the exception, None if no exception
|
|
|
|
is present.
|
|
|
|
"""
|
2015-05-26 04:32:50 +00:00
|
|
|
try:
|
|
|
|
exception_result = name_cache.nameCache[helpers.full_sanitizeSceneName(show_name)]
|
|
|
|
return [exception_result]
|
|
|
|
except:
|
|
|
|
return [[None, None]]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 18:07:10 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def retrieve_exceptions():
|
|
|
|
"""
|
|
|
|
Looks up the exceptions on github, parses them into a dict, and inserts them into the
|
|
|
|
scene_exceptions table in cache.db. Also clears the scene name cache.
|
|
|
|
"""
|
2014-07-15 04:10:57 +00:00
|
|
|
global exception_dict, anidb_exception_dict, xem_exception_dict
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# exceptions are stored on github pages
|
2014-07-15 02:00:53 +00:00
|
|
|
for indexer in sickbeard.indexerApi().indexers:
|
|
|
|
if shouldRefresh(sickbeard.indexerApi(indexer).name):
|
2015-07-25 09:19:46 +00:00
|
|
|
logger.log(u'Checking for scene exception updates for %s' % sickbeard.indexerApi(indexer).name)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
url = sickbeard.indexerApi(indexer).config['scene_url']
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
url_data = helpers.getURL(url)
|
2015-07-25 09:19:46 +00:00
|
|
|
if None is url_data:
|
|
|
|
# When None is urlData, trouble connecting to github
|
|
|
|
logger.log(u'Check scene exceptions update failed. Unable to get URL: %s' % url, logger.ERROR)
|
2014-06-30 06:59:27 +00:00
|
|
|
continue
|
2014-03-11 03:58:18 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
else:
|
2014-07-15 02:00:53 +00:00
|
|
|
setLastRefresh(sickbeard.indexerApi(indexer).name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
|
|
|
|
for cur_line in url_data.splitlines():
|
|
|
|
cur_line = cur_line.decode('utf-8')
|
|
|
|
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
if not aliases:
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
indexer_id = int(indexer_id)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
# regex out the list of shows, taking \' into account
|
|
|
|
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
|
|
|
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
|
|
|
exception_dict[indexer_id] = alias_list
|
2014-07-15 04:10:57 +00:00
|
|
|
del alias_list
|
|
|
|
del url_data
|
2014-06-30 06:59:27 +00:00
|
|
|
|
|
|
|
# XEM scene exceptions
|
2014-07-15 04:10:57 +00:00
|
|
|
_xem_exceptions_fetcher()
|
|
|
|
for xem_ex in xem_exception_dict:
|
2014-06-30 06:59:27 +00:00
|
|
|
if xem_ex in exception_dict:
|
2014-07-15 04:10:57 +00:00
|
|
|
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exception_dict[xem_ex]
|
2014-06-30 06:59:27 +00:00
|
|
|
else:
|
2014-07-15 04:10:57 +00:00
|
|
|
exception_dict[xem_ex] = xem_exception_dict[xem_ex]
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
# AniDB scene exceptions
|
2014-07-15 04:10:57 +00:00
|
|
|
_anidb_exceptions_fetcher()
|
|
|
|
for anidb_ex in anidb_exception_dict:
|
2014-07-15 02:00:53 +00:00
|
|
|
if anidb_ex in exception_dict:
|
2014-07-15 04:10:57 +00:00
|
|
|
exception_dict[anidb_ex] = exception_dict[anidb_ex] + anidb_exception_dict[anidb_ex]
|
2014-05-26 06:29:22 +00:00
|
|
|
else:
|
2014-07-15 04:10:57 +00:00
|
|
|
exception_dict[anidb_ex] = anidb_exception_dict[anidb_ex]
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-03-11 03:58:18 +00:00
|
|
|
changed_exceptions = False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-11 03:58:18 +00:00
|
|
|
# write all the exceptions we got off the net into the database
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2014-12-23 22:28:06 +00:00
|
|
|
cl = []
|
2014-06-21 22:46:59 +00:00
|
|
|
for cur_indexer_id in exception_dict:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
# get a list of the existing exceptions for this ID
|
2015-07-25 09:19:46 +00:00
|
|
|
existing_exceptions = [x['show_name'] for x in
|
|
|
|
my_db.select('SELECT * FROM scene_exceptions WHERE indexer_id = ?', [cur_indexer_id])]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
if cur_indexer_id not in exception_dict:
|
2014-07-18 01:06:42 +00:00
|
|
|
continue
|
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
for cur_exception_dict in exception_dict[cur_indexer_id]:
|
2015-07-25 09:19:46 +00:00
|
|
|
cur_exception, cur_season = cur_exception_dict.items()[0]
|
2014-05-26 18:07:10 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
# if this exception isn't already in the DB then add it
|
|
|
|
if cur_exception not in existing_exceptions:
|
2014-07-15 04:10:57 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
if not isinstance(cur_exception, unicode):
|
|
|
|
cur_exception = unicode(cur_exception, 'utf-8', 'replace')
|
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
cl.append(['INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)',
|
|
|
|
[cur_indexer_id, cur_exception, cur_season]])
|
2014-06-21 22:46:59 +00:00
|
|
|
changed_exceptions = True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
my_db.mass_action(cl)
|
2014-12-23 22:28:06 +00:00
|
|
|
|
2014-03-11 03:58:18 +00:00
|
|
|
# since this could invalidate the results of the cache we clear it out after updating
|
|
|
|
if changed_exceptions:
|
2015-07-25 09:19:46 +00:00
|
|
|
logger.log(u'Updated scene exceptions')
|
2014-03-11 03:58:18 +00:00
|
|
|
else:
|
2015-07-25 09:19:46 +00:00
|
|
|
logger.log(u'No scene exceptions update needed')
|
2014-06-30 11:44:36 +00:00
|
|
|
|
2014-07-15 04:10:57 +00:00
|
|
|
# cleanup
|
|
|
|
exception_dict.clear()
|
|
|
|
anidb_exception_dict.clear()
|
|
|
|
xem_exception_dict.clear()
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
|
2015-12-05 04:09:23 +00:00
|
|
|
def update_scene_exceptions(indexer_id, scene_exceptions):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
|
|
|
"""
|
2014-08-17 19:17:20 +00:00
|
|
|
global exceptionsCache
|
2016-09-04 20:00:44 +00:00
|
|
|
my_db = db.DBConnection()
|
2015-12-05 04:09:23 +00:00
|
|
|
my_db.action('DELETE FROM scene_exceptions WHERE indexer_id=?', [indexer_id])
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-08-17 19:17:20 +00:00
|
|
|
# A change has been made to the scene exception list. Let's clear the cache, to make this visible
|
2015-12-05 04:09:23 +00:00
|
|
|
exceptionsCache[indexer_id] = defaultdict(list)
|
|
|
|
|
|
|
|
logger.log(u'Updating scene exceptions', logger.MESSAGE)
|
|
|
|
for exception in scene_exceptions:
|
|
|
|
cur_season, cur_exception = exception.split('|', 1)
|
2014-08-17 19:17:20 +00:00
|
|
|
|
2015-12-05 04:09:23 +00:00
|
|
|
exceptionsCache[indexer_id][cur_season].append(cur_exception)
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-07-20 05:26:28 +00:00
|
|
|
if not isinstance(cur_exception, unicode):
|
|
|
|
cur_exception = unicode(cur_exception, 'utf-8', 'replace')
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
my_db.action('INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)',
|
2015-12-05 04:09:23 +00:00
|
|
|
[indexer_id, cur_exception, cur_season])
|
2015-07-25 09:19:46 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
def _anidb_exceptions_fetcher():
|
2014-07-15 04:10:57 +00:00
|
|
|
global anidb_exception_dict
|
2014-06-01 06:44:44 +00:00
|
|
|
|
2014-06-30 06:59:27 +00:00
|
|
|
if shouldRefresh('anidb'):
|
2015-07-25 09:19:46 +00:00
|
|
|
logger.log(u'Checking for scene exception updates for AniDB')
|
2014-06-21 22:46:59 +00:00
|
|
|
for show in sickbeard.showList:
|
2015-07-25 09:19:46 +00:00
|
|
|
if show.is_anime and 1 == show.indexer:
|
2014-06-21 22:46:59 +00:00
|
|
|
try:
|
|
|
|
anime = adba.Anime(None, name=show.name, tvdbid=show.indexerid, autoCorrectName=True)
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
if anime.name and anime.name != show.name:
|
2014-07-15 04:10:57 +00:00
|
|
|
anidb_exception_dict[show.indexerid] = [{anime.name: -1}]
|
2014-06-01 06:44:44 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
setLastRefresh('anidb')
|
2014-07-15 04:10:57 +00:00
|
|
|
return anidb_exception_dict
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-05-26 18:07:10 +00:00
|
|
|
|
2014-06-30 11:09:55 +00:00
|
|
|
def _xem_exceptions_fetcher():
|
2014-07-15 04:10:57 +00:00
|
|
|
global xem_exception_dict
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
xem_list = 'xem_us'
|
|
|
|
for show in sickbeard.showList:
|
|
|
|
if show.is_anime and not show.paused:
|
|
|
|
xem_list = 'xem'
|
|
|
|
break
|
|
|
|
|
|
|
|
if shouldRefresh(xem_list):
|
2017-05-17 15:55:59 +00:00
|
|
|
for indexer in [i for i in sickbeard.indexerApi().indexers if 'xem_origin' in sickbeard.indexerApi(i).config]:
|
2015-07-25 09:19:46 +00:00
|
|
|
logger.log(u'Checking for XEM scene exception updates for %s' % sickbeard.indexerApi(indexer).name)
|
2014-06-01 06:44:44 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
url = 'http://thexem.de/map/allNames?origin=%s%s&seasonNumbers=1'\
|
|
|
|
% (sickbeard.indexerApi(indexer).config['xem_origin'], ('&language=us', '')['xem' == xem_list])
|
2014-06-01 06:44:44 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
parsed_json = helpers.getURL(url, json=True, timeout=90)
|
|
|
|
if not parsed_json:
|
|
|
|
logger.log(u'Check scene exceptions update failed for %s, Unable to get URL: %s'
|
|
|
|
% (sickbeard.indexerApi(indexer).name, url), logger.ERROR)
|
2014-06-30 06:59:27 +00:00
|
|
|
continue
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
if 'failure' == parsed_json['result']:
|
2014-06-30 06:59:27 +00:00
|
|
|
continue
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
for indexerid, names in parsed_json['data'].items():
|
2015-04-02 09:48:25 +00:00
|
|
|
try:
|
|
|
|
xem_exception_dict[int(indexerid)] = names
|
|
|
|
except:
|
|
|
|
continue
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
setLastRefresh(xem_list)
|
|
|
|
|
|
|
|
return xem_exception_dict
|
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-07-25 09:19:46 +00:00
|
|
|
def _xem_get_ids(indexer_name, xem_origin):
|
|
|
|
xem_ids = []
|
|
|
|
|
|
|
|
url = 'http://thexem.de/map/havemap?origin=%s' % xem_origin
|
|
|
|
|
|
|
|
task = 'Fetching show ids with%s xem scene mapping%s for origin'
|
|
|
|
logger.log(u'%s %s' % (task % ('', 's'), indexer_name))
|
|
|
|
parsed_json = helpers.getURL(url, json=True, timeout=90)
|
|
|
|
if not parsed_json:
|
|
|
|
logger.log(u'Failed %s %s, Unable to get URL: %s'
|
|
|
|
% (task.lower() % ('', 's'), indexer_name, url), logger.ERROR)
|
|
|
|
else:
|
|
|
|
if 'result' in parsed_json and 'success' == parsed_json['result'] and 'data' in parsed_json:
|
|
|
|
try:
|
|
|
|
for indexerid in parsed_json['data']:
|
|
|
|
xem_id = helpers.tryInt(indexerid)
|
|
|
|
if xem_id and xem_id not in xem_ids:
|
|
|
|
xem_ids.append(xem_id)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if 0 == len(xem_ids):
|
|
|
|
logger.log(u'Failed %s %s, no data items parsed from URL: %s'
|
|
|
|
% (task.lower() % ('', 's'), indexer_name, url), logger.WARNING)
|
|
|
|
|
|
|
|
logger.log(u'Finished %s %s' % (task.lower() % (' %s' % len(xem_ids), helpers.maybe_plural(len(xem_ids))),
|
|
|
|
indexer_name))
|
|
|
|
return xem_ids
|
|
|
|
|
|
|
|
|
|
|
|
def get_xem_ids():
|
2015-12-06 11:36:45 +00:00
|
|
|
global xem_ids_list
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
for iid, name in sickbeard.indexerApi().xem_supported_indexers.iteritems():
|
|
|
|
xem_ids = _xem_get_ids(name, sickbeard.indexerApi(iid).config['xem_origin'])
|
2015-12-06 11:36:45 +00:00
|
|
|
if len(xem_ids):
|
2016-09-04 20:00:44 +00:00
|
|
|
xem_ids_list[iid] = xem_ids
|
2016-08-21 20:31:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
def has_abs_episodes(ep_obj=None, name=None):
|
|
|
|
return any((name or ep_obj.show.name or '').lower().startswith(x.lower()) for x in [
|
|
|
|
'The Eighties', 'The Making of the Mob', 'The Night Of', 'Roots 2016', 'Trepalium'
|
|
|
|
])
|