mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge pull request #965 from JackDandy/feature/ChangeNewznabCats
Change improve newnab autoselect categories.
This commit is contained in:
commit
d5a581b107
10 changed files with 163 additions and 105 deletions
|
@ -76,6 +76,8 @@
|
|||
* Change extend WEB PROPER release group check to ignore SD releases
|
||||
* Change increase performance by reducing TVDb API requests with a global token
|
||||
* Change make indexer lookup optional in NameParser, and deactivate during searches
|
||||
* Change improve newnab autoselect categories
|
||||
* Change add nzb.org BoxSD and BoxHD categories
|
||||
|
||||
|
||||
[develop changelog]
|
||||
|
|
|
@ -428,3 +428,50 @@ class Overview:
|
|||
countryList = {'Australia': 'AU',
|
||||
'Canada': 'CA',
|
||||
'USA': 'US'}
|
||||
|
||||
|
||||
class neededQualities:
|
||||
def __init__(self, need_anime=False, need_sports=False, need_sd=False, need_hd=False, need_uhd=False,
|
||||
need_webdl=False, need_all_qualities=False, need_all_types=False, need_all=False):
|
||||
self.need_anime = need_anime or need_all_types or need_all
|
||||
self.need_sports = need_sports or need_all_types or need_all
|
||||
self.need_sd = need_sd or need_all_qualities or need_all
|
||||
self.need_hd = need_hd or need_all_qualities or need_all
|
||||
self.need_uhd = need_uhd or need_all_qualities or need_all
|
||||
self.need_webdl = need_webdl or need_all_qualities or need_all
|
||||
|
||||
max_sd = Quality.SDDVD
|
||||
hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
webdl_qualities = [Quality.SDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
|
||||
@property
|
||||
def all_needed(self):
|
||||
return self.all_qualities_needed and self.all_types_needed
|
||||
|
||||
@property
|
||||
def all_types_needed(self):
|
||||
return self.need_anime and self.need_sports
|
||||
|
||||
@property
|
||||
def all_qualities_needed(self):
|
||||
return self.need_sd and self.need_hd and self.need_uhd and self.need_webdl
|
||||
|
||||
def check_needed_types(self, show):
|
||||
if show.is_anime:
|
||||
self.need_anime = True
|
||||
if show.is_sports:
|
||||
self.need_sports = True
|
||||
|
||||
def check_needed_qualities(self, wantedQualities):
|
||||
if Quality.UNKNOWN in wantedQualities:
|
||||
self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True
|
||||
else:
|
||||
if not self.need_sd and min(wantedQualities) <= neededQualities.max_sd:
|
||||
self.need_sd = True
|
||||
if not self.need_hd and any(i in neededQualities.hd_qualities for i in wantedQualities):
|
||||
self.need_hd = True
|
||||
if not self.need_webdl and any(i in neededQualities.webdl_qualities for i in wantedQualities):
|
||||
self.need_webdl = True
|
||||
if not self.need_uhd and max(wantedQualities) > neededQualities.max_hd:
|
||||
self.need_uhd = True
|
||||
|
|
|
@ -65,7 +65,7 @@ class AnizbCache(tvcache.TVCache):
|
|||
tvcache.TVCache.__init__(self, this_provider)
|
||||
self.update_freq = 6
|
||||
|
||||
def _cache_data(self):
|
||||
def _cache_data(self, **kwargs):
|
||||
return self.provider.cache_data()
|
||||
|
||||
|
||||
|
|
|
@ -384,7 +384,7 @@ class BTNCache(tvcache.TVCache):
|
|||
|
||||
self.update_freq = 15
|
||||
|
||||
def _cache_data(self):
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self.provider.cache_data(age=self._getLastUpdate().timetuple(), min_time=self.update_freq)
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ from math import ceil
|
|||
from sickbeard.sbdatetime import sbdatetime
|
||||
from . import generic
|
||||
from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard.common import neededQualities
|
||||
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
|
||||
from sickbeard.indexers.indexer_config import *
|
||||
from io import BytesIO
|
||||
|
@ -37,7 +37,7 @@ from sickbeard.network_timezones import sb_timezone
|
|||
from sickbeard.helpers import tryInt
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
|
@ -56,14 +56,18 @@ class NewznabConstants:
|
|||
CAT_HEVC = -203
|
||||
CAT_ANIME = -204
|
||||
CAT_SPORT = -205
|
||||
CAT_WEBDL = -206
|
||||
|
||||
catSearchStrings = {r'^Anime$': CAT_ANIME,
|
||||
r'^Sport$': CAT_SPORT,
|
||||
r'^SD$': CAT_SD,
|
||||
r'^BoxSD$': CAT_SD,
|
||||
r'^HD$': CAT_HD,
|
||||
r'^BoxHD$': CAT_HD,
|
||||
r'^UHD$': CAT_UHD,
|
||||
r'^4K$': CAT_UHD,
|
||||
r'^HEVC$': CAT_HEVC}
|
||||
#r'^HEVC$': CAT_HEVC,
|
||||
r'^WEB.?DL$': CAT_WEBDL}
|
||||
|
||||
providerToIndexerMapping = {'tvdbid': INDEXER_TVDB,
|
||||
'rageid': INDEXER_TVRAGE,
|
||||
|
@ -96,6 +100,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
self.url = url
|
||||
self.key = key
|
||||
self._exclude = set()
|
||||
self.cat_ids = cat_ids or ''
|
||||
self._cat_ids = None
|
||||
self.search_mode = search_mode or 'eponly'
|
||||
|
@ -132,6 +137,11 @@ class NewznabProvider(generic.NZBProvider):
|
|||
self.check_cap_update()
|
||||
return self._caps_cats
|
||||
|
||||
@property
|
||||
def excludes(self):
|
||||
self.check_cap_update()
|
||||
return self._exclude
|
||||
|
||||
@property
|
||||
def all_cats(self):
|
||||
self.check_cap_update()
|
||||
|
@ -188,6 +198,15 @@ class NewznabProvider(generic.NZBProvider):
|
|||
self._caps_need_apikey = {'need': True, 'date': datetime.date.today()}
|
||||
return xml_caps
|
||||
|
||||
def _check_excludes(self, cats):
|
||||
if isinstance(cats, dict):
|
||||
c = []
|
||||
for v in cats.itervalues():
|
||||
c.extend(v)
|
||||
self._exclude = set(c)
|
||||
else:
|
||||
self._exclude = set(v for v in cats)
|
||||
|
||||
def get_caps(self):
|
||||
caps = {}
|
||||
cats = {}
|
||||
|
@ -231,6 +250,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log('Error parsing result for [%s]' % self.name, logger.DEBUG)
|
||||
|
||||
if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats:
|
||||
self._check_excludes(cats)
|
||||
return
|
||||
|
||||
if self.enabled:
|
||||
|
@ -248,27 +268,26 @@ class NewznabProvider(generic.NZBProvider):
|
|||
caps[INDEXER_TVRAGE] = 'rid'
|
||||
|
||||
if NewznabConstants.CAT_HD not in cats or not cats.get(NewznabConstants.CAT_HD):
|
||||
cats[NewznabConstants.CAT_HD] = ['5040']
|
||||
cats[NewznabConstants.CAT_HD] = (['5040'], ['5040', '5090'])['nzbs_org' == self.get_id()]
|
||||
if NewznabConstants.CAT_SD not in cats or not cats.get(NewznabConstants.CAT_SD):
|
||||
cats[NewznabConstants.CAT_SD] = ['5030']
|
||||
cats[NewznabConstants.CAT_SD] = (['5030'], ['5030', '5070'])['nzbs_org' == self.get_id()]
|
||||
if NewznabConstants.CAT_ANIME not in cats or not cats.get(NewznabConstants.CAT_ANIME):
|
||||
cats[NewznabConstants.CAT_ANIME] = (['5070'], ['6070,7040'])['nzbs_org' == self.get_id()]
|
||||
cats[NewznabConstants.CAT_ANIME] = (['5070'], ['6070', '7040'])['nzbs_org' == self.get_id()]
|
||||
if NewznabConstants.CAT_SPORT not in cats or not cats.get(NewznabConstants.CAT_SPORT):
|
||||
cats[NewznabConstants.CAT_SPORT] = ['5060']
|
||||
|
||||
self._check_excludes(cats)
|
||||
self._caps = caps
|
||||
self._caps_cats = cats
|
||||
self._caps_all_cats = all_cats
|
||||
|
||||
@staticmethod
|
||||
def clean_newznab_categories(cats):
|
||||
def clean_newznab_categories(self, cats):
|
||||
"""
|
||||
Removes the anime (5070), sports (5060), HD (5040), UHD (5045), SD (5030) categories from the list
|
||||
Removes automatically mapped categories from the list
|
||||
"""
|
||||
exclude = {'5070', '5060', '5040', '5045', '5030'}
|
||||
if isinstance(cats, list):
|
||||
return [x for x in cats if x['id'] not in exclude]
|
||||
return ','.join(set(cats.split(',')) - exclude)
|
||||
return [x for x in cats if x['id'] not in self.excludes]
|
||||
return ','.join(set(cats.split(',')) - self.excludes)
|
||||
|
||||
def check_auth_from_data(self, data):
|
||||
|
||||
|
@ -441,36 +460,26 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100):
|
||||
if not hasattr(ep_obj, 'eps_aired_in_season'):
|
||||
return None, True, True, True, hits_per_page
|
||||
return None, neededQualities(need_all_qualities=True), hits_per_page
|
||||
searches = [e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or
|
||||
(ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season)]
|
||||
need_sd = need_hd = need_uhd = False
|
||||
max_sd = Quality.SDDVD
|
||||
hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
|
||||
needed = neededQualities()
|
||||
for s in searches:
|
||||
if need_sd and need_hd and need_uhd:
|
||||
if needed.all_qualities_needed:
|
||||
break
|
||||
if not s.show.is_anime and not s.show.is_sports:
|
||||
if Quality.UNKNOWN in s.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if not need_sd and min(s.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in s.wantedQuality):
|
||||
need_hd = True
|
||||
if not need_uhd and max(s.wantedQuality) > max_hd:
|
||||
need_uhd = True
|
||||
needed.check_needed_qualities(s.wantedQuality)
|
||||
|
||||
per_ep, limit_per_ep = 0, 0
|
||||
if need_sd and not need_hd:
|
||||
if needed.need_sd and not needed.need_hd:
|
||||
per_ep, limit_per_ep = 10, 25
|
||||
if need_hd:
|
||||
if not need_sd:
|
||||
if needed.need_hd:
|
||||
if not needed.need_sd:
|
||||
per_ep, limit_per_ep = 30, 90
|
||||
else:
|
||||
per_ep, limit_per_ep = 40, 120
|
||||
if need_uhd or (need_hd and not self.cats.get(NewznabConstants.CAT_UHD)):
|
||||
if needed.need_uhd or (needed.need_hd and not self.cats.get(NewznabConstants.CAT_UHD)):
|
||||
per_ep += 4
|
||||
limit_per_ep += 10
|
||||
if ep_obj.show.is_anime or ep_obj.show.is_sports or ep_obj.show.air_by_date:
|
||||
|
@ -483,18 +492,10 @@ class NewznabProvider(generic.NZBProvider):
|
|||
ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page))
|
||||
season_search = rel < (len(searches) * 100 // hits_per_page)
|
||||
if not season_search:
|
||||
need_sd = need_hd = need_uhd = False
|
||||
needed = neededQualities()
|
||||
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
||||
if Quality.UNKNOWN in ep_obj.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if min(ep_obj.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if any(i in hd_qualities for i in ep_obj.wantedQuality):
|
||||
need_hd = True
|
||||
if max(ep_obj.wantedQuality) > max_hd:
|
||||
need_uhd = True
|
||||
return (season_search, need_sd, need_hd, need_uhd,
|
||||
needed.check_needed_qualities(ep_obj.wantedQuality)
|
||||
return (season_search, needed,
|
||||
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs):
|
||||
|
@ -523,8 +524,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
# found result, search next episode
|
||||
continue
|
||||
|
||||
s_mode, need_sd, need_hd, need_uhd, max_items = self.choose_search_mode(
|
||||
episodes, ep_obj, hits_per_page=self.limits)
|
||||
s_mode, needed, max_items = self.choose_search_mode(episodes, ep_obj, hits_per_page=self.limits)
|
||||
needed.check_needed_types(self.show)
|
||||
|
||||
if 'sponly' == search_mode:
|
||||
searched_scene_season = ep_obj.scene_season
|
||||
|
@ -541,9 +542,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
for cur_param in search_params:
|
||||
items, n_space = self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes),
|
||||
need_anime=self.show.is_anime, need_sports=self.show.is_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd,
|
||||
max_items=max_items, try_all_searches=try_other_searches)
|
||||
needed=needed, max_items=max_items,
|
||||
try_all_searches=try_other_searches)
|
||||
item_list += items
|
||||
name_space.update(n_space)
|
||||
|
||||
|
@ -568,8 +568,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
return parsed_date
|
||||
|
||||
def _search_provider(self, search_params, need_anime=True, need_sports=True, need_sd=True, need_hd=True,
|
||||
need_uhd=True, max_items=400, try_all_searches=False, **kwargs):
|
||||
def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400,
|
||||
try_all_searches=False, **kwargs):
|
||||
|
||||
api_key = self._check_auth()
|
||||
|
||||
|
@ -591,6 +591,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
cat_hd = self.cats.get(NewznabConstants.CAT_HD, ['5040'])
|
||||
cat_sd = self.cats.get(NewznabConstants.CAT_SD, ['5030'])
|
||||
cat_uhd = self.cats.get(NewznabConstants.CAT_UHD)
|
||||
cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL)
|
||||
|
||||
for mode in search_params.keys():
|
||||
for i, params in enumerate(search_params[mode]):
|
||||
|
@ -604,17 +605,19 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log('Show is missing either an id or search term for search')
|
||||
continue
|
||||
|
||||
if need_anime:
|
||||
if needed.need_anime:
|
||||
cat.extend(cat_anime)
|
||||
if need_sports:
|
||||
if needed.need_sports:
|
||||
cat.extend(cat_sport)
|
||||
|
||||
if need_hd:
|
||||
if needed.need_hd:
|
||||
cat.extend(cat_hd)
|
||||
if need_sd:
|
||||
if needed.need_sd:
|
||||
cat.extend(cat_sd)
|
||||
if need_uhd and cat_uhd is not None:
|
||||
if needed.need_uhd and cat_uhd is not None:
|
||||
cat.extend(cat_uhd)
|
||||
if needed.need_webdl and cat_webdl is not None:
|
||||
cat.extend(cat_webdl)
|
||||
|
||||
if self.cat_ids or len(cat):
|
||||
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
|
||||
|
@ -816,7 +819,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
root = elem
|
||||
return root, ns
|
||||
|
||||
def updateCache(self, need_anime=True, need_sports=True, need_sd=True, need_hd=True, need_uhd=True, **kwargs):
|
||||
def updateCache(self, needed=neededQualities(need_all=True), **kwargs):
|
||||
|
||||
result = []
|
||||
|
||||
|
@ -824,8 +827,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
n_spaces = {}
|
||||
try:
|
||||
self._checkAuth()
|
||||
(items, n_spaces) = self.provider.cache_data(need_anime=need_anime, need_sports=need_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd)
|
||||
(items, n_spaces) = self.provider.cache_data(needed=needed)
|
||||
except (StandardError, Exception):
|
||||
items = None
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ from sickbeard import classes, logger, show_name_helpers, tvcache
|
|||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.exceptions import AuthException
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from sickbeard.common import neededQualities
|
||||
|
||||
|
||||
class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||
|
@ -51,6 +52,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
self.username, self.api_key, self.cookies = 3 * [None]
|
||||
self.cache = OmgwtfnzbsCache(self)
|
||||
|
||||
cat_sd = ['19']
|
||||
cat_hd = ['20']
|
||||
cat_uhd = ['30']
|
||||
|
||||
def _check_auth_from_data(self, parsed_data, is_xml=True):
|
||||
|
||||
if parsed_data is None:
|
||||
|
@ -121,16 +126,27 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
return result
|
||||
|
||||
def cache_data(self):
|
||||
def _get_cats(self, needed):
|
||||
cats = []
|
||||
if needed.need_sd:
|
||||
cats.extend(OmgwtfnzbsProvider.cat_sd)
|
||||
if needed.need_hd:
|
||||
cats.extend(OmgwtfnzbsProvider.cat_hd)
|
||||
if needed.need_uhd:
|
||||
cats.extend(OmgwtfnzbsProvider.cat_uhd)
|
||||
return cats
|
||||
|
||||
def cache_data(self, needed=neededQualities(need_all=True), **kwargs):
|
||||
|
||||
api_key = self._init_api()
|
||||
if False is api_key:
|
||||
return self.search_html()
|
||||
return self.search_html(needed=needed, **kwargs)
|
||||
cats = self._get_cats(needed=needed)
|
||||
if None is not api_key:
|
||||
params = {'user': self.username,
|
||||
'api': api_key,
|
||||
'eng': 1,
|
||||
'catid': '19,20'} # SD,HD
|
||||
'catid': ','.join(cats)} # SD,HD
|
||||
|
||||
rss_url = self.urls['cache'] % urllib.urlencode(params)
|
||||
|
||||
|
@ -141,18 +157,20 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
return data.entries
|
||||
return []
|
||||
|
||||
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0, **kwargs):
|
||||
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0,
|
||||
needed=neededQualities(need_all=True), **kwargs):
|
||||
|
||||
api_key = self._init_api()
|
||||
if False is api_key:
|
||||
return self.search_html(search, search_mode)
|
||||
return self.search_html(search, search_mode, needed=needed, **kwargs)
|
||||
results = []
|
||||
cats = self._get_cats(needed=needed)
|
||||
if None is not api_key:
|
||||
params = {'user': self.username,
|
||||
'api': api_key,
|
||||
'eng': 1,
|
||||
'nukes': 1,
|
||||
'catid': '19,20', # SD,HD
|
||||
'catid': ','.join(cats), # SD,HD
|
||||
'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION],
|
||||
'search': search}
|
||||
|
||||
|
@ -168,14 +186,16 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
results.append(item)
|
||||
return results
|
||||
|
||||
def search_html(self, search='', search_mode=''):
|
||||
def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs):
|
||||
|
||||
results = []
|
||||
if None is self.cookies:
|
||||
return results
|
||||
|
||||
cats = self._get_cats(needed=needed)
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': r'send\?', 'nuked': r'\bnuked',
|
||||
'cat': 'cat=(?:19|20)'}.items())
|
||||
'cat': 'cat=(?:%s)' % '|'.join(cats)}.items())
|
||||
mode = ('search', 'cache')['' == search]
|
||||
search_url = self.urls[mode + '_html'] % search
|
||||
html = self.get_url(search_url)
|
||||
|
@ -268,9 +288,9 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
|||
|
||||
self.update_freq = 20
|
||||
|
||||
def _cache_data(self):
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self.provider.cache_data()
|
||||
return self.provider.cache_data(**kwargs)
|
||||
|
||||
|
||||
provider = OmgwtfnzbsProvider()
|
||||
|
|
|
@ -99,7 +99,7 @@ class TokyoToshokanCache(tvcache.TVCache):
|
|||
|
||||
self.update_freq = 15
|
||||
|
||||
def _cache_data(self):
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
mode = 'Cache'
|
||||
search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
|
||||
|
|
|
@ -191,39 +191,27 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
show_list = sickbeard.showList
|
||||
from_date = datetime.date.fromordinal(1)
|
||||
need_anime = need_sports = need_sd = need_hd = need_uhd = False
|
||||
max_sd = Quality.SDDVD
|
||||
hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
needed = common.neededQualities()
|
||||
for curShow in show_list:
|
||||
if curShow.paused:
|
||||
continue
|
||||
|
||||
wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED)
|
||||
if wanted_eps:
|
||||
if not need_anime and curShow.is_anime:
|
||||
need_anime = True
|
||||
if not need_sports and curShow.is_sports:
|
||||
need_sports = True
|
||||
if not need_sd or not need_hd or not need_uhd:
|
||||
for w in wanted_eps:
|
||||
if need_sd and need_hd and need_uhd:
|
||||
break
|
||||
if not w.show.is_anime and not w.show.is_sports:
|
||||
if Quality.UNKNOWN in w.wantedQuality:
|
||||
need_sd = need_hd = need_uhd = True
|
||||
else:
|
||||
if not need_sd and max_sd >= min(w.wantedQuality):
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in w.wantedQuality):
|
||||
need_hd = True
|
||||
if not need_uhd and max_hd < max(w.wantedQuality):
|
||||
need_uhd = True
|
||||
self.episodes.extend(wanted_eps)
|
||||
|
||||
self.update_providers(need_anime=need_anime, need_sports=need_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd)
|
||||
if wanted_eps:
|
||||
if not needed.all_needed:
|
||||
if not needed.all_types_needed:
|
||||
needed.check_needed_types(curShow)
|
||||
if not needed.all_qualities_needed:
|
||||
for w in wanted_eps:
|
||||
if needed.all_qualities_needed:
|
||||
break
|
||||
if not w.show.is_anime and not w.show.is_sports:
|
||||
needed.check_needed_qualities(w.wantedQuality)
|
||||
|
||||
self.episodes.extend(wanted_eps)
|
||||
|
||||
self.update_providers(needed=needed)
|
||||
|
||||
if not self.episodes:
|
||||
logger.log(u'No search of cache for episodes required')
|
||||
|
@ -319,7 +307,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'Found new episodes marked wanted')
|
||||
|
||||
@staticmethod
|
||||
def update_providers(need_anime=True, need_sports=True, need_sd=True, need_hd=True, need_uhd=True):
|
||||
def update_providers(needed=common.neededQualities(need_all=True)):
|
||||
orig_thread_name = threading.currentThread().name
|
||||
threads = []
|
||||
|
||||
|
@ -333,8 +321,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
# spawn a thread for each provider to save time waiting for slow response providers
|
||||
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
|
||||
kwargs={'need_anime': need_anime, 'need_sports': need_sports,
|
||||
'need_sd': need_sd, 'need_hd': need_hd, 'need_uhd': need_uhd},
|
||||
kwargs={'needed': needed},
|
||||
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
|
||||
# start the thread we just created
|
||||
threads[-1].start()
|
||||
|
|
|
@ -66,7 +66,7 @@ class TVCache:
|
|||
# override this in the provider if recent search has a different data layout to backlog searches
|
||||
return self.provider._title_and_url(item)
|
||||
|
||||
def _cache_data(self):
|
||||
def _cache_data(self, **kwargs):
|
||||
data = None
|
||||
return data
|
||||
|
||||
|
@ -84,7 +84,7 @@ class TVCache:
|
|||
return []
|
||||
|
||||
if self.should_update():
|
||||
data = self._cache_data()
|
||||
data = self._cache_data(**kwargs)
|
||||
|
||||
# clear cache
|
||||
if data:
|
||||
|
|
|
@ -5194,7 +5194,7 @@ class ConfigProviders(Config):
|
|||
|
||||
if name in [n.name for n in sickbeard.newznabProviderList if n.url == url]:
|
||||
provider = [n for n in sickbeard.newznabProviderList if n.name == name][0]
|
||||
tv_categories = newznab.NewznabProvider.clean_newznab_categories(provider.all_cats)
|
||||
tv_categories = provider.clean_newznab_categories(provider.all_cats)
|
||||
state = provider.is_enabled()
|
||||
else:
|
||||
providers = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
|
||||
|
@ -5202,7 +5202,7 @@ class ConfigProviders(Config):
|
|||
if None is not key and starify(key, True):
|
||||
temp_provider.key = providers[temp_provider.get_id()].key
|
||||
|
||||
tv_categories = newznab.NewznabProvider.clean_newznab_categories(temp_provider.all_cats)
|
||||
tv_categories = temp_provider.clean_newznab_categories(temp_provider.all_cats)
|
||||
state = False
|
||||
|
||||
return json.dumps({'success': True, 'tv_categories': tv_categories, 'state': state, 'error': ''})
|
||||
|
|
Loading…
Reference in a new issue