mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge branch 'master' into develop
This commit is contained in:
commit
67f0b660ba
21 changed files with 586 additions and 144 deletions
12
CHANGES.md
12
CHANGES.md
|
@ -1,4 +1,14 @@
|
|||
### 0.13.6 (2017-12-13 01:50:00 UTC)
|
||||
### 0.13.7 (2017-12-27 03:00:00 UTC)
|
||||
|
||||
* Add log message for not found on indexer when adding a new show
|
||||
* Fix upgrade once ARCHIVED setting by postProcessor
|
||||
* Fix determination of is_first_best_match
|
||||
* Fix BTScene and Lime
|
||||
* Add ETTV torrent provider
|
||||
* Add PotUK torrent provider
|
||||
|
||||
|
||||
### 0.13.6 (2017-12-13 01:50:00 UTC)
|
||||
|
||||
* Change improve multi episode release search
|
||||
* Change improve usage of the optional regex library
|
||||
|
|
BIN
gui/slick/images/providers/ettv.png
Normal file
BIN
gui/slick/images/providers/ettv.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 268 B |
BIN
gui/slick/images/providers/potuk.png
Normal file
BIN
gui/slick/images/providers/potuk.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 237 B |
|
@ -340,7 +340,7 @@
|
|||
#if $show.paused
|
||||
<span class="label label-paused">Paused</span>
|
||||
#end if
|
||||
#if ($anyQualities + $bestQualities) and int($show.archive_firstmatch)
|
||||
#if ($anyQualities + $bestQualities) and int($show.upgrade_once)
|
||||
<span class="label">Upgrade once</span>
|
||||
#end if
|
||||
#if $show.exceptions
|
||||
|
|
|
@ -152,10 +152,10 @@
|
|||
|
||||
#if $anyQualities + $bestQualities
|
||||
<div class="field-pair show-if-quality-custom" style="display:none">
|
||||
<label for="archive_firstmatch">
|
||||
<label for="upgrade-once">
|
||||
<span class="component-title">Upgrade once</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="archive_firstmatch" id="archive_firstmatch"#echo ('', $html_checked)[$show.archive_firstmatch]#>
|
||||
<input type="checkbox" name="upgrade_once" id="upgrade-once"#echo ('', $html_checked)[$show.upgrade_once]#>
|
||||
<p>stop upgrading after matching the first best <em>Upgrade to</em> quality</p>
|
||||
</span>
|
||||
</label>
|
||||
|
|
|
@ -88,11 +88,11 @@
|
|||
#set $isSelected = ' selected="selected"'
|
||||
#set $isEnabled = $isSelected
|
||||
#set $isDisabled = $isSelected
|
||||
#if $archive_firstmatch_value##set $isDisabled = ''##else##set $isEnabled = ''##end if#
|
||||
#if $upgrade_once_value##set $isDisabled = ''##else##set $isEnabled = ''##end if#
|
||||
<div class="optionWrapper clearfix">
|
||||
<span class="selectTitle">Upgrade once</span>
|
||||
<div class="selectChoices">
|
||||
<select id="edit_archive_firstmatch" name="archive_firstmatch" class="form-control form-control-inline input-sm">
|
||||
<select id="edit_upgrade-once" name="upgrade_once" class="form-control form-control-inline input-sm">
|
||||
<option value="keep">< keep ></option>
|
||||
<option value="enable"${isEnabled}>enable</option>
|
||||
<option value="disable"${isDisabled}>disable</option>
|
||||
|
|
|
@ -507,6 +507,8 @@ IGNORE_WORDS = 'core2hd, hevc, MrLss, reenc, x265, danish, deutsch, dutch, flemi
|
|||
'german, italian, nordic, norwegian, portuguese, spanish, swedish, turkish'
|
||||
REQUIRE_WORDS = ''
|
||||
|
||||
WANTEDLIST_CACHE = None
|
||||
|
||||
CALENDAR_UNPROTECTED = False
|
||||
|
||||
TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
||||
|
@ -555,7 +557,7 @@ def initialize(console_logging=True):
|
|||
global __INITIALIZED__, showList, providerList, newznabProviderList, torrentRssProviderList, \
|
||||
WEB_HOST, WEB_ROOT, ACTUAL_CACHE_DIR, CACHE_DIR, ZONEINFO_DIR, ADD_SHOWS_WO_DIR, CREATE_MISSING_SHOW_DIRS, \
|
||||
RECENTSEARCH_STARTUP, NAMING_FORCE_FOLDERS, SOCKET_TIMEOUT, DEBUG, INDEXER_DEFAULT, CONFIG_FILE, \
|
||||
REMOVE_FILENAME_CHARS, IMPORT_DEFAULT_CHECKED_SHOWS
|
||||
REMOVE_FILENAME_CHARS, IMPORT_DEFAULT_CHECKED_SHOWS, WANTEDLIST_CACHE
|
||||
# Schedulers
|
||||
# global traktCheckerScheduler
|
||||
global recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
||||
|
@ -677,6 +679,8 @@ def initialize(console_logging=True):
|
|||
|
||||
update_config = False
|
||||
|
||||
WANTEDLIST_CACHE = common.wantedQualities()
|
||||
|
||||
# wanted branch
|
||||
BRANCH = check_setting_str(CFG, 'General', 'branch', '')
|
||||
|
||||
|
|
|
@ -377,6 +377,62 @@ class Quality:
|
|||
FAILED = None
|
||||
|
||||
|
||||
class wantedQualities(dict):
|
||||
wantedlist = 1
|
||||
bothlists = 2
|
||||
upgradelist = 3
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(wantedQualities, self).__init__(**kwargs)
|
||||
|
||||
def _generate_wantedlist(self, qualities):
|
||||
initial_qualities, upgrade_qualities = Quality.splitQuality(qualities)
|
||||
max_initial_quality = max(initial_qualities or [Quality.NONE])
|
||||
self[qualities] = {0: {self.bothlists: False, self.wantedlist: initial_qualities, self.upgradelist: False}}
|
||||
for q in Quality.qualityStrings:
|
||||
if 0 >= q:
|
||||
continue
|
||||
if q not in upgrade_qualities and q in initial_qualities:
|
||||
# quality is only in initial_qualities
|
||||
self[qualities][q] = {self.bothlists: False,
|
||||
self.wantedlist: [i for i in upgrade_qualities if q < i], self.upgradelist: False}
|
||||
elif q in upgrade_qualities and q in initial_qualities:
|
||||
# quality is in initial_qualities and upgrade_qualities
|
||||
self[qualities][q] = {self.bothlists: True,
|
||||
self.wantedlist: [i for i in upgrade_qualities if q < i], self.upgradelist: True}
|
||||
elif q in upgrade_qualities:
|
||||
# quality is only in upgrade_qualities
|
||||
self[qualities][q] = {self.bothlists: False,
|
||||
self.wantedlist: [i for i in upgrade_qualities if q < i], self.upgradelist: True}
|
||||
else:
|
||||
# quality is not in any selected quality for the show
|
||||
only_upgrade = q >= max_initial_quality
|
||||
self[qualities][q] = {self.bothlists: False,
|
||||
self.wantedlist:
|
||||
[i for i in (initial_qualities, upgrade_qualities)[only_upgrade] if q < i],
|
||||
self.upgradelist: only_upgrade}
|
||||
|
||||
def __getitem__(self, k):
|
||||
if k not in self:
|
||||
self._generate_wantedlist(k)
|
||||
return super(wantedQualities, self).__getitem__(k)
|
||||
|
||||
def get(self, k, *args, **kwargs):
|
||||
if k not in self:
|
||||
self._generate_wantedlist(k)
|
||||
return super(wantedQualities, self).get(k, *args, **kwargs)
|
||||
|
||||
def get_wantedlist(self, qualities, upgradeonce, quality, status, unaired=False, manual=False):
|
||||
if not manual and status in [ARCHIVED, IGNORED, SKIPPED] + ([UNAIRED], [])[unaired]:
|
||||
return []
|
||||
if upgradeonce:
|
||||
if status == SNATCHED_BEST or \
|
||||
(not self[qualities][quality][self.bothlists] and self[qualities][quality][self.upgradelist] and
|
||||
status in (DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER)):
|
||||
return []
|
||||
return self[qualities][quality][self.wantedlist]
|
||||
|
||||
|
||||
Quality.SNATCHED = [Quality.compositeStatus(SNATCHED, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.SNATCHED_PROPER = [Quality.compositeStatus(SNATCHED_PROPER, x) for x in Quality.qualityStrings.keys()]
|
||||
Quality.SNATCHED_BEST = [Quality.compositeStatus(SNATCHED_BEST, x) for x in Quality.qualityStrings.keys()]
|
||||
|
|
|
@ -939,12 +939,17 @@ class PostProcessor(object):
|
|||
cur_ep.release_name = self.release_name or ''
|
||||
|
||||
any_qualities, best_qualities = common.Quality.splitQuality(cur_ep.show.quality)
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_ep.status)
|
||||
|
||||
cur_ep.status = common.Quality.compositeStatus(
|
||||
**({'status': common.DOWNLOADED, 'quality': new_ep_quality},
|
||||
{'status': common.ARCHIVED, 'quality': new_ep_quality})
|
||||
[ep_obj.status in common.Quality.SNATCHED_BEST or
|
||||
(cur_ep.show.archive_firstmatch and new_ep_quality in best_qualities)])
|
||||
(cur_ep.show.upgrade_once and
|
||||
(new_ep_quality in best_qualities and
|
||||
(new_ep_quality not in any_qualities or (cur_status in
|
||||
(common.SNATCHED, common.SNATCHED_BEST, common.SNATCHED_PROPER, common.DOWNLOADED) and
|
||||
cur_quality != new_ep_quality))))])
|
||||
|
||||
cur_ep.release_group = self.release_group or ''
|
||||
|
||||
|
|
|
@ -26,9 +26,9 @@ from sickbeard import logger, encodingKludge as ek
|
|||
# usenet
|
||||
from . import newznab, omgwtfnzbs
|
||||
# torrent
|
||||
from . import alpharatio, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, \
|
||||
from . import alpharatio, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, \
|
||||
fano, filelist, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, hdtorrents, \
|
||||
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, pretome, privatehd, ptf, \
|
||||
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \
|
||||
rarbg, revtt, scenehd, scenetime, shazbat, skytorrents, speedcd, \
|
||||
thepiratebay, torlock, torrentbytes, torrentday, torrenting, torrentleech, \
|
||||
torrentvault, torrentz2, tvchaosuk, wop, zooqle
|
||||
|
@ -51,6 +51,7 @@ __all__ = ['omgwtfnzbs',
|
|||
'btscene',
|
||||
'custom01',
|
||||
'dh',
|
||||
'ettv',
|
||||
'fano',
|
||||
'filelist',
|
||||
'funfile',
|
||||
|
@ -68,6 +69,7 @@ __all__ = ['omgwtfnzbs',
|
|||
'ncore',
|
||||
'nyaa',
|
||||
'pisexy',
|
||||
'potuk',
|
||||
'pretome',
|
||||
'privatehd',
|
||||
'ptf',
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import re
|
||||
import traceback
|
||||
import urllib
|
||||
|
@ -31,8 +32,13 @@ class BTSceneProvider(generic.TorrentProvider):
|
|||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'BTScene')
|
||||
|
||||
self.url_home = ['http://btsone.cc/', 'http://diriri.xyz/', 'http://mytorrentz.tv/']
|
||||
|
||||
self.url_home = ['http://btsone.cc/', 'http://diriri.xyz/'] + \
|
||||
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
|
||||
[re.sub('[L\sT]+', '', x[::-1]) for x in [
|
||||
'zTRnTY', 'uVT 2Y', '15LSTZ', 's JmLb', 'rTNL2b', 'uQW LZ', '=LLMmd']],
|
||||
[re.sub('[j\sq]+', '', x[::-1]) for x in [
|
||||
'zRn qY', 'l52j b', '1j5S M', 'sq Jmb', 'r Nq2b', 'ujQWqZ', 's9jGqb']],
|
||||
]]]
|
||||
self.url_vars = {'search': '?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/',
|
||||
'get': 'torrentdownload.php?id=%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(vars)s',
|
||||
|
@ -58,6 +64,9 @@ class BTSceneProvider(generic.TorrentProvider):
|
|||
|
||||
url = self.url
|
||||
response = self.get_url(url)
|
||||
if not response:
|
||||
return results
|
||||
|
||||
form = re.findall('(?is)(<form[^>]+)', response)
|
||||
response = any(form) and form[0] or response
|
||||
action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0]
|
||||
|
|
129
sickbeard/providers/ettv.py
Normal file
129
sickbeard/providers/ettv.py
Normal file
|
@ -0,0 +1,129 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class ETTVProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'ETTV')
|
||||
|
||||
self.url_home = ['https://ettv.tv/']
|
||||
self.url_vars = {'search': 'torrents-search.php?%s&search=%s&sort=id&order=desc'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s'}
|
||||
|
||||
self.categories = {'Season': [7], 'Episode': [41, 5, 50]}
|
||||
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
|
||||
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
return data and re.search(r'(?i)(?:ettv)', data)
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self.url:
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'torrent/'}.iteritems())
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
search_url = self.urls['search'] % (
|
||||
self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split()))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', class_='table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if not len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
head = None
|
||||
for tr in torrent_rows[1:]:
|
||||
cells = tr.find_all('td')
|
||||
if 6 > len(cells):
|
||||
continue
|
||||
try:
|
||||
head = head if None is not head else self._header_row(
|
||||
tr, {'seed': r'seed', 'leech': r'leech', 'size': r'^size'})
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(info.get('href'))
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
def get_data(self, url):
|
||||
result = None
|
||||
html = self.get_url(url, timeout=90)
|
||||
try:
|
||||
result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0]
|
||||
except IndexError:
|
||||
logger.log('Failed no magnet in response', logger.DEBUG)
|
||||
return result
|
||||
|
||||
def get_result(self, episodes, url):
|
||||
result = None
|
||||
|
||||
if url:
|
||||
result = super(ETTVProvider, self).get_result(episodes, url)
|
||||
result.get_data_func = self.get_data
|
||||
|
||||
return result
|
||||
|
||||
|
||||
provider = ETTVProvider()
|
|
@ -34,18 +34,8 @@ class LimeTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_home = ['https://www.limetorrents.cc/'] + \
|
||||
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
|
||||
[re.sub('[f\sX]+', '', x[::-1]) for x in [
|
||||
'tlXGfb', '1X5SfZ', 'sfJfmb', 'rN 2Xb', 'u QfWZ', 's9G b']],
|
||||
[re.sub('[ \ss]+', '', x[::-1]) for x in [
|
||||
'Ztl Gsb', 'nc svRX', 'Rs nblJ', '5 JmLz', 'czsFsGc', 'nLskVs2', '0s N']],
|
||||
[re.sub('[1\sF]+', '', x[::-1]) for x in [
|
||||
'X Zt1lGb', 'l1Jn1cvR', 'mL11zRnb', 'uVXbtFFl', 'Hdp NWFa', '=1FQ3cuk']],
|
||||
[re.sub('[y\sW]+', '', x[::-1]) for x in [
|
||||
'XWZtlyGb', 'lJnyWcvR', 'nyLzRn b', 'vxmYWuWV', 'CWZlt2yY', '== Adyz5']],
|
||||
[re.sub('[j\sy]+', '', x[::-1]) for x in [
|
||||
'XyZtlG b', 'lJjnjcvR', 'njLz Rnb', 'vjxmYyuV', 'Gbyhjt2Y', 'n jJ3buw']],
|
||||
[re.sub('[o\sg]+', '', x[::-1]) for x in [
|
||||
'XZt lgGb', 'loJn cvR', 'ngLz Rnb', 'v xgmYuV', 'Gbh t2gY', '6l Heu w']],
|
||||
[re.sub('[ \sF]+', '', x[::-1]) for x in [
|
||||
'X ZtlFGb', 'lJnc vR', 'n LzR nb', 'vxmYuF V', 'CFZltF2Y', '==wYF2F5']],
|
||||
]]]
|
||||
|
||||
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}
|
||||
|
|
|
@ -35,6 +35,7 @@ from io import BytesIO
|
|||
from lib.dateutil import parser
|
||||
from sickbeard.network_timezones import sb_timezone
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.search import get_wanted_qualities, get_aired_in_season
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
|
@ -473,18 +474,30 @@ class NewznabProvider(generic.NZBProvider):
|
|||
return show_obj
|
||||
|
||||
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100):
|
||||
if not hasattr(ep_obj, 'eps_aired_in_season'):
|
||||
return None, neededQualities(need_all_qualities=True), hits_per_page
|
||||
searches = [e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or
|
||||
(ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season)]
|
||||
|
||||
needed = neededQualities()
|
||||
needed.check_needed_types(ep_obj.show)
|
||||
for s in searches:
|
||||
if needed.all_qualities_needed:
|
||||
break
|
||||
if not s.show.is_anime and not s.show.is_sports:
|
||||
if not getattr(s, 'wantedQuality', None):
|
||||
# this should not happen, the creation is missing for the search in this case
|
||||
logger.log('wantedQuality property was missing for search, creating it', logger.WARNING)
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
s.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
||||
needed.check_needed_qualities(s.wantedQuality)
|
||||
|
||||
if not hasattr(ep_obj, 'eps_aired_in_season'):
|
||||
# this should not happen, the creation is missing for the search in this case
|
||||
logger.log('eps_aired_in_season property was missing for search, creating it', logger.WARNING)
|
||||
ep_count, ep_count_scene = get_aired_in_season(ep_obj.show)
|
||||
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(ep_obj.scene_season, 0) if ep_obj.show.is_scene else \
|
||||
ep_obj.eps_aired_in_season
|
||||
|
||||
per_ep, limit_per_ep = 0, 0
|
||||
if needed.need_sd and not needed.need_hd:
|
||||
per_ep, limit_per_ep = 10, 25
|
||||
|
@ -500,15 +513,26 @@ class NewznabProvider(generic.NZBProvider):
|
|||
rel_per_ep, limit_per_ep = 5, 10
|
||||
else:
|
||||
rel_per_ep = per_ep
|
||||
rel = int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page))
|
||||
rel_limit = int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page))
|
||||
rel = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page)))
|
||||
rel_limit = max(1, int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page)))
|
||||
season_search = rel < (len(searches) * 100 // hits_per_page)
|
||||
if not season_search:
|
||||
needed = neededQualities()
|
||||
needed.check_needed_types(ep_obj.show)
|
||||
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
||||
if not getattr(ep_obj, 'wantedQuality', None):
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=True)
|
||||
needed.check_needed_qualities(ep_obj.wantedQuality)
|
||||
else:
|
||||
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
||||
for ep in episodes:
|
||||
if not getattr(ep, 'wantedQuality', None):
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep.status)
|
||||
ep.wantedQuality = get_wanted_qualities(ep, ep_status, ep_quality, unaired=True)
|
||||
needed.check_needed_qualities(ep.wantedQuality)
|
||||
return (season_search, needed,
|
||||
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
|
||||
|
||||
|
|
157
sickbeard/providers/potuk.py
Normal file
157
sickbeard/providers/potuk.py
Normal file
|
@ -0,0 +1,157 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class PotUKProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'PotUK')
|
||||
|
||||
self.url_base = 'http://www.potuk.com/newforum/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'search.php',
|
||||
'browse': self.url_base + 'search.php?do=getdaily&exclude=%s',
|
||||
'get_data': self.url_base + 'misc.php?do=showattachments&t=%s'}
|
||||
|
||||
self.url = self.urls['config_provider_home_uri']
|
||||
|
||||
self.digest, self.resp = 2 * [None]
|
||||
|
||||
def logged_in(self, resp):
|
||||
try:
|
||||
self.resp = re.findall('(?sim)<form .*?search.php.*?</form>', resp)[0]
|
||||
except (IndexError, TypeError):
|
||||
return False
|
||||
return self.has_all_cookies('bbsessionhash')
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(PotUKProvider, self)._authorised(
|
||||
logged_in=(lambda y=None: self.logged_in(y)),
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self._authorised():
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
opts = re.findall('(?sim)forumchoice\[\][^<]+(.*?)</select>', self.resp)[0]
|
||||
cat_opts = re.findall(r'(?mis)<option[^>]*?value=[\'"](\d+)[^>]*>(.*?)</option>', opts)
|
||||
include = []
|
||||
tv = False
|
||||
for c in cat_opts:
|
||||
if not tv and 'TV Shows' in c[1]:
|
||||
tv = True
|
||||
elif tv:
|
||||
if 3 > len(re.findall(' ', c[1])):
|
||||
break
|
||||
elif not filter(lambda v: v in c[1], ('Requests', 'Offer', 'Discussion')):
|
||||
include += [c[0]]
|
||||
exclude = ','.join(list(filter(lambda v: v not in include, map(lambda x: x[0], cat_opts))))
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
params = {}
|
||||
if 'Cache' == mode:
|
||||
search_url = self.urls['browse'] % exclude
|
||||
else:
|
||||
search_url = self._link(re.findall('(?i)action="([^"]+?)"', self.resp)[0])
|
||||
params = {'query': search_string, 'showposts': 0, 'titleonly': 1, 'prefixchoice': '',
|
||||
'replyless': 0, 'searchdate': 0, 'beforeafter': 'after', 'sortby': 'threadstart',
|
||||
'order': 'descending', 'starteronly': 0, 'forumchoice': include}
|
||||
tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', self.resp)
|
||||
attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, c) or [''])[0]
|
||||
for attr in ['type', 'name', 'value']] for c in tags]
|
||||
for itype, name, value in attrs:
|
||||
params.setdefault(name, value)
|
||||
del params['doprefs']
|
||||
html = self.get_url(search_url, post_data=params)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', id='threadslist')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
if 6 > len(tr.find_all('td')) or not tr.select('img[alt*="ttach"]'):
|
||||
continue
|
||||
try:
|
||||
link = tr.select('td[id^="td_threadtitle"]')[0].select('a[id*="title"]')[0]
|
||||
title = link.get_text().strip()
|
||||
download_url = self.urls['get_data'] % re.findall('t=(\d+)', link['href'])[0]
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, '', ''))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(
|
||||
mode, len(items[mode]) - cnt, ('search_param: ' + search_string, search_url)['Cache' == mode])
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
def get_data(self, url):
|
||||
result = None
|
||||
html = self.get_url(url, timeout=90)
|
||||
try:
|
||||
result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0])
|
||||
except IndexError:
|
||||
logger.log('Failed no torrent in response', logger.DEBUG)
|
||||
return result
|
||||
|
||||
def get_result(self, episodes, url):
|
||||
result = None
|
||||
|
||||
if url:
|
||||
result = super(PotUKProvider, self).get_result(episodes, url)
|
||||
result.get_data_func = self.get_data
|
||||
|
||||
return result
|
||||
|
||||
def ui_string(self, key):
|
||||
return ('%s_digest' % self.get_id()) == key and 'use... \'bbuserid=xx; bbpassword=yy\'' or ''
|
||||
|
||||
|
||||
provider = PotUKProvider()
|
|
@ -26,7 +26,7 @@ import traceback
|
|||
|
||||
import sickbeard
|
||||
|
||||
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT
|
||||
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, Quality, SEASON_RESULT, MULTI_EP_RESULT
|
||||
|
||||
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
|
||||
from sickbeard import sab
|
||||
|
@ -41,6 +41,7 @@ from sickbeard import failed_history
|
|||
from sickbeard.exceptions import ex
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from sickbeard import common
|
||||
from sickbeard.tv import TVEpisode
|
||||
|
||||
|
||||
def _download_result(result):
|
||||
|
@ -130,6 +131,11 @@ def snatch_episode(result, end_status=SNATCHED):
|
|||
|
||||
# TORRENTs can be sent to clients or saved to disk
|
||||
elif 'torrent' == result.resultType:
|
||||
if not result.url.startswith('magnet') and None is not result.get_data_func:
|
||||
result.url = result.get_data_func(result.url)
|
||||
result.get_data_func = None # consume only once
|
||||
if not result.url:
|
||||
return False
|
||||
# torrents are saved to disk when blackhole mode
|
||||
if 'blackhole' == sickbeard.TORRENT_METHOD:
|
||||
dl_result = _download_result(result)
|
||||
|
@ -165,7 +171,7 @@ def snatch_episode(result, end_status=SNATCHED):
|
|||
update_imdb_data = True
|
||||
for cur_ep_obj in result.episodes:
|
||||
with cur_ep_obj.lock:
|
||||
if is_first_best_match(result):
|
||||
if is_first_best_match(cur_ep_obj.status, result):
|
||||
cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
||||
else:
|
||||
cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)
|
||||
|
@ -289,7 +295,7 @@ def is_final_result(result):
|
|||
return False
|
||||
|
||||
|
||||
def is_first_best_match(result):
|
||||
def is_first_best_match(ep_status, result):
|
||||
"""
|
||||
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
||||
"""
|
||||
|
@ -298,21 +304,41 @@ def is_first_best_match(result):
|
|||
result.name, logger.DEBUG)
|
||||
|
||||
show_obj = result.episodes[0].show
|
||||
cur_status, cur_quality = Quality.splitCompositeStatus(ep_status)
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
|
||||
# if there is a redownload that's a match to one of our best qualities and
|
||||
# we want to archive the episode then we are done
|
||||
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
|
||||
if best_qualities and show_obj.upgrade_once and \
|
||||
(result.quality in best_qualities and
|
||||
(cur_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED) or
|
||||
result.quality not in any_qualities)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
||||
initial_qualities, upgrade_qualities = common.Quality.splitQuality(show.quality)
|
||||
all_qualities = list(set(initial_qualities + upgrade_qualities))
|
||||
def set_wanted_aired(ep_obj, unaired, ep_count, ep_count_scene, manual=False):
|
||||
ep_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=unaired, manual=manual)
|
||||
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
ep_obj.scene_season, 0) if ep_obj.scene_season else ep_obj.eps_aired_in_season
|
||||
|
||||
|
||||
def get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=False, manual=False):
|
||||
if isinstance(ep_obj, TVEpisode):
|
||||
return sickbeard.WANTEDLIST_CACHE.get_wantedlist(ep_obj.show.quality, ep_obj.show.upgrade_once,
|
||||
cur_quality, cur_status, unaired, manual)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def get_aired_in_season(show, return_sql=False):
|
||||
ep_count = {}
|
||||
ep_count_scene = {}
|
||||
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
||||
my_db = db.DBConnection()
|
||||
|
||||
if show.air_by_date:
|
||||
|
@ -326,9 +352,6 @@ def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
|||
'WHERE showid = ? AND indexer = ? AND season > 0'
|
||||
|
||||
sql_results = my_db.select(sql_string, [show.indexerid, show.indexer])
|
||||
ep_count = {}
|
||||
ep_count_scene = {}
|
||||
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
||||
for result in sql_results:
|
||||
if 1 < helpers.tryInt(result['airdate']) <= tomorrow:
|
||||
cur_season = helpers.tryInt(result['season'])
|
||||
|
@ -337,76 +360,57 @@ def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
|||
if -1 != cur_scene_season:
|
||||
ep_count_scene[cur_scene_season] = ep_count.setdefault(cur_scene_season, 0) + 1
|
||||
|
||||
if return_sql:
|
||||
return ep_count, ep_count_scene, sql_results
|
||||
|
||||
return ep_count, ep_count_scene
|
||||
|
||||
|
||||
def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
||||
|
||||
ep_count, ep_count_scene, sql_results_org = get_aired_in_season(show, return_sql=True)
|
||||
|
||||
from_date_ord = from_date.toordinal()
|
||||
if unaired:
|
||||
status_list = [common.WANTED, common.FAILED, common.UNAIRED]
|
||||
sql_string += ' AND ( airdate > ? OR airdate = 1 )'
|
||||
sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord or s['airdate'] == 1]
|
||||
else:
|
||||
status_list = [common.WANTED, common.FAILED]
|
||||
sql_string += ' AND airdate > ?'
|
||||
sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord]
|
||||
|
||||
sql_results = my_db.select(sql_string, [show.indexerid, show.indexer, from_date.toordinal()])
|
||||
|
||||
# check through the list of statuses to see if we want any
|
||||
if make_dict:
|
||||
wanted = {}
|
||||
else:
|
||||
wanted = []
|
||||
total_wanted = total_replacing = total_unaired = 0
|
||||
downloaded_status_list = common.SNATCHED_ANY + [common.DOWNLOADED]
|
||||
for result in sql_results:
|
||||
not_downloaded = True
|
||||
cur_composite_status = int(result['status'])
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(cur_composite_status)
|
||||
cur_snatched = cur_status in downloaded_status_list
|
||||
|
||||
if show.archive_firstmatch and cur_snatched and cur_quality in upgrade_qualities:
|
||||
total_wanted = total_replacing = total_unaired = 0
|
||||
|
||||
if 0 < len(sql_results) and 2 < len(sql_results) - len(show.episodes):
|
||||
myDB = db.DBConnection()
|
||||
show_ep_sql = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND indexer = ?',
|
||||
[show.indexerid, show.indexer])
|
||||
else:
|
||||
show_ep_sql = None
|
||||
|
||||
for result in sql_results:
|
||||
ep_obj = show.getEpisode(int(result['season']), int(result['episode']), ep_sql=show_ep_sql)
|
||||
cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
||||
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
|
||||
if not ep_obj.wantedQuality:
|
||||
continue
|
||||
|
||||
# special case: already downloaded quality is not in any of the upgrade to Qualities
|
||||
other_quality_downloaded = False
|
||||
if len(upgrade_qualities) and cur_snatched and cur_quality not in all_qualities:
|
||||
other_quality_downloaded = True
|
||||
wanted_qualities = all_qualities
|
||||
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
|
||||
if make_dict:
|
||||
wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
|
||||
else:
|
||||
wanted_qualities = upgrade_qualities
|
||||
wanted.append(ep_obj)
|
||||
|
||||
if upgrade_qualities:
|
||||
highest_wanted_quality = max(wanted_qualities)
|
||||
if cur_status in (common.WANTED, common.FAILED):
|
||||
total_wanted += 1
|
||||
elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
|
||||
total_unaired += 1
|
||||
else:
|
||||
if other_quality_downloaded:
|
||||
highest_wanted_quality = max(initial_qualities)
|
||||
else:
|
||||
highest_wanted_quality = 0
|
||||
|
||||
# if we need a better one then say yes
|
||||
if (cur_snatched and cur_quality < highest_wanted_quality) \
|
||||
or cur_status in status_list \
|
||||
or (sickbeard.SEARCH_UNAIRED and 1 == result['airdate']
|
||||
and cur_status in (common.SKIPPED, common.IGNORED, common.UNAIRED, common.UNKNOWN, common.FAILED)):
|
||||
|
||||
if cur_status in (common.WANTED, common.FAILED):
|
||||
total_wanted += 1
|
||||
elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
|
||||
total_unaired += 1
|
||||
else:
|
||||
total_replacing += 1
|
||||
not_downloaded = False
|
||||
|
||||
ep_obj = show.getEpisode(int(result['season']), int(result['episode']))
|
||||
ep_obj.wantedQuality = [i for i in (wanted_qualities, initial_qualities)[not_downloaded]
|
||||
if cur_quality < i]
|
||||
# in case we don't want any quality for this episode, skip the episode
|
||||
if 0 == len(ep_obj.wantedQuality):
|
||||
logger.log('Dropped episode, no wanted quality for %sx%s: [%s]' % (
|
||||
ep_obj.season, ep_obj.episode, ep_obj.show.name), logger.ERROR)
|
||||
continue
|
||||
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
|
||||
if make_dict:
|
||||
wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
|
||||
else:
|
||||
wanted.append(ep_obj)
|
||||
total_replacing += 1
|
||||
|
||||
if 0 < total_wanted + total_replacing + total_unaired:
|
||||
actions = []
|
||||
|
@ -739,6 +743,11 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
|
|||
|
||||
# filter out possible bad torrents from providers
|
||||
if 'torrent' == best_result.resultType:
|
||||
if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
|
||||
best_result.url = best_result.get_data_func(best_result.url)
|
||||
best_result.get_data_func = None # consume only once
|
||||
if not best_result.url:
|
||||
continue
|
||||
if best_result.url.startswith('magnet'):
|
||||
if 'blackhole' != sickbeard.TORRENT_METHOD:
|
||||
best_result.content = None
|
||||
|
|
|
@ -25,8 +25,7 @@ import datetime
|
|||
import sickbeard
|
||||
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
||||
failed_history, history, ui, properFinder
|
||||
from sickbeard.search import wanted_episodes
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired
|
||||
|
||||
|
||||
search_queue_lock = threading.Lock()
|
||||
|
@ -369,6 +368,9 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName())
|
||||
self.started = True
|
||||
|
||||
ep_count, ep_count_scene = get_aired_in_season(self.show)
|
||||
set_wanted_aired(self.segment, True, ep_count, ep_count_scene, manual=True)
|
||||
|
||||
search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True)
|
||||
|
||||
if search_result:
|
||||
|
@ -415,6 +417,11 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||
|
||||
is_error = False
|
||||
try:
|
||||
if not self.standard_backlog:
|
||||
ep_count, ep_count_scene = get_aired_in_season(self.show)
|
||||
for ep_obj in self.segment:
|
||||
set_wanted_aired(ep_obj, True, ep_count, ep_count_scene)
|
||||
|
||||
logger.log(u'Beginning backlog search for: [%s]' % self.show.name)
|
||||
search_result = search.search_providers(
|
||||
self.show, self.segment, False,
|
||||
|
@ -454,12 +461,11 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
self.started = True
|
||||
|
||||
try:
|
||||
ep_count, ep_count_scene = get_aired_in_season(self.show)
|
||||
for ep_obj in self.segment:
|
||||
|
||||
logger.log(u'Marking episode as bad: [%s]' % ep_obj.prettyName())
|
||||
|
||||
cur_status = ep_obj.status
|
||||
|
||||
failed_history.set_episode_failed(ep_obj)
|
||||
(release, provider) = failed_history.find_release(ep_obj)
|
||||
failed_history.revert_episode(ep_obj)
|
||||
|
@ -469,8 +475,9 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
|
||||
logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName())
|
||||
|
||||
search_result = search.search_providers(
|
||||
self.show, self.segment, True, try_other_searches=True, old_status=cur_status)
|
||||
set_wanted_aired(ep_obj, True, ep_count, ep_count_scene)
|
||||
|
||||
search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True)
|
||||
|
||||
if search_result:
|
||||
for result in search_result:
|
||||
|
|
|
@ -177,10 +177,10 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
|
||||
def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
|
||||
lang='en', subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None,
|
||||
wanted_begin=None, wanted_latest=None, tag=None, new_show=False):
|
||||
wanted_begin=None, wanted_latest=None, tag=None, new_show=False, show_name=None):
|
||||
queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
|
||||
subtitles, anime, scene, paused, blacklist, whitelist,
|
||||
wanted_begin, wanted_latest, tag, new_show=new_show)
|
||||
wanted_begin, wanted_latest, tag, new_show=new_show, show_name=show_name)
|
||||
|
||||
self.add_item(queueItemObj)
|
||||
|
||||
|
@ -238,7 +238,7 @@ class ShowQueueItem(generic_queue.QueueItem):
|
|||
class QueueItemAdd(ShowQueueItem):
|
||||
def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, anime,
|
||||
scene, paused, blacklist, whitelist, default_wanted_begin, default_wanted_latest, tag,
|
||||
scheduled_update=False, new_show=False):
|
||||
scheduled_update=False, new_show=False, show_name=None):
|
||||
|
||||
self.indexer = indexer
|
||||
self.indexer_id = indexer_id
|
||||
|
@ -257,6 +257,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.whitelist = whitelist
|
||||
self.tag = tag
|
||||
self.new_show = new_show
|
||||
self.showname = show_name
|
||||
|
||||
self.show = None
|
||||
|
||||
|
@ -270,7 +271,9 @@ class QueueItemAdd(ShowQueueItem):
|
|||
Returns the show name if there is a show object created, if not returns
|
||||
the dir that the show is being added to.
|
||||
"""
|
||||
if self.show == None:
|
||||
if None is not self.showname:
|
||||
return self.showname
|
||||
if None is self.show:
|
||||
return self.showDir
|
||||
return self.show.name
|
||||
|
||||
|
@ -304,6 +307,12 @@ class QueueItemAdd(ShowQueueItem):
|
|||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
s = t[self.indexer_id, False]
|
||||
|
||||
if getattr(t, 'show_not_found', False):
|
||||
logger.log('Show %s was not found on %s, maybe show was deleted' %
|
||||
(self.show_name, sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
||||
self._finishEarly()
|
||||
return
|
||||
|
||||
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
|
||||
if getattr(s, 'seriesname', None) is None:
|
||||
logger.log('Show in %s has no name on %s, probably the wrong language used to search with.' %
|
||||
|
|
|
@ -109,7 +109,7 @@ class TVShow(object):
|
|||
self._air_by_date = 0
|
||||
self._subtitles = int(sickbeard.SUBTITLES_DEFAULT if sickbeard.SUBTITLES_DEFAULT else 0)
|
||||
self._dvdorder = 0
|
||||
self._archive_firstmatch = 0
|
||||
self._upgrade_once = 0
|
||||
self._lang = lang
|
||||
self._last_update_indexer = 1
|
||||
self._sports = 0
|
||||
|
@ -156,7 +156,7 @@ class TVShow(object):
|
|||
air_by_date = property(lambda self: self._air_by_date, dirty_setter('_air_by_date'))
|
||||
subtitles = property(lambda self: self._subtitles, dirty_setter('_subtitles'))
|
||||
dvdorder = property(lambda self: self._dvdorder, dirty_setter('_dvdorder'))
|
||||
archive_firstmatch = property(lambda self: self._archive_firstmatch, dirty_setter('_archive_firstmatch'))
|
||||
upgrade_once = property(lambda self: self._upgrade_once, dirty_setter('_upgrade_once'))
|
||||
lang = property(lambda self: self._lang, dirty_setter('_lang'))
|
||||
last_update_indexer = property(lambda self: self._last_update_indexer, dirty_setter('_last_update_indexer'))
|
||||
sports = property(lambda self: self._sports, dirty_setter('_sports'))
|
||||
|
@ -358,7 +358,8 @@ class TVShow(object):
|
|||
|
||||
return ep_list
|
||||
|
||||
def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False):
|
||||
def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False,
|
||||
ep_sql=None):
|
||||
|
||||
# if we get an anime get the real season and episode
|
||||
if self.is_anime and absolute_number and not season and not episode:
|
||||
|
@ -392,9 +393,9 @@ class TVShow(object):
|
|||
(self.indexerid, season, episode), logger.DEBUG)
|
||||
|
||||
if file:
|
||||
ep = TVEpisode(self, season, episode, file)
|
||||
ep = TVEpisode(self, season, episode, file, show_sql=ep_sql)
|
||||
else:
|
||||
ep = TVEpisode(self, season, episode)
|
||||
ep = TVEpisode(self, season, episode, show_sql=ep_sql)
|
||||
|
||||
if ep != None:
|
||||
self.episodes[season][episode] = ep
|
||||
|
@ -932,9 +933,9 @@ class TVShow(object):
|
|||
if not self.dvdorder:
|
||||
self.dvdorder = 0
|
||||
|
||||
self.archive_firstmatch = sqlResults[0]['archive_firstmatch']
|
||||
if not self.archive_firstmatch:
|
||||
self.archive_firstmatch = 0
|
||||
self.upgrade_once = sqlResults[0]['archive_firstmatch']
|
||||
if not self.upgrade_once:
|
||||
self.upgrade_once = 0
|
||||
|
||||
self.quality = int(sqlResults[0]['quality'])
|
||||
self.flatten_folders = int(sqlResults[0]['flatten_folders'])
|
||||
|
@ -1407,7 +1408,7 @@ class TVShow(object):
|
|||
'sports': self.sports,
|
||||
'subtitles': self.subtitles,
|
||||
'dvdorder': self.dvdorder,
|
||||
'archive_firstmatch': self.archive_firstmatch,
|
||||
'archive_firstmatch': self.upgrade_once,
|
||||
'startyear': self.startyear,
|
||||
'lang': self.lang,
|
||||
'imdb_id': self.imdbid,
|
||||
|
@ -1451,6 +1452,30 @@ class TVShow(object):
|
|||
logger.log('Checking if found %sepisode %sx%s is wanted at quality %s' %
|
||||
(('', 'multi-part ')[multi_ep], season, episode, Quality.qualityStrings[quality]), logger.DEBUG)
|
||||
|
||||
if not multi_ep:
|
||||
try:
|
||||
wq = getattr(self.episodes.get(season, {}).get(episode, {}), 'wantedQuality', None)
|
||||
if None is not wq:
|
||||
if quality in wq:
|
||||
curStatus, curQuality = Quality.splitCompositeStatus(self.episodes[season][episode].status)
|
||||
if curStatus in (WANTED, UNAIRED, SKIPPED, FAILED):
|
||||
logger.log('Existing episode status is wanted/unaired/skipped/failed, getting found episode',
|
||||
logger.DEBUG)
|
||||
return True
|
||||
elif manualSearch:
|
||||
logger.log('Usually ignoring found episode, but forced search allows the quality, getting found'
|
||||
' episode', logger.DEBUG)
|
||||
return True
|
||||
elif quality > curQuality:
|
||||
logger.log(
|
||||
'Episode already exists but the found episode has better quality, getting found episode',
|
||||
logger.DEBUG)
|
||||
return True
|
||||
logger.log('None of the conditions were met, ignoring found episode', logger.DEBUG)
|
||||
return False
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
# if the quality isn't one we want under any circumstances then just say no
|
||||
initialQualities, archiveQualities = Quality.splitQuality(self.quality)
|
||||
allQualities = list(set(initialQualities + archiveQualities))
|
||||
|
@ -1542,7 +1567,7 @@ class TVShow(object):
|
|||
|
||||
min_best, max_best = min(best_qualities), max(best_qualities)
|
||||
if quality >= max_best \
|
||||
or (self.archive_firstmatch and
|
||||
or (self.upgrade_once and
|
||||
(quality in best_qualities or (None is not min_best and quality > min_best))):
|
||||
return Overview.GOOD
|
||||
return Overview.QUAL
|
||||
|
@ -1558,7 +1583,7 @@ class TVShow(object):
|
|||
|
||||
|
||||
class TVEpisode(object):
|
||||
def __init__(self, show, season, episode, file=''):
|
||||
def __init__(self, show, season, episode, file='', show_sql=None):
|
||||
self._name = ''
|
||||
self._season = season
|
||||
self._episode = episode
|
||||
|
@ -1593,7 +1618,7 @@ class TVEpisode(object):
|
|||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
self.specifyEpisode(self.season, self.episode)
|
||||
self.specifyEpisode(self.season, self.episode, show_sql)
|
||||
|
||||
self.relatedEps = []
|
||||
|
||||
|
@ -1738,9 +1763,9 @@ class TVEpisode(object):
|
|||
# if either setting has changed return true, if not return false
|
||||
return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
|
||||
|
||||
def specifyEpisode(self, season, episode):
|
||||
def specifyEpisode(self, season, episode, show_sql=None):
|
||||
|
||||
sqlResult = self.loadFromDB(season, episode)
|
||||
sqlResult = self.loadFromDB(season, episode, show_sql)
|
||||
|
||||
if not sqlResult:
|
||||
# only load from NFO if we didn't load from DB
|
||||
|
@ -1764,13 +1789,17 @@ class TVEpisode(object):
|
|||
raise exceptions.EpisodeNotFoundException(
|
||||
'Couldn\'t find episode %sx%s' % (season, episode))
|
||||
|
||||
def loadFromDB(self, season, episode):
|
||||
def loadFromDB(self, season, episode, show_sql=None):
|
||||
logger.log('%s: Loading episode details from DB for episode %sx%s' % (self.show.indexerid, season, episode),
|
||||
logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||
[self.show.indexerid, season, episode])
|
||||
sql_results = None
|
||||
if show_sql:
|
||||
sql_results = [s for s in show_sql if episode == s['episode'] and season == s['season']]
|
||||
if not sql_results:
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||
[self.show.indexerid, season, episode])
|
||||
|
||||
if len(sql_results) > 1:
|
||||
raise exceptions.MultipleDBEpisodesException('Your DB has two records for the same show somehow.')
|
||||
|
|
|
@ -1678,7 +1678,7 @@ class Home(MainHandler):
|
|||
|
||||
def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[],
|
||||
flatten_folders=None, paused=None, directCall=False, air_by_date=None, sports=None, dvdorder=None,
|
||||
indexerLang=None, subtitles=None, archive_firstmatch=None, rls_ignore_words=None,
|
||||
indexerLang=None, subtitles=None, upgrade_once=None, rls_ignore_words=None,
|
||||
rls_require_words=None, anime=None, blacklist=None, whitelist=None,
|
||||
scene=None, tag=None, quality_preset=None, reset_fanart=None, **kwargs):
|
||||
|
||||
|
@ -1757,7 +1757,7 @@ class Home(MainHandler):
|
|||
|
||||
flatten_folders = config.checkbox_to_value(flatten_folders)
|
||||
dvdorder = config.checkbox_to_value(dvdorder)
|
||||
archive_firstmatch = config.checkbox_to_value(archive_firstmatch)
|
||||
upgrade_once = config.checkbox_to_value(upgrade_once)
|
||||
paused = config.checkbox_to_value(paused)
|
||||
air_by_date = config.checkbox_to_value(air_by_date)
|
||||
scene = config.checkbox_to_value(scene)
|
||||
|
@ -1820,7 +1820,7 @@ class Home(MainHandler):
|
|||
with showObj.lock:
|
||||
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
|
||||
showObj.quality = newQuality
|
||||
showObj.archive_firstmatch = archive_firstmatch
|
||||
showObj.upgrade_once = upgrade_once
|
||||
|
||||
# reversed for now
|
||||
if bool(showObj.flatten_folders) != bool(flatten_folders):
|
||||
|
@ -3717,7 +3717,8 @@ class NewHomeAddShows(Home):
|
|||
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality,
|
||||
flatten_folders, indexerLang, subtitles, anime,
|
||||
scene, None, blacklist, whitelist,
|
||||
wanted_begin, wanted_latest, tag, new_show=new_show) # @UndefinedVariable
|
||||
wanted_begin, wanted_latest, tag, new_show=new_show,
|
||||
show_name=show_name) # @UndefinedVariable
|
||||
# ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
|
||||
|
||||
return finishAddShow()
|
||||
|
@ -3788,7 +3789,8 @@ class NewHomeAddShows(Home):
|
|||
flatten_folders=sickbeard.FLATTEN_FOLDERS_DEFAULT,
|
||||
subtitles=sickbeard.SUBTITLES_DEFAULT,
|
||||
anime=sickbeard.ANIME_DEFAULT,
|
||||
scene=sickbeard.SCENE_DEFAULT)
|
||||
scene=sickbeard.SCENE_DEFAULT,
|
||||
show_name=show_name)
|
||||
num_added += 1
|
||||
|
||||
if num_added:
|
||||
|
@ -4131,8 +4133,8 @@ class Manage(MainHandler):
|
|||
if showObj:
|
||||
showList.append(showObj)
|
||||
|
||||
archive_firstmatch_all_same = True
|
||||
last_archive_firstmatch = None
|
||||
upgrade_once_all_same = True
|
||||
last_upgrade_once = None
|
||||
|
||||
flatten_folders_all_same = True
|
||||
last_flatten_folders = None
|
||||
|
@ -4169,12 +4171,12 @@ class Manage(MainHandler):
|
|||
if cur_root_dir not in root_dir_list:
|
||||
root_dir_list.append(cur_root_dir)
|
||||
|
||||
if archive_firstmatch_all_same:
|
||||
if upgrade_once_all_same:
|
||||
# if we had a value already and this value is different then they're not all the same
|
||||
if last_archive_firstmatch not in (None, curShow.archive_firstmatch):
|
||||
archive_firstmatch_all_same = False
|
||||
if last_upgrade_once not in (None, curShow.upgrade_once):
|
||||
upgrade_once_all_same = False
|
||||
else:
|
||||
last_archive_firstmatch = curShow.archive_firstmatch
|
||||
last_upgrade_once = curShow.upgrade_once
|
||||
|
||||
# if we know they're not all the same then no point even bothering
|
||||
if paused_all_same:
|
||||
|
@ -4235,7 +4237,7 @@ class Manage(MainHandler):
|
|||
last_air_by_date = curShow.air_by_date
|
||||
|
||||
t.showList = toEdit
|
||||
t.archive_firstmatch_value = last_archive_firstmatch if archive_firstmatch_all_same else None
|
||||
t.upgrade_once_value = last_upgrade_once if upgrade_once_all_same else None
|
||||
t.paused_value = last_paused if paused_all_same else None
|
||||
t.tag_value = last_tag if tag_all_same else None
|
||||
t.anime_value = last_anime if anime_all_same else None
|
||||
|
@ -4249,7 +4251,7 @@ class Manage(MainHandler):
|
|||
|
||||
return t.respond()
|
||||
|
||||
def massEditSubmit(self, archive_firstmatch=None, paused=None, anime=None, sports=None, scene=None,
|
||||
def massEditSubmit(self, upgrade_once=None, paused=None, anime=None, sports=None, scene=None,
|
||||
flatten_folders=None, quality_preset=False, subtitles=None, air_by_date=None, anyQualities=[],
|
||||
bestQualities=[], toEdit=None, tag=None, *args, **kwargs):
|
||||
|
||||
|
@ -4285,11 +4287,11 @@ class Manage(MainHandler):
|
|||
else:
|
||||
new_show_dir = showObj._location
|
||||
|
||||
if archive_firstmatch == 'keep':
|
||||
new_archive_firstmatch = showObj.archive_firstmatch
|
||||
if upgrade_once == 'keep':
|
||||
new_upgrade_once = showObj.upgrade_once
|
||||
else:
|
||||
new_archive_firstmatch = True if archive_firstmatch == 'enable' else False
|
||||
new_archive_firstmatch = 'on' if new_archive_firstmatch else 'off'
|
||||
new_upgrade_once = True if 'enable' == upgrade_once else False
|
||||
new_upgrade_once = 'on' if new_upgrade_once else 'off'
|
||||
|
||||
if paused == 'keep':
|
||||
new_paused = showObj.paused
|
||||
|
@ -4348,7 +4350,7 @@ class Manage(MainHandler):
|
|||
|
||||
curErrors += Home(self.application, self.request).editShow(curShow, new_show_dir, anyQualities,
|
||||
bestQualities, exceptions_list,
|
||||
archive_firstmatch=new_archive_firstmatch,
|
||||
upgrade_once=new_upgrade_once,
|
||||
flatten_folders=new_flatten_folders,
|
||||
paused=new_paused, sports=new_sports,
|
||||
subtitles=new_subtitles, anime=new_anime,
|
||||
|
|
|
@ -110,7 +110,7 @@ mainDB.sickbeard.save_config = _dummy_saveConfig
|
|||
|
||||
|
||||
# the real one tries to contact tvdb just stop it from getting more info on the ep
|
||||
def _fake_specifyEP(self, season, episode):
|
||||
def _fake_specifyEP(self, season, episode, show_sql=None):
|
||||
pass
|
||||
|
||||
sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP
|
||||
|
|
Loading…
Reference in a new issue