diff --git a/CHANGES.md b/CHANGES.md
index 886e5992..46e9555a 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -89,6 +89,11 @@
* Add PTF torrent provider
* Add ILT torrent provider
* Add Fano torrent provider
+* Add BTScene torrent provider
+* Add Extratorrent provider
+* Add Limetorrents provider
+* Add nCore torrent provider
+* Remove Usenet-Crawler provider
* Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs
* Change provider OMGWTFNZBS api url and auto reject nuked releases
* Change Search Provider page to load torrent settings only when Search torrents is enabled in Search Settings
@@ -120,6 +125,9 @@
* Change post process to join incrementally named (i.e. file.001 to file.nnn) split files
* Change replace unrar2 lib with rarfile 3.0 and UnRAR.exe 5.40 freeware
* Change post process "Copy" to delete redundant files after use
+* Add indicator for public access search providers
+* Change improve probability selecting most seeded release
+* Change add the TorrentDay x265 category to search
[develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url
diff --git a/gui/slick/images/providers/btscene.png b/gui/slick/images/providers/btscene.png
new file mode 100644
index 00000000..142436e0
Binary files /dev/null and b/gui/slick/images/providers/btscene.png differ
diff --git a/gui/slick/images/providers/extratorrent.png b/gui/slick/images/providers/extratorrent.png
new file mode 100644
index 00000000..f4e49273
Binary files /dev/null and b/gui/slick/images/providers/extratorrent.png differ
diff --git a/gui/slick/images/providers/limetorrents.png b/gui/slick/images/providers/limetorrents.png
new file mode 100644
index 00000000..47ea1d55
Binary files /dev/null and b/gui/slick/images/providers/limetorrents.png differ
diff --git a/gui/slick/images/providers/ncore.png b/gui/slick/images/providers/ncore.png
new file mode 100644
index 00000000..40af14e0
Binary files /dev/null and b/gui/slick/images/providers/ncore.png differ
diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl
index f5e0596f..fdf61759 100644
--- a/gui/slick/interfaces/default/config_providers.tmpl
+++ b/gui/slick/interfaces/default/config_providers.tmpl
@@ -87,7 +87,7 @@
Provider Priorities
Check off and drag the providers into the order you want them to be used.
-
At least one provider is required but two are recommended.
+
At least one provider is required, two are recommended.
#if $methods_notused
<%= '/'.join(x for x in methods_notused) %> providers can be enabled in Search Settings
@@ -109,7 +109,10 @@
/>
$cur_provider.name$state
-<%= '*' if not cur_provider.supports_backlog else '' %>
+ #if $cur_provider.is_public_access()#
+ (PA)
+ #end if#
+ #if not $cur_provider.supports_backlog#*#end if#
#end for
@@ -117,10 +120,12 @@
-
*
Provider does not support backlog searches at this time
-#if $sickbeard.USE_TORRENTS
-
**
Provider supports limited backlog searches, some episodes/qualities may not be available
-#end if
+ (PA)
Public access, no account required
+
Searches current and past releases
+
*
Searches current but not past releases
+## #if $sickbeard.USE_TORRENTS
+##
**
Supports limited backlog searches, some episodes/qualities may not be available
+## #end if
##
!
Provider is NOT WORKING
diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py
index 2d14d147..c358b69e 100644
--- a/sickbeard/properFinder.py
+++ b/sickbeard/properFinder.py
@@ -78,6 +78,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# for each provider get a list of the
orig_thread_name = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
+ np = NameParser(False, try_scene_exceptions=True)
for cur_provider in providers:
if not recent_anime and cur_provider.anime_only:
continue
@@ -99,7 +100,6 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# if they haven't been added by a different provider than add the proper to the list
count = 0
- np = NameParser(False, try_scene_exceptions=True)
for x in found_propers:
name = _generic_name(x.name)
if name not in propers:
@@ -125,6 +125,8 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
for cur_proper in sorted_propers:
+ parse_result = np.parse(cur_proper.name)
+
# set the indexerid in the db to the show's indexerid
cur_proper.indexerid = parse_result.show.indexerid
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index a5a6852b..d60a0015 100755
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -26,8 +26,10 @@ from sickbeard import logger, encodingKludge as ek
# usenet
from . import newznab, omgwtfnzbs, womble
# torrent
-from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, dh, fano, filelist, freshontv, funfile, gftracker, grabtheinfo, \
- hd4free, hdbits, hdspace, ilt, iptorrents, morethan, pisexy, pretome, privatehd, ptf, rarbg, revtt, scc, scenetime, shazbat, speedcd, \
+from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, btscene, dh, extratorrent, \
+ fano, filelist, freshontv, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, \
+ ilt, iptorrents, limetorrents, morethan, ncore, pisexy, pretome, privatehd, ptf, \
+ rarbg, revtt, scc, scenetime, shazbat, speedcd, \
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle
# anime
from . import anizb, nyaatorrents, tokyotoshokan
@@ -45,8 +47,10 @@ __all__ = ['omgwtfnzbs',
'bithdtv',
'bitmetv',
'btn',
+ 'btscene',
'custom01',
'dh',
+ 'extratorrent',
'fano',
'filelist',
'freshontv',
@@ -58,7 +62,9 @@ __all__ = ['omgwtfnzbs',
'hdspace',
'ilt',
'iptorrents',
+ 'limetorrents',
'morethan',
+ 'ncore',
'pisexy',
'pretome',
'privatehd',
@@ -227,7 +233,7 @@ def getDefaultNewznabProviders():
return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0',
'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0',
- 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'])
+ ])
def getProviderModule(name):
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
index 13478d7f..c6fae1fd 100644
--- a/sickbeard/providers/alpharatio.py
+++ b/sickbeard/providers/alpharatio.py
@@ -35,7 +35,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
self.url_base = 'https://alpharatio.cc/'
self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'login.php',
+ 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join(
['tags_type=1', 'order_by=time', 'order_way=desc'] +
['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] +
@@ -48,8 +48,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
- return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
- post_params={'keeplogged': '1', 'login': 'Login'})
+ return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
+ post_params={'keeplogged': '1', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs):
@@ -73,7 +73,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
- torrent_table = soup.find('table', attrs={'id': 'torrent_table'})
+ torrent_table = soup.find(id='torrent_table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
@@ -82,14 +82,12 @@ class AlphaRatioProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
+ tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers):
continue
title = tr.find('a', title=rc['info']).get_text().strip()
-
- link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
- download_url = self.urls['get'] % link
+ download_url = self._link(tr.find('a', title=rc['get'])['href'])
except (AttributeError, TypeError, ValueError):
continue
@@ -98,13 +96,11 @@ class AlphaRatioProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/beyondhd.py b/sickbeard/providers/beyondhd.py
index cf764c01..d18bcfe8 100644
--- a/sickbeard/providers/beyondhd.py
+++ b/sickbeard/providers/beyondhd.py
@@ -71,7 +71,7 @@ class BeyondHDProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats])
if 'Cache' != mode:
- search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
+ search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
data_json = self.get_url(search_url, json=True)
@@ -82,16 +82,14 @@ class BeyondHDProvider(generic.TorrentProvider):
seeders, leechers = item.get('seeders', 0), item.get('leechers', 0)
if self._peers_fail(mode, seeders, leechers):
continue
- title, download_url = item.get('file'), item.get('get')
+ title, download_url = item.get('file'), self._link(item.get('get'))
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size'))))
time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py
index 5fabd99a..86621cbe 100644
--- a/sickbeard/providers/bithdtv.py
+++ b/sickbeard/providers/bithdtv.py
@@ -44,7 +44,7 @@ class BitHDTVProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(BitHDTVProvider, self)._authorised(
- logged_in=(lambda x=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls
+ logged_in=(lambda y=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls
@staticmethod
def _has_signature(data=None):
@@ -82,15 +82,15 @@ class BitHDTVProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]]
+ tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self.freeleech and not tr.attrs.get('bgcolor').endswith('FF99') or \
self._peers_fail(mode, seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
- title = (info.attrs.get('title') or info.contents[0].get_text()).strip()
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
- except (AttributeError, TypeError, ValueError):
+ title = (info.attrs.get('title') or info.get_text()).strip()
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
+ except (AttributeError, TypeError, ValueError, KeyError):
continue
if title and download_url:
@@ -98,14 +98,12 @@ class BitHDTVProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/bitmetv.py b/sickbeard/providers/bitmetv.py
index 550addd6..f00eaff3 100644
--- a/sickbeard/providers/bitmetv.py
+++ b/sickbeard/providers/bitmetv.py
@@ -46,9 +46,9 @@ class BitmetvProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(BitmetvProvider, self)._authorised(
- logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and
+ logged_in=(lambda y=None: (None is y or 'Other Links' in y) and self.has_all_cookies() and
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
- failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
+ failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs):
@@ -81,13 +81,13 @@ class BitmetvProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
+ (tr.find_all('td')[x].get_text().strip()) for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
- title = info.attrs.get('title') or info.get_text().strip()
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
+ title = (info.attrs.get('title') or info.get_text()).strip()
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError):
continue
@@ -96,14 +96,12 @@ class BitmetvProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 58949db9..80aad729 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -75,15 +75,18 @@ class BTNProvider(generic.TorrentProvider):
try:
response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True)
error_text = response['error']['message']
- logger.log(('Call Limit' in error_text and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' or
- u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
+ logger.log(
+ ('Call Limit' in error_text
+ and u'Action aborted because the %(prov)s 150 calls/hr limit was reached'
+ or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
+ {'prov': self.name, 'desc': error_text}, logger.WARNING)
return results
- except:
+ except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {}
if data_json:
- found_torrents = {} if 'torrents' not in data_json else data_json['torrents']
+ found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
# We got something, we know the API sends max 1000 results at a time.
# See if there are more than 1000 results for our query, if not we
@@ -101,37 +104,45 @@ class BTNProvider(generic.TorrentProvider):
for page in range(1, pages_needed + 1):
try:
- response = helpers.getURL(self.url_api, json=True, session=self.session,
- post_data=json_rpc(params, results_per_page, page * results_per_page))
+ response = helpers.getURL(
+ self.url_api, json=True, session=self.session,
+ post_data=json_rpc(params, results_per_page, page * results_per_page))
error_text = response['error']['message']
- logger.log(('Call Limit' in error_text and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' or
- u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
+ logger.log(
+ ('Call Limit' in error_text
+ and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
+ or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
+ {'prov': self.name, 'desc': error_text}, logger.WARNING)
return results
- except:
+ except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {}
- # Note that this these are individual requests and might time out individually. This would result in 'gaps'
- # in the results. There is no way to fix this though.
+ # Note that this these are individual requests and might time out individually.
+ # This would result in 'gaps' in the results. There is no way to fix this though.
if 'torrents' in data_json:
found_torrents.update(data_json['torrents'])
cnt = len(results)
for torrentid, torrent_info in found_torrents.iteritems():
- seeders, leechers = [tryInt(n) for n in torrent_info.get('Seeders'), torrent_info.get('Leechers')]
+ seeders, leechers, size = (tryInt(n, n) for n in [torrent_info.get(x) for x in
+ 'Seeders', 'Leechers', 'Size'])
if self._peers_fail(mode, seeders, leechers) or \
self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')):
continue
- title, url = self._title_and_url(torrent_info)
+ title, url = self._get_title_and_url(torrent_info)
if title and url:
- results.append(torrent_info)
+ results.append((title, url, seeders, self._bytesizer(size)))
self._log_search(mode, len(results) - cnt,
('search_param: ' + str(search_param), self.name)['Cache' == mode])
+ results = self._sort_seeding(mode, results)
+
return results
- def _title_and_url(self, data_json):
+ @staticmethod
+ def _get_title_and_url(data_json):
# The BTN API gives a lot of information in response,
# however SickGear is built mostly around Scene or
@@ -189,7 +200,7 @@ class BTNProvider(generic.TorrentProvider):
series_param.update(base_params)
search_params.append(series_param)
- return [dict({'Season': search_params})]
+ return [dict(Season=search_params)]
def _episode_strings(self, ep_obj, **kwargs):
@@ -231,7 +242,7 @@ class BTNProvider(generic.TorrentProvider):
series_param.update(base_params)
search_params.append(series_param)
- return [dict({'Episode': search_params})]
+ return [dict(Episode=search_params)]
def cache_data(self, **kwargs):
@@ -246,11 +257,11 @@ class BTNProvider(generic.TorrentProvider):
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search,
# older items will be done through backlog
if 86400 < seconds_since_last_update:
- logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on %s was over 24 hours'
- % self.name, logger.WARNING)
+ logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' +
+ '%s was over 24 hours' % self.name, logger.WARNING)
seconds_since_last_update = 86400
- return self._search_provider(dict({'Cache': ['']}), age=seconds_since_last_update)
+ return self._search_provider(dict(Cache=['']), age=seconds_since_last_update)
class BTNCache(tvcache.TVCache):
@@ -258,7 +269,7 @@ class BTNCache(tvcache.TVCache):
def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
- self.update_freq = 15 # cache update frequency
+ self.update_freq = 15
def _cache_data(self):
diff --git a/sickbeard/providers/btscene.py b/sickbeard/providers/btscene.py
new file mode 100644
index 00000000..fddd2cb5
--- /dev/null
+++ b/sickbeard/providers/btscene.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+#
+# This file is part of SickGear.
+#
+# SickGear is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickGear is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickGear. If not, see .
+
+import re
+import traceback
+import urllib
+
+from . import generic
+from sickbeard import logger
+from sickbeard.bs4_parser import BS4Parser
+from sickbeard.helpers import tryInt
+from lib.unidecode import unidecode
+
+
+class BTSceneProvider(generic.TorrentProvider):
+
+ def __init__(self):
+ generic.TorrentProvider.__init__(self, 'BTScene')
+
+ self.url_home = ['http://www.btstorrent.cc/', 'http://bittorrentstart.com/',
+ 'http://diriri.xyz/', 'http://mytorrentz.tv/']
+
+ self.url_vars = {'search': 'results.php?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/',
+ 'get': 'torrentdownload.php?id=%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
+ 'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
+
+ self.minseed, self.minleech = 2 * [None]
+ self.confirmed = False
+
+ @staticmethod
+ def _has_signature(data=None):
+ return data and re.search(r'(?i)(?:btscene|bts[-]official|full\sindex)', data)
+
+ def _search_provider(self, search_params, **kwargs):
+
+ results = []
+ if not self.url:
+ return results
+
+ items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
+
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
+ 'info': '\w+?(\d+)[.]html', 'verified': 'Verified'}.iteritems())
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+
+ search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
+
+ search_url = self.urls['browse'] if 'Cache' == mode \
+ else self.urls['search'] % (urllib.quote_plus(search_string))
+
+ html = self.get_url(search_url)
+
+ cnt = len(items[mode])
+ try:
+ if not html or self._has_no_results(html):
+ raise generic.HaltParseException
+ with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
+ torrent_rows = soup.select('tr[class$="_tr"]')
+
+ if not len(torrent_rows):
+ raise generic.HaltParseException
+
+ for tr in torrent_rows:
+ try:
+ seeders, leechers, size = [tryInt(n, n) for n in [
+ tr.find_all('td')[x].get_text().strip() for x in -4, -3, -5]]
+ if self._peers_fail(mode, seeders, leechers) or \
+ self.confirmed and not (tr.find('img', src=rc['verified'])
+ or tr.find('img', title=rc['verified'])):
+ continue
+
+ info = tr.find('a', href=rc['info'])
+ title = info and info.get_text().strip()
+ tid_href = info and rc['info'].findall(info['href'])
+ tid_href = tid_href and tryInt(tid_href[0], 0) or 0
+ tid_tr = tryInt(tr['id'].strip('_'), 0)
+ tid = (tid_tr, tid_href)[tid_href > tid_tr]
+
+ download_url = info and (self.urls['get'] % tid)
+ except (AttributeError, TypeError, ValueError, IndexError):
+ continue
+
+ if title and download_url:
+ items[mode].append((title, download_url, seeders, self._bytesizer(size)))
+
+ except generic.HaltParseException:
+ pass
+ except (StandardError, Exception):
+ logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
+
+ self._log_search(mode, len(items[mode]) - cnt, search_url)
+
+ results = self._sort_seeding(mode, results + items[mode])
+
+ return results
+
+ def _episode_strings(self, ep_obj, **kwargs):
+ return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
+
+
+provider = BTSceneProvider()
diff --git a/sickbeard/providers/dh.py b/sickbeard/providers/dh.py
index 3fcc7dc9..fb3a29c1 100644
--- a/sickbeard/providers/dh.py
+++ b/sickbeard/providers/dh.py
@@ -46,9 +46,9 @@ class DHProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(DHProvider, self)._authorised(
- logged_in=(lambda x=None: (None is x or re.search('(?i)rss\slink', x)) and self.has_all_cookies() and
+ logged_in=(lambda y=None: (None is y or re.search('(?i)rss\slink', y)) and self.has_all_cookies() and
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
- failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
+ failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs):
@@ -82,14 +82,12 @@ class DHProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
+ tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue
title = tr.find('a', href=rc['info']).get_text().strip()
-
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
-
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, IndexError):
continue
@@ -98,14 +96,12 @@ class DHProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py
new file mode 100644
index 00000000..23154d59
--- /dev/null
+++ b/sickbeard/providers/extratorrent.py
@@ -0,0 +1,108 @@
+# coding=utf-8
+#
+# This file is part of SickGear.
+#
+# SickGear is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickGear is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickGear. If not, see .
+
+import re
+import traceback
+import urllib
+
+from . import generic
+from sickbeard import logger
+from sickbeard.bs4_parser import BS4Parser
+from sickbeard.helpers import tryInt
+from lib.unidecode import unidecode
+
+
+class ExtraTorrentProvider(generic.TorrentProvider):
+
+ def __init__(self):
+ generic.TorrentProvider.__init__(self, 'ExtraTorrent')
+
+ self.url_home = ['https://www.extratorrent%s/' % u for u in '.works', 'live.com', 'online.com', '.cc'] + \
+ ['https://etmirror.com/', 'https://etproxy.com/', 'https://extratorrent.usbypass.xyz/']
+
+ self.url_vars = {'search': 'search/?new=1&search=%s&s_cat=8', 'browse': 'view/today/TV.html',
+ 'get': '%s'}
+ self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
+ 'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
+
+ self.minseed, self.minleech = 2 * [None]
+
+ @staticmethod
+ def _has_signature(data=None):
+ return data and re.search(r'(?i)ExtraTorrent', data[33:1024:])
+
+ def _search_provider(self, search_params, **kwargs):
+
+ results = []
+ if not self.url:
+ return results
+
+ items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
+
+ rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
+ 'get': 'download', 'title': '(?:^download|torrent$)', 'get_url': '^/(torrent_)?'}.iteritems())
+
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+
+ search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
+
+ search_url = self.urls['browse'] if 'Cache' == mode \
+ else self.urls['search'] % (urllib.quote_plus(search_string))
+
+ html = self.get_url(search_url)
+
+ cnt = len(items[mode])
+ try:
+ if not html or self._has_no_results(html):
+ raise generic.HaltParseException
+ with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
+ torrent_table = soup.find('table', class_='tl')
+ torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
+
+ if 2 > len(torrent_rows):
+ raise generic.HaltParseException
+
+ for tr in torrent_rows[1:]:
+ try:
+ seeders, leechers, size = [tryInt(n.replace('---', '0'), n) for n in [
+ tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
+ if self._peers_fail(mode, seeders, leechers):
+ continue
+
+ info = tr.find('a', title=rc['get']) or {}
+ title = rc['title'].sub('', info.get('title') or '').strip()
+ download_url = self._link(rc['get_url'].sub('', info['href']))
+ except (AttributeError, TypeError, ValueError, IndexError):
+ continue
+
+ if title and download_url:
+ items[mode].append((title, download_url, seeders, self._bytesizer(size)))
+
+ except generic.HaltParseException:
+ pass
+ except (StandardError, Exception):
+ logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
+
+ self._log_search(mode, len(items[mode]) - cnt, search_url)
+
+ results = self._sort_seeding(mode, results + items[mode])
+
+ return results
+
+
+provider = ExtraTorrentProvider()
diff --git a/sickbeard/providers/fano.py b/sickbeard/providers/fano.py
index 1f26d61e..0494fa73 100644
--- a/sickbeard/providers/fano.py
+++ b/sickbeard/providers/fano.py
@@ -45,7 +45,7 @@ class FanoProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
- return super(FanoProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass']))
+ return super(FanoProvider, self)._authorised()
def _search_provider(self, search_params, **kwargs):
@@ -82,14 +82,12 @@ class FanoProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
+ tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue
title = tr.find('a', href=rc['info']).get_text().strip()
-
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
-
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, IndexError):
continue
@@ -98,14 +96,12 @@ class FanoProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/filelist.py b/sickbeard/providers/filelist.py
index 8655d3e5..545d2e11 100644
--- a/sickbeard/providers/filelist.py
+++ b/sickbeard/providers/filelist.py
@@ -78,14 +78,12 @@ class FLProvider(generic.TorrentProvider):
for tr in torrent_rows:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.select('span[style*="cell"]')[x].get_text().strip()) for x in (-3, -2, -5)]]
+ tr.select('span[style*="cell"]')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue
title = tr.find('a', href=rc['info']).get_text().strip()
-
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
-
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, IndexError):
continue
@@ -94,14 +92,12 @@ class FLProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index 34c8ba3a..8f30ee34 100644
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -32,7 +32,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.url_base = 'https://freshon.tv/'
self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'login.php?action=makelogin',
+ 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s',
'get': self.url_base + '%s'}
@@ -45,8 +45,8 @@ class FreshOnTVProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(FreshOnTVProvider, self)._authorised(
- post_params={'login': 'Do it!'},
- failed_msg=(lambda x=None: 'DDoS protection by CloudFlare' in x and
+ post_params={'form_tmpl': True},
+ failed_msg=(lambda y=None: 'DDoS protection by CloudFlare' in y and
u'Unable to login to %s due to CloudFlare DDoS javascript check' or
'Username does not exist' in x and
u'Invalid username or password for %s. Check settings' or
@@ -80,7 +80,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
- torrent_table = soup.find('table', attrs={'class': 'frame'})
+ torrent_table = soup.find('table', class_='frame')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
@@ -92,14 +92,13 @@ class FreshOnTVProvider(generic.TorrentProvider):
continue
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
+ tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers):
continue
- info = tr.find('a', href=rc['info'], attrs={'class': rc['name']})
- title = info.attrs.get('title') or info.get_text().strip()
-
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
+ info = tr.find('a', href=rc['info'], class_=rc['name'])
+ title = (info.attrs.get('title') or info.get_text()).strip()
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError):
continue
@@ -108,13 +107,11 @@ class FreshOnTVProvider(generic.TorrentProvider):
except generic.HaltParseException:
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/funfile.py b/sickbeard/providers/funfile.py
index a5ab41ee..a5d18646 100644
--- a/sickbeard/providers/funfile.py
+++ b/sickbeard/providers/funfile.py
@@ -32,7 +32,7 @@ class FunFileProvider(generic.TorrentProvider):
self.url_base = 'https://www.funfile.org/'
self.urls = {'config_provider_home_uri': self.url_base,
- 'login': self.url_base + 'takelogin.php',
+ 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&',
'get': self.url_base + '%s'}
@@ -45,9 +45,9 @@ class FunFileProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(FunFileProvider, self)._authorised(
- logged_in=(lambda x=None: None is not self.session.cookies.get('uid', domain='.funfile.org') and
- None is not self.session.cookies.get('pass', domain='.funfile.org')),
- post_params={'login': 'Login', 'returnto': '/'}, timeout=self.url_timeout)
+ logged_in=(lambda y=None: all(
+ [None is not self.session.cookies.get(x, domain='.funfile.org') for x in 'uid', 'pass'])),
+ post_params={'form_tmpl': True}, timeout=self.url_timeout)
def _search_provider(self, search_params, **kwargs):
@@ -72,7 +72,7 @@ class FunFileProvider(generic.TorrentProvider):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
- torrent_table = soup.find('td', attrs={'class': 'colhead'}).find_parent('table')
+ torrent_table = soup.find('td', class_='colhead').find_parent('table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
@@ -85,13 +85,12 @@ class FunFileProvider(generic.TorrentProvider):
continue
seeders, leechers, size = [tryInt(n, n) for n in [
- (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
+ tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
continue
- title = info.attrs.get('title') or info.get_text().strip()
- download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
-
+ title = (info.attrs.get('title') or info.get_text()).strip()
+ download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError):
continue
@@ -100,14 +99,12 @@ class FunFileProvider(generic.TorrentProvider):
except (generic.HaltParseException, AttributeError):
pass
- except Exception:
+ except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
- self._sort_seeders(mode, items)
-
- results = list(set(results + items[mode]))
+ results = self._sort_seeding(mode, results + items[mode])
return results
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 3c236249..cb73e0e3 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -33,6 +33,7 @@ import sickbeard
import requests
import requests.cookies
from hachoir_parser import guessParser
+from hachoir_core.error import HachoirError
from hachoir_core.stream import FileInputStream
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
@@ -77,7 +78,8 @@ class GenericProvider:
self.headers = {
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
# otherwise session might be broken and download fail, asking again for authentication
- # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
+ # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
+ # 'Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT}
def get_id(self):
@@ -99,9 +101,17 @@ class GenericProvider:
def _authorised(self):
return True
- def _check_auth(self):
+ def _check_auth(self, is_required=None):
return True
+ def is_public_access(self):
+ try:
+ return bool(re.search('(?i)rarbg|sick|womble|anizb', self.name)) \
+ or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest')
+ or self._check_auth(is_required=True)))
+ except AuthException:
+ return False
+
def is_active(self):
if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
return self.is_enabled()
@@ -176,7 +186,7 @@ class GenericProvider:
urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,))
for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'),
('s', 'torrentproject.se'), ('', 'thetorrent.org'))]
- except:
+ except (StandardError, Exception):
link_type = 'torrent'
urls = [result.url]
@@ -204,7 +214,7 @@ class GenericProvider:
try:
helpers.moveFile(cache_file, final_file)
msg = 'moved'
- except:
+ except (OSError, Exception):
msg = 'copied cached file'
logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file))
saved = True
@@ -234,13 +244,13 @@ class GenericProvider:
try:
stream = FileInputStream(file_name)
parser = guessParser(stream)
- except:
+ except (HachoirError, Exception):
pass
result = parser and 'application/x-bittorrent' == parser.mime_type
try:
stream._input.close()
- except:
+ except (HachoirError, Exception):
pass
return result
@@ -282,7 +292,7 @@ class GenericProvider:
try:
title, url = isinstance(item, tuple) and (item[0], item[1]) or \
(item.get('title', None), item.get('link', None))
- except Exception:
+ except (StandardError, Exception):
pass
title = title and re.sub(r'\s+', '.', u'%s' % title)
@@ -290,6 +300,15 @@ class GenericProvider:
return title, url
+ def _link(self, url, url_tmpl=None):
+
+ url = url and str(url).strip().replace('&', '&') or ''
+ try:
+ url_tmpl = url_tmpl or self.urls['get']
+ except (StandardError, Exception):
+ url_tmpl = '%s'
+ return url if re.match('(?i)https?://', url) else (url_tmpl % url.lstrip('/'))
+
def find_search_results(self, show, episodes, search_mode, manual_search=False):
self._check_auth()
@@ -391,8 +410,9 @@ class GenericProvider:
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True
- elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
- ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
+ elif len(parse_result.episode_numbers) and not [
+ ep for ep in episodes if ep.season == parse_result.season_number and
+ ep.episode in parse_result.episode_numbers]:
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True
@@ -409,8 +429,8 @@ class GenericProvider:
else:
airdate = parse_result.air_date.toordinal()
my_db = db.DBConnection()
- sql_results = my_db.select('SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
- [show_obj.indexerid, airdate])
+ sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' +
+ 'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate])
if 1 != len(sql_results):
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
@@ -507,6 +527,7 @@ class GenericProvider:
def log_result(self, mode='Cache', count=0, url='url missing'):
"""
Simple function to log the result of any search
+ :param mode: string that this log relates to
:param count: count of successfully processed items
:param url: source url of item(s)
"""
@@ -541,8 +562,8 @@ class GenericProvider:
def has_all_cookies(self, cookies=None, pre=''):
- cookies = cookies or ['uid', 'pass']
- return False not in ['%s%s' % (pre, item) in self.session.cookies for item in ([cookies], cookies)[isinstance(cookies, list)]]
+ cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass']
+ return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies])
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
@@ -558,7 +579,7 @@ class GenericProvider:
def _bytesizer(size_dim=''):
try:
- value = float('.'.join(re.findall('(?i)(\d+)(?:[\.,](\d+))?', size_dim)[0]))
+ value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0]))
except TypeError:
return size_dim
except IndexError:
@@ -587,7 +608,7 @@ class NZBProvider(object, GenericProvider):
return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
return False
- def _check_auth(self):
+ def _check_auth(self, is_required=None):
has_key = self.maybe_apikey()
if has_key:
@@ -703,9 +724,16 @@ class TorrentProvider(object, GenericProvider):
@staticmethod
def _sort_seeders(mode, items):
-
+ """ legacy function used by a custom provider, do not remove """
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
+ @staticmethod
+ def _sort_seeding(mode, items):
+
+ if mode in ['Season', 'Episode']:
+ return sorted(set(items), key=lambda tup: tup[2], reverse=True)
+ return items
+
def _peers_fail(self, mode, seeders=0, leechers=0):
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
@@ -744,7 +772,7 @@ class TorrentProvider(object, GenericProvider):
ep_dict = self._ep_dict(ep_obj)
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
(show.is_anime and ep_obj.scene_absolute_number or
- 'S%(seasonnumber)02d' % ep_dict if 'sp_detail' not in kwargs.keys() else kwargs['sp_detail'](ep_dict))
+ ('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict)
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
@@ -792,7 +820,7 @@ class TorrentProvider(object, GenericProvider):
prefix = ([prefix], prefix)[isinstance(prefix, list)]
search_params = []
- crop = re.compile(r'([\.\s])(?:\1)+')
+ crop = re.compile(r'([.\s])(?:\1)+')
for name in set(allPossibleShowNames(self.show)):
if process_name:
name = helpers.sanitizeSceneName(name)
@@ -861,11 +889,14 @@ class TorrentProvider(object, GenericProvider):
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
- maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x))
+ maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
+ '(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
- (logged_in, (lambda x=None: self.has_all_cookies())),
- (failed_msg, (lambda x='': maxed_out(x) and u'Urgent abort, running low on login attempts. Password flushed to prevent service disruption to %s.' or
- (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', x) and
+ (logged_in, (lambda y=None: self.has_all_cookies())),
+ (failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +
+ u'Password flushed to prevent service disruption to %s.' or
+ (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
+ '(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
u'Invalid username or password for %s. Check settings' or
u'Failed to authenticate or parse a response from %s, abort provider')))
)]
@@ -896,17 +927,25 @@ class TorrentProvider(object, GenericProvider):
if url:
response = helpers.getURL(url, session=self.session)
try:
- action = re.findall('[<]form[\w\W]+?action=[\'\"]([^\'\"]+)', response)[0]
+ post_params = isinstance(post_params, type({})) and post_params or {}
+ form = 'form_tmpl' in post_params and post_params.pop('form_tmpl')
+ if form:
+ form = re.findall(
+ '(?is)()' % (True is form and 'login' or form), response)
+ response = form and form[0] or response
+
+ action = re.findall('