mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-20 16:43:43 +00:00
Add indicator for public access search providers.
Change improve probability selecting most seeded release. Change add the TorrentDay x265 category to search. Change torrent provider code PEP8 and refactoring. Add BTScene torrent provider. Add Extratorrent provider. Add Limetorrents provider. Add nCore torrent provider. Remove Usenet Crawler provider.
This commit is contained in:
parent
aa89a3fedf
commit
0d50a4b345
56 changed files with 991 additions and 533 deletions
|
@ -89,6 +89,11 @@
|
|||
* Add PTF torrent provider
|
||||
* Add ILT torrent provider
|
||||
* Add Fano torrent provider
|
||||
* Add BTScene torrent provider
|
||||
* Add Extratorrent provider
|
||||
* Add Limetorrents provider
|
||||
* Add nCore torrent provider
|
||||
* Remove Usenet-Crawler provider
|
||||
* Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs
|
||||
* Change provider OMGWTFNZBS api url and auto reject nuked releases
|
||||
* Change Search Provider page to load torrent settings only when Search torrents is enabled in Search Settings
|
||||
|
@ -120,6 +125,9 @@
|
|||
* Change post process to join incrementally named (i.e. file.001 to file.nnn) split files
|
||||
* Change replace unrar2 lib with rarfile 3.0 and UnRAR.exe 5.40 freeware
|
||||
* Change post process "Copy" to delete redundant files after use
|
||||
* Add indicator for public access search providers
|
||||
* Change improve probability selecting most seeded release
|
||||
* Change add the TorrentDay x265 category to search
|
||||
|
||||
[develop changelog]
|
||||
* Change send nzb data to NZBGet for Anizb instead of url
|
||||
|
|
BIN
gui/slick/images/providers/btscene.png
Normal file
BIN
gui/slick/images/providers/btscene.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 548 B |
BIN
gui/slick/images/providers/extratorrent.png
Normal file
BIN
gui/slick/images/providers/extratorrent.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 497 B |
BIN
gui/slick/images/providers/limetorrents.png
Normal file
BIN
gui/slick/images/providers/limetorrents.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 682 B |
BIN
gui/slick/images/providers/ncore.png
Normal file
BIN
gui/slick/images/providers/ncore.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 482 B |
|
@ -87,7 +87,7 @@
|
|||
<div class="component-group-desc">
|
||||
<h3>Provider Priorities</h3>
|
||||
<p>Check off and drag the providers into the order you want them to be used.</p>
|
||||
<p>At least one provider is required but two are recommended.</p>
|
||||
<p>At least one provider is required, two are recommended.</p>
|
||||
|
||||
#if $methods_notused
|
||||
<blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote>
|
||||
|
@ -109,7 +109,10 @@
|
|||
<input type="checkbox" id="enable_$cur_name" class="provider_enabler" <%= html_checked if cur_provider.is_enabled() else '' %>/>
|
||||
<a href="<%= anon_url(cur_url) %>" class="imgLink" rel="noreferrer" onclick="window.open(this.href,'_blank');return false;"><img src="$sbRoot/images/providers/$cur_provider.image_name()" alt="$tip" title="$tip" width="16" height="16" style="vertical-align:middle" /></a>
|
||||
<span style="vertical-align:middle">$cur_provider.name$state</span>
|
||||
<%= '*' if not cur_provider.supports_backlog else '' %>
|
||||
#if $cur_provider.is_public_access()#
|
||||
<span style="font-size:10px;vertical-align:top;font-weight:normal">(PA)</span>
|
||||
#end if#
|
||||
#if not $cur_provider.supports_backlog#*#end if#
|
||||
<span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span>
|
||||
</li>
|
||||
#end for
|
||||
|
@ -117,10 +120,12 @@
|
|||
|
||||
|
||||
<div id="provider_key">
|
||||
<h4 class="note">*</h4><p class="note">Provider does not support backlog searches at this time</p>
|
||||
#if $sickbeard.USE_TORRENTS
|
||||
<h4 class="note">**</h4><p class="note">Provider supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p>
|
||||
#end if
|
||||
<span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
|
||||
<h4 class="note"></h4><p class="note">Searches current and past releases</p>
|
||||
<h4 class="note">*</h4><p class="note">Searches current but not past releases</p>
|
||||
## #if $sickbeard.USE_TORRENTS
|
||||
## <h4 class="note">**</h4><p class="note">Supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p>
|
||||
## #end if
|
||||
##<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b></p>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -78,6 +78,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
# for each provider get a list of the
|
||||
orig_thread_name = threading.currentThread().name
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
||||
np = NameParser(False, try_scene_exceptions=True)
|
||||
for cur_provider in providers:
|
||||
if not recent_anime and cur_provider.anime_only:
|
||||
continue
|
||||
|
@ -99,7 +100,6 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
|
||||
# if they haven't been added by a different provider than add the proper to the list
|
||||
count = 0
|
||||
np = NameParser(False, try_scene_exceptions=True)
|
||||
for x in found_propers:
|
||||
name = _generic_name(x.name)
|
||||
if name not in propers:
|
||||
|
@ -125,6 +125,8 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
|
||||
for cur_proper in sorted_propers:
|
||||
|
||||
parse_result = np.parse(cur_proper.name)
|
||||
|
||||
# set the indexerid in the db to the show's indexerid
|
||||
cur_proper.indexerid = parse_result.show.indexerid
|
||||
|
||||
|
|
|
@ -26,8 +26,10 @@ from sickbeard import logger, encodingKludge as ek
|
|||
# usenet
|
||||
from . import newznab, omgwtfnzbs, womble
|
||||
# torrent
|
||||
from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, dh, fano, filelist, freshontv, funfile, gftracker, grabtheinfo, \
|
||||
hd4free, hdbits, hdspace, ilt, iptorrents, morethan, pisexy, pretome, privatehd, ptf, rarbg, revtt, scc, scenetime, shazbat, speedcd, \
|
||||
from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, btscene, dh, extratorrent, \
|
||||
fano, filelist, freshontv, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, \
|
||||
ilt, iptorrents, limetorrents, morethan, ncore, pisexy, pretome, privatehd, ptf, \
|
||||
rarbg, revtt, scc, scenetime, shazbat, speedcd, \
|
||||
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle
|
||||
# anime
|
||||
from . import anizb, nyaatorrents, tokyotoshokan
|
||||
|
@ -45,8 +47,10 @@ __all__ = ['omgwtfnzbs',
|
|||
'bithdtv',
|
||||
'bitmetv',
|
||||
'btn',
|
||||
'btscene',
|
||||
'custom01',
|
||||
'dh',
|
||||
'extratorrent',
|
||||
'fano',
|
||||
'filelist',
|
||||
'freshontv',
|
||||
|
@ -58,7 +62,9 @@ __all__ = ['omgwtfnzbs',
|
|||
'hdspace',
|
||||
'ilt',
|
||||
'iptorrents',
|
||||
'limetorrents',
|
||||
'morethan',
|
||||
'ncore',
|
||||
'pisexy',
|
||||
'pretome',
|
||||
'privatehd',
|
||||
|
@ -227,7 +233,7 @@ def getDefaultNewznabProviders():
|
|||
return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0',
|
||||
'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
|
||||
'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0',
|
||||
'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'])
|
||||
])
|
||||
|
||||
|
||||
def getProviderModule(name):
|
||||
|
|
|
@ -35,7 +35,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://alpharatio.cc/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'login.php',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join(
|
||||
['tags_type=1', 'order_by=time', 'order_way=desc'] +
|
||||
['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] +
|
||||
|
@ -48,8 +48,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'login': 'Login'})
|
||||
return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -73,7 +73,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'id': 'torrent_table'})
|
||||
torrent_table = soup.find(id='torrent_table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -82,14 +82,12 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = tr.find('a', title=rc['info']).get_text().strip()
|
||||
|
||||
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||
download_url = self.urls['get'] % link
|
||||
download_url = self._link(tr.find('a', title=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -98,13 +96,11 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ class BeyondHDProvider(generic.TorrentProvider):
|
|||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats])
|
||||
if 'Cache' != mode:
|
||||
search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string)
|
||||
search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
|
||||
|
||||
data_json = self.get_url(search_url, json=True)
|
||||
|
||||
|
@ -82,16 +82,14 @@ class BeyondHDProvider(generic.TorrentProvider):
|
|||
seeders, leechers = item.get('seeders', 0), item.get('leechers', 0)
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
title, download_url = item.get('file'), item.get('get')
|
||||
title, download_url = item.get('file'), self._link(item.get('get'))
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size'))))
|
||||
|
||||
time.sleep(1.1)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ class BitHDTVProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(BitHDTVProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls
|
||||
logged_in=(lambda y=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
|
@ -82,15 +82,15 @@ class BitHDTVProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
|
||||
if self.freeleech and not tr.attrs.get('bgcolor').endswith('FF99') or \
|
||||
self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = (info.attrs.get('title') or info.contents[0].get_text()).strip()
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
|
@ -98,14 +98,12 @@ class BitHDTVProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -46,9 +46,9 @@ class BitmetvProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(BitmetvProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and
|
||||
logged_in=(lambda y=None: (None is y or 'Other Links' in y) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -81,13 +81,13 @@ class BitmetvProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in -3, -2, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -96,14 +96,12 @@ class BitmetvProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -75,15 +75,18 @@ class BTNProvider(generic.TorrentProvider):
|
|||
try:
|
||||
response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True)
|
||||
error_text = response['error']['message']
|
||||
logger.log(('Call Limit' in error_text and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' or
|
||||
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
logger.log(
|
||||
('Call Limit' in error_text
|
||||
and u'Action aborted because the %(prov)s 150 calls/hr limit was reached'
|
||||
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
|
||||
{'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
return results
|
||||
except:
|
||||
except (KeyError, Exception):
|
||||
data_json = response and 'result' in response and response['result'] or {}
|
||||
|
||||
if data_json:
|
||||
|
||||
found_torrents = {} if 'torrents' not in data_json else data_json['torrents']
|
||||
found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
|
||||
|
||||
# We got something, we know the API sends max 1000 results at a time.
|
||||
# See if there are more than 1000 results for our query, if not we
|
||||
|
@ -101,37 +104,45 @@ class BTNProvider(generic.TorrentProvider):
|
|||
for page in range(1, pages_needed + 1):
|
||||
|
||||
try:
|
||||
response = helpers.getURL(self.url_api, json=True, session=self.session,
|
||||
post_data=json_rpc(params, results_per_page, page * results_per_page))
|
||||
response = helpers.getURL(
|
||||
self.url_api, json=True, session=self.session,
|
||||
post_data=json_rpc(params, results_per_page, page * results_per_page))
|
||||
error_text = response['error']['message']
|
||||
logger.log(('Call Limit' in error_text and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' or
|
||||
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
logger.log(
|
||||
('Call Limit' in error_text
|
||||
and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
|
||||
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
|
||||
{'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
return results
|
||||
except:
|
||||
except (KeyError, Exception):
|
||||
data_json = response and 'result' in response and response['result'] or {}
|
||||
|
||||
# Note that this these are individual requests and might time out individually. This would result in 'gaps'
|
||||
# in the results. There is no way to fix this though.
|
||||
# Note that this these are individual requests and might time out individually.
|
||||
# This would result in 'gaps' in the results. There is no way to fix this though.
|
||||
if 'torrents' in data_json:
|
||||
found_torrents.update(data_json['torrents'])
|
||||
|
||||
cnt = len(results)
|
||||
for torrentid, torrent_info in found_torrents.iteritems():
|
||||
seeders, leechers = [tryInt(n) for n in torrent_info.get('Seeders'), torrent_info.get('Leechers')]
|
||||
seeders, leechers, size = (tryInt(n, n) for n in [torrent_info.get(x) for x in
|
||||
'Seeders', 'Leechers', 'Size'])
|
||||
if self._peers_fail(mode, seeders, leechers) or \
|
||||
self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')):
|
||||
continue
|
||||
|
||||
title, url = self._title_and_url(torrent_info)
|
||||
title, url = self._get_title_and_url(torrent_info)
|
||||
if title and url:
|
||||
results.append(torrent_info)
|
||||
results.append((title, url, seeders, self._bytesizer(size)))
|
||||
|
||||
self._log_search(mode, len(results) - cnt,
|
||||
('search_param: ' + str(search_param), self.name)['Cache' == mode])
|
||||
|
||||
results = self._sort_seeding(mode, results)
|
||||
|
||||
return results
|
||||
|
||||
def _title_and_url(self, data_json):
|
||||
@staticmethod
|
||||
def _get_title_and_url(data_json):
|
||||
|
||||
# The BTN API gives a lot of information in response,
|
||||
# however SickGear is built mostly around Scene or
|
||||
|
@ -189,7 +200,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
series_param.update(base_params)
|
||||
search_params.append(series_param)
|
||||
|
||||
return [dict({'Season': search_params})]
|
||||
return [dict(Season=search_params)]
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
|
||||
|
@ -231,7 +242,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
series_param.update(base_params)
|
||||
search_params.append(series_param)
|
||||
|
||||
return [dict({'Episode': search_params})]
|
||||
return [dict(Episode=search_params)]
|
||||
|
||||
def cache_data(self, **kwargs):
|
||||
|
||||
|
@ -246,11 +257,11 @@ class BTNProvider(generic.TorrentProvider):
|
|||
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search,
|
||||
# older items will be done through backlog
|
||||
if 86400 < seconds_since_last_update:
|
||||
logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on %s was over 24 hours'
|
||||
% self.name, logger.WARNING)
|
||||
logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' +
|
||||
'%s was over 24 hours' % self.name, logger.WARNING)
|
||||
seconds_since_last_update = 86400
|
||||
|
||||
return self._search_provider(dict({'Cache': ['']}), age=seconds_since_last_update)
|
||||
return self._search_provider(dict(Cache=['']), age=seconds_since_last_update)
|
||||
|
||||
|
||||
class BTNCache(tvcache.TVCache):
|
||||
|
@ -258,7 +269,7 @@ class BTNCache(tvcache.TVCache):
|
|||
def __init__(self, this_provider):
|
||||
tvcache.TVCache.__init__(self, this_provider)
|
||||
|
||||
self.update_freq = 15 # cache update frequency
|
||||
self.update_freq = 15
|
||||
|
||||
def _cache_data(self):
|
||||
|
||||
|
|
117
sickbeard/providers/btscene.py
Normal file
117
sickbeard/providers/btscene.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class BTSceneProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'BTScene')
|
||||
|
||||
self.url_home = ['http://www.btstorrent.cc/', 'http://bittorrentstart.com/',
|
||||
'http://diriri.xyz/', 'http://mytorrentz.tv/']
|
||||
|
||||
self.url_vars = {'search': 'results.php?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/',
|
||||
'get': 'torrentdownload.php?id=%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
|
||||
'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
|
||||
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
self.confirmed = False
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
return data and re.search(r'(?i)(?:btscene|bts[-]official|full\sindex)', data)
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self.url:
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'info': '\w+?(\d+)[.]html', 'verified': 'Verified'}.iteritems())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
search_url = self.urls['browse'] if 'Cache' == mode \
|
||||
else self.urls['search'] % (urllib.quote_plus(search_string))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_rows = soup.select('tr[class$="_tr"]')
|
||||
|
||||
if not len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in -4, -3, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers) or \
|
||||
self.confirmed and not (tr.find('img', src=rc['verified'])
|
||||
or tr.find('img', title=rc['verified'])):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = info and info.get_text().strip()
|
||||
tid_href = info and rc['info'].findall(info['href'])
|
||||
tid_href = tid_href and tryInt(tid_href[0], 0) or 0
|
||||
tid_tr = tryInt(tr['id'].strip('_'), 0)
|
||||
tid = (tid_tr, tid_href)[tid_href > tid_tr]
|
||||
|
||||
download_url = info and (self.urls['get'] % tid)
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
|
||||
|
||||
|
||||
provider = BTSceneProvider()
|
|
@ -46,9 +46,9 @@ class DHProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(DHProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: (None is x or re.search('(?i)rss\slink', x)) and self.has_all_cookies() and
|
||||
logged_in=(lambda y=None: (None is y or re.search('(?i)rss\slink', y)) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -82,14 +82,12 @@ class DHProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -98,14 +96,12 @@ class DHProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
108
sickbeard/providers/extratorrent.py
Normal file
108
sickbeard/providers/extratorrent.py
Normal file
|
@ -0,0 +1,108 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class ExtraTorrentProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'ExtraTorrent')
|
||||
|
||||
self.url_home = ['https://www.extratorrent%s/' % u for u in '.works', 'live.com', 'online.com', '.cc'] + \
|
||||
['https://etmirror.com/', 'https://etproxy.com/', 'https://extratorrent.usbypass.xyz/']
|
||||
|
||||
self.url_vars = {'search': 'search/?new=1&search=%s&s_cat=8', 'browse': 'view/today/TV.html',
|
||||
'get': '%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
|
||||
'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
|
||||
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
return data and re.search(r'(?i)ExtraTorrent', data[33:1024:])
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self.url:
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'get': 'download', 'title': '(?:^download|torrent$)', 'get_url': '^/(torrent_)?'}.iteritems())
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
search_url = self.urls['browse'] if 'Cache' == mode \
|
||||
else self.urls['search'] % (urllib.quote_plus(search_string))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', class_='tl')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n.replace('---', '0'), n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', title=rc['get']) or {}
|
||||
title = rc['title'].sub('', info.get('title') or '').strip()
|
||||
download_url = self._link(rc['get_url'].sub('', info['href']))
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
provider = ExtraTorrentProvider()
|
|
@ -45,7 +45,7 @@ class FanoProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(FanoProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass']))
|
||||
return super(FanoProvider, self)._authorised()
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -82,14 +82,12 @@ class FanoProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -98,14 +96,12 @@ class FanoProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -78,14 +78,12 @@ class FLProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.select('span[style*="cell"]')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||
tr.select('span[style*="cell"]')[x].get_text().strip() for x in -3, -2, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -94,14 +92,12 @@ class FLProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://freshon.tv/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'login.php?action=makelogin',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s',
|
||||
'get': self.url_base + '%s'}
|
||||
|
||||
|
@ -45,8 +45,8 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(FreshOnTVProvider, self)._authorised(
|
||||
post_params={'login': 'Do it!'},
|
||||
failed_msg=(lambda x=None: 'DDoS protection by CloudFlare' in x and
|
||||
post_params={'form_tmpl': True},
|
||||
failed_msg=(lambda y=None: 'DDoS protection by CloudFlare' in y and
|
||||
u'Unable to login to %s due to CloudFlare DDoS javascript check' or
|
||||
'Username does not exist' in x and
|
||||
u'Invalid username or password for %s. Check settings' or
|
||||
|
@ -80,7 +80,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'frame'})
|
||||
torrent_table = soup.find('table', class_='frame')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -92,14 +92,13 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'], attrs={'class': rc['name']})
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
info = tr.find('a', href=rc['info'], class_=rc['name'])
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -108,13 +107,11 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://www.funfile.org/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'takelogin.php',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&',
|
||||
'get': self.url_base + '%s'}
|
||||
|
||||
|
@ -45,9 +45,9 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(FunFileProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: None is not self.session.cookies.get('uid', domain='.funfile.org') and
|
||||
None is not self.session.cookies.get('pass', domain='.funfile.org')),
|
||||
post_params={'login': 'Login', 'returnto': '/'}, timeout=self.url_timeout)
|
||||
logged_in=(lambda y=None: all(
|
||||
[None is not self.session.cookies.get(x, domain='.funfile.org') for x in 'uid', 'pass'])),
|
||||
post_params={'form_tmpl': True}, timeout=self.url_timeout)
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -72,7 +72,7 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('td', attrs={'class': 'colhead'}).find_parent('table')
|
||||
torrent_table = soup.find('td', class_='colhead').find_parent('table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -85,13 +85,12 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -100,14 +99,12 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
|
||||
except (generic.HaltParseException, AttributeError):
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import sickbeard
|
|||
import requests
|
||||
import requests.cookies
|
||||
from hachoir_parser import guessParser
|
||||
from hachoir_core.error import HachoirError
|
||||
from hachoir_core.stream import FileInputStream
|
||||
|
||||
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
||||
|
@ -77,7 +78,8 @@ class GenericProvider:
|
|||
self.headers = {
|
||||
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
|
||||
# otherwise session might be broken and download fail, asking again for authentication
|
||||
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
|
||||
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
|
||||
# 'Chrome/32.0.1700.107 Safari/537.36'}
|
||||
'User-Agent': USER_AGENT}
|
||||
|
||||
def get_id(self):
|
||||
|
@ -99,9 +101,17 @@ class GenericProvider:
|
|||
def _authorised(self):
|
||||
return True
|
||||
|
||||
def _check_auth(self):
|
||||
def _check_auth(self, is_required=None):
|
||||
return True
|
||||
|
||||
def is_public_access(self):
|
||||
try:
|
||||
return bool(re.search('(?i)rarbg|sick|womble|anizb', self.name)) \
|
||||
or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest')
|
||||
or self._check_auth(is_required=True)))
|
||||
except AuthException:
|
||||
return False
|
||||
|
||||
def is_active(self):
|
||||
if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
|
||||
return self.is_enabled()
|
||||
|
@ -176,7 +186,7 @@ class GenericProvider:
|
|||
urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,))
|
||||
for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'),
|
||||
('s', 'torrentproject.se'), ('', 'thetorrent.org'))]
|
||||
except:
|
||||
except (StandardError, Exception):
|
||||
link_type = 'torrent'
|
||||
urls = [result.url]
|
||||
|
||||
|
@ -204,7 +214,7 @@ class GenericProvider:
|
|||
try:
|
||||
helpers.moveFile(cache_file, final_file)
|
||||
msg = 'moved'
|
||||
except:
|
||||
except (OSError, Exception):
|
||||
msg = 'copied cached file'
|
||||
logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file))
|
||||
saved = True
|
||||
|
@ -234,13 +244,13 @@ class GenericProvider:
|
|||
try:
|
||||
stream = FileInputStream(file_name)
|
||||
parser = guessParser(stream)
|
||||
except:
|
||||
except (HachoirError, Exception):
|
||||
pass
|
||||
result = parser and 'application/x-bittorrent' == parser.mime_type
|
||||
|
||||
try:
|
||||
stream._input.close()
|
||||
except:
|
||||
except (HachoirError, Exception):
|
||||
pass
|
||||
|
||||
return result
|
||||
|
@ -282,7 +292,7 @@ class GenericProvider:
|
|||
try:
|
||||
title, url = isinstance(item, tuple) and (item[0], item[1]) or \
|
||||
(item.get('title', None), item.get('link', None))
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
title = title and re.sub(r'\s+', '.', u'%s' % title)
|
||||
|
@ -290,6 +300,15 @@ class GenericProvider:
|
|||
|
||||
return title, url
|
||||
|
||||
def _link(self, url, url_tmpl=None):
|
||||
|
||||
url = url and str(url).strip().replace('&', '&') or ''
|
||||
try:
|
||||
url_tmpl = url_tmpl or self.urls['get']
|
||||
except (StandardError, Exception):
|
||||
url_tmpl = '%s'
|
||||
return url if re.match('(?i)https?://', url) else (url_tmpl % url.lstrip('/'))
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False):
|
||||
|
||||
self._check_auth()
|
||||
|
@ -391,8 +410,9 @@ class GenericProvider:
|
|||
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
|
||||
u' to snatch, ignoring', logger.DEBUG)
|
||||
add_cache_entry = True
|
||||
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
|
||||
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
|
||||
elif len(parse_result.episode_numbers) and not [
|
||||
ep for ep in episodes if ep.season == parse_result.season_number and
|
||||
ep.episode in parse_result.episode_numbers]:
|
||||
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
||||
u' to snatch, ignoring', logger.DEBUG)
|
||||
add_cache_entry = True
|
||||
|
@ -409,8 +429,8 @@ class GenericProvider:
|
|||
else:
|
||||
airdate = parse_result.air_date.toordinal()
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
|
||||
[show_obj.indexerid, airdate])
|
||||
sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' +
|
||||
'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate])
|
||||
|
||||
if 1 != len(sql_results):
|
||||
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
|
||||
|
@ -507,6 +527,7 @@ class GenericProvider:
|
|||
def log_result(self, mode='Cache', count=0, url='url missing'):
|
||||
"""
|
||||
Simple function to log the result of any search
|
||||
:param mode: string that this log relates to
|
||||
:param count: count of successfully processed items
|
||||
:param url: source url of item(s)
|
||||
"""
|
||||
|
@ -541,8 +562,8 @@ class GenericProvider:
|
|||
|
||||
def has_all_cookies(self, cookies=None, pre=''):
|
||||
|
||||
cookies = cookies or ['uid', 'pass']
|
||||
return False not in ['%s%s' % (pre, item) in self.session.cookies for item in ([cookies], cookies)[isinstance(cookies, list)]]
|
||||
cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass']
|
||||
return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies])
|
||||
|
||||
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
|
||||
|
||||
|
@ -558,7 +579,7 @@ class GenericProvider:
|
|||
def _bytesizer(size_dim=''):
|
||||
|
||||
try:
|
||||
value = float('.'.join(re.findall('(?i)(\d+)(?:[\.,](\d+))?', size_dim)[0]))
|
||||
value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0]))
|
||||
except TypeError:
|
||||
return size_dim
|
||||
except IndexError:
|
||||
|
@ -587,7 +608,7 @@ class NZBProvider(object, GenericProvider):
|
|||
return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
|
||||
return False
|
||||
|
||||
def _check_auth(self):
|
||||
def _check_auth(self, is_required=None):
|
||||
|
||||
has_key = self.maybe_apikey()
|
||||
if has_key:
|
||||
|
@ -703,9 +724,16 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
@staticmethod
|
||||
def _sort_seeders(mode, items):
|
||||
|
||||
""" legacy function used by a custom provider, do not remove """
|
||||
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
|
||||
|
||||
@staticmethod
|
||||
def _sort_seeding(mode, items):
|
||||
|
||||
if mode in ['Season', 'Episode']:
|
||||
return sorted(set(items), key=lambda tup: tup[2], reverse=True)
|
||||
return items
|
||||
|
||||
def _peers_fail(self, mode, seeders=0, leechers=0):
|
||||
|
||||
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
|
||||
|
@ -744,7 +772,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
ep_dict = self._ep_dict(ep_obj)
|
||||
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
|
||||
(show.is_anime and ep_obj.scene_absolute_number or
|
||||
'S%(seasonnumber)02d' % ep_dict if 'sp_detail' not in kwargs.keys() else kwargs['sp_detail'](ep_dict))
|
||||
('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict)
|
||||
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
|
||||
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
|
||||
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
|
||||
|
@ -792,7 +820,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
prefix = ([prefix], prefix)[isinstance(prefix, list)]
|
||||
|
||||
search_params = []
|
||||
crop = re.compile(r'([\.\s])(?:\1)+')
|
||||
crop = re.compile(r'([.\s])(?:\1)+')
|
||||
for name in set(allPossibleShowNames(self.show)):
|
||||
if process_name:
|
||||
name = helpers.sanitizeSceneName(name)
|
||||
|
@ -861,11 +889,14 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
|
||||
|
||||
maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x))
|
||||
maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
|
||||
'(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
|
||||
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
|
||||
(logged_in, (lambda x=None: self.has_all_cookies())),
|
||||
(failed_msg, (lambda x='': maxed_out(x) and u'Urgent abort, running low on login attempts. Password flushed to prevent service disruption to %s.' or
|
||||
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', x) and
|
||||
(logged_in, (lambda y=None: self.has_all_cookies())),
|
||||
(failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +
|
||||
u'Password flushed to prevent service disruption to %s.' or
|
||||
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
|
||||
'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
|
||||
u'Invalid username or password for %s. Check settings' or
|
||||
u'Failed to authenticate or parse a response from %s, abort provider')))
|
||||
)]
|
||||
|
@ -896,17 +927,25 @@ class TorrentProvider(object, GenericProvider):
|
|||
if url:
|
||||
response = helpers.getURL(url, session=self.session)
|
||||
try:
|
||||
action = re.findall('[<]form[\w\W]+?action=[\'\"]([^\'\"]+)', response)[0]
|
||||
post_params = isinstance(post_params, type({})) and post_params or {}
|
||||
form = 'form_tmpl' in post_params and post_params.pop('form_tmpl')
|
||||
if form:
|
||||
form = re.findall(
|
||||
'(?is)(<form[^>]+%s.*?</form>)' % (True is form and 'login' or form), response)
|
||||
response = form and form[0] or response
|
||||
|
||||
action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0]
|
||||
url = action if action.startswith('http') else \
|
||||
url if not action else \
|
||||
(url + action) if action.startswith('?') else \
|
||||
(self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/')
|
||||
|
||||
tags = re.findall(r'(?is)(<input.*?name=[\'\"][^\'\"]+[\'\"].*?>)', response)
|
||||
tags = re.findall(r'(?is)(<input.*?name=[\'"][^\'"]+[^>]*)', response)
|
||||
nv = [(tup[0]) for tup in [
|
||||
re.findall(r'(?is)name=[\'\"]([^\'\"]+)[\'\"](?:.*?value=[\'\"]([^\'\"]+)[\'\"])?', x)
|
||||
re.findall(r'(?is)name=[\'"]([^\'"]+)(?:[^>]*?value=[\'"]([^\'"]+))?', x)
|
||||
for x in tags]]
|
||||
for name, value in nv:
|
||||
if name not in ('username', 'password'):
|
||||
post_params = isinstance(post_params, type({})) and post_params or {}
|
||||
post_params.setdefault(name, value)
|
||||
except KeyError:
|
||||
return super(TorrentProvider, self)._authorised()
|
||||
|
@ -936,7 +975,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
return False
|
||||
|
||||
def _check_auth(self):
|
||||
def _check_auth(self, is_required=False):
|
||||
|
||||
if hasattr(self, 'username') and hasattr(self, 'password'):
|
||||
if self.username and self.password:
|
||||
|
@ -963,7 +1002,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
return True
|
||||
setting = 'Passkey'
|
||||
else:
|
||||
return GenericProvider._check_auth(self)
|
||||
return not is_required and GenericProvider._check_auth(self)
|
||||
|
||||
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
|
||||
|
||||
|
@ -982,7 +1021,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
items = self._search_provider({'Propers': search_terms})
|
||||
|
||||
clean_term = re.compile(r'(?i)[^a-z1-9\|\.]+')
|
||||
clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
|
||||
for proper_term in search_terms:
|
||||
|
||||
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
||||
|
@ -995,10 +1034,10 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
@staticmethod
|
||||
def _has_no_results(*html):
|
||||
return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>(?:' +
|
||||
'your\ssearch\sdid\snot\smatch|' +
|
||||
'nothing\sfound|' +
|
||||
'(sorry,\s)?no\storrents\s(found|match)|' +
|
||||
return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>\s*(?:' +
|
||||
'your\ssearch.*?did\snot\smatch|' +
|
||||
'(?:nothing|0</b>\s+torrents)\sfound|' +
|
||||
'(sorry,\s)?no\s(?:results|torrents)\s(found|match)|' +
|
||||
'.*?there\sare\sno\sresults|' +
|
||||
'.*?no\shits\.\sTry\sadding' +
|
||||
')', html[0])
|
||||
|
|
|
@ -47,7 +47,7 @@ class GFTrackerProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(GFTrackerProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='gft_')),
|
||||
return super(GFTrackerProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='gft_')),
|
||||
url=[self.urls['login_init']])
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
@ -90,10 +90,9 @@ class GFTrackerProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
size = tr.find_all('td')[-2].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -102,13 +101,11 @@ class GFTrackerProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -81,35 +81,27 @@ class GrabTheInfoProvider(generic.TorrentProvider):
|
|||
|
||||
for tr in torrent_rows[1 + shows_found:]:
|
||||
try:
|
||||
info = tr.find('a', href=rc['info'])
|
||||
if None is info:
|
||||
continue
|
||||
title = (('title' in info.attrs.keys() and info['title']) or info.get_text()).strip()
|
||||
|
||||
download_url = tr.find('a', href=rc['get'])
|
||||
if None is download_url:
|
||||
continue
|
||||
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -3)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
continue
|
||||
|
||||
if title:
|
||||
items[mode].append((title, self.urls['get'] % str(download_url['href'].lstrip('/')),
|
||||
seeders, self._bytesizer(size)))
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -52,10 +52,10 @@ class HD4FreeProvider(generic.TorrentProvider):
|
|||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
params['search'] = '+'.join(search_string.split())
|
||||
data_json = self.get_url(self.urls['search'], params=params, json=True)
|
||||
json_resp = self.get_url(self.urls['search'], params=params, json=True)
|
||||
|
||||
cnt = len(items[mode])
|
||||
for k, item in data_json.items():
|
||||
for k, item in json_resp.items():
|
||||
if 'error' == k or not item.get('total_results'):
|
||||
break
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
|
@ -63,17 +63,15 @@ class HD4FreeProvider(generic.TorrentProvider):
|
|||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
title = item.get('release_name')
|
||||
download_url = (self.urls['get'] % (item.get('torrentid'), item.get('torrentpass')), None)[
|
||||
not (item.get('torrentid') and item.get('torrentpass'))]
|
||||
tid, tpass = [item.get('torrent' + x) for x in 'id', 'pass']
|
||||
download_url = all([tid, tpass]) and (self.urls['get'] % (tid, tpass))
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer('%smb' % size)))
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response['url'])
|
||||
time.sleep(1.1)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
def check_auth_from_data(self, parsed_json):
|
||||
|
||||
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
|
||||
logger.log(u'Incorrect username or password for %s : %s' % (self.name, parsed_json['message']), logger.DEBUG)
|
||||
logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG)
|
||||
raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name)
|
||||
|
||||
return True
|
||||
|
@ -120,13 +120,14 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
cnt = len(items[mode])
|
||||
for item in json_resp['data']:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seeders', 'leechers', 'size']]
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in
|
||||
'seeders', 'leechers', 'size']]
|
||||
if self._peers_fail(mode, seeders, leechers)\
|
||||
or self.freeleech and re.search('(?i)no', item.get('freeleech', 'no')):
|
||||
continue
|
||||
|
||||
title = item['name']
|
||||
download_url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
|
||||
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -136,12 +137,10 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
self._log_search(mode, len(items[mode]) - cnt,
|
||||
('search_param: ' + str(search_param), self.name)['Cache' == mode])
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
if id_search and len(items[mode]):
|
||||
return items[mode]
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
if id_search and len(results):
|
||||
return results
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
|||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
|
@ -31,8 +32,9 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://hd-space.org/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'index.php?page=login',
|
||||
'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(['options=0', 'active=1', 'category=']),
|
||||
'login_action': self.url_base + 'index.php?page=login',
|
||||
'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(
|
||||
['options=0', 'active=1', 'category=']),
|
||||
'search': '&search=%s',
|
||||
'get': self.url_base + '%s'}
|
||||
|
||||
|
@ -44,7 +46,8 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(HDSpaceProvider, self)._authorised(post_params={'uid': self.username, 'pwd': self.password})
|
||||
return super(HDSpaceProvider, self)._authorised(
|
||||
post_params={'uid': self.username, 'pwd': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -71,8 +74,9 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive'], attr='width="100%"\Wclass="lista"') as soup:
|
||||
torrent_table = soup.find_all('table', attrs={'class': 'lista'})[-1]
|
||||
with BS4Parser(html, features=['html5lib', 'permissive'],
|
||||
attr='width="100%"\Wclass="lista"') as soup:
|
||||
torrent_table = soup.find_all('table', class_='lista')[-1]
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -85,16 +89,16 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
if None is downlink:
|
||||
continue
|
||||
try:
|
||||
seeders, leechers = [int(x.get_text().strip()) for x in tr.find_all('a', href=rc['peers'])]
|
||||
seeders, leechers = [tryInt(x.get_text().strip())
|
||||
for x in tr.find_all('a', href=rc['peers'])]
|
||||
if self._peers_fail(mode, seeders, leechers)\
|
||||
or self.freeleech and None is tr.find('img', title=rc['fl']):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
size = tr.find_all('td')[-5].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(downlink['href']).lstrip('/')
|
||||
download_url = self._link(downlink['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -103,13 +107,11 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ class ILTProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(ILTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass']))
|
||||
return super(ILTProvider, self)._authorised()
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -79,14 +79,12 @@ class ILTProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -95,14 +93,12 @@ class ILTProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
|||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
|
@ -45,9 +46,10 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(IPTorrentsProvider, self)._authorised(
|
||||
logged_in=(lambda x='': ('RSS Link' in x) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
logged_in=(lambda y='': all(
|
||||
['RSS Link' in y, self.has_all_cookies()] +
|
||||
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
|
@ -78,8 +80,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'id': 'torrents'}) or \
|
||||
soup.find('table', attrs={'class': 'torrents'})
|
||||
torrent_table = soup.find(id='torrents') or soup.find('table', class_='torrents')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -87,16 +88,15 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||
for x in ('t_seeders', 't_leechers')]
|
||||
seeders, leechers = [tryInt(tr.find('td', class_='t_' + x).get_text().strip())
|
||||
for x in 'seeders', 'leechers']
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
size = tr.find_all('td')[-4].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -105,13 +105,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
109
sickbeard/providers/limetorrents.py
Normal file
109
sickbeard/providers/limetorrents.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class LimeTorrentsProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'LimeTorrents')
|
||||
|
||||
self.url_home = ['https://www.limetorrents.cc/', 'https://limetorrents.usbypass.xyz/']
|
||||
|
||||
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
|
||||
'browse': '%(home)s%(vars)s'}
|
||||
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
return data and re.search(r'(?i)LimeTorrents', data[33:1024:])
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self.url:
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'dl'}.iteritems())
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
search_url = self.urls['browse'] if 'Cache' == mode \
|
||||
else self.urls['search'] % (urllib.quote_plus(search_string))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find_all('table', class_='table2')
|
||||
torrent_rows = [] if not torrent_table else [
|
||||
t.select('tr[bgcolor]') for t in torrent_table if
|
||||
all([x in ' '.join(x.get_text() for x in t.find_all('th')).lower() for x in
|
||||
['torrent', 'size']])]
|
||||
|
||||
if not len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows[0]: # 0 = all rows
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n.replace(',', ''), n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
anchors = tr.td.find_all('a')
|
||||
stats = anchors and [len(a.get_text()) for a in anchors]
|
||||
title = stats and anchors[stats.index(max(stats))].get_text().strip()
|
||||
download_url = self._link((tr.td.find('a', class_=rc['get']) or {}).get('href'))
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
provider = LimeTorrentsProvider()
|
|
@ -34,7 +34,7 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://www.morethan.tv/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'login.php',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([
|
||||
'tags_type=1', 'order_by=time', 'order_way=desc',
|
||||
'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']),
|
||||
|
@ -46,8 +46,8 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(MoreThanProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'login': 'Log in'})
|
||||
return super(MoreThanProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -72,7 +72,7 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'torrent_table'})
|
||||
torrent_table = soup.find('table', class_='torrent_table')
|
||||
torrent_rows = []
|
||||
if torrent_table:
|
||||
torrent_rows = torrent_table.find_all('tr')
|
||||
|
@ -86,17 +86,15 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = tr.find('a', title=rc['info']).get_text().strip()
|
||||
if title.lower().startswith('season '):
|
||||
title = '%s %s' % (tr.find('div', attrs={'class': rc['name']}).get_text().strip(),
|
||||
title)
|
||||
title = '%s %s' % (tr.find('div', class_=rc['name']).get_text().strip(), title)
|
||||
|
||||
link = str(tr.find('a', href=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||
download_url = self.urls['get'] % link
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -105,14 +103,12 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
112
sickbeard/providers/ncore.py
Normal file
112
sickbeard/providers/ncore.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# Author: SickGear
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class NcoreProvider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'nCore')
|
||||
|
||||
self.url_base = 'https://ncore.cc/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'torrents.php?mire=%s&' + '&'.join([
|
||||
'miszerint=fid', 'hogyan=DESC', 'tipus=kivalasztottak_kozott',
|
||||
'kivalasztott_tipus=xvidser,dvdser,hdser', 'miben=name']),
|
||||
'get': self.url_base + '%s'}
|
||||
|
||||
self.url = self.urls['config_provider_home_uri']
|
||||
|
||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||
self.chk_td = True
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(NcoreProvider, self)._authorised(
|
||||
logged_in=(lambda y='': all([bool(y), 'action="login' not in y, self.has_all_cookies('PHPSESSID')])),
|
||||
post_params={'nev': self.username, 'pass': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self._authorised():
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'list': '.*?torrent_all', 'info': 'details'}.iteritems())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = self.urls['search'] % search_string
|
||||
|
||||
# fetches 15 results by default, and up to 100 if allowed in user profile
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('div', class_=rc['list'])
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('div', class_='box_torrent')
|
||||
|
||||
if not len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find('div', class_=x).get_text().strip()
|
||||
for x in 'box_s2', 'box_l2', 'box_meret2']]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
anchor = tr.find('a', href=rc['info'])
|
||||
title = (anchor.get('title') or anchor.get_text()).strip()
|
||||
download_url = self._link(anchor.get('href').replace('details', 'download'))
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
provider = NcoreProvider()
|
|
@ -359,7 +359,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
def __init__(self, provider):
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
self.update_freq = 5 # cache update frequency
|
||||
self.update_freq = 5
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# Author: Mr_Orange
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
|
@ -16,10 +14,12 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import urllib
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger, show_name_helpers, tvcache
|
||||
from sickbeard.helpers import tryInt
|
||||
|
||||
|
||||
class NyaaProvider(generic.TorrentProvider):
|
||||
|
@ -27,43 +27,55 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'NyaaTorrents', anime_only=True)
|
||||
|
||||
self.url_base = self.url = 'http://www.nyaa.se/'
|
||||
self.url_base = self.url = 'https://www.nyaa.se/'
|
||||
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
|
||||
self.cache = NyaaCache(self)
|
||||
|
||||
def _search_provider(self, search_string, **kwargs):
|
||||
def _search_provider(self, search_string, search_mode='eponly', **kwargs):
|
||||
|
||||
if self.show and not self.show.is_anime:
|
||||
return []
|
||||
|
||||
params = urllib.urlencode({'term': search_string.encode('utf-8'),
|
||||
'cats': '1_37', # Limit to English-translated Anime (for now)
|
||||
# 'sort': '2', # Sort Descending By Seeders
|
||||
})
|
||||
|
||||
return self.get_data(getrss_func=self.cache.getRSSFeed,
|
||||
search_url='%s?page=rss&%s' % (self.url, params),
|
||||
mode=('Episode', 'Season')['sponly' == search_mode])
|
||||
|
||||
def get_data(self, getrss_func, search_url, mode='cache'):
|
||||
|
||||
data = getrss_func(search_url)
|
||||
|
||||
results = []
|
||||
if self.show and not self.show.is_anime:
|
||||
return results
|
||||
|
||||
params = {'term': search_string.encode('utf-8'),
|
||||
'cats': '1_37', # Limit to English-translated Anime (for now)
|
||||
# 'sort': '2', # Sort Descending By Seeders
|
||||
}
|
||||
|
||||
search_url = self.url + '?page=rss&' + urllib.urlencode(params)
|
||||
|
||||
logger.log(u'Search string: ' + search_url, logger.DEBUG)
|
||||
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
if data and 'entries' in data:
|
||||
items = data.entries
|
||||
for curItem in items:
|
||||
|
||||
title, url = self._title_and_url(curItem)
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'stats': '(\d+)\W+seed[^\d]+(\d+)\W+leech[^\d]+\d+\W+down[^\d]+([\d.,]+\s\w+)'}.iteritems())
|
||||
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(u'The data returned from ' + self.name + ' is incomplete, this result is unusable',
|
||||
logger.DEBUG)
|
||||
for cur_item in data.get('entries', []):
|
||||
try:
|
||||
seeders, leechers, size = 0, 0, 0
|
||||
stats = rc['stats'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', ''))
|
||||
if len(stats):
|
||||
seeders, leechers, size = (tryInt(n, n) for n in stats[0])
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
title, download_url = self._title_and_url(cur_item)
|
||||
download_url = self._link(download_url)
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
return results
|
||||
if title and download_url:
|
||||
results.append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False):
|
||||
self._log_search(mode, len(results), search_url)
|
||||
|
||||
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
||||
return self._sort_seeding(mode, results)
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
|
||||
|
@ -79,20 +91,17 @@ class NyaaCache(tvcache.TVCache):
|
|||
def __init__(self, this_provider):
|
||||
tvcache.TVCache.__init__(self, this_provider)
|
||||
|
||||
self.update_freq = 15 # cache update frequency
|
||||
self.update_freq = 15
|
||||
|
||||
def _cache_data(self):
|
||||
params = {'page': 'rss', # Use RSS page
|
||||
'order': '1', # Sort Descending By Date
|
||||
'cats': '1_37'} # Limit to English-translated Anime (for now)
|
||||
|
||||
url = self.provider.url + '?' + urllib.urlencode(params)
|
||||
logger.log(u'NyaaTorrents cache update URL: ' + url, logger.DEBUG)
|
||||
params = urllib.urlencode({'page': 'rss', # Use RSS page
|
||||
'order': '1', # Sort Descending By Date
|
||||
'cats': '1_37' # Limit to English-translated Anime (for now)
|
||||
})
|
||||
|
||||
data = self.getRSSFeed(url)
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
return []
|
||||
return self.provider.get_data(getrss_func=self.getRSSFeed,
|
||||
search_url='%s?%s' % (self.provider.url, params))
|
||||
|
||||
|
||||
provider = NyaaProvider()
|
||||
|
|
|
@ -203,7 +203,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
except generic.HaltParseException:
|
||||
time.sleep(1.1)
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
mode = (mode, search_mode)['Propers' == search_mode]
|
||||
|
@ -222,7 +222,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
title, url = self._title_and_url(item)
|
||||
try:
|
||||
result_date = datetime.fromtimestamp(int(item['usenetage']))
|
||||
except:
|
||||
except (StandardError, Exception):
|
||||
result_date = None
|
||||
|
||||
if result_date:
|
||||
|
@ -236,7 +236,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
api_key = self._check_auth()
|
||||
if not api_key.startswith('cookie:'):
|
||||
return api_key
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
return None
|
||||
|
||||
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key)
|
||||
|
|
|
@ -40,7 +40,8 @@ class PiSexyProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(PiSexyProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy']))
|
||||
return super(PiSexyProvider, self)._authorised(
|
||||
logged_in=(lambda y=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy'])))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -81,13 +82,10 @@ class PiSexyProvider(generic.TorrentProvider):
|
|||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title'])\
|
||||
or info.get_text().strip()
|
||||
title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip()
|
||||
size = tr.find_all('td')[3].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, KeyError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
|
@ -95,14 +93,12 @@ class PiSexyProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -52,11 +52,11 @@ class PreToMeProvider(generic.TorrentProvider):
|
|||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
|
||||
|
||||
data = RSSFeeds(self).get_feed(search_url)
|
||||
xml_data = RSSFeeds(self).get_feed(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
if data and 'entries' in data:
|
||||
for entry in data['entries']:
|
||||
if xml_data and 'entries' in xml_data:
|
||||
for entry in xml_data['entries']:
|
||||
try:
|
||||
if entry['title'] and 'download' in entry['link']:
|
||||
items[mode].append((entry['title'], entry['link'], None, None))
|
||||
|
|
|
@ -46,8 +46,8 @@ class PrivateHDProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(PrivateHDProvider, self)._authorised(
|
||||
logged_in=lambda x=None: self.has_all_cookies(['love']),
|
||||
post_params={'email_username': self.username})
|
||||
logged_in=(lambda y=None: self.has_all_cookies('love')),
|
||||
post_params={'email_username': self.username, 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -80,7 +80,7 @@ class PrivateHDProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'table'})
|
||||
torrent_table = soup.find('table', class_='table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -89,14 +89,12 @@ class PrivateHDProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = rc['info'].sub('', tr.find('a', attrs={'title': rc['info']})['title'])
|
||||
|
||||
download_url = tr.find('a', href=rc['get'])['href']
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -105,14 +103,12 @@ class PrivateHDProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -47,8 +47,8 @@ class PTFProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(PTFProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['session_key']),
|
||||
post_params={'force_ssl': 'on', 'ssl': ''})
|
||||
return super(PTFProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session_key')),
|
||||
post_params={'force_ssl': 'on', 'ssl': '', 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -98,9 +98,7 @@ class PTFProvider(generic.TorrentProvider):
|
|||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
snatches = tr.find('a', href=rc['snatch']).get_text().strip()
|
||||
size = tr.find_all('td')[-3].get_text().strip().replace(snatches, '')
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -109,14 +107,12 @@ class PTFProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ class RarbgProvider(generic.TorrentProvider):
|
|||
'api_list': self.url_api + 'mode=list',
|
||||
'api_search': self.url_api + 'mode=search'}
|
||||
|
||||
self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(ranked)s&token=%(token)s',
|
||||
self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(r)s&token=%(t)s',
|
||||
'param_iid': '&search_imdb=%(sid)s',
|
||||
'param_tid': '&search_tvdb=%(sid)s',
|
||||
'param_str': '&search_string=%(str)s',
|
||||
|
@ -90,7 +90,8 @@ class RarbgProvider(generic.TorrentProvider):
|
|||
id_search = self.params[search_with] % {'sid': sid}
|
||||
|
||||
dedupe = []
|
||||
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) # sort type "_only" as first to process
|
||||
# sort type "_only" as first to process
|
||||
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True)
|
||||
for mode_params in search_types:
|
||||
mode_search = mode_params[0]
|
||||
mode = mode_search.replace('_only', '')
|
||||
|
@ -121,41 +122,40 @@ class RarbgProvider(generic.TorrentProvider):
|
|||
time_out += 1
|
||||
time.sleep(1)
|
||||
|
||||
searched_url = search_url % {'ranked': int(self.confirmed), 'token': self.token}
|
||||
searched_url = search_url % {'r': int(self.confirmed), 't': self.token}
|
||||
|
||||
data = self.get_url(searched_url, json=True)
|
||||
data_json = self.get_url(searched_url, json=True)
|
||||
|
||||
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
||||
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
|
||||
if not data:
|
||||
if not data_json:
|
||||
continue
|
||||
|
||||
if 'error' in data:
|
||||
if 5 == data['error_code']: # Too many requests per second.
|
||||
if 'error' in data_json:
|
||||
if 5 == data_json['error_code']: # Too many requests per second.
|
||||
continue
|
||||
|
||||
elif 2 == data['error_code']: # Invalid token set
|
||||
elif 2 == data_json['error_code']: # Invalid token set
|
||||
if self._authorised(reset=True):
|
||||
continue
|
||||
self.log_result(mode, len(items[mode]) - cnt, searched_url)
|
||||
return items[mode]
|
||||
break
|
||||
|
||||
if 'error' not in data:
|
||||
for item in data['torrent_results']:
|
||||
if 'error' not in data_json:
|
||||
for item in data_json['torrent_results']:
|
||||
title, download_magnet, seeders, size = [
|
||||
item.get(x) for x in 'title', 'download', 'seeders', 'size']
|
||||
title = None is title and item.get('filename') or title
|
||||
if not (title and download_magnet) or download_magnet in dedupe:
|
||||
continue
|
||||
dedupe += [download_magnet]
|
||||
|
||||
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, searched_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
if '_only' in mode_search and len(results):
|
||||
break
|
||||
|
|
|
@ -45,7 +45,7 @@ class RevTTProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(RevTTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass']))
|
||||
return super(RevTTProvider, self)._authorised()
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -80,15 +80,13 @@ class RevTTProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
size = rc['size'].sub(r'\1', size)
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -97,14 +95,12 @@ class RevTTProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
try:
|
||||
bdecode(torrent_file)
|
||||
break
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
else:
|
||||
return False, '%s fetched RSS feed data: %s' % \
|
||||
|
|
|
@ -34,17 +34,17 @@ class SCCProvider(generic.TorrentProvider):
|
|||
self.url_home = ['https://sceneaccess.%s/' % u for u in 'eu', 'org']
|
||||
|
||||
self.url_vars = {
|
||||
'login': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s',
|
||||
'login_action': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s',
|
||||
'nonscene': 'nonscene?search=%s&method=1&c44=44&c45=44', 'archive': 'archive?search=%s&method=1&c26=26'}
|
||||
self.url_tmpl = {
|
||||
'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s',
|
||||
'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s',
|
||||
'get': '%(home)s%(vars)s', 'nonscene': '%(home)s%(vars)s', 'archive': '%(home)s%(vars)s'}
|
||||
|
||||
self.username, self.password, self.minseed, self.minleech = 4 * [None]
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(SCCProvider, self)._authorised(post_params={'submit': 'come+on+in'})
|
||||
return super(SCCProvider, self)._authorised(post_params={'form_tmpl': 'method'})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -76,7 +76,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'id': 'torrents-table'})
|
||||
torrent_table = soup.find(id='torrents-table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -85,17 +85,14 @@ class SCCProvider(generic.TorrentProvider):
|
|||
for tr in torrent_table.find_all('tr')[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find('td', attrs={'class': x}).get_text().strip()
|
||||
for x in ('ttr_seeders', 'ttr_leechers', 'ttr_size')]]
|
||||
tr.find('td', class_='ttr_' + x).get_text().strip()
|
||||
for x in 'seeders', 'leechers', 'size']]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||
|
||||
link = str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
download_url = self.urls['get'] % link
|
||||
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -104,13 +101,11 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
time.sleep(1.1)
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://www.scenetime.com/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'takelogin.php',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'browse': self.url_base + 'browse_API.php',
|
||||
'params': {'sec': 'jax', 'cata': 'yes'},
|
||||
'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'}
|
||||
|
@ -46,7 +46,7 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(SceneTimeProvider, self)._authorised(post_params={'submit': 'Log in'})
|
||||
return super(SceneTimeProvider, self)._authorised(post_params={'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -91,19 +91,18 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
|
||||
if None is tr.find('a', href=rc['cats'])\
|
||||
or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text())\
|
||||
or self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self.urls['get'] % {
|
||||
'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
|
||||
'title': str(title).replace(' ', '.')}
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
|
@ -111,15 +110,13 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt,
|
||||
('search string: ' + search_string, self.name)['Cache' == mode])
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -50,10 +50,9 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(ShazbatProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: '<input type="password"' not in helpers.getURL(
|
||||
logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL(
|
||||
self.urls['feeds'], session=self.session)),
|
||||
post_params={'tv_login': self.username, 'tv_password': self.password,
|
||||
'referer': 'login', 'query': '', 'email': ''})
|
||||
post_params={'tv_login': self.username, 'tv_password': self.password, 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -116,8 +115,7 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
title = unicode(element).strip()
|
||||
break
|
||||
|
||||
link = str(tr.find('a', href=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||
download_url = self.urls['get'] % link
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -126,13 +124,11 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(SpeedCDProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: self.has_all_cookies('inSpeed_speedian')))
|
||||
logged_in=(lambda y=None: self.has_all_cookies('inSpeed_speedian')))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -81,32 +81,28 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
|
||||
if None is tr.find('a', href=rc['cats']) \
|
||||
or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text()) \
|
||||
or self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', 'torrent')
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
time.sleep(1.1)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt,
|
||||
('search string: ' + search_string, self.name)['Cache' == mode])
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ from . import generic
|
|||
from sickbeard import config, logger, show_name_helpers
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.common import Quality, mediaExtensions
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
@ -35,7 +36,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20)
|
||||
|
||||
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org']
|
||||
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \
|
||||
['piratebay.usbypass.xyz/']
|
||||
|
||||
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
|
||||
|
@ -135,9 +137,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v))
|
||||
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
|
||||
'verify': '(?:helper|moderator|trusted|vip)'}.items())
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
|
||||
'verify': '(?:helper|moderator|trusted|vip)', 'size': 'size[^\d]+(\d+(?:[.,]\d+)?\W*[bkmgt]\w+)'}.items())
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
@ -153,7 +155,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="searchResult"') as soup:
|
||||
torrent_table = soup.find('table', attrs={'id': 'searchResult'})
|
||||
torrent_table = soup.find(id='searchResult')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -161,14 +163,13 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
for tr in torrent_table.find_all('tr')[1:]:
|
||||
try:
|
||||
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)]
|
||||
seeders, leechers = [tryInt(tr.find_all('td')[x].get_text().strip()) for x in -2, -1]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', title=rc['info'])
|
||||
title = info.get_text().strip().replace('_', '.')
|
||||
tid = rc['tid'].sub(r'\1', str(info['href']))
|
||||
|
||||
download_magnet = tr.find('a', title=rc['get'])['href']
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
@ -186,22 +187,19 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
if title and download_magnet:
|
||||
size = None
|
||||
try:
|
||||
size = re.findall('(?i)size[^\d]+(\d+(?:[\.,]\d+)?\W*[bkmgt]\w+)',
|
||||
tr.find_all(class_='detDesc')[0].get_text())[0]
|
||||
except Exception:
|
||||
size = rc['size'].findall(tr.find_all(class_='detDesc')[0].get_text())[0]
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# Author: Mr_Orange
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
|
@ -16,11 +14,13 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
from . import generic
|
||||
from sickbeard import logger, show_name_helpers, tvcache
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
|
||||
|
||||
|
@ -29,7 +29,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
|||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'TokyoToshokan', anime_only=True)
|
||||
|
||||
self.url_base = self.url = 'http://tokyotosho.info/'
|
||||
self.url_base = self.url = 'https://tokyotosho.info/'
|
||||
|
||||
self.cache = TokyoToshokanCache(self)
|
||||
|
||||
|
@ -39,36 +39,49 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
|||
if self.show and not self.show.is_anime:
|
||||
return results
|
||||
|
||||
params = {'terms': search_string.encode('utf-8'),
|
||||
'type': 1} # get anime types
|
||||
params = urllib.urlencode({'terms': search_string.encode('utf-8'),
|
||||
'type': 1}) # get anime types
|
||||
|
||||
search_url = self.url + 'search.php?' + urllib.urlencode(params)
|
||||
logger.log(u'Search string: ' + search_url, logger.DEBUG)
|
||||
search_url = '%ssearch.php?%s' % (self.url, params)
|
||||
mode = ('Episode', 'Season')['sponly' == search_mode]
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if html:
|
||||
try:
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'listing'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
torrent_table = soup.find('table', class_='listing')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
if torrent_rows:
|
||||
a = (0, 1)[None is not torrent_rows[0].find('td', attrs={'class': 'centertext'})]
|
||||
a = (0, 1)[None is not torrent_rows[0].find('td', class_='centertext')]
|
||||
|
||||
for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]):
|
||||
title = top.find('td', attrs={'class': 'desc-top'}).text
|
||||
url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
|
||||
for top, bottom in zip(torrent_rows[a::2], torrent_rows[a+1::2]):
|
||||
try:
|
||||
bottom_text = bottom.get_text() or ''
|
||||
stats = rc['stats'].findall(bottom_text)
|
||||
seeders, leechers = (0, 0) if not stats else [tryInt(n) for n in stats[0]]
|
||||
|
||||
if title and url:
|
||||
results.append((title.lstrip(), url))
|
||||
size = rc['size'].findall(bottom_text)
|
||||
size = size and size[0] or -1
|
||||
|
||||
except Exception:
|
||||
logger.log(u'Failed to parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
|
||||
info = top.find('td', class_='desc-top')
|
||||
title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip())
|
||||
urls = info and sorted([x.get('href') for x in info.find_all('a') or []])
|
||||
download_url = urls and urls[0].startswith('http') and urls[0] or urls[1]
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
return results
|
||||
if title and download_url:
|
||||
results.append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False):
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search)
|
||||
self._log_search(mode, len(results), search_url)
|
||||
|
||||
return self._sort_seeding(mode, results)
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
|
||||
|
@ -84,18 +97,35 @@ class TokyoToshokanCache(tvcache.TVCache):
|
|||
def __init__(self, this_provider):
|
||||
tvcache.TVCache.__init__(self, this_provider)
|
||||
|
||||
self.update_freq = 15 # cache update frequency
|
||||
self.update_freq = 15
|
||||
|
||||
def _cache_data(self):
|
||||
params = {'filter': '1'}
|
||||
|
||||
url = self.provider.url + 'rss.php?' + urllib.urlencode(params)
|
||||
logger.log(u'TokyoToshokan cache update URL: ' + url, logger.DEBUG)
|
||||
mode = 'Cache'
|
||||
search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
|
||||
data = self.getRSSFeed(search_url)
|
||||
|
||||
data = self.getRSSFeed(url)
|
||||
results = []
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
return []
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
|
||||
|
||||
for cur_item in data.get('entries', []):
|
||||
try:
|
||||
title, download_url = self._title_and_url(cur_item)
|
||||
size = rc['size'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', ''))
|
||||
size = size and size[0] or -1
|
||||
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
# feed does not carry seed, leech counts
|
||||
results.append((title, download_url, 0, self.provider._bytesizer(size)))
|
||||
|
||||
self.provider._log_search(mode, len(results), search_url)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
provider = TokyoToshokanProvider()
|
||||
|
|
|
@ -32,8 +32,8 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_home = ['https://www.torrentbytes.net/']
|
||||
|
||||
self.url_vars = {'login': 'takelogin.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
|
||||
self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
|
||||
'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
|
||||
|
||||
self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]}
|
||||
|
@ -43,7 +43,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentBytesProvider, self)._authorised(post_params={'login': 'Log in!'})
|
||||
return super(TorrentBytesProvider, self)._authorised(post_params={'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -78,15 +78,14 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
try:
|
||||
info = tr.find('a', href=rc['info'])
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search(
|
||||
info.contents[1].string.strip())) or self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = info.attrs.get('title') or info.contents[0]
|
||||
title = (isinstance(title, list) and title[0] or title).strip()
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
|
@ -94,14 +93,12 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
|
||||
'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
|
||||
|
||||
self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 2], 'Anime': [29]}
|
||||
self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 34, 2], 'Anime': [29]}
|
||||
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
|
||||
|
||||
self.proper_search_terms = None
|
||||
|
@ -45,9 +45,10 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentDayProvider, self)._authorised(
|
||||
logged_in=(lambda x='': ('RSS URL' in x) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
logged_in=(lambda y='': all(
|
||||
['RSS URL' in y, self.has_all_cookies()] +
|
||||
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
|
@ -87,15 +88,14 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers = [tryInt(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||
for x in ('seedersInfo', 'leechersInfo')]
|
||||
seeders, leechers = [tryInt(tr.find('td', class_=x + 'ersInfo').get_text().strip())
|
||||
for x in 'seed', 'leech']
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
||||
size = tr.find_all('td')[-3].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -104,14 +104,12 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
time.sleep(1.1)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -43,9 +43,10 @@ class TorrentingProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentingProvider, self)._authorised(
|
||||
logged_in=(lambda x='': ('RSS link' in x) and self.has_all_cookies() and
|
||||
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
|
||||
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings'))
|
||||
logged_in=(lambda y='': all(
|
||||
['RSS link' in y, self.has_all_cookies()] +
|
||||
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
|
@ -84,14 +85,13 @@ class TorrentingProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
|
||||
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = info.attrs.get('title') or info.get_text().strip()
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -100,14 +100,12 @@ class TorrentingProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
|||
from . import generic
|
||||
from sickbeard import logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
|
@ -30,7 +31,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://torrentleech.org/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'user/account/login/',
|
||||
'login_action': self.url_base,
|
||||
'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s',
|
||||
'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s',
|
||||
'get': self.url_base + '%s'}
|
||||
|
@ -43,8 +44,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='tl')),
|
||||
post_params={'remember_me': 'on', 'login': 'submit'})
|
||||
return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='tl')),
|
||||
post_params={'remember_me': 'on', 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -69,7 +70,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'id': 'torrenttable'})
|
||||
torrent_table = soup.find(id='torrenttable')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -77,16 +78,15 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip())
|
||||
for x in ('seeders', 'leechers')]
|
||||
seeders, leechers = [tryInt(n) for n in [
|
||||
tr.find('td', class_=x).get_text().strip() for x in 'seeders', 'leechers']]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.find('td', {'class': 'name'}).a
|
||||
title = ('title' in info.attrs and info['title']) or info.get_text().strip()
|
||||
info = tr.find('td', class_='name').a
|
||||
title = (info.attrs.get('title') or info.get_text()).strip()
|
||||
size = tr.find_all('td')[-5].get_text().strip()
|
||||
|
||||
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
continue
|
||||
|
||||
|
@ -95,13 +95,11 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
@ -110,5 +108,4 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
|
||||
|
||||
|
||||
|
||||
provider = TorrentLeechProvider()
|
||||
|
|
|
@ -34,7 +34,7 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
|
||||
self.url_base = 'https://torrentshack.me/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'login': self.url_base + 'login.php?lang=',
|
||||
'login_action': self.url_base + 'login.php',
|
||||
'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join(
|
||||
['release_type=both', 'searchtags=', 'tags_type=0',
|
||||
'order_by=s3', 'order_way=desc', 'torrent_preset=all']),
|
||||
|
@ -48,8 +48,8 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TorrentShackProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'login': 'Login'})
|
||||
return super(TorrentShackProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -59,8 +59,8 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v))
|
||||
for (k, v) in {'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+'}.items())
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
|
||||
'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+', 'size': '\s{2,}.*'}.iteritems())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
@ -75,7 +75,7 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
raise generic.HaltParseException
|
||||
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'torrent_table'})
|
||||
torrent_table = soup.find('table', class_='torrent_table')
|
||||
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
|
||||
|
||||
if 2 > len(torrent_rows):
|
||||
|
@ -84,17 +84,15 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
size = rc['size'].sub('', size)
|
||||
info = tr.find('a', title=rc['info'])
|
||||
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title']) \
|
||||
or info.get_text().strip()
|
||||
|
||||
link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/')
|
||||
download_url = self.urls['get'] % link
|
||||
except (AttributeError, TypeError, ValueError):
|
||||
title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip()
|
||||
download_url = self._link(tr.find('a', title=rc['get'])['href'])
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
|
@ -102,13 +100,11 @@ class TorrentShackProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -47,8 +47,8 @@ class TransmithenetProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
if not super(TransmithenetProvider, self)._authorised(
|
||||
logged_in=(lambda x=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'login': 'Login'}):
|
||||
logged_in=(lambda y=None: self.has_all_cookies('session')),
|
||||
post_params={'keeplogged': '1', 'form_tmpl': True}):
|
||||
return False
|
||||
if not self.user_authkey:
|
||||
response = helpers.getURL(self.urls['user'], session=self.session, json=True)
|
||||
|
@ -102,13 +102,11 @@ class TransmithenetProvider(generic.TorrentProvider):
|
|||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -45,7 +45,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(TVChaosUKProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='c_secure_')))
|
||||
return super(TVChaosUKProvider, self)._authorised(
|
||||
logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_')))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -83,29 +84,30 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]]
|
||||
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
|
||||
if self._peers_fail(mode, seeders, leechers) \
|
||||
or self.freeleech and None is tr.find_all('td')[1].find('img', title=rc['fl']):
|
||||
continue
|
||||
|
||||
info = tr.find('a', href=rc['info'])
|
||||
title = (tr.find('div', attrs={'class': 'tooltip-content'}).get_text() or info.get_text()).strip()
|
||||
title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip()
|
||||
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
|
||||
download_url = str(tr.find('a', href=rc['get'])['href'])
|
||||
if not download_url.startswith('http'):
|
||||
download_url = self.urls['get'] % download_url.lstrip('/')
|
||||
except Exception:
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
|
||||
if get_detail and title.endswith('...'):
|
||||
try:
|
||||
with BS4Parser(self.get_url('%s%s' % (self.urls['config_provider_home_uri'], info['href'].lstrip(
|
||||
'/').replace(self.urls['config_provider_home_uri'], ''))), 'html.parser') as soup_detail:
|
||||
title = soup_detail.find('td', attrs={'colspan': '3', 'class': 'thead'}).get_text().strip()
|
||||
with BS4Parser(self.get_url('%s%s' % (
|
||||
self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace(
|
||||
self.urls['config_provider_home_uri'], ''))),
|
||||
'html.parser') as soup_detail:
|
||||
title = soup_detail.find(
|
||||
'td', class_='thead', attrs={'colspan': '3'}).get_text().strip()
|
||||
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
|
||||
except IndexError:
|
||||
continue
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
get_detail = False
|
||||
|
||||
try:
|
||||
|
@ -114,11 +116,13 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
rc_xtras = re.compile('(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)')
|
||||
has_special = rc_xtras.findall(has_series[0][1])
|
||||
if has_special:
|
||||
title = has_series[0][0] + rc_xtras.sub(list(
|
||||
set(list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1])
|
||||
title = has_series[0][0] + rc_xtras.sub(list(set(
|
||||
list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1])
|
||||
title = re.sub('(?i)series', r'Season', title)
|
||||
|
||||
title_parts = re.findall('(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title)
|
||||
title_parts = re.findall(
|
||||
'(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?' +
|
||||
'(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title)
|
||||
if len(title_parts):
|
||||
new_parts = [tryInt(part, part.strip()) for part in title_parts[0]]
|
||||
if not new_parts[1]:
|
||||
|
@ -126,24 +130,26 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
new_parts[2] = ('E%02d', ' Pack %d')[mode in 'Season'] % new_parts[2]
|
||||
title = '%s.S%02d%s.%s' % tuple(new_parts)
|
||||
|
||||
dated = re.findall('(?i)([\(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([\)\s]*)', title)
|
||||
dated = re.findall(
|
||||
'(?i)([(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([)\s]*)', title)
|
||||
if dated:
|
||||
title = title.replace(''.join(dated[0]), '%s%s%s' % (
|
||||
('', ' ')[1 < len(dated[0][0])], parse(dated[0][1]).strftime('%Y-%m-%d'),
|
||||
('', ' ')[1 < len(dated[0][2])]))
|
||||
add_pad = re.findall('((?:19|20)\d\d\-\d\d\-\d\d)([\w\W])', title)
|
||||
add_pad = re.findall('((?:19|20)\d\d[-]\d\d[-]\d\d)([\w\W])', title)
|
||||
if len(add_pad) and add_pad[0][1] not in [' ', '.']:
|
||||
title = title.replace(''.join(add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1]))
|
||||
title = title.replace(''.join(
|
||||
add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1]))
|
||||
title = re.sub(r'(?sim)(.*?)(?:Episode|Season).\d+.(.*)', r'\1\2', title)
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt,
|
||||
|
@ -152,17 +158,16 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
if mode in 'Season' and len(items[mode]):
|
||||
break
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
|
||||
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, prefix='%', sp_detail=(
|
||||
lambda e: [(('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e,
|
||||
'Series %(seasonnumber)d' % e]))
|
||||
lambda e: [
|
||||
(('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e,
|
||||
'Series %(seasonnumber)d' % e]))
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
|
||||
|
@ -174,7 +179,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
@staticmethod
|
||||
def ui_string(key):
|
||||
|
||||
return 'tvchaosuk_tip' == key and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or ''
|
||||
return ('tvchaosuk_tip' == key
|
||||
and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or '')
|
||||
|
||||
|
||||
provider = TVChaosUKProvider()
|
||||
|
|
|
@ -35,19 +35,19 @@ class WombleCache(tvcache.TVCache):
|
|||
def __init__(self, this_provider):
|
||||
tvcache.TVCache.__init__(self, this_provider)
|
||||
|
||||
self.update_freq = 6 # cache update frequency
|
||||
self.update_freq = 6
|
||||
|
||||
def _cache_data(self):
|
||||
|
||||
result = []
|
||||
for section in ['sd', 'hd', 'x264', 'dvd']:
|
||||
url = '%srss/?sec=tv-%s&fr=false' % (self.provider.url, section)
|
||||
data = self.getRSSFeed(url)
|
||||
xml_data = self.getRSSFeed(url)
|
||||
time.sleep(1.1)
|
||||
cnt = len(result)
|
||||
for entry in (data and data.get('entries', []) or []):
|
||||
for entry in (xml_data and xml_data.get('entries', []) or []):
|
||||
if entry.get('title') and entry.get('link', '').startswith('http'):
|
||||
result.append((entry['title'], entry['link'], None, None))
|
||||
result.append((entry.get('title'), entry.get('link'), None, None))
|
||||
|
||||
self.provider.log_result(count=len(result) - cnt, url=url)
|
||||
|
||||
|
|
|
@ -82,9 +82,7 @@ class ZooqleProvider(generic.TorrentProvider):
|
|||
info = td[1].find('a', href=rc['info'])
|
||||
title = info and info.get_text().strip()
|
||||
size = td[-3].get_text().strip()
|
||||
|
||||
download_url = info and (self.urls['get'] % rc['info'].findall(info['href'])[0])
|
||||
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
|
@ -93,14 +91,12 @@ class ZooqleProvider(generic.TorrentProvider):
|
|||
|
||||
except generic.HaltParseException:
|
||||
pass
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
self._sort_seeders(mode, items)
|
||||
|
||||
results = list(set(results + items[mode]))
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
|
|
Loading…
Reference in a new issue