From 0d50a4b345e861020e960d33f46be83aad81f6b1 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Sat, 27 Aug 2016 00:36:01 +0100 Subject: [PATCH] Add indicator for public access search providers. Change improve probability selecting most seeded release. Change add the TorrentDay x265 category to search. Change torrent provider code PEP8 and refactoring. Add BTScene torrent provider. Add Extratorrent provider. Add Limetorrents provider. Add nCore torrent provider. Remove Usenet Crawler provider. --- CHANGES.md | 8 ++ gui/slick/images/providers/btscene.png | Bin 0 -> 548 bytes gui/slick/images/providers/extratorrent.png | Bin 0 -> 497 bytes gui/slick/images/providers/limetorrents.png | Bin 0 -> 682 bytes gui/slick/images/providers/ncore.png | Bin 0 -> 482 bytes .../interfaces/default/config_providers.tmpl | 17 ++- sickbeard/properFinder.py | 4 +- sickbeard/providers/__init__.py | 12 +- sickbeard/providers/alpharatio.py | 20 ++- sickbeard/providers/beyondhd.py | 8 +- sickbeard/providers/bithdtv.py | 16 ++- sickbeard/providers/bitmetv.py | 16 ++- sickbeard/providers/btn.py | 53 ++++---- sickbeard/providers/btscene.py | 117 ++++++++++++++++++ sickbeard/providers/dh.py | 16 +-- sickbeard/providers/extratorrent.py | 108 ++++++++++++++++ sickbeard/providers/fano.py | 14 +-- sickbeard/providers/filelist.py | 12 +- sickbeard/providers/freshontv.py | 23 ++-- sickbeard/providers/funfile.py | 23 ++-- sickbeard/providers/generic.py | 105 +++++++++++----- sickbeard/providers/gftracker.py | 13 +- sickbeard/providers/grabtheinfo.py | 26 ++-- sickbeard/providers/hd4free.py | 12 +- sickbeard/providers/hdbits.py | 15 ++- sickbeard/providers/hdspace.py | 28 +++-- sickbeard/providers/ilt.py | 14 +-- sickbeard/providers/iptorrents.py | 26 ++-- sickbeard/providers/limetorrents.py | 109 ++++++++++++++++ sickbeard/providers/morethan.py | 22 ++-- sickbeard/providers/ncore.py | 112 +++++++++++++++++ sickbeard/providers/newznab.py | 2 +- sickbeard/providers/nyaatorrents.py | 85 +++++++------ sickbeard/providers/omgwtfnzbs.py | 6 +- sickbeard/providers/pisexy.py | 18 ++- sickbeard/providers/pretome.py | 6 +- sickbeard/providers/privatehd.py | 18 ++- sickbeard/providers/ptf.py | 14 +-- sickbeard/providers/rarbg.py | 26 ++-- sickbeard/providers/revtt.py | 14 +-- sickbeard/providers/rsstorrent.py | 2 +- sickbeard/providers/scc.py | 25 ++-- sickbeard/providers/scenetime.py | 17 ++- sickbeard/providers/shazbat.py | 14 +-- sickbeard/providers/speedcd.py | 16 +-- sickbeard/providers/thepiratebay.py | 26 ++-- sickbeard/providers/tokyotoshokan.py | 84 +++++++++---- sickbeard/providers/torrentbytes.py | 21 ++-- sickbeard/providers/torrentday.py | 22 ++-- sickbeard/providers/torrenting.py | 20 ++- sickbeard/providers/torrentleech.py | 27 ++-- sickbeard/providers/torrentshack.py | 30 ++--- sickbeard/providers/transmithe_net.py | 10 +- sickbeard/providers/tvchaosuk.py | 56 +++++---- sickbeard/providers/womble.py | 8 +- sickbeard/providers/zooqle.py | 8 +- 56 files changed, 991 insertions(+), 533 deletions(-) create mode 100644 gui/slick/images/providers/btscene.png create mode 100644 gui/slick/images/providers/extratorrent.png create mode 100644 gui/slick/images/providers/limetorrents.png create mode 100644 gui/slick/images/providers/ncore.png create mode 100644 sickbeard/providers/btscene.py create mode 100644 sickbeard/providers/extratorrent.py create mode 100644 sickbeard/providers/limetorrents.py create mode 100644 sickbeard/providers/ncore.py diff --git a/CHANGES.md b/CHANGES.md index 886e5992..46e9555a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -89,6 +89,11 @@ * Add PTF torrent provider * Add ILT torrent provider * Add Fano torrent provider +* Add BTScene torrent provider +* Add Extratorrent provider +* Add Limetorrents provider +* Add nCore torrent provider +* Remove Usenet-Crawler provider * Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs * Change provider OMGWTFNZBS api url and auto reject nuked releases * Change Search Provider page to load torrent settings only when Search torrents is enabled in Search Settings @@ -120,6 +125,9 @@ * Change post process to join incrementally named (i.e. file.001 to file.nnn) split files * Change replace unrar2 lib with rarfile 3.0 and UnRAR.exe 5.40 freeware * Change post process "Copy" to delete redundant files after use +* Add indicator for public access search providers +* Change improve probability selecting most seeded release +* Change add the TorrentDay x265 category to search [develop changelog] * Change send nzb data to NZBGet for Anizb instead of url diff --git a/gui/slick/images/providers/btscene.png b/gui/slick/images/providers/btscene.png new file mode 100644 index 0000000000000000000000000000000000000000..142436e040321ffc3892c411110876c2fcfa6f4b GIT binary patch literal 548 zcmV+<0^9wGP) zOY1RE5XN2q?n3>uvJtlINZDA(Qfw?_r+flQme%qeEPMhM_Oek#Qb;7^vU1J)y1YM* z*DOwRW}caO<~c{|_xmLhiBu}(l*{FXLc!nM~r`@Ap>-(AP-(dE4Lr z2B=!CnoK5{OeSETPD-WHY&MTZBapY-4V*BF$K$cn>4bt{AyK@IOCpf~@pwF_`4&(N znPRaB=I7_<1qC6v#^Z5LOw1K=d?F5qL-Zmd`|ihdqMx}WWUmwx>h*fF+0BJxUL z;Ii2)Jwzvr`s?*No6UaTW;7b1^MZ;AIPG@(+a8C=a=GkwyW8#7>-FmOdO=BoIG9?k zwpy)7ez{zZ$KztLXti3y;gD;&T;A<=nM_7-k;5bu3LzX(i^U=ubJ}b+nBchGZk0*} zy-KC>qF}e%Bauk2*XwjT>-GA4KC>t%JJ6n$fFe{l9RBRkXf!+?Pc#~3&i4h7Tu3aw z1M$sfBYXur(P_Wm=W;pTTc^{3V?QCCc$357FrUx4l2XKCvFUW0&*xn(SHIs!N1j}K z1j^t|rNLl0olZhh#bh#JEjD3dsZ-Vs1Oh~Bwc21X&}y}* mR0@RnV7v^v;ZQGTqQ^gL&1aHT!3k#o00005QV>)`|{$(6$N7>kyIMB2#FA*o3JSqK`VQ^fT)EMY!tN?Z8XLr+6Y2GgH~1+ z*@z0y2#N~g50b({$hv{J(d_dld&lCsyFn3ln#;X+&YYQZMy3lhW*$UD_oFJ8Sqd-7O) z*3DbGum)iSICVga+Wa>U43C_exOL;tcO2*Xi}}HU);Mke)G4s+rIZ4&dgI{mxq8V< zr;qgZbgydP5M+;lpb}6q1@7IxSt`x}QWMIxwmGK_hG5xGle7v@D$d;>JF`+KRRs&+ z2yo5;zFIB+@WAro{Os%pMGU<8?3%TkIy&l}gq9>IlKwJa--!#?Cq|djO327$+72Du z-`}@mg-R}r?!Ko>;WfMWaJJS@GC4kZ=WcG-&cA2qY|ow_yI8*xj5UkOpY|Ns-B``S nRN|YC!c-E8=!5B*=GXf--&e-t7g9}s00000NkvXXu0mjfEV1lR literal 0 HcmV?d00001 diff --git a/gui/slick/images/providers/limetorrents.png b/gui/slick/images/providers/limetorrents.png new file mode 100644 index 0000000000000000000000000000000000000000..47ea1d55aaa96455de772965fbd82c371bbf3fb8 GIT binary patch literal 682 zcmV;b0#*HqP)1wkoO|z#Gox|xOH_hjQYwO|k!5!gwy7TcHUij{jM|#Q>vB5^V1Y!XY z5b$IyBZP#sBgncyC;^4c(bNGsm&lv!#_oW^AmYRIO%>D&Vlc89@S@rj6hQ>R2#7bD zOT>5o8?3Ixr+W%LCzTe8A{dZ7s3DLp6i=oB0;o?=90E8eIF0`f=9|?mxh$Zq5MtN} zC`bxeR2Be>QNJ##L-1H*nW~M^(>35tu@u#xP7Psg(K}tn>3d>VCCz3@v$rJK42hp5 zc9W$39holSymjL}s^qS7d$#=bOY? z6{kxpk1BsSYa^SJ&PAh=j`sKUkKTEBvu$E_E>+rFVls8U!D?Dz%}tW}I>v+~t}}Tu zSuuZ&JPh{@91&G}&7S`4=e2UF9M|R=2k!qfyvc=5!*HAAf-Q&?6?(Oml;zbzohOxH z?mZrge*N*d+10=Q`hp$48Z4pBu2S^irvroSKFgi?^X^FRPC3#cj!jSp0( zY9z6@#}1!h|IUNAQoDSq&TuLKV|u}s3A-DMYrnjho;&{R`Siic|I@Z!Uqp=1+tbMp zdk5LKYp~Y0ZR_RyhT!%X7vCI`F_kY#j_X+|v#FG*>#NVN?DT1{$4Gv=k-!8d*NCFv zzxIyesKve<;`h~~Qq|AZe$BL=mmGyi2=fM$1FME8acJuum1K`x(QkUDPxIm~HY&Cm Q+yDRo07*qoM6N<$g0gZ=v;Y7A literal 0 HcmV?d00001 diff --git a/gui/slick/images/providers/ncore.png b/gui/slick/images/providers/ncore.png new file mode 100644 index 0000000000000000000000000000000000000000..40af14e0a986498fe935be972f30f4c4b65dfaeb GIT binary patch literal 482 zcmV<80UiE{P)<21S!t2>Co0!4jmZG)2Ov*CJ^aCT+&9?Haw->5+?7M!2Z@WZ zI6sBfs82N$T_^AtU-0WcfHE#baXA^ac*JmZzpXT3S-C*WF%Na5-EZ0Ez{5WB!kuo8GUls|K`%=~~UxbJ|YTpe(0Cv0^Si^XDlNluDXULTG9Cc~2%_Gfs2>M?)k?^wf& Yf3r5ToI5-&NB{r;07*qoM6N<$f}l~{oB#j- literal 0 HcmV?d00001 diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl index f5e0596f..fdf61759 100644 --- a/gui/slick/interfaces/default/config_providers.tmpl +++ b/gui/slick/interfaces/default/config_providers.tmpl @@ -87,7 +87,7 @@

Provider Priorities

Check off and drag the providers into the order you want them to be used.

-

At least one provider is required but two are recommended.

+

At least one provider is required, two are recommended.

#if $methods_notused
<%= '/'.join(x for x in methods_notused) %> providers can be enabled in Search Settings
@@ -109,7 +109,10 @@ /> $tip $cur_provider.name$state -<%= '*' if not cur_provider.supports_backlog else '' %> + #if $cur_provider.is_public_access()# + (PA) + #end if# + #if not $cur_provider.supports_backlog#*#end if# #end for @@ -117,10 +120,12 @@
-

*

Provider does not support backlog searches at this time

-#if $sickbeard.USE_TORRENTS -

**

Provider supports limited backlog searches, some episodes/qualities may not be available

-#end if + (PA)

Public access, no account required

+

Searches current and past releases

+

*

Searches current but not past releases

+## #if $sickbeard.USE_TORRENTS +##

**

Supports limited backlog searches, some episodes/qualities may not be available

+## #end if ##

!

Provider is NOT WORKING

diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index 2d14d147..c358b69e 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -78,6 +78,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): # for each provider get a list of the orig_thread_name = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] + np = NameParser(False, try_scene_exceptions=True) for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue @@ -99,7 +100,6 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): # if they haven't been added by a different provider than add the proper to the list count = 0 - np = NameParser(False, try_scene_exceptions=True) for x in found_propers: name = _generic_name(x.name) if name not in propers: @@ -125,6 +125,8 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): for cur_proper in sorted_propers: + parse_result = np.parse(cur_proper.name) + # set the indexerid in the db to the show's indexerid cur_proper.indexerid = parse_result.show.indexerid diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index a5a6852b..d60a0015 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -26,8 +26,10 @@ from sickbeard import logger, encodingKludge as ek # usenet from . import newznab, omgwtfnzbs, womble # torrent -from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, dh, fano, filelist, freshontv, funfile, gftracker, grabtheinfo, \ - hd4free, hdbits, hdspace, ilt, iptorrents, morethan, pisexy, pretome, privatehd, ptf, rarbg, revtt, scc, scenetime, shazbat, speedcd, \ +from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, btscene, dh, extratorrent, \ + fano, filelist, freshontv, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, \ + ilt, iptorrents, limetorrents, morethan, ncore, pisexy, pretome, privatehd, ptf, \ + rarbg, revtt, scc, scenetime, shazbat, speedcd, \ thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle # anime from . import anizb, nyaatorrents, tokyotoshokan @@ -45,8 +47,10 @@ __all__ = ['omgwtfnzbs', 'bithdtv', 'bitmetv', 'btn', + 'btscene', 'custom01', 'dh', + 'extratorrent', 'fano', 'filelist', 'freshontv', @@ -58,7 +62,9 @@ __all__ = ['omgwtfnzbs', 'hdspace', 'ilt', 'iptorrents', + 'limetorrents', 'morethan', + 'ncore', 'pisexy', 'pretome', 'privatehd', @@ -227,7 +233,7 @@ def getDefaultNewznabProviders(): return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0', 'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0', 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0', - 'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0']) + ]) def getProviderModule(name): diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py index 13478d7f..c6fae1fd 100644 --- a/sickbeard/providers/alpharatio.py +++ b/sickbeard/providers/alpharatio.py @@ -35,7 +35,7 @@ class AlphaRatioProvider(generic.TorrentProvider): self.url_base = 'https://alpharatio.cc/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'login.php', + 'login_action': self.url_base + 'login.php', 'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join( ['tags_type=1', 'order_by=time', 'order_way=desc'] + ['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] + @@ -48,8 +48,8 @@ class AlphaRatioProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), - post_params={'keeplogged': '1', 'login': 'Login'}) + return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')), + post_params={'keeplogged': '1', 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -73,7 +73,7 @@ class AlphaRatioProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'id': 'torrent_table'}) + torrent_table = soup.find(id='torrent_table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -82,14 +82,12 @@ class AlphaRatioProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers): continue title = tr.find('a', title=rc['info']).get_text().strip() - - link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/') - download_url = self.urls['get'] % link + download_url = self._link(tr.find('a', title=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -98,13 +96,11 @@ class AlphaRatioProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/beyondhd.py b/sickbeard/providers/beyondhd.py index cf764c01..d18bcfe8 100644 --- a/sickbeard/providers/beyondhd.py +++ b/sickbeard/providers/beyondhd.py @@ -71,7 +71,7 @@ class BeyondHDProvider(generic.TorrentProvider): search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats]) if 'Cache' != mode: - search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string) + search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string) data_json = self.get_url(search_url, json=True) @@ -82,16 +82,14 @@ class BeyondHDProvider(generic.TorrentProvider): seeders, leechers = item.get('seeders', 0), item.get('leechers', 0) if self._peers_fail(mode, seeders, leechers): continue - title, download_url = item.get('file'), item.get('get') + title, download_url = item.get('file'), self._link(item.get('get')) if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size')))) time.sleep(1.1) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/bithdtv.py b/sickbeard/providers/bithdtv.py index 5fabd99a..86621cbe 100644 --- a/sickbeard/providers/bithdtv.py +++ b/sickbeard/providers/bithdtv.py @@ -44,7 +44,7 @@ class BitHDTVProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(BitHDTVProvider, self)._authorised( - logged_in=(lambda x=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls + logged_in=(lambda y=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls @staticmethod def _has_signature(data=None): @@ -82,15 +82,15 @@ class BitHDTVProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]] + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]] if self.freeleech and not tr.attrs.get('bgcolor').endswith('FF99') or \ self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = (info.attrs.get('title') or info.contents[0].get_text()).strip() - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - except (AttributeError, TypeError, ValueError): + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (AttributeError, TypeError, ValueError, KeyError): continue if title and download_url: @@ -98,14 +98,12 @@ class BitHDTVProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/bitmetv.py b/sickbeard/providers/bitmetv.py index 550addd6..f00eaff3 100644 --- a/sickbeard/providers/bitmetv.py +++ b/sickbeard/providers/bitmetv.py @@ -46,9 +46,9 @@ class BitmetvProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(BitmetvProvider, self)._authorised( - logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and + logged_in=(lambda y=None: (None is y or 'Other Links' in y) and self.has_all_cookies() and self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), - failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) def _search_provider(self, search_params, **kwargs): @@ -81,13 +81,13 @@ class BitmetvProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] + (tr.find_all('td')[x].get_text().strip()) for x in -3, -2, -5]] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = info.attrs.get('title') or info.get_text().strip() - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -96,14 +96,12 @@ class BitmetvProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 58949db9..80aad729 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -75,15 +75,18 @@ class BTNProvider(generic.TorrentProvider): try: response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True) error_text = response['error']['message'] - logger.log(('Call Limit' in error_text and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' or - u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING) + logger.log( + ('Call Limit' in error_text + and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' + or u'Action prematurely ended. %(prov)s server error response = %(desc)s') % + {'prov': self.name, 'desc': error_text}, logger.WARNING) return results - except: + except (KeyError, Exception): data_json = response and 'result' in response and response['result'] or {} if data_json: - found_torrents = {} if 'torrents' not in data_json else data_json['torrents'] + found_torrents = 'torrents' in data_json and data_json['torrents'] or {} # We got something, we know the API sends max 1000 results at a time. # See if there are more than 1000 results for our query, if not we @@ -101,37 +104,45 @@ class BTNProvider(generic.TorrentProvider): for page in range(1, pages_needed + 1): try: - response = helpers.getURL(self.url_api, json=True, session=self.session, - post_data=json_rpc(params, results_per_page, page * results_per_page)) + response = helpers.getURL( + self.url_api, json=True, session=self.session, + post_data=json_rpc(params, results_per_page, page * results_per_page)) error_text = response['error']['message'] - logger.log(('Call Limit' in error_text and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' or - u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING) + logger.log( + ('Call Limit' in error_text + and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' + or u'Action prematurely ended. %(prov)s server error response = %(desc)s') % + {'prov': self.name, 'desc': error_text}, logger.WARNING) return results - except: + except (KeyError, Exception): data_json = response and 'result' in response and response['result'] or {} - # Note that this these are individual requests and might time out individually. This would result in 'gaps' - # in the results. There is no way to fix this though. + # Note that this these are individual requests and might time out individually. + # This would result in 'gaps' in the results. There is no way to fix this though. if 'torrents' in data_json: found_torrents.update(data_json['torrents']) cnt = len(results) for torrentid, torrent_info in found_torrents.iteritems(): - seeders, leechers = [tryInt(n) for n in torrent_info.get('Seeders'), torrent_info.get('Leechers')] + seeders, leechers, size = (tryInt(n, n) for n in [torrent_info.get(x) for x in + 'Seeders', 'Leechers', 'Size']) if self._peers_fail(mode, seeders, leechers) or \ self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')): continue - title, url = self._title_and_url(torrent_info) + title, url = self._get_title_and_url(torrent_info) if title and url: - results.append(torrent_info) + results.append((title, url, seeders, self._bytesizer(size))) self._log_search(mode, len(results) - cnt, ('search_param: ' + str(search_param), self.name)['Cache' == mode]) + results = self._sort_seeding(mode, results) + return results - def _title_and_url(self, data_json): + @staticmethod + def _get_title_and_url(data_json): # The BTN API gives a lot of information in response, # however SickGear is built mostly around Scene or @@ -189,7 +200,7 @@ class BTNProvider(generic.TorrentProvider): series_param.update(base_params) search_params.append(series_param) - return [dict({'Season': search_params})] + return [dict(Season=search_params)] def _episode_strings(self, ep_obj, **kwargs): @@ -231,7 +242,7 @@ class BTNProvider(generic.TorrentProvider): series_param.update(base_params) search_params.append(series_param) - return [dict({'Episode': search_params})] + return [dict(Episode=search_params)] def cache_data(self, **kwargs): @@ -246,11 +257,11 @@ class BTNProvider(generic.TorrentProvider): # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, # older items will be done through backlog if 86400 < seconds_since_last_update: - logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on %s was over 24 hours' - % self.name, logger.WARNING) + logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' + + '%s was over 24 hours' % self.name, logger.WARNING) seconds_since_last_update = 86400 - return self._search_provider(dict({'Cache': ['']}), age=seconds_since_last_update) + return self._search_provider(dict(Cache=['']), age=seconds_since_last_update) class BTNCache(tvcache.TVCache): @@ -258,7 +269,7 @@ class BTNCache(tvcache.TVCache): def __init__(self, this_provider): tvcache.TVCache.__init__(self, this_provider) - self.update_freq = 15 # cache update frequency + self.update_freq = 15 def _cache_data(self): diff --git a/sickbeard/providers/btscene.py b/sickbeard/providers/btscene.py new file mode 100644 index 00000000..fddd2cb5 --- /dev/null +++ b/sickbeard/providers/btscene.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import traceback +import urllib + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class BTSceneProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'BTScene') + + self.url_home = ['http://www.btstorrent.cc/', 'http://bittorrentstart.com/', + 'http://diriri.xyz/', 'http://mytorrentz.tv/'] + + self.url_vars = {'search': 'results.php?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/', + 'get': 'torrentdownload.php?id=%s'} + self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s', + 'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} + + self.minseed, self.minleech = 2 * [None] + self.confirmed = False + + @staticmethod + def _has_signature(data=None): + return data and re.search(r'(?i)(?:btscene|bts[-]official|full\sindex)', data) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self.url: + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'info': '\w+?(\d+)[.]html', 'verified': 'Verified'}.iteritems()) + for mode in search_params.keys(): + for search_string in search_params[mode]: + + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + + search_url = self.urls['browse'] if 'Cache' == mode \ + else self.urls['search'] % (urllib.quote_plus(search_string)) + + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_rows = soup.select('tr[class$="_tr"]') + + if not len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows: + try: + seeders, leechers, size = [tryInt(n, n) for n in [ + tr.find_all('td')[x].get_text().strip() for x in -4, -3, -5]] + if self._peers_fail(mode, seeders, leechers) or \ + self.confirmed and not (tr.find('img', src=rc['verified']) + or tr.find('img', title=rc['verified'])): + continue + + info = tr.find('a', href=rc['info']) + title = info and info.get_text().strip() + tid_href = info and rc['info'].findall(info['href']) + tid_href = tid_href and tryInt(tid_href[0], 0) or 0 + tid_tr = tryInt(tr['id'].strip('_'), 0) + tid = (tid_tr, tid_href)[tid_href > tid_tr] + + download_url = info and (self.urls['get'] % tid) + except (AttributeError, TypeError, ValueError, IndexError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + def _episode_strings(self, ep_obj, **kwargs): + return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs) + + +provider = BTSceneProvider() diff --git a/sickbeard/providers/dh.py b/sickbeard/providers/dh.py index 3fcc7dc9..fb3a29c1 100644 --- a/sickbeard/providers/dh.py +++ b/sickbeard/providers/dh.py @@ -46,9 +46,9 @@ class DHProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(DHProvider, self)._authorised( - logged_in=(lambda x=None: (None is x or re.search('(?i)rss\slink', x)) and self.has_all_cookies() and + logged_in=(lambda y=None: (None is y or re.search('(?i)rss\slink', y)) and self.has_all_cookies() and self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), - failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) + failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) def _search_provider(self, search_params, **kwargs): @@ -82,14 +82,12 @@ class DHProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]] if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): continue title = tr.find('a', href=rc['info']).get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -98,14 +96,12 @@ class DHProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/extratorrent.py b/sickbeard/providers/extratorrent.py new file mode 100644 index 00000000..23154d59 --- /dev/null +++ b/sickbeard/providers/extratorrent.py @@ -0,0 +1,108 @@ +# coding=utf-8 +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import traceback +import urllib + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class ExtraTorrentProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'ExtraTorrent') + + self.url_home = ['https://www.extratorrent%s/' % u for u in '.works', 'live.com', 'online.com', '.cc'] + \ + ['https://etmirror.com/', 'https://etproxy.com/', 'https://extratorrent.usbypass.xyz/'] + + self.url_vars = {'search': 'search/?new=1&search=%s&s_cat=8', 'browse': 'view/today/TV.html', + 'get': '%s'} + self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s', + 'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} + + self.minseed, self.minleech = 2 * [None] + + @staticmethod + def _has_signature(data=None): + return data and re.search(r'(?i)ExtraTorrent', data[33:1024:]) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self.url: + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'get': 'download', 'title': '(?:^download|torrent$)', 'get_url': '^/(torrent_)?'}.iteritems()) + + for mode in search_params.keys(): + for search_string in search_params[mode]: + + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + + search_url = self.urls['browse'] if 'Cache' == mode \ + else self.urls['search'] % (urllib.quote_plus(search_string)) + + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_table = soup.find('table', class_='tl') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') + + if 2 > len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows[1:]: + try: + seeders, leechers, size = [tryInt(n.replace('---', '0'), n) for n in [ + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]] + if self._peers_fail(mode, seeders, leechers): + continue + + info = tr.find('a', title=rc['get']) or {} + title = rc['title'].sub('', info.get('title') or '').strip() + download_url = self._link(rc['get_url'].sub('', info['href'])) + except (AttributeError, TypeError, ValueError, IndexError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + +provider = ExtraTorrentProvider() diff --git a/sickbeard/providers/fano.py b/sickbeard/providers/fano.py index 1f26d61e..0494fa73 100644 --- a/sickbeard/providers/fano.py +++ b/sickbeard/providers/fano.py @@ -45,7 +45,7 @@ class FanoProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(FanoProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) + return super(FanoProvider, self)._authorised() def _search_provider(self, search_params, **kwargs): @@ -82,14 +82,12 @@ class FanoProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): continue title = tr.find('a', href=rc['info']).get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -98,14 +96,12 @@ class FanoProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/filelist.py b/sickbeard/providers/filelist.py index 8655d3e5..545d2e11 100644 --- a/sickbeard/providers/filelist.py +++ b/sickbeard/providers/filelist.py @@ -78,14 +78,12 @@ class FLProvider(generic.TorrentProvider): for tr in torrent_rows: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.select('span[style*="cell"]')[x].get_text().strip()) for x in (-3, -2, -5)]] + tr.select('span[style*="cell"]')[x].get_text().strip() for x in -3, -2, -5]] if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): continue title = tr.find('a', href=rc['info']).get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -94,14 +92,12 @@ class FLProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 34c8ba3a..8f30ee34 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -32,7 +32,7 @@ class FreshOnTVProvider(generic.TorrentProvider): self.url_base = 'https://freshon.tv/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'login.php?action=makelogin', + 'login_action': self.url_base + 'login.php', 'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s', 'get': self.url_base + '%s'} @@ -45,8 +45,8 @@ class FreshOnTVProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(FreshOnTVProvider, self)._authorised( - post_params={'login': 'Do it!'}, - failed_msg=(lambda x=None: 'DDoS protection by CloudFlare' in x and + post_params={'form_tmpl': True}, + failed_msg=(lambda y=None: 'DDoS protection by CloudFlare' in y and u'Unable to login to %s due to CloudFlare DDoS javascript check' or 'Username does not exist' in x and u'Invalid username or password for %s. Check settings' or @@ -80,7 +80,7 @@ class FreshOnTVProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'class': 'frame'}) + torrent_table = soup.find('table', class_='frame') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -92,14 +92,13 @@ class FreshOnTVProvider(generic.TorrentProvider): continue seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers): continue - info = tr.find('a', href=rc['info'], attrs={'class': rc['name']}) - title = info.attrs.get('title') or info.get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + info = tr.find('a', href=rc['info'], class_=rc['name']) + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -108,13 +107,11 @@ class FreshOnTVProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/funfile.py b/sickbeard/providers/funfile.py index a5ab41ee..a5d18646 100644 --- a/sickbeard/providers/funfile.py +++ b/sickbeard/providers/funfile.py @@ -32,7 +32,7 @@ class FunFileProvider(generic.TorrentProvider): self.url_base = 'https://www.funfile.org/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'takelogin.php', + 'login_action': self.url_base + 'login.php', 'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&', 'get': self.url_base + '%s'} @@ -45,9 +45,9 @@ class FunFileProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(FunFileProvider, self)._authorised( - logged_in=(lambda x=None: None is not self.session.cookies.get('uid', domain='.funfile.org') and - None is not self.session.cookies.get('pass', domain='.funfile.org')), - post_params={'login': 'Login', 'returnto': '/'}, timeout=self.url_timeout) + logged_in=(lambda y=None: all( + [None is not self.session.cookies.get(x, domain='.funfile.org') for x in 'uid', 'pass'])), + post_params={'form_tmpl': True}, timeout=self.url_timeout) def _search_provider(self, search_params, **kwargs): @@ -72,7 +72,7 @@ class FunFileProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('td', attrs={'class': 'colhead'}).find_parent('table') + torrent_table = soup.find('td', class_='colhead').find_parent('table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -85,13 +85,12 @@ class FunFileProvider(generic.TorrentProvider): continue seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers): continue - title = info.attrs.get('title') or info.get_text().strip() - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -100,14 +99,12 @@ class FunFileProvider(generic.TorrentProvider): except (generic.HaltParseException, AttributeError): pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 3c236249..cb73e0e3 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -33,6 +33,7 @@ import sickbeard import requests import requests.cookies from hachoir_parser import guessParser +from hachoir_core.error import HachoirError from hachoir_core.stream import FileInputStream from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek @@ -77,7 +78,8 @@ class GenericProvider: self.headers = { # Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases, # otherwise session might be broken and download fail, asking again for authentication - # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} + # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' + + # 'Chrome/32.0.1700.107 Safari/537.36'} 'User-Agent': USER_AGENT} def get_id(self): @@ -99,9 +101,17 @@ class GenericProvider: def _authorised(self): return True - def _check_auth(self): + def _check_auth(self, is_required=None): return True + def is_public_access(self): + try: + return bool(re.search('(?i)rarbg|sick|womble|anizb', self.name)) \ + or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest') + or self._check_auth(is_required=True))) + except AuthException: + return False + def is_active(self): if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS: return self.is_enabled() @@ -176,7 +186,7 @@ class GenericProvider: urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,)) for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'), ('s', 'torrentproject.se'), ('', 'thetorrent.org'))] - except: + except (StandardError, Exception): link_type = 'torrent' urls = [result.url] @@ -204,7 +214,7 @@ class GenericProvider: try: helpers.moveFile(cache_file, final_file) msg = 'moved' - except: + except (OSError, Exception): msg = 'copied cached file' logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) saved = True @@ -234,13 +244,13 @@ class GenericProvider: try: stream = FileInputStream(file_name) parser = guessParser(stream) - except: + except (HachoirError, Exception): pass result = parser and 'application/x-bittorrent' == parser.mime_type try: stream._input.close() - except: + except (HachoirError, Exception): pass return result @@ -282,7 +292,7 @@ class GenericProvider: try: title, url = isinstance(item, tuple) and (item[0], item[1]) or \ (item.get('title', None), item.get('link', None)) - except Exception: + except (StandardError, Exception): pass title = title and re.sub(r'\s+', '.', u'%s' % title) @@ -290,6 +300,15 @@ class GenericProvider: return title, url + def _link(self, url, url_tmpl=None): + + url = url and str(url).strip().replace('&', '&') or '' + try: + url_tmpl = url_tmpl or self.urls['get'] + except (StandardError, Exception): + url_tmpl = '%s' + return url if re.match('(?i)https?://', url) else (url_tmpl % url.lstrip('/')) + def find_search_results(self, show, episodes, search_mode, manual_search=False): self._check_auth() @@ -391,8 +410,9 @@ class GenericProvider: logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' + u' to snatch, ignoring', logger.DEBUG) add_cache_entry = True - elif len(parse_result.episode_numbers) and not [ep for ep in episodes if - ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: + elif len(parse_result.episode_numbers) and not [ + ep for ep in episodes if ep.season == parse_result.season_number and + ep.episode in parse_result.episode_numbers]: logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + u' to snatch, ignoring', logger.DEBUG) add_cache_entry = True @@ -409,8 +429,8 @@ class GenericProvider: else: airdate = parse_result.air_date.toordinal() my_db = db.DBConnection() - sql_results = my_db.select('SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', - [show_obj.indexerid, airdate]) + sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' + + 'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate]) if 1 != len(sql_results): logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' + @@ -507,6 +527,7 @@ class GenericProvider: def log_result(self, mode='Cache', count=0, url='url missing'): """ Simple function to log the result of any search + :param mode: string that this log relates to :param count: count of successfully processed items :param url: source url of item(s) """ @@ -541,8 +562,8 @@ class GenericProvider: def has_all_cookies(self, cookies=None, pre=''): - cookies = cookies or ['uid', 'pass'] - return False not in ['%s%s' % (pre, item) in self.session.cookies for item in ([cookies], cookies)[isinstance(cookies, list)]] + cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass'] + return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies]) def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'): @@ -558,7 +579,7 @@ class GenericProvider: def _bytesizer(size_dim=''): try: - value = float('.'.join(re.findall('(?i)(\d+)(?:[\.,](\d+))?', size_dim)[0])) + value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0])) except TypeError: return size_dim except IndexError: @@ -587,7 +608,7 @@ class NZBProvider(object, GenericProvider): return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None return False - def _check_auth(self): + def _check_auth(self, is_required=None): has_key = self.maybe_apikey() if has_key: @@ -703,9 +724,16 @@ class TorrentProvider(object, GenericProvider): @staticmethod def _sort_seeders(mode, items): - + """ legacy function used by a custom provider, do not remove """ mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True) + @staticmethod + def _sort_seeding(mode, items): + + if mode in ['Season', 'Episode']: + return sorted(set(items), key=lambda tup: tup[2], reverse=True) + return items + def _peers_fail(self, mode, seeders=0, leechers=0): return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0)) @@ -744,7 +772,7 @@ class TorrentProvider(object, GenericProvider): ep_dict = self._ep_dict(ep_obj) sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \ (show.is_anime and ep_obj.scene_absolute_number or - 'S%(seasonnumber)02d' % ep_dict if 'sp_detail' not in kwargs.keys() else kwargs['sp_detail'](ep_dict)) + ('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict) sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)] detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime] return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())] @@ -792,7 +820,7 @@ class TorrentProvider(object, GenericProvider): prefix = ([prefix], prefix)[isinstance(prefix, list)] search_params = [] - crop = re.compile(r'([\.\s])(?:\1)+') + crop = re.compile(r'([.\s])(?:\1)+') for name in set(allPossibleShowNames(self.show)): if process_name: name = helpers.sanitizeSceneName(name) @@ -861,11 +889,14 @@ class TorrentProvider(object, GenericProvider): def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30): - maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x)) + maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' + + '(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y)) logged_in, failed_msg = [None is not a and a or b for (a, b) in ( - (logged_in, (lambda x=None: self.has_all_cookies())), - (failed_msg, (lambda x='': maxed_out(x) and u'Urgent abort, running low on login attempts. Password flushed to prevent service disruption to %s.' or - (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', x) and + (logged_in, (lambda y=None: self.has_all_cookies())), + (failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' + + u'Password flushed to prevent service disruption to %s.' or + (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' + + '(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and u'Invalid username or password for %s. Check settings' or u'Failed to authenticate or parse a response from %s, abort provider'))) )] @@ -896,17 +927,25 @@ class TorrentProvider(object, GenericProvider): if url: response = helpers.getURL(url, session=self.session) try: - action = re.findall('[<]form[\w\W]+?action=[\'\"]([^\'\"]+)', response)[0] + post_params = isinstance(post_params, type({})) and post_params or {} + form = 'form_tmpl' in post_params and post_params.pop('form_tmpl') + if form: + form = re.findall( + '(?is)(]+%s.*?)' % (True is form and 'login' or form), response) + response = form and form[0] or response + + action = re.findall(']+action=[\'"]([^\'"]*)', response)[0] url = action if action.startswith('http') else \ + url if not action else \ + (url + action) if action.startswith('?') else \ (self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/') - tags = re.findall(r'(?is)()', response) + tags = re.findall(r'(?is)(]*)', response) nv = [(tup[0]) for tup in [ - re.findall(r'(?is)name=[\'\"]([^\'\"]+)[\'\"](?:.*?value=[\'\"]([^\'\"]+)[\'\"])?', x) + re.findall(r'(?is)name=[\'"]([^\'"]+)(?:[^>]*?value=[\'"]([^\'"]+))?', x) for x in tags]] for name, value in nv: if name not in ('username', 'password'): - post_params = isinstance(post_params, type({})) and post_params or {} post_params.setdefault(name, value) except KeyError: return super(TorrentProvider, self)._authorised() @@ -936,7 +975,7 @@ class TorrentProvider(object, GenericProvider): return False - def _check_auth(self): + def _check_auth(self, is_required=False): if hasattr(self, 'username') and hasattr(self, 'password'): if self.username and self.password: @@ -963,7 +1002,7 @@ class TorrentProvider(object, GenericProvider): return True setting = 'Passkey' else: - return GenericProvider._check_auth(self) + return not is_required and GenericProvider._check_auth(self) raise AuthException('%s for %s is empty in config provider options' % (setting, self.name)) @@ -982,7 +1021,7 @@ class TorrentProvider(object, GenericProvider): items = self._search_provider({'Propers': search_terms}) - clean_term = re.compile(r'(?i)[^a-z1-9\|\.]+') + clean_term = re.compile(r'(?i)[^a-z1-9|.]+') for proper_term in search_terms: proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) @@ -995,10 +1034,10 @@ class TorrentProvider(object, GenericProvider): @staticmethod def _has_no_results(*html): - return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>(?:' + - 'your\ssearch\sdid\snot\smatch|' + - 'nothing\sfound|' + - '(sorry,\s)?no\storrents\s(found|match)|' + + return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>\s*(?:' + + 'your\ssearch.*?did\snot\smatch|' + + '(?:nothing|0\s+torrents)\sfound|' + + '(sorry,\s)?no\s(?:results|torrents)\s(found|match)|' + '.*?there\sare\sno\sresults|' + '.*?no\shits\.\sTry\sadding' + ')', html[0]) diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 462403f6..24ca21ae 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -47,7 +47,7 @@ class GFTrackerProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(GFTrackerProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='gft_')), + return super(GFTrackerProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='gft_')), url=[self.urls['login_init']]) def _search_provider(self, search_params, **kwargs): @@ -90,10 +90,9 @@ class GFTrackerProvider(generic.TorrentProvider): continue info = tr.find('a', href=rc['info']) - title = ('title' in info.attrs and info['title']) or info.get_text().strip() + title = (info.attrs.get('title') or info.get_text()).strip() size = tr.find_all('td')[-2].get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -102,13 +101,11 @@ class GFTrackerProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/grabtheinfo.py b/sickbeard/providers/grabtheinfo.py index dd6e92d3..e1142df0 100644 --- a/sickbeard/providers/grabtheinfo.py +++ b/sickbeard/providers/grabtheinfo.py @@ -81,35 +81,27 @@ class GrabTheInfoProvider(generic.TorrentProvider): for tr in torrent_rows[1 + shows_found:]: try: - info = tr.find('a', href=rc['info']) - if None is info: - continue - title = (('title' in info.attrs.keys() and info['title']) or info.get_text()).strip() - - download_url = tr.find('a', href=rc['get']) - if None is download_url: - continue - seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -3)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]] if self._peers_fail(mode, seeders, leechers): continue + + info = tr.find('a', href=rc['info']) + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, KeyError): continue - if title: - items[mode].append((title, self.urls['get'] % str(download_url['href'].lstrip('/')), - seeders, self._bytesizer(size))) + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/hd4free.py b/sickbeard/providers/hd4free.py index ba9b89c5..0a1f50fe 100644 --- a/sickbeard/providers/hd4free.py +++ b/sickbeard/providers/hd4free.py @@ -52,10 +52,10 @@ class HD4FreeProvider(generic.TorrentProvider): for mode in search_params.keys(): for search_string in search_params[mode]: params['search'] = '+'.join(search_string.split()) - data_json = self.get_url(self.urls['search'], params=params, json=True) + json_resp = self.get_url(self.urls['search'], params=params, json=True) cnt = len(items[mode]) - for k, item in data_json.items(): + for k, item in json_resp.items(): if 'error' == k or not item.get('total_results'): break seeders, leechers, size = [tryInt(n, n) for n in [ @@ -63,17 +63,15 @@ class HD4FreeProvider(generic.TorrentProvider): if self._peers_fail(mode, seeders, leechers): continue title = item.get('release_name') - download_url = (self.urls['get'] % (item.get('torrentid'), item.get('torrentpass')), None)[ - not (item.get('torrentid') and item.get('torrentpass'))] + tid, tpass = [item.get('torrent' + x) for x in 'id', 'pass'] + download_url = all([tid, tpass]) and (self.urls['get'] % (tid, tpass)) if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer('%smb' % size))) self._log_search(mode, len(items[mode]) - cnt, self.session.response['url']) time.sleep(1.1) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 8ba0c731..3a8e78f5 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -51,7 +51,7 @@ class HDBitsProvider(generic.TorrentProvider): def check_auth_from_data(self, parsed_json): if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: - logger.log(u'Incorrect username or password for %s : %s' % (self.name, parsed_json['message']), logger.DEBUG) + logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG) raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name) return True @@ -120,13 +120,14 @@ class HDBitsProvider(generic.TorrentProvider): cnt = len(items[mode]) for item in json_resp['data']: try: - seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seeders', 'leechers', 'size']] + seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in + 'seeders', 'leechers', 'size']] if self._peers_fail(mode, seeders, leechers)\ or self.freeleech and re.search('(?i)no', item.get('freeleech', 'no')): continue + title = item['name'] download_url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey}) - except (AttributeError, TypeError, ValueError): continue @@ -136,12 +137,10 @@ class HDBitsProvider(generic.TorrentProvider): self._log_search(mode, len(items[mode]) - cnt, ('search_param: ' + str(search_param), self.name)['Cache' == mode]) - self._sort_seeders(mode, items) + results = self._sort_seeding(mode, results + items[mode]) - if id_search and len(items[mode]): - return items[mode] - - results = list(set(results + items[mode])) + if id_search and len(results): + return results return results diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index f0b1522b..9be1ac19 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -21,6 +21,7 @@ import traceback from . import generic from sickbeard import logger from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt from lib.unidecode import unidecode @@ -31,8 +32,9 @@ class HDSpaceProvider(generic.TorrentProvider): self.url_base = 'https://hd-space.org/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'index.php?page=login', - 'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(['options=0', 'active=1', 'category=']), + 'login_action': self.url_base + 'index.php?page=login', + 'browse': self.url_base + 'index.php?page=torrents&' + '&'.join( + ['options=0', 'active=1', 'category=']), 'search': '&search=%s', 'get': self.url_base + '%s'} @@ -44,7 +46,8 @@ class HDSpaceProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(HDSpaceProvider, self)._authorised(post_params={'uid': self.username, 'pwd': self.password}) + return super(HDSpaceProvider, self)._authorised( + post_params={'uid': self.username, 'pwd': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'}) def _search_provider(self, search_params, **kwargs): @@ -71,8 +74,9 @@ class HDSpaceProvider(generic.TorrentProvider): if not html or self._has_no_results(html): raise generic.HaltParseException - with BS4Parser(html, features=['html5lib', 'permissive'], attr='width="100%"\Wclass="lista"') as soup: - torrent_table = soup.find_all('table', attrs={'class': 'lista'})[-1] + with BS4Parser(html, features=['html5lib', 'permissive'], + attr='width="100%"\Wclass="lista"') as soup: + torrent_table = soup.find_all('table', class_='lista')[-1] torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -85,16 +89,16 @@ class HDSpaceProvider(generic.TorrentProvider): if None is downlink: continue try: - seeders, leechers = [int(x.get_text().strip()) for x in tr.find_all('a', href=rc['peers'])] + seeders, leechers = [tryInt(x.get_text().strip()) + for x in tr.find_all('a', href=rc['peers'])] if self._peers_fail(mode, seeders, leechers)\ or self.freeleech and None is tr.find('img', title=rc['fl']): continue info = tr.find('a', href=rc['info']) - title = ('title' in info.attrs and info['title']) or info.get_text().strip() + title = (info.attrs.get('title') or info.get_text()).strip() size = tr.find_all('td')[-5].get_text().strip() - - download_url = self.urls['get'] % str(downlink['href']).lstrip('/') + download_url = self._link(downlink['href']) except (AttributeError, TypeError, ValueError): continue @@ -103,13 +107,11 @@ class HDSpaceProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/ilt.py b/sickbeard/providers/ilt.py index 6d887a63..482573bc 100644 --- a/sickbeard/providers/ilt.py +++ b/sickbeard/providers/ilt.py @@ -45,7 +45,7 @@ class ILTProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(ILTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) + return super(ILTProvider, self)._authorised() def _search_provider(self, search_params, **kwargs): @@ -79,14 +79,12 @@ class ILTProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]] if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): continue title = tr.find('a', href=rc['info']).get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -95,14 +93,12 @@ class ILTProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index b144f8bd..72fd2cb6 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -21,6 +21,7 @@ import traceback from . import generic from sickbeard import logger from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt from lib.unidecode import unidecode @@ -45,9 +46,10 @@ class IPTorrentsProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(IPTorrentsProvider, self)._authorised( - logged_in=(lambda x='': ('RSS Link' in x) and self.has_all_cookies() and - self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), - failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) + logged_in=(lambda y='': all( + ['RSS Link' in y, self.has_all_cookies()] + + [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])), + failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -78,8 +80,7 @@ class IPTorrentsProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'id': 'torrents'}) or \ - soup.find('table', attrs={'class': 'torrents'}) + torrent_table = soup.find(id='torrents') or soup.find('table', class_='torrents') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -87,16 +88,15 @@ class IPTorrentsProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: - seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip()) - for x in ('t_seeders', 't_leechers')] + seeders, leechers = [tryInt(tr.find('td', class_='t_' + x).get_text().strip()) + for x in 'seeders', 'leechers'] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = ('title' in info.attrs and info['title']) or info.get_text().strip() + title = (info.attrs.get('title') or info.get_text()).strip() size = tr.find_all('td')[-4].get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -105,13 +105,11 @@ class IPTorrentsProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/limetorrents.py b/sickbeard/providers/limetorrents.py new file mode 100644 index 00000000..280b70a3 --- /dev/null +++ b/sickbeard/providers/limetorrents.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import traceback +import urllib + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class LimeTorrentsProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'LimeTorrents') + + self.url_home = ['https://www.limetorrents.cc/', 'https://limetorrents.usbypass.xyz/'] + + self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'} + self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s', + 'browse': '%(home)s%(vars)s'} + + self.minseed, self.minleech = 2 * [None] + + @staticmethod + def _has_signature(data=None): + return data and re.search(r'(?i)LimeTorrents', data[33:1024:]) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self.url: + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'dl'}.iteritems()) + + for mode in search_params.keys(): + for search_string in search_params[mode]: + + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + + search_url = self.urls['browse'] if 'Cache' == mode \ + else self.urls['search'] % (urllib.quote_plus(search_string)) + + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_table = soup.find_all('table', class_='table2') + torrent_rows = [] if not torrent_table else [ + t.select('tr[bgcolor]') for t in torrent_table if + all([x in ' '.join(x.get_text() for x in t.find_all('th')).lower() for x in + ['torrent', 'size']])] + + if not len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows[0]: # 0 = all rows + try: + seeders, leechers, size = [tryInt(n.replace(',', ''), n) for n in [ + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]] + if self._peers_fail(mode, seeders, leechers): + continue + + anchors = tr.td.find_all('a') + stats = anchors and [len(a.get_text()) for a in anchors] + title = stats and anchors[stats.index(max(stats))].get_text().strip() + download_url = self._link((tr.td.find('a', class_=rc['get']) or {}).get('href')) + except (AttributeError, TypeError, ValueError, IndexError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + +provider = LimeTorrentsProvider() diff --git a/sickbeard/providers/morethan.py b/sickbeard/providers/morethan.py index 4ecdb796..69410949 100644 --- a/sickbeard/providers/morethan.py +++ b/sickbeard/providers/morethan.py @@ -34,7 +34,7 @@ class MoreThanProvider(generic.TorrentProvider): self.url_base = 'https://www.morethan.tv/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'login.php', + 'login_action': self.url_base + 'login.php', 'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([ 'tags_type=1', 'order_by=time', 'order_way=desc', 'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']), @@ -46,8 +46,8 @@ class MoreThanProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(MoreThanProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), - post_params={'keeplogged': '1', 'login': 'Log in'}) + return super(MoreThanProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')), + post_params={'keeplogged': '1', 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -72,7 +72,7 @@ class MoreThanProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'class': 'torrent_table'}) + torrent_table = soup.find('table', class_='torrent_table') torrent_rows = [] if torrent_table: torrent_rows = torrent_table.find_all('tr') @@ -86,17 +86,15 @@ class MoreThanProvider(generic.TorrentProvider): try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers): continue title = tr.find('a', title=rc['info']).get_text().strip() if title.lower().startswith('season '): - title = '%s %s' % (tr.find('div', attrs={'class': rc['name']}).get_text().strip(), - title) + title = '%s %s' % (tr.find('div', class_=rc['name']).get_text().strip(), title) - link = str(tr.find('a', href=rc['get'])['href']).replace('&', '&').lstrip('/') - download_url = self.urls['get'] % link + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -105,14 +103,12 @@ class MoreThanProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/ncore.py b/sickbeard/providers/ncore.py new file mode 100644 index 00000000..0ac76f09 --- /dev/null +++ b/sickbeard/providers/ncore.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# +# Author: SickGear +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import re +import traceback + +from . import generic +from sickbeard import logger +from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt +from lib.unidecode import unidecode + + +class NcoreProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'nCore') + + self.url_base = 'https://ncore.cc/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login_action': self.url_base + 'login.php', + 'search': self.url_base + 'torrents.php?mire=%s&' + '&'.join([ + 'miszerint=fid', 'hogyan=DESC', 'tipus=kivalasztottak_kozott', + 'kivalasztott_tipus=xvidser,dvdser,hdser', 'miben=name']), + 'get': self.url_base + '%s'} + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self.minseed, self.minleech = 4 * [None] + self.chk_td = True + + def _authorised(self, **kwargs): + + return super(NcoreProvider, self)._authorised( + logged_in=(lambda y='': all([bool(y), 'action="login' not in y, self.has_all_cookies('PHPSESSID')])), + post_params={'nev': self.username, 'pass': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'}) + + def _search_provider(self, search_params, **kwargs): + + results = [] + if not self._authorised(): + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'list': '.*?torrent_all', 'info': 'details'}.iteritems()) + for mode in search_params.keys(): + for search_string in search_params[mode]: + search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string + search_url = self.urls['search'] % search_string + + # fetches 15 results by default, and up to 100 if allowed in user profile + html = self.get_url(search_url) + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html): + raise generic.HaltParseException + + with BS4Parser(html, features=['html5lib', 'permissive']) as soup: + torrent_table = soup.find('div', class_=rc['list']) + torrent_rows = [] if not torrent_table else torrent_table.find_all('div', class_='box_torrent') + + if not len(torrent_rows): + raise generic.HaltParseException + + for tr in torrent_rows: + try: + seeders, leechers, size = [tryInt(n, n) for n in [ + tr.find('div', class_=x).get_text().strip() + for x in 'box_s2', 'box_l2', 'box_meret2']] + if self._peers_fail(mode, seeders, leechers): + continue + + anchor = tr.find('a', href=rc['info']) + title = (anchor.get('title') or anchor.get_text()).strip() + download_url = self._link(anchor.get('href').replace('details', 'download')) + except (AttributeError, TypeError, ValueError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + +provider = NcoreProvider() diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 530cfba1..7f1403fe 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -359,7 +359,7 @@ class NewznabCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) - self.update_freq = 5 # cache update frequency + self.update_freq = 5 def updateCache(self): diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index b131a9ef..9082f4b4 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -1,5 +1,3 @@ -# Author: Mr_Orange -# URL: http://code.google.com/p/sickbeard/ # # This file is part of SickGear. # @@ -16,10 +14,12 @@ # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . +import re import urllib from . import generic from sickbeard import logger, show_name_helpers, tvcache +from sickbeard.helpers import tryInt class NyaaProvider(generic.TorrentProvider): @@ -27,43 +27,55 @@ class NyaaProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, 'NyaaTorrents', anime_only=True) - self.url_base = self.url = 'http://www.nyaa.se/' + self.url_base = self.url = 'https://www.nyaa.se/' + + self.minseed, self.minleech = 2 * [None] self.cache = NyaaCache(self) - def _search_provider(self, search_string, **kwargs): + def _search_provider(self, search_string, search_mode='eponly', **kwargs): + + if self.show and not self.show.is_anime: + return [] + + params = urllib.urlencode({'term': search_string.encode('utf-8'), + 'cats': '1_37', # Limit to English-translated Anime (for now) + # 'sort': '2', # Sort Descending By Seeders + }) + + return self.get_data(getrss_func=self.cache.getRSSFeed, + search_url='%s?page=rss&%s' % (self.url, params), + mode=('Episode', 'Season')['sponly' == search_mode]) + + def get_data(self, getrss_func, search_url, mode='cache'): + + data = getrss_func(search_url) results = [] - if self.show and not self.show.is_anime: - return results - - params = {'term': search_string.encode('utf-8'), - 'cats': '1_37', # Limit to English-translated Anime (for now) - # 'sort': '2', # Sort Descending By Seeders - } - - search_url = self.url + '?page=rss&' + urllib.urlencode(params) - - logger.log(u'Search string: ' + search_url, logger.DEBUG) - - data = self.cache.getRSSFeed(search_url) if data and 'entries' in data: - items = data.entries - for curItem in items: - title, url = self._title_and_url(curItem) + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'stats': '(\d+)\W+seed[^\d]+(\d+)\W+leech[^\d]+\d+\W+down[^\d]+([\d.,]+\s\w+)'}.iteritems()) - if title and url: - results.append(curItem) - else: - logger.log(u'The data returned from ' + self.name + ' is incomplete, this result is unusable', - logger.DEBUG) + for cur_item in data.get('entries', []): + try: + seeders, leechers, size = 0, 0, 0 + stats = rc['stats'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', '')) + if len(stats): + seeders, leechers, size = (tryInt(n, n) for n in stats[0]) + if self._peers_fail(mode, seeders, leechers): + continue + title, download_url = self._title_and_url(cur_item) + download_url = self._link(download_url) + except (AttributeError, TypeError, ValueError, IndexError): + continue - return results + if title and download_url: + results.append((title, download_url, seeders, self._bytesizer(size))) - def find_search_results(self, show, episodes, search_mode, manual_search=False): + self._log_search(mode, len(results), search_url) - return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search) + return self._sort_seeding(mode, results) def _season_strings(self, ep_obj, **kwargs): @@ -79,20 +91,17 @@ class NyaaCache(tvcache.TVCache): def __init__(self, this_provider): tvcache.TVCache.__init__(self, this_provider) - self.update_freq = 15 # cache update frequency + self.update_freq = 15 def _cache_data(self): - params = {'page': 'rss', # Use RSS page - 'order': '1', # Sort Descending By Date - 'cats': '1_37'} # Limit to English-translated Anime (for now) - url = self.provider.url + '?' + urllib.urlencode(params) - logger.log(u'NyaaTorrents cache update URL: ' + url, logger.DEBUG) + params = urllib.urlencode({'page': 'rss', # Use RSS page + 'order': '1', # Sort Descending By Date + 'cats': '1_37' # Limit to English-translated Anime (for now) + }) - data = self.getRSSFeed(url) - if data and 'entries' in data: - return data.entries - return [] + return self.provider.get_data(getrss_func=self.getRSSFeed, + search_url='%s?%s' % (self.provider.url, params)) provider = NyaaProvider() diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 14f2e299..81255afe 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -203,7 +203,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): except generic.HaltParseException: time.sleep(1.1) pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) mode = (mode, search_mode)['Propers' == search_mode] @@ -222,7 +222,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): title, url = self._title_and_url(item) try: result_date = datetime.fromtimestamp(int(item['usenetage'])) - except: + except (StandardError, Exception): result_date = None if result_date: @@ -236,7 +236,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider): api_key = self._check_auth() if not api_key.startswith('cookie:'): return api_key - except Exception: + except (StandardError, Exception): return None self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key) diff --git a/sickbeard/providers/pisexy.py b/sickbeard/providers/pisexy.py index 74f247d7..c3fbb882 100644 --- a/sickbeard/providers/pisexy.py +++ b/sickbeard/providers/pisexy.py @@ -40,7 +40,8 @@ class PiSexyProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(PiSexyProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy'])) + return super(PiSexyProvider, self)._authorised( + logged_in=(lambda y=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy']))) def _search_provider(self, search_params, **kwargs): @@ -81,13 +82,10 @@ class PiSexyProvider(generic.TorrentProvider): continue info = tr.find('a', href=rc['info']) - title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title'])\ - or info.get_text().strip() + title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip() size = tr.find_all('td')[3].get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - - except (AttributeError, TypeError, ValueError, IndexError): + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (AttributeError, TypeError, ValueError, KeyError, IndexError): continue if title and download_url: @@ -95,14 +93,12 @@ class PiSexyProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index 7a49c1a9..5fb2e042 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -52,11 +52,11 @@ class PreToMeProvider(generic.TorrentProvider): search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode] - data = RSSFeeds(self).get_feed(search_url) + xml_data = RSSFeeds(self).get_feed(search_url) cnt = len(items[mode]) - if data and 'entries' in data: - for entry in data['entries']: + if xml_data and 'entries' in xml_data: + for entry in xml_data['entries']: try: if entry['title'] and 'download' in entry['link']: items[mode].append((entry['title'], entry['link'], None, None)) diff --git a/sickbeard/providers/privatehd.py b/sickbeard/providers/privatehd.py index 03f53ffe..d316135b 100644 --- a/sickbeard/providers/privatehd.py +++ b/sickbeard/providers/privatehd.py @@ -46,8 +46,8 @@ class PrivateHDProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(PrivateHDProvider, self)._authorised( - logged_in=lambda x=None: self.has_all_cookies(['love']), - post_params={'email_username': self.username}) + logged_in=(lambda y=None: self.has_all_cookies('love')), + post_params={'email_username': self.username, 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -80,7 +80,7 @@ class PrivateHDProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'class': 'table'}) + torrent_table = soup.find('table', class_='table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -89,14 +89,12 @@ class PrivateHDProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]] if self._peers_fail(mode, seeders, leechers): continue title = rc['info'].sub('', tr.find('a', attrs={'title': rc['info']})['title']) - - download_url = tr.find('a', href=rc['get'])['href'] - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -105,14 +103,12 @@ class PrivateHDProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/ptf.py b/sickbeard/providers/ptf.py index 94d6bb15..f48e6f05 100644 --- a/sickbeard/providers/ptf.py +++ b/sickbeard/providers/ptf.py @@ -47,8 +47,8 @@ class PTFProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(PTFProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['session_key']), - post_params={'force_ssl': 'on', 'ssl': ''}) + return super(PTFProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session_key')), + post_params={'force_ssl': 'on', 'ssl': '', 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -98,9 +98,7 @@ class PTFProvider(generic.TorrentProvider): title = tr.find('a', href=rc['info']).get_text().strip() snatches = tr.find('a', href=rc['snatch']).get_text().strip() size = tr.find_all('td')[-3].get_text().strip().replace(snatches, '') - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -109,14 +107,12 @@ class PTFProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py index 6e520d0f..4e5a45b3 100644 --- a/sickbeard/providers/rarbg.py +++ b/sickbeard/providers/rarbg.py @@ -38,7 +38,7 @@ class RarbgProvider(generic.TorrentProvider): 'api_list': self.url_api + 'mode=list', 'api_search': self.url_api + 'mode=search'} - self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(ranked)s&token=%(token)s', + self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(r)s&token=%(t)s', 'param_iid': '&search_imdb=%(sid)s', 'param_tid': '&search_tvdb=%(sid)s', 'param_str': '&search_string=%(str)s', @@ -90,7 +90,8 @@ class RarbgProvider(generic.TorrentProvider): id_search = self.params[search_with] % {'sid': sid} dedupe = [] - search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) # sort type "_only" as first to process + # sort type "_only" as first to process + search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) for mode_params in search_types: mode_search = mode_params[0] mode = mode_search.replace('_only', '') @@ -121,41 +122,40 @@ class RarbgProvider(generic.TorrentProvider): time_out += 1 time.sleep(1) - searched_url = search_url % {'ranked': int(self.confirmed), 'token': self.token} + searched_url = search_url % {'r': int(self.confirmed), 't': self.token} - data = self.get_url(searched_url, json=True) + data_json = self.get_url(searched_url, json=True) self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3) - if not data: + if not data_json: continue - if 'error' in data: - if 5 == data['error_code']: # Too many requests per second. + if 'error' in data_json: + if 5 == data_json['error_code']: # Too many requests per second. continue - elif 2 == data['error_code']: # Invalid token set + elif 2 == data_json['error_code']: # Invalid token set if self._authorised(reset=True): continue self.log_result(mode, len(items[mode]) - cnt, searched_url) return items[mode] break - if 'error' not in data: - for item in data['torrent_results']: + if 'error' not in data_json: + for item in data_json['torrent_results']: title, download_magnet, seeders, size = [ item.get(x) for x in 'title', 'download', 'seeders', 'size'] title = None is title and item.get('filename') or title if not (title and download_magnet) or download_magnet in dedupe: continue dedupe += [download_magnet] + items[mode].append((title, download_magnet, seeders, self._bytesizer(size))) self._log_search(mode, len(items[mode]) - cnt, searched_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) if '_only' in mode_search and len(results): break diff --git a/sickbeard/providers/revtt.py b/sickbeard/providers/revtt.py index 862f6614..90e9f882 100644 --- a/sickbeard/providers/revtt.py +++ b/sickbeard/providers/revtt.py @@ -45,7 +45,7 @@ class RevTTProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(RevTTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) + return super(RevTTProvider, self)._authorised() def _search_provider(self, search_params, **kwargs): @@ -80,15 +80,13 @@ class RevTTProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - (tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): continue title = tr.find('a', href=rc['info']).get_text().strip() size = rc['size'].sub(r'\1', size) - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, IndexError): continue @@ -97,14 +95,12 @@ class RevTTProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 7b5903b6..73daaa8b 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -100,7 +100,7 @@ class TorrentRssProvider(generic.TorrentProvider): try: bdecode(torrent_file) break - except Exception: + except (StandardError, Exception): pass else: return False, '%s fetched RSS feed data: %s' % \ diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index acce2694..df6aa952 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -34,17 +34,17 @@ class SCCProvider(generic.TorrentProvider): self.url_home = ['https://sceneaccess.%s/' % u for u in 'eu', 'org'] self.url_vars = { - 'login': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s', + 'login_action': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s', 'nonscene': 'nonscene?search=%s&method=1&c44=44&c45=44', 'archive': 'archive?search=%s&method=1&c26=26'} self.url_tmpl = { - 'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s', + 'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s', 'nonscene': '%(home)s%(vars)s', 'archive': '%(home)s%(vars)s'} self.username, self.password, self.minseed, self.minleech = 4 * [None] def _authorised(self, **kwargs): - return super(SCCProvider, self)._authorised(post_params={'submit': 'come+on+in'}) + return super(SCCProvider, self)._authorised(post_params={'form_tmpl': 'method'}) def _search_provider(self, search_params, **kwargs): @@ -76,7 +76,7 @@ class SCCProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'id': 'torrents-table'}) + torrent_table = soup.find(id='torrents-table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -85,17 +85,14 @@ class SCCProvider(generic.TorrentProvider): for tr in torrent_table.find_all('tr')[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find('td', attrs={'class': x}).get_text().strip() - for x in ('ttr_seeders', 'ttr_leechers', 'ttr_size')]] + tr.find('td', class_='ttr_' + x).get_text().strip() + for x in 'seeders', 'leechers', 'size']] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = ('title' in info.attrs and info['title']) or info.get_text().strip() - - link = str(tr.find('a', href=rc['get'])['href']).lstrip('/') - download_url = self.urls['get'] % link - + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -104,13 +101,11 @@ class SCCProvider(generic.TorrentProvider): except generic.HaltParseException: time.sleep(1.1) - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index a337f539..b53094fb 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -33,7 +33,7 @@ class SceneTimeProvider(generic.TorrentProvider): self.url_base = 'https://www.scenetime.com/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'takelogin.php', + 'login_action': self.url_base + 'login.php', 'browse': self.url_base + 'browse_API.php', 'params': {'sec': 'jax', 'cata': 'yes'}, 'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'} @@ -46,7 +46,7 @@ class SceneTimeProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(SceneTimeProvider, self)._authorised(post_params={'submit': 'Log in'}) + return super(SceneTimeProvider, self)._authorised(post_params={'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -91,19 +91,18 @@ class SceneTimeProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]] if None is tr.find('a', href=rc['cats'])\ or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text())\ or self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = info.attrs.get('title') or info.get_text().strip() - + title = (info.attrs.get('title') or info.get_text()).strip() download_url = self.urls['get'] % { 'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])), 'title': str(title).replace(' ', '.')} - except (AttributeError, TypeError, ValueError): + except (AttributeError, TypeError, ValueError, KeyError): continue if title and download_url: @@ -111,15 +110,13 @@ class SceneTimeProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, ('search string: ' + search_string, self.name)['Cache' == mode]) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py index bc5650cc..c8b6c433 100644 --- a/sickbeard/providers/shazbat.py +++ b/sickbeard/providers/shazbat.py @@ -50,10 +50,9 @@ class ShazbatProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(ShazbatProvider, self)._authorised( - logged_in=(lambda x=None: ' len(torrent_rows): @@ -161,14 +163,13 @@ class ThePirateBayProvider(generic.TorrentProvider): for tr in torrent_table.find_all('tr')[1:]: try: - seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)] + seeders, leechers = [tryInt(tr.find_all('td')[x].get_text().strip()) for x in -2, -1] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', title=rc['info']) title = info.get_text().strip().replace('_', '.') tid = rc['tid'].sub(r'\1', str(info['href'])) - download_magnet = tr.find('a', title=rc['get'])['href'] except (AttributeError, TypeError, ValueError): continue @@ -186,22 +187,19 @@ class ThePirateBayProvider(generic.TorrentProvider): if title and download_magnet: size = None try: - size = re.findall('(?i)size[^\d]+(\d+(?:[\.,]\d+)?\W*[bkmgt]\w+)', - tr.find_all(class_='detDesc')[0].get_text())[0] - except Exception: + size = rc['size'].findall(tr.find_all(class_='detDesc')[0].get_text())[0] + except (StandardError, Exception): pass items[mode].append((title, download_magnet, seeders, self._bytesizer(size))) except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/tokyotoshokan.py b/sickbeard/providers/tokyotoshokan.py index 1aab206c..b4807832 100644 --- a/sickbeard/providers/tokyotoshokan.py +++ b/sickbeard/providers/tokyotoshokan.py @@ -1,5 +1,3 @@ -# Author: Mr_Orange -# URL: http://code.google.com/p/sickbeard/ # # This file is part of SickGear. # @@ -16,11 +14,13 @@ # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . +import re import traceback import urllib from . import generic from sickbeard import logger, show_name_helpers, tvcache +from sickbeard.helpers import tryInt from sickbeard.bs4_parser import BS4Parser @@ -29,7 +29,7 @@ class TokyoToshokanProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, 'TokyoToshokan', anime_only=True) - self.url_base = self.url = 'http://tokyotosho.info/' + self.url_base = self.url = 'https://tokyotosho.info/' self.cache = TokyoToshokanCache(self) @@ -39,36 +39,49 @@ class TokyoToshokanProvider(generic.TorrentProvider): if self.show and not self.show.is_anime: return results - params = {'terms': search_string.encode('utf-8'), - 'type': 1} # get anime types + params = urllib.urlencode({'terms': search_string.encode('utf-8'), + 'type': 1}) # get anime types - search_url = self.url + 'search.php?' + urllib.urlencode(params) - logger.log(u'Search string: ' + search_url, logger.DEBUG) + search_url = '%ssearch.php?%s' % (self.url, params) + mode = ('Episode', 'Season')['sponly' == search_mode] + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems()) html = self.get_url(search_url) if html: try: with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'class': 'listing'}) - torrent_rows = torrent_table.find_all('tr') if torrent_table else [] + torrent_table = soup.find('table', class_='listing') + torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if torrent_rows: - a = (0, 1)[None is not torrent_rows[0].find('td', attrs={'class': 'centertext'})] + a = (0, 1)[None is not torrent_rows[0].find('td', class_='centertext')] - for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]): - title = top.find('td', attrs={'class': 'desc-top'}).text - url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href'] + for top, bottom in zip(torrent_rows[a::2], torrent_rows[a+1::2]): + try: + bottom_text = bottom.get_text() or '' + stats = rc['stats'].findall(bottom_text) + seeders, leechers = (0, 0) if not stats else [tryInt(n) for n in stats[0]] - if title and url: - results.append((title.lstrip(), url)) + size = rc['size'].findall(bottom_text) + size = size and size[0] or -1 - except Exception: - logger.log(u'Failed to parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR) + info = top.find('td', class_='desc-top') + title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip()) + urls = info and sorted([x.get('href') for x in info.find_all('a') or []]) + download_url = urls and urls[0].startswith('http') and urls[0] or urls[1] + except (AttributeError, TypeError, ValueError, IndexError): + continue - return results + if title and download_url: + results.append((title, download_url, seeders, self._bytesizer(size))) - def find_search_results(self, show, episodes, search_mode, manual_search=False): + except (StandardError, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) - return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search) + self._log_search(mode, len(results), search_url) + + return self._sort_seeding(mode, results) def _season_strings(self, ep_obj, **kwargs): @@ -84,18 +97,35 @@ class TokyoToshokanCache(tvcache.TVCache): def __init__(self, this_provider): tvcache.TVCache.__init__(self, this_provider) - self.update_freq = 15 # cache update frequency + self.update_freq = 15 def _cache_data(self): - params = {'filter': '1'} - url = self.provider.url + 'rss.php?' + urllib.urlencode(params) - logger.log(u'TokyoToshokan cache update URL: ' + url, logger.DEBUG) + mode = 'Cache' + search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'})) + data = self.getRSSFeed(search_url) - data = self.getRSSFeed(url) + results = [] if data and 'entries' in data: - return data.entries - return [] + + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems()) + + for cur_item in data.get('entries', []): + try: + title, download_url = self._title_and_url(cur_item) + size = rc['size'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', '')) + size = size and size[0] or -1 + + except (AttributeError, TypeError, ValueError): + continue + + if title and download_url: + # feed does not carry seed, leech counts + results.append((title, download_url, 0, self.provider._bytesizer(size))) + + self.provider._log_search(mode, len(results), search_url) + + return results provider = TokyoToshokanProvider() diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 6dbaa82a..1de627a0 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -32,8 +32,8 @@ class TorrentBytesProvider(generic.TorrentProvider): self.url_home = ['https://www.torrentbytes.net/'] - self.url_vars = {'login': 'takelogin.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'} - self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', + self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'} + self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]} @@ -43,7 +43,7 @@ class TorrentBytesProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(TorrentBytesProvider, self)._authorised(post_params={'login': 'Log in!'}) + return super(TorrentBytesProvider, self)._authorised(post_params={'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -78,15 +78,14 @@ class TorrentBytesProvider(generic.TorrentProvider): try: info = tr.find('a', href=rc['info']) seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search( info.contents[1].string.strip())) or self._peers_fail(mode, seeders, leechers): continue - title = info.attrs.get('title') or info.contents[0] - title = (isinstance(title, list) and title[0] or title).strip() - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - except (AttributeError, TypeError, ValueError): + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (AttributeError, TypeError, ValueError, KeyError): continue if title and download_url: @@ -94,14 +93,12 @@ class TorrentBytesProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 16d095be..0ba358ce 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -35,7 +35,7 @@ class TorrentDayProvider(generic.TorrentProvider): self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} - self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 2], 'Anime': [29]} + self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 34, 2], 'Anime': [29]} self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] self.proper_search_terms = None @@ -45,9 +45,10 @@ class TorrentDayProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(TorrentDayProvider, self)._authorised( - logged_in=(lambda x='': ('RSS URL' in x) and self.has_all_cookies() and - self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), - failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) + logged_in=(lambda y='': all( + ['RSS URL' in y, self.has_all_cookies()] + + [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])), + failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -87,15 +88,14 @@ class TorrentDayProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: - seeders, leechers = [tryInt(tr.find('td', attrs={'class': x}).get_text().strip()) - for x in ('seedersInfo', 'leechersInfo')] + seeders, leechers = [tryInt(tr.find('td', class_=x + 'ersInfo').get_text().strip()) + for x in 'seed', 'leech'] if self._peers_fail(mode, seeders, leechers): continue title = tr.find('a', href=rc['info']).get_text().strip() size = tr.find_all('td')[-3].get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -104,14 +104,12 @@ class TorrentDayProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): time.sleep(1.1) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/torrenting.py b/sickbeard/providers/torrenting.py index 6379a4e0..e262ec33 100644 --- a/sickbeard/providers/torrenting.py +++ b/sickbeard/providers/torrenting.py @@ -43,9 +43,10 @@ class TorrentingProvider(generic.TorrentProvider): def _authorised(self, **kwargs): return super(TorrentingProvider, self)._authorised( - logged_in=(lambda x='': ('RSS link' in x) and self.has_all_cookies() and - self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), - failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) + logged_in=(lambda y='': all( + ['RSS link' in y, self.has_all_cookies()] + + [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])), + failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) @staticmethod def _has_signature(data=None): @@ -84,14 +85,13 @@ class TorrentingProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]] if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) - title = info.attrs.get('title') or info.get_text().strip() - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') - + title = (info.attrs.get('title') or info.get_text()).strip() + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -100,14 +100,12 @@ class TorrentingProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index cfe5f05b..420bae0c 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -21,6 +21,7 @@ import traceback from . import generic from sickbeard import logger from sickbeard.bs4_parser import BS4Parser +from sickbeard.helpers import tryInt from lib.unidecode import unidecode @@ -30,7 +31,7 @@ class TorrentLeechProvider(generic.TorrentProvider): self.url_base = 'https://torrentleech.org/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'user/account/login/', + 'login_action': self.url_base, 'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s', 'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s', 'get': self.url_base + '%s'} @@ -43,8 +44,8 @@ class TorrentLeechProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='tl')), - post_params={'remember_me': 'on', 'login': 'submit'}) + return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='tl')), + post_params={'remember_me': 'on', 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -69,7 +70,7 @@ class TorrentLeechProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'id': 'torrenttable'}) + torrent_table = soup.find(id='torrenttable') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -77,16 +78,15 @@ class TorrentLeechProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: - seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip()) - for x in ('seeders', 'leechers')] + seeders, leechers = [tryInt(n) for n in [ + tr.find('td', class_=x).get_text().strip() for x in 'seeders', 'leechers']] if self._peers_fail(mode, seeders, leechers): continue - info = tr.find('td', {'class': 'name'}).a - title = ('title' in info.attrs and info['title']) or info.get_text().strip() + info = tr.find('td', class_='name').a + title = (info.attrs.get('title') or info.get_text()).strip() size = tr.find_all('td')[-5].get_text().strip() - - download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') + download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue @@ -95,13 +95,11 @@ class TorrentLeechProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results @@ -110,5 +108,4 @@ class TorrentLeechProvider(generic.TorrentProvider): return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs) - provider = TorrentLeechProvider() diff --git a/sickbeard/providers/torrentshack.py b/sickbeard/providers/torrentshack.py index 19ea689b..de6c15a5 100644 --- a/sickbeard/providers/torrentshack.py +++ b/sickbeard/providers/torrentshack.py @@ -34,7 +34,7 @@ class TorrentShackProvider(generic.TorrentProvider): self.url_base = 'https://torrentshack.me/' self.urls = {'config_provider_home_uri': self.url_base, - 'login': self.url_base + 'login.php?lang=', + 'login_action': self.url_base + 'login.php', 'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join( ['release_type=both', 'searchtags=', 'tags_type=0', 'order_by=s3', 'order_way=desc', 'torrent_preset=all']), @@ -48,8 +48,8 @@ class TorrentShackProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(TorrentShackProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), - post_params={'keeplogged': '1', 'login': 'Login'}) + return super(TorrentShackProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')), + post_params={'keeplogged': '1', 'form_tmpl': True}) def _search_provider(self, search_params, **kwargs): @@ -59,8 +59,8 @@ class TorrentShackProvider(generic.TorrentProvider): items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} - rc = dict((k, re.compile('(?i)' + v)) - for (k, v) in {'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+'}.items()) + rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { + 'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+', 'size': '\s{2,}.*'}.iteritems()) for mode in search_params.keys(): for search_string in search_params[mode]: search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string @@ -75,7 +75,7 @@ class TorrentShackProvider(generic.TorrentProvider): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: - torrent_table = soup.find('table', attrs={'class': 'torrent_table'}) + torrent_table = soup.find('table', class_='torrent_table') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') if 2 > len(torrent_rows): @@ -84,17 +84,15 @@ class TorrentShackProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] + tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]] if self._peers_fail(mode, seeders, leechers): continue + size = rc['size'].sub('', size) info = tr.find('a', title=rc['info']) - title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title']) \ - or info.get_text().strip() - - link = str(tr.find('a', title=rc['get'])['href']).replace('&', '&').lstrip('/') - download_url = self.urls['get'] % link - except (AttributeError, TypeError, ValueError): + title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip() + download_url = self._link(tr.find('a', title=rc['get'])['href']) + except (AttributeError, TypeError, ValueError, KeyError): continue if title and download_url: @@ -102,13 +100,11 @@ class TorrentShackProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/transmithe_net.py b/sickbeard/providers/transmithe_net.py index b7cd5c35..56e373f7 100644 --- a/sickbeard/providers/transmithe_net.py +++ b/sickbeard/providers/transmithe_net.py @@ -47,8 +47,8 @@ class TransmithenetProvider(generic.TorrentProvider): def _authorised(self, **kwargs): if not super(TransmithenetProvider, self)._authorised( - logged_in=(lambda x=None: self.has_all_cookies('session')), - post_params={'keeplogged': '1', 'login': 'Login'}): + logged_in=(lambda y=None: self.has_all_cookies('session')), + post_params={'keeplogged': '1', 'form_tmpl': True}): return False if not self.user_authkey: response = helpers.getURL(self.urls['user'], session=self.session, json=True) @@ -102,13 +102,11 @@ class TransmithenetProvider(generic.TorrentProvider): if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer(size))) - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results diff --git a/sickbeard/providers/tvchaosuk.py b/sickbeard/providers/tvchaosuk.py index 49e29cbf..f80c7728 100644 --- a/sickbeard/providers/tvchaosuk.py +++ b/sickbeard/providers/tvchaosuk.py @@ -45,7 +45,8 @@ class TVChaosUKProvider(generic.TorrentProvider): def _authorised(self, **kwargs): - return super(TVChaosUKProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='c_secure_'))) + return super(TVChaosUKProvider, self)._authorised( + logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_'))) def _search_provider(self, search_params, **kwargs): @@ -83,29 +84,30 @@ class TVChaosUKProvider(generic.TorrentProvider): for tr in torrent_rows[1:]: try: seeders, leechers, size = [tryInt(n, n) for n in [ - tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]] + tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]] if self._peers_fail(mode, seeders, leechers) \ or self.freeleech and None is tr.find_all('td')[1].find('img', title=rc['fl']): continue info = tr.find('a', href=rc['info']) - title = (tr.find('div', attrs={'class': 'tooltip-content'}).get_text() or info.get_text()).strip() + title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip() title = re.findall('(?m)(^[^\r\n]+)', title)[0] - download_url = str(tr.find('a', href=rc['get'])['href']) - if not download_url.startswith('http'): - download_url = self.urls['get'] % download_url.lstrip('/') - except Exception: + download_url = self._link(tr.find('a', href=rc['get'])['href']) + except (StandardError, Exception): continue if get_detail and title.endswith('...'): try: - with BS4Parser(self.get_url('%s%s' % (self.urls['config_provider_home_uri'], info['href'].lstrip( - '/').replace(self.urls['config_provider_home_uri'], ''))), 'html.parser') as soup_detail: - title = soup_detail.find('td', attrs={'colspan': '3', 'class': 'thead'}).get_text().strip() + with BS4Parser(self.get_url('%s%s' % ( + self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace( + self.urls['config_provider_home_uri'], ''))), + 'html.parser') as soup_detail: + title = soup_detail.find( + 'td', class_='thead', attrs={'colspan': '3'}).get_text().strip() title = re.findall('(?m)(^[^\r\n]+)', title)[0] except IndexError: continue - except Exception: + except (StandardError, Exception): get_detail = False try: @@ -114,11 +116,13 @@ class TVChaosUKProvider(generic.TorrentProvider): rc_xtras = re.compile('(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)') has_special = rc_xtras.findall(has_series[0][1]) if has_special: - title = has_series[0][0] + rc_xtras.sub(list( - set(list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1]) + title = has_series[0][0] + rc_xtras.sub(list(set( + list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1]) title = re.sub('(?i)series', r'Season', title) - title_parts = re.findall('(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title) + title_parts = re.findall( + '(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?' + + '(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title) if len(title_parts): new_parts = [tryInt(part, part.strip()) for part in title_parts[0]] if not new_parts[1]: @@ -126,24 +130,26 @@ class TVChaosUKProvider(generic.TorrentProvider): new_parts[2] = ('E%02d', ' Pack %d')[mode in 'Season'] % new_parts[2] title = '%s.S%02d%s.%s' % tuple(new_parts) - dated = re.findall('(?i)([\(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([\)\s]*)', title) + dated = re.findall( + '(?i)([(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([)\s]*)', title) if dated: title = title.replace(''.join(dated[0]), '%s%s%s' % ( ('', ' ')[1 < len(dated[0][0])], parse(dated[0][1]).strftime('%Y-%m-%d'), ('', ' ')[1 < len(dated[0][2])])) - add_pad = re.findall('((?:19|20)\d\d\-\d\d\-\d\d)([\w\W])', title) + add_pad = re.findall('((?:19|20)\d\d[-]\d\d[-]\d\d)([\w\W])', title) if len(add_pad) and add_pad[0][1] not in [' ', '.']: - title = title.replace(''.join(add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1])) + title = title.replace(''.join( + add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1])) title = re.sub(r'(?sim)(.*?)(?:Episode|Season).\d+.(.*)', r'\1\2', title) if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer(size))) - except Exception: + except (StandardError, Exception): pass except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, @@ -152,17 +158,16 @@ class TVChaosUKProvider(generic.TorrentProvider): if mode in 'Season' and len(items[mode]): break - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results def _season_strings(self, ep_obj, **kwargs): return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, prefix='%', sp_detail=( - lambda e: [(('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e, - 'Series %(seasonnumber)d' % e])) + lambda e: [ + (('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e, + 'Series %(seasonnumber)d' % e])) def _episode_strings(self, ep_obj, **kwargs): @@ -174,7 +179,8 @@ class TVChaosUKProvider(generic.TorrentProvider): @staticmethod def ui_string(key): - return 'tvchaosuk_tip' == key and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or '' + return ('tvchaosuk_tip' == key + and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or '') provider = TVChaosUKProvider() diff --git a/sickbeard/providers/womble.py b/sickbeard/providers/womble.py index e0f479c6..80b3b5bb 100644 --- a/sickbeard/providers/womble.py +++ b/sickbeard/providers/womble.py @@ -35,19 +35,19 @@ class WombleCache(tvcache.TVCache): def __init__(self, this_provider): tvcache.TVCache.__init__(self, this_provider) - self.update_freq = 6 # cache update frequency + self.update_freq = 6 def _cache_data(self): result = [] for section in ['sd', 'hd', 'x264', 'dvd']: url = '%srss/?sec=tv-%s&fr=false' % (self.provider.url, section) - data = self.getRSSFeed(url) + xml_data = self.getRSSFeed(url) time.sleep(1.1) cnt = len(result) - for entry in (data and data.get('entries', []) or []): + for entry in (xml_data and xml_data.get('entries', []) or []): if entry.get('title') and entry.get('link', '').startswith('http'): - result.append((entry['title'], entry['link'], None, None)) + result.append((entry.get('title'), entry.get('link'), None, None)) self.provider.log_result(count=len(result) - cnt, url=url) diff --git a/sickbeard/providers/zooqle.py b/sickbeard/providers/zooqle.py index 68c73054..d4d017b6 100644 --- a/sickbeard/providers/zooqle.py +++ b/sickbeard/providers/zooqle.py @@ -82,9 +82,7 @@ class ZooqleProvider(generic.TorrentProvider): info = td[1].find('a', href=rc['info']) title = info and info.get_text().strip() size = td[-3].get_text().strip() - download_url = info and (self.urls['get'] % rc['info'].findall(info['href'])[0]) - except (AttributeError, TypeError, ValueError, IndexError): continue @@ -93,14 +91,12 @@ class ZooqleProvider(generic.TorrentProvider): except generic.HaltParseException: pass - except Exception: + except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) - self._sort_seeders(mode, items) - - results = list(set(results + items[mode])) + results = self._sort_seeding(mode, results + items[mode]) return results