mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Merge pull request #767 from JackDandy/feature/AddTorrentz
Add Torrentz2 provider.
This commit is contained in:
commit
d9f55d168c
12 changed files with 192 additions and 22 deletions
|
@ -93,6 +93,7 @@
|
|||
* Add Extratorrent provider
|
||||
* Add Limetorrents provider
|
||||
* Add nCore torrent provider
|
||||
* Add Torrentz2 provider
|
||||
* Remove Usenet-Crawler provider
|
||||
* Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs
|
||||
* Change provider OMGWTFNZBS api url and auto reject nuked releases
|
||||
|
@ -137,6 +138,8 @@
|
|||
* Fix Add from Trakt
|
||||
* Change unpack files once only in auto post processing copy mode
|
||||
* Fix data logger for clients
|
||||
* Change handle when a torrent provider goes down and its urls are cleared
|
||||
* Add handler for when rar files can not be opened during post processing
|
||||
|
||||
|
||||
### 0.11.14 (2016-07-25 03:10:00 UTC)
|
||||
|
|
BIN
gui/slick/images/providers/torrentz2.png
Normal file
BIN
gui/slick/images/providers/torrentz2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 245 B |
|
@ -1,4 +1,5 @@
|
|||
#import sickbeard
|
||||
#from sickbeard.clients import get_client_instance
|
||||
#from sickbeard.providers.generic import GenericProvider
|
||||
#from sickbeard.providers import thepiratebay
|
||||
#from sickbeard.helpers import anon_url, starify
|
||||
|
@ -446,28 +447,30 @@
|
|||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, '_seed_ratio') and $sickbeard.TORRENT_METHOD not in ('blackhole', 'qbittorrent'):
|
||||
#set $torrent_method_text = {'deluge': 'Deluge', 'qbittorrent': 'qBittorrent', 'rtorrent': 'rTorrent', 'download_station': 'Synology DS', 'transmission': 'Transmission', 'utorrent': 'uTorrent'}
|
||||
<%
|
||||
client = {} if 'blackhole' == sickbeard.TORRENT_METHOD else get_client_instance(sickbeard.TORRENT_METHOD)().__class__.__dict__
|
||||
name = '' if not client else get_client_instance(sickbeard.TORRENT_METHOD)().name
|
||||
%>
|
||||
#if ($hasattr($cur_torrent_provider, '_seed_ratio') and '_set_torrent_ratio' in $client)
|
||||
<div class="field-pair">
|
||||
<label for="${cur_torrent_provider.get_id()}_ratio">
|
||||
<span class="component-title" id="${cur_torrent_provider.get_id()}_ratio_desc">Seed until ratio (the goal)</span>
|
||||
<span class="component-desc">
|
||||
<input type="number" name="${cur_torrent_provider.get_id()}_ratio" id="${cur_torrent_provider.get_id()}_ratio" value="$cur_torrent_provider._seed_ratio" class="form-control input-sm input75" />
|
||||
<p>this ratio is requested of each item sent to $torrent_method_text[$sickbeard.TORRENT_METHOD]</p>
|
||||
<div class="clear-left"><p>(#if 'Transmission' in $torrent_method_text[$sickbeard.TORRENT_METHOD]#set -1 to seed forever, or #end if#leave blank for the $torrent_method_text[$sickbeard.TORRENT_METHOD] setting)</p></div>
|
||||
<p>this ratio is requested of each item sent to $name</p>
|
||||
<div class="clear-left"><p>(#if 'transmission' == $sickbeard.TORRENT_METHOD#set -1 to seed forever, or #end if#leave blank for the $name setting)</p></div>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, 'seed_time') and 'utorrent' == $sickbeard.TORRENT_METHOD:
|
||||
#set $torrent_method_text = {'utorrent': 'uTorrent'}
|
||||
#set $use_default = 'to use the %s min <a href="%s/config/search/#core-component-group3">torrent search setting minumum default</a>' % ($sickbeard.TORRENT_SEED_TIME, $sbRoot) if $sickbeard.TORRENT_SEED_TIME else 'for the %s setting' % $torrent_method_text[$sickbeard.TORRENT_METHOD]
|
||||
#if ($hasattr($cur_torrent_provider, 'seed_time') and '_set_torrent_seed_time' in $client)
|
||||
#set $use_default = 'to use the %s min <a href="%s/config/search/#core-component-group3">torrent search setting minumum default</a>' % ($sickbeard.TORRENT_SEED_TIME, $sbRoot) if $sickbeard.TORRENT_SEED_TIME else 'for the %s setting' % $name
|
||||
<div class="field-pair">
|
||||
<label for="${cur_torrent_provider.get_id()}_seed_time">
|
||||
<span class="component-title" id="${cur_torrent_provider.get_id()}_seed_time_desc">Seed time (provider default)</span>
|
||||
<span class="component-desc">
|
||||
<input type="number" name="${cur_torrent_provider.get_id()}_seed_time" id="${cur_torrent_provider.get_id()}_seed_time" value="$cur_torrent_provider.seed_time" class="form-control input-sm input75" />
|
||||
<p>set 1 or more minimum minutes for each item sent to $torrent_method_text[$sickbeard.TORRENT_METHOD]</p>
|
||||
<p>set 1 or more minimum minutes for each item sent to $name</p>
|
||||
<div class="clear-left"><p>(leave blank $use_default)</p></div>
|
||||
</span>
|
||||
</label>
|
||||
|
|
|
@ -104,8 +104,9 @@ def remove_non_release_groups(name, is_anime=False):
|
|||
'([\s\.\-_\[\{\(]*(no-rar|nzbgeek|ripsalot|rp|siklopentan)[\s\.\-_\]\}\)]*)$',
|
||||
'(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*(www\.\w+.\w+)[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
||||
'(?<=\w)([\s\.\-_]*[\[\{\(]\s*(rar(bg|tv)|((e[tz]|v)tv))[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'] +
|
||||
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'], [])[is_anime]]
|
||||
rename = name
|
||||
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
||||
'^([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)(?=\w)'], [])[is_anime]]
|
||||
rename = name = remove_extension(name)
|
||||
while rename:
|
||||
for regex in rc:
|
||||
name = regex.sub('', name)
|
||||
|
|
|
@ -108,12 +108,13 @@ class NameParser(object):
|
|||
|
||||
for regex in self.compiled_regexes:
|
||||
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[regex]:
|
||||
match = cur_regex.match(name)
|
||||
new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name)
|
||||
match = cur_regex.match(new_name)
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
result = ParseResult(name)
|
||||
result = ParseResult(new_name)
|
||||
result.which_regex = [cur_regex_name]
|
||||
result.score = 0 - cur_regex_num
|
||||
|
||||
|
@ -195,7 +196,7 @@ class NameParser(object):
|
|||
result.score += 1
|
||||
|
||||
if 'release_group' in named_groups:
|
||||
result.release_group = helpers.remove_non_release_groups(match.group('release_group'))
|
||||
result.release_group = match.group('release_group')
|
||||
result.score += 1
|
||||
|
||||
if 'version' in named_groups:
|
||||
|
@ -240,7 +241,8 @@ class NameParser(object):
|
|||
return best_result
|
||||
|
||||
# get quality
|
||||
best_result.quality = common.Quality.nameQuality(name, show.is_anime)
|
||||
new_name = helpers.remove_non_release_groups(name, show.is_anime)
|
||||
best_result.quality = common.Quality.nameQuality(new_name, show.is_anime)
|
||||
|
||||
new_episode_numbers = []
|
||||
new_season_numbers = []
|
||||
|
|
|
@ -476,7 +476,11 @@ class ProcessTVShow(object):
|
|||
|
||||
try:
|
||||
rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
|
||||
|
||||
except (StandardError, Exception):
|
||||
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
|
||||
self._set_process_success(False)
|
||||
continue
|
||||
try:
|
||||
# Skip extraction if any file in archive has previously been extracted
|
||||
skip_file = False
|
||||
for file_in_archive in [ek.ek(os.path.basename, x.filename)
|
||||
|
@ -504,7 +508,7 @@ class ProcessTVShow(object):
|
|||
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
|
||||
self._set_process_success(False)
|
||||
self.fail_detected = True
|
||||
except Exception as e:
|
||||
except (StandardError, Exception):
|
||||
self._log_helper(u'Failed to unpack archive: %s' % archive, logger.ERROR)
|
||||
self._set_process_success(False)
|
||||
finally:
|
||||
|
@ -516,13 +520,17 @@ class ProcessTVShow(object):
|
|||
for archive in rar_files:
|
||||
try:
|
||||
rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
|
||||
except (StandardError, Exception):
|
||||
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
|
||||
continue
|
||||
try:
|
||||
if rar_handle.needs_password():
|
||||
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
|
||||
self._set_process_success(False)
|
||||
self.failure_detected = True
|
||||
rar_handle.close()
|
||||
del rar_handle
|
||||
except Exception:
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
return unpacked_files
|
||||
|
|
|
@ -30,7 +30,8 @@ from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, btscene, dh, extrator
|
|||
fano, filelist, freshontv, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, \
|
||||
ilt, iptorrents, limetorrents, morethan, ncore, pisexy, pretome, privatehd, ptf, \
|
||||
rarbg, revtt, scc, scenetime, shazbat, speedcd, \
|
||||
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle
|
||||
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, \
|
||||
torrentshack, torrentz2, transmithe_net, tvchaosuk, zooqle
|
||||
# anime
|
||||
from . import anizb, nyaatorrents, tokyotoshokan
|
||||
# custom
|
||||
|
@ -81,6 +82,7 @@ __all__ = ['omgwtfnzbs',
|
|||
'torrenting',
|
||||
'torrentleech',
|
||||
'torrentshack',
|
||||
'torrentz2',
|
||||
'transmithe_net',
|
||||
'tvchaosuk',
|
||||
'zooqle',
|
||||
|
|
|
@ -26,6 +26,7 @@ import os
|
|||
import re
|
||||
import time
|
||||
import urlparse
|
||||
from urllib import quote_plus
|
||||
import zlib
|
||||
from base64 import b16encode, b32decode
|
||||
|
||||
|
@ -309,6 +310,36 @@ class GenericProvider:
|
|||
url_tmpl = '%s'
|
||||
return url if re.match('(?i)https?://', url) else (url_tmpl % url.lstrip('/'))
|
||||
|
||||
@staticmethod
|
||||
def _dhtless_magnet(btih, name=None):
|
||||
"""
|
||||
:param btih: torrent hash
|
||||
:param name: torrent name
|
||||
:return: a magnet loaded with default trackers for clients without enabled DHT or None if bad hash
|
||||
"""
|
||||
try:
|
||||
btih = btih.lstrip('/').upper()
|
||||
if 32 == len(btih):
|
||||
btih = b16encode(b32decode(btih)).lower()
|
||||
btih = re.search('(?i)[0-9a-f]{32,40}', btih) and btih or None
|
||||
except (StandardError, Exception):
|
||||
btih = None
|
||||
return (btih and 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (btih, quote_plus(name or btih), '&tr='.join(
|
||||
[quote_plus(tr) for tr in
|
||||
'http://atrack.pow7.com/announce', 'http://mgtracker.org:2710/announce',
|
||||
'http://pow7.com/announce', 'http://t1.pow7.com/announce',
|
||||
'http://tracker.tfile.me/announce', 'udp://9.rarbg.com:2710/announce',
|
||||
'udp://9.rarbg.me:2710/announce', 'udp://9.rarbg.to:2710/announce',
|
||||
'udp://eddie4.nl:6969/announce', 'udp://explodie.org:6969/announce',
|
||||
'udp://inferno.demonoid.pw:3395/announce', 'udp://inferno.subdemon.com:3395/announce',
|
||||
'udp://ipv4.tracker.harry.lu:80/announce', 'udp://p4p.arenabg.ch:1337/announce',
|
||||
'udp://shadowshq.yi.org:6969/announce', 'udp://tracker.aletorrenty.pl:2710/announce',
|
||||
'udp://tracker.coppersurfer.tk:6969', 'udp://tracker.coppersurfer.tk:6969/announce',
|
||||
'udp://tracker.internetwarriors.net:1337', 'udp://tracker.internetwarriors.net:1337/announce',
|
||||
'udp://tracker.leechers-paradise.org:6969', 'udp://tracker.leechers-paradise.org:6969/announce',
|
||||
'udp://tracker.opentrackr.org:1337/announce', 'udp://tracker.torrent.eu.org:451/announce',
|
||||
'udp://tracker.trackerfix.com:80/announce'])) or None)
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False):
|
||||
|
||||
self._check_auth()
|
||||
|
@ -797,7 +828,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
ep_detail = sickbeard.config.naming_ep_type[2] % ep_dict \
|
||||
if 'ep_detail' not in kwargs.keys() else kwargs['ep_detail'](ep_dict)
|
||||
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
|
||||
ep_detail = [ep_detail] + ['%d' % ep_dict['episodenumber']]
|
||||
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)] + ['%d' % ep_dict['episodenumber']]
|
||||
ep_detail = ([ep_detail], ep_detail)[isinstance(ep_detail, list)]
|
||||
detail = ({}, {'Episode_only': ep_detail})[detail_only and not show.is_sports and not show.is_anime]
|
||||
return [dict({'Episode': self._build_search_strings(ep_detail, scene, prefix)}.items() + detail.items())]
|
||||
|
@ -901,7 +932,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
u'Failed to authenticate or parse a response from %s, abort provider')))
|
||||
)]
|
||||
|
||||
if logged_in():
|
||||
if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))):
|
||||
return True
|
||||
|
||||
if not self._valid_home():
|
||||
|
|
114
sickbeard/providers/torrentz2.py
Normal file
114
sickbeard/providers/torrentz2.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
# coding=utf-8
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
from urllib import quote_plus
|
||||
|
||||
from . import generic
|
||||
from sickbeard import config, logger
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.helpers import tryInt
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
class Torrentz2Provider(generic.TorrentProvider):
|
||||
|
||||
def __init__(self):
|
||||
generic.TorrentProvider.__init__(self, 'Torrentz2')
|
||||
|
||||
self.url_home = ['https://torrentz2.eu/']
|
||||
|
||||
self.url_vars = {'search': 'searchA?f=%s&safe=1', 'searchv': 'verifiedA?f=%s&safe=1'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s',
|
||||
'search': '%(home)s%(vars)s', 'searchv': '%(home)s%(vars)s'}
|
||||
|
||||
self.proper_search_terms = '.proper.|.repack.'
|
||||
self.minseed, self.minleech = 2 * [None]
|
||||
self.confirmed = False
|
||||
|
||||
@staticmethod
|
||||
def _has_signature(data=None):
|
||||
return data and re.search(r'(?i)Torrentz', data)
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
results = []
|
||||
if not self.url:
|
||||
return results
|
||||
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': r'>>.*tv'}.iteritems())
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
|
||||
search_url = self.urls['search' + ('', 'v')[self.confirmed]] % (
|
||||
'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
raise generic.HaltParseException
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
torrent_rows = soup.select('dl')
|
||||
|
||||
if not len(torrent_rows):
|
||||
raise generic.HaltParseException
|
||||
|
||||
for tr in torrent_rows:
|
||||
try:
|
||||
if not rc['info'].search(unidecode(tr.dt.get_text().strip())):
|
||||
continue
|
||||
seeders, leechers, size = [tryInt(n, n) for n in [
|
||||
tr.dd.find_all('span')[x].get_text().strip() for x in -2, -1, -3]]
|
||||
if self._peers_fail(mode, seeders, leechers):
|
||||
continue
|
||||
|
||||
info = tr.dt.a
|
||||
title = info and info.get_text().strip()
|
||||
title = title and isinstance(title, unicode) and unidecode(title) or title
|
||||
download_url = info and title and self._dhtless_magnet(info['href'], title)
|
||||
except (AttributeError, TypeError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
time.sleep(1.1)
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url)
|
||||
|
||||
results = self._sort_seeding(mode, results + items[mode])
|
||||
|
||||
return results
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._episode_strings(
|
||||
self, ep_obj, date_detail=(lambda d: [x % str(d).replace('-', '.') for x in ('"%s"', '%s')]),
|
||||
ep_detail=(lambda ep_dict: [x % (config.naming_ep_type[2] % ep_dict) for x in ('"%s"', '%s')]), **kwargs)
|
||||
|
||||
|
||||
provider = Torrentz2Provider()
|
|
@ -476,6 +476,10 @@ def search_providers(show, episodes, manual_search=False):
|
|||
try:
|
||||
cur_provider.cache._clearCache()
|
||||
search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search)
|
||||
if any(search_results):
|
||||
logger.log(', '.join(['%s%s has %s candidate%s' % (
|
||||
('S', 'Ep')['ep' in search_mode], k, len(v), helpers.maybe_plural(len(v)))
|
||||
for (k, v) in search_results.iteritems()]))
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
|
||||
break
|
||||
|
|
|
@ -2165,12 +2165,12 @@ class TVEpisode(object):
|
|||
|
||||
def release_name(name, is_anime=False):
|
||||
if name:
|
||||
name = helpers.remove_non_release_groups(helpers.remove_extension(name), is_anime)
|
||||
name = helpers.remove_non_release_groups(name, is_anime)
|
||||
return name
|
||||
|
||||
def release_group(show, name):
|
||||
if name:
|
||||
name = helpers.remove_non_release_groups(helpers.remove_extension(name), show.is_anime)
|
||||
name = helpers.remove_non_release_groups(name, show.is_anime)
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
|
|
@ -39,6 +39,8 @@ simple_test_cases = {
|
|||
'Show.Name.S06E01.Other.WEB-DL': parser.ParseResult(None, 'Show Name', 6, [1], 'Other.WEB-DL'),
|
||||
'Show.Name.S06E01 Some-Stuff Here': parser.ParseResult(None, 'Show Name', 6, [1], 'Some-Stuff Here'),
|
||||
'Show.Name.S01E15-11001001': parser.ParseResult(None, 'Show Name', 1, [15], None),
|
||||
'Show.Name.S01E02.Source.Quality.Etc-Group - [stuff]':
|
||||
parser.ParseResult(None, 'Show Name', 1, [2], 'Source.Quality.Etc', 'Group'),
|
||||
},
|
||||
|
||||
'fov': {
|
||||
|
|
Loading…
Reference in a new issue