mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-11 21:53:37 +00:00
Change TorrentDay and fix for Emby updater when no folders are returned from API.
This commit is contained in:
parent
30fc002692
commit
52218fcd6b
3 changed files with 16 additions and 9 deletions
|
@ -1,4 +1,10 @@
|
||||||
### 0.16.19 (2018-07-05 18:10:00 UTC)
|
### 0.16.20 (2018-07-17 14:30:00 UTC)
|
||||||
|
|
||||||
|
* Change TorrentDay
|
||||||
|
* Fix for Emby updater when no folders are returned from API
|
||||||
|
|
||||||
|
|
||||||
|
### 0.16.19 (2018-07-05 18:10:00 UTC)
|
||||||
|
|
||||||
* Fix Uuid1 Python Bug, add fallback to uuid4 when uuid1 fails with ValueError https://bugs.python.org/issue32502
|
* Fix Uuid1 Python Bug, add fallback to uuid4 when uuid1 fails with ValueError https://bugs.python.org/issue32502
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||||
'Vmbq', 'WL10', 'ZyZ', 'rFW', '5yc', '12bj', 'q=0']]
|
'Vmbq', 'WL10', 'ZyZ', 'rFW', '5yc', '12bj', 'q=0']]
|
||||||
]]]
|
]]]
|
||||||
|
|
||||||
self.url_vars = {'login': 'rss.php', 'search': 'browse.php?cata=yes&%s%s&search=%s%s'}
|
self.url_vars = {'login': 'rss.php', 'search': 't?%s%s&qf=&q=%s'}
|
||||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
|
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
|
||||||
'search': '%(home)s%(vars)s'}
|
'search': '%(home)s%(vars)s'}
|
||||||
|
|
||||||
|
@ -76,14 +76,13 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
|
||||||
|
|
||||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
|
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download'}.items())
|
||||||
for mode in search_params.keys():
|
for mode in search_params.keys():
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
search_string = '+'.join(search_string.split())
|
search_string = '+'.join(search_string.split())
|
||||||
|
|
||||||
search_url = self.urls['search'] % (
|
search_url = self.urls['search'] % (
|
||||||
self._categories_string(mode), ('&free=on', '')[not self.freeleech],
|
self._categories_string(mode, '%s=on'), ('&free=on', '')[not self.freeleech], search_string)
|
||||||
search_string, ('&sort=7&type=desc', '')['Cache' == mode])
|
|
||||||
|
|
||||||
html = self.get_url(search_url)
|
html = self.get_url(search_url)
|
||||||
if self.should_skip():
|
if self.should_skip():
|
||||||
|
@ -114,9 +113,11 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||||
if self._peers_fail(mode, seeders, leechers):
|
if self._peers_fail(mode, seeders, leechers):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = tr.find('a', href=rc['info']).get_text().strip()
|
dl = tr.find('a', href=rc['get'])['href']
|
||||||
download_url = self._link(tr.find('a', href=rc['get'])['href'])
|
title = tr.find('a', href=re.compile(
|
||||||
except (AttributeError, TypeError, ValueError):
|
'/t/%s' % re.findall('download.*?/([^/]+)', dl)[0])).get_text().strip()
|
||||||
|
download_url = self._link(dl)
|
||||||
|
except (AttributeError, TypeError, ValueError, IndexError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and download_url:
|
if title and download_url:
|
||||||
|
|
|
@ -5369,7 +5369,7 @@ class History(MainHandler):
|
||||||
folder = sickbeard.helpers.getURL('%s/Items/%s' % (user_url, folder_id), headers=headers,
|
folder = sickbeard.helpers.getURL('%s/Items/%s' % (user_url, folder_id), headers=headers,
|
||||||
params=dict(format='json'), timeout=10, json=True)
|
params=dict(format='json'), timeout=10, json=True)
|
||||||
|
|
||||||
if 'tvshows' != folder.get('CollectionType', ''):
|
if not folder or 'tvshows' != folder.get('CollectionType', ''):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
items = sickbeard.helpers.getURL('%s/Items' % user_url, headers=headers,
|
items = sickbeard.helpers.getURL('%s/Items' % user_url, headers=headers,
|
||||||
|
|
Loading…
Reference in a new issue