mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Fix issue creating xml metadata files.
Change improve searching torrent providers AReign, EZTV, HDB, SkyT, and SCD.
This commit is contained in:
parent
c47a48c91a
commit
e41b4a442d
8 changed files with 32 additions and 37 deletions
|
@ -1,4 +1,10 @@
|
|||
### 0.15.10 (2018-04-13 12:10:00 UTC)
|
||||
### 0.15.11 (2018-04-16 03:20:00 UTC)
|
||||
|
||||
* Fix issue creating xml metadata files
|
||||
* Change improve searching torrent providers AReign, EZTV, HDB, SkyT, and SCD
|
||||
|
||||
|
||||
### 0.15.10 (2018-04-13 12:10:00 UTC)
|
||||
|
||||
* Change accept theTVDB Url in addshow search field
|
||||
* Change Nzb.org usenet provider add config scene only/nuked
|
||||
|
@ -6,6 +12,7 @@
|
|||
* Change BTScene, LimeTorrents, SkyTorrents, Torlock, Torrentz, TPB torrent providers
|
||||
* Add AlphaReign, EZTV torrent providers
|
||||
|
||||
|
||||
### 0.15.9 (2018-04-07 20:45:00 UTC)
|
||||
|
||||
* Fix metadata show not found
|
||||
|
|
|
@ -65,13 +65,13 @@ from itertools import izip, cycle
|
|||
|
||||
|
||||
def indentXML(elem, level=0):
|
||||
'''
|
||||
"""
|
||||
Does our pretty printing, makes Matt very happy
|
||||
'''
|
||||
i = "\n" + level * " "
|
||||
"""
|
||||
i = '\n' + level * ' '
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.text or not ('%s' % elem.text).strip():
|
||||
elem.text = i + ' '
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
|
@ -81,7 +81,7 @@ def indentXML(elem, level=0):
|
|||
else:
|
||||
# Strip out the newlines from text
|
||||
if elem.text:
|
||||
elem.text = elem.text.replace('\n', ' ')
|
||||
elem.text = ('%s' % elem.text).replace('\n', ' ')
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class AlphaReignProvider(generic.TorrentProvider):
|
|||
if 'Cache' == mode:
|
||||
search_url = self.urls['search'] % tuple(search_string.split(','))
|
||||
else:
|
||||
search_url = self.urls['search'] % (search_string, '')
|
||||
search_url = self.urls['search'] % (search_string.replace('.', ' '), '')
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
@ -94,12 +94,6 @@ class AlphaReignProvider(generic.TorrentProvider):
|
|||
|
||||
return results
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self._search_provider({'Cache': [',', ',2', ',3', ',4', ',5']})
|
||||
|
|
|
@ -56,7 +56,8 @@ class EztvProvider(generic.TorrentProvider):
|
|||
'h1 id0opXZ', 'u8Wa15y Z', 'lhGdpFSmoZ', 'uVnL2 RnSe', 'ht2oY vxmY', 'nJ3obuwSGb']],
|
||||
]]]
|
||||
self.url_vars = {'search': 'search/%s', 'browse': 'page_%s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(vars)s', 'browse': '%(home)s%(vars)s'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s',
|
||||
'search': '%(home)s%(vars)s', 'browse': '%(home)s%(vars)s'}
|
||||
|
||||
self.minseed = None
|
||||
|
||||
|
@ -77,7 +78,8 @@ class EztvProvider(generic.TorrentProvider):
|
|||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = self.urls[('search', 'browse')['Cache' == mode]] % search_string
|
||||
search_url = self.urls['browse'] % search_string if 'Cache' == mode else \
|
||||
self.urls['search'] % search_string.replace('.', ' ')
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
|
@ -117,7 +119,7 @@ class EztvProvider(generic.TorrentProvider):
|
|||
if title and download_url:
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
|
||||
|
||||
except generic.HaltParseException:
|
||||
except (generic.HaltParseException, IndexError):
|
||||
pass
|
||||
except (StandardError, Exception):
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
|
@ -128,12 +130,6 @@ class EztvProvider(generic.TorrentProvider):
|
|||
|
||||
return results
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self._search_provider({'Cache': [0, 1]})
|
||||
|
|
|
@ -58,7 +58,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
|
||||
params = super(HDBitsProvider, self)._season_strings(ep_obj, scene=False)
|
||||
params = super(HDBitsProvider, self)._season_strings(ep_obj)
|
||||
|
||||
show = ep_obj.show
|
||||
if indexer_config.INDEXER_TVDB == show.indexer and show.indexerid:
|
||||
|
@ -72,7 +72,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
|
||||
params = super(HDBitsProvider, self)._episode_strings(ep_obj, scene=False, sep_date='|')
|
||||
params = super(HDBitsProvider, self)._episode_strings(ep_obj, sep_date='|')
|
||||
|
||||
show = ep_obj.show
|
||||
if indexer_config.INDEXER_TVDB == show.indexer and show.indexerid:
|
||||
|
@ -105,7 +105,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
post_data.update(search_param)
|
||||
id_search = True
|
||||
else:
|
||||
post_data['search'] = search_param
|
||||
post_data['search'] = search_param = search_param.replace('.', ' ')
|
||||
id_search = False
|
||||
|
||||
post_data = json.dumps(post_data)
|
||||
|
|
|
@ -54,7 +54,7 @@ class SkytorrentsProvider(generic.TorrentProvider):
|
|||
if 'Cache' == mode:
|
||||
search_url = self.urls['search'] % tuple(search_string.split(','))
|
||||
else:
|
||||
search_url = self.urls['search'] % (search_string, '')
|
||||
search_url = self.urls['search'] % (search_string.replace('.', ' '), '')
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
@ -103,12 +103,6 @@ class SkytorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
return results
|
||||
|
||||
def _season_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, **kwargs)
|
||||
|
||||
def _cache_data(self, **kwargs):
|
||||
|
||||
return self._search_provider({'Cache': ['x264,', 'x264,2', 'x264,3', 'x264,4', 'x264,5']})
|
||||
|
|
|
@ -49,7 +49,7 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
[self.session.cookies.get_dict(domain='.speed.cd') and
|
||||
self.session.cookies.clear('.speed.cd') is None or True] +
|
||||
['RSS' in y, 'type="password"' not in y, self.has_all_cookies(['speedian'], 'inSpeed_')] +
|
||||
[(self.session.cookies.get('inSpeed_' + x) or 'sg!no!pw') in self.digest for x in ['speedian']])),
|
||||
[(self.session.cookies.get('inSpeed_' + c) or 'sg!no!pw') in self.digest for c in ['speedian']])),
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Perhaps the cookie expired? Check settings'))
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
@ -65,7 +65,8 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
for mode in search_params.keys():
|
||||
rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|'))
|
||||
for search_string in search_params[mode]:
|
||||
post_data = dict((x.split('=') for x in self._categories_string(mode).split('&')), search=search_string,
|
||||
post_data = dict((x.split('=') for x in self._categories_string(mode).split('&')),
|
||||
search=search_string.replace('.', ' ').replace('^@^', '.'),
|
||||
jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech])
|
||||
|
||||
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
||||
|
@ -120,7 +121,7 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
|
||||
def _episode_strings(self, ep_obj, **kwargs):
|
||||
|
||||
return super(SpeedCDProvider, self)._episode_strings(ep_obj, scene=False, sep_date='.', **kwargs)
|
||||
return super(SpeedCDProvider, self)._episode_strings(ep_obj, sep_date='^@^', **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def ui_string(key):
|
||||
|
|
|
@ -100,7 +100,10 @@ class TVCache:
|
|||
|
||||
if len(cl) > 0:
|
||||
myDB = self.get_db()
|
||||
myDB.mass_action(cl)
|
||||
try:
|
||||
myDB.mass_action(cl)
|
||||
except (StandardError, Exception) as e:
|
||||
logger.log('Warning could not save cache value [%s], caught err: %s' % (cl, ex(e)))
|
||||
|
||||
# set updated as time the attempt to fetch data is
|
||||
self.setLastUpdate()
|
||||
|
|
Loading…
Reference in a new issue