mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-04 10:23:37 +00:00
Merge branch 'origin/dev'
This commit is contained in:
commit
24f5bbae97
4 changed files with 56 additions and 27 deletions
|
@ -31,7 +31,7 @@ from threading import Lock
|
||||||
# apparently py2exe won't build these unless they're imported somewhere
|
# apparently py2exe won't build these unless they're imported somewhere
|
||||||
from sickbeard import providers, metadata, config
|
from sickbeard import providers, metadata, config
|
||||||
from sickbeard.providers.generic import GenericProvider
|
from sickbeard.providers.generic import GenericProvider
|
||||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
|
||||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub
|
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
||||||
naming_ep_type
|
naming_ep_type
|
||||||
|
|
|
@ -22,7 +22,6 @@ __all__ = ['ezrss',
|
||||||
'btn',
|
'btn',
|
||||||
'thepiratebay',
|
'thepiratebay',
|
||||||
'kat',
|
'kat',
|
||||||
'publichd',
|
|
||||||
'torrentleech',
|
'torrentleech',
|
||||||
'scc',
|
'scc',
|
||||||
'hdtorrents',
|
'hdtorrents',
|
||||||
|
|
|
@ -177,15 +177,19 @@ class KATProvider(generic.TorrentProvider):
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
ep_string = show_name + str(ep_obj.airdate).split('-')[0] + ' category:tv' #2) showName Season X
|
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
|
||||||
else:
|
search_string['Season'].append(ep_string)
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(ep_obj.scene_season) + 'E' + ' category:tv' #1) showName SXX -SXXE
|
ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
|
||||||
|
search_string['Season'].append(ep_string)
|
||||||
|
elif ep_obj.show.anime:
|
||||||
|
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0] + ' category:tv' #2) showName Season X
|
|
||||||
else:
|
else:
|
||||||
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' + ' category:tv' #2) showName Season X
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(
|
||||||
|
ep_obj.scene_season) + 'E' + ' category:tv' #1) showName SXX -SXXE
|
||||||
|
search_string['Season'].append(ep_string)
|
||||||
|
ep_string = show_name + ' Season ' + str(
|
||||||
|
ep_obj.scene_season) + ' -Ep*' + ' category:tv' # 2) showName Season X
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
@ -196,7 +200,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
if self.show.air_by_date:
|
if self.show.air_by_date:
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate).replace('-','|')
|
str(ep_obj.airdate).replace('-', '|')
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
elif self.show.sports:
|
elif self.show.sports:
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
|
@ -204,14 +208,17 @@ class KATProvider(generic.TorrentProvider):
|
||||||
str(ep_obj.airdate).replace('-', '|') + '|' + \
|
str(ep_obj.airdate).replace('-', '|') + '|' + \
|
||||||
ep_obj.airdate.strftime('%b')
|
ep_obj.airdate.strftime('%b')
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
|
elif self.show.anime:
|
||||||
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
|
"%i" % int(ep_obj.scene_absolute_number)
|
||||||
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
|
||||||
'episodenumber': ep_obj.scene_episode} + '|' + \
|
'episodenumber': ep_obj.scene_episode} + '|' + \
|
||||||
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
|
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
|
||||||
'episodenumber': ep_obj.scene_episode} + '|' + \
|
|
||||||
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.scene_season,
|
|
||||||
'episodenumber': ep_obj.scene_episode} + ' %s category:tv' % add_string
|
'episodenumber': ep_obj.scene_episode} + ' %s category:tv' % add_string
|
||||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||||
|
|
||||||
|
@ -252,7 +259,8 @@ class KATProvider(generic.TorrentProvider):
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
link = urlparse.urljoin(self.url,(tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
|
link = urlparse.urljoin(self.url,
|
||||||
|
(tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
|
||||||
id = tr.get('id')[-7:]
|
id = tr.get('id')[-7:]
|
||||||
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
|
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
|
||||||
or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
|
or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
|
||||||
|
@ -456,7 +464,7 @@ class KATCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -1837,9 +1837,31 @@ class TVEpisode(object):
|
||||||
logger.log(str(self.show.indexerid) + u": Not creating SQL queue - record is not dirty", logger.DEBUG)
|
logger.log(str(self.show.indexerid) + u": Not creating SQL queue - record is not dirty", logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
# use a custom update/insert method to get the data into the DB
|
myDB = db.DBConnection()
|
||||||
|
|
||||||
|
rows = myDB.select(
|
||||||
|
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||||
|
[self.show.indexerid, self.season, self.episode])
|
||||||
|
|
||||||
|
epID = None
|
||||||
|
if rows:
|
||||||
|
epID = int(rows[0]['episode_id'])
|
||||||
|
|
||||||
|
if epID:
|
||||||
|
# use a custom update method to get the data into the DB for existing records.
|
||||||
return [
|
return [
|
||||||
"INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
"UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, "
|
||||||
|
"subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
|
||||||
|
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
|
||||||
|
"absolute_number = ? WHERE episode_id = ?",
|
||||||
|
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
||||||
|
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn,
|
||||||
|
self.status, self.location, self.file_size,self.release_name, self.is_proper, self.show.indexerid,
|
||||||
|
self.season, self.episode, self.absolute_number, epID]]
|
||||||
|
else:
|
||||||
|
# use a custom insert method to get the data into the DB.
|
||||||
|
return [
|
||||||
|
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
||||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
|
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
|
||||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||||
|
|
Loading…
Reference in a new issue