mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Upgraded IMDBpy and improved performance of it.
Fixed TVDB Indexer API issues.
This commit is contained in:
parent
4784b4619f
commit
cfcc35ebcc
6 changed files with 25 additions and 36 deletions
|
@ -29,7 +29,7 @@
|
|||
|
||||
[imdbpy]
|
||||
## Default.
|
||||
accessSystem = http
|
||||
accessSystem = httpThin
|
||||
|
||||
## Optional (options common to every data access system):
|
||||
# Activate adult searches (on, by default).
|
||||
|
@ -69,7 +69,7 @@ accessSystem = http
|
|||
## Set the threshold for logging messages.
|
||||
# Can be one of "debug", "info", "warning", "error", "critical" (default:
|
||||
# "warning").
|
||||
#loggingLevel = debug
|
||||
loggingLevel = debug
|
||||
|
||||
## Path to a configuration file for the logging facility;
|
||||
# see: http://docs.python.org/library/logging.html#configuring-logging
|
||||
|
|
|
@ -598,7 +598,7 @@ class Tvdb:
|
|||
zipdata = StringIO.StringIO()
|
||||
zipdata.write(resp.content)
|
||||
myzipfile = zipfile.ZipFile(zipdata)
|
||||
return xmltodict.parse(myzipfile.read('%s.xml' % language).strip().encode('utf-8'), postprocessor=process)
|
||||
return xmltodict.parse(myzipfile.read('%s.xml' % language), postprocessor=process)
|
||||
except zipfile.BadZipfile:
|
||||
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
||||
else:
|
||||
|
|
2
setup.py
2
setup.py
|
@ -179,7 +179,7 @@ for curFile in auto_process_files:
|
|||
setup(
|
||||
options = {'py2exe': {'bundle_files': 1}},
|
||||
zipfile = None,
|
||||
console = ['updater.py'],
|
||||
console = ['updater.py'], requires=['Cheetah']
|
||||
)
|
||||
|
||||
if 'test' in oldArgs:
|
||||
|
|
|
@ -163,9 +163,9 @@ class AllShowsListUI:
|
|||
continue
|
||||
|
||||
if 'seriesname' in curShow:
|
||||
seriesnames.append(curShow['seriesname'].encode('utf-8'))
|
||||
seriesnames.append(curShow['seriesname'])
|
||||
if 'aliasnames' in curShow:
|
||||
seriesnames.extend(curShow['aliasnames'].encode('utf-8').split('|'))
|
||||
seriesnames.extend(curShow['aliasnames'].split('|'))
|
||||
|
||||
for name in seriesnames:
|
||||
if searchterm.lower() in name.lower():
|
||||
|
|
|
@ -52,6 +52,7 @@ from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVE
|
|||
from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \
|
||||
NAMING_LIMITED_EXTEND_E_PREFIXED
|
||||
|
||||
|
||||
class TVShow(object):
|
||||
def __init__(self, indexer, indexerid, lang=""):
|
||||
|
||||
|
@ -238,13 +239,13 @@ class TVShow(object):
|
|||
|
||||
# get scene absolute numbering
|
||||
ep.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(self.indexerid,
|
||||
self.indexer,
|
||||
ep.absolute_number)
|
||||
self.indexer,
|
||||
ep.absolute_number)
|
||||
|
||||
# get scene season and episode numbering
|
||||
ep.scene_season, ep.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.indexerid,
|
||||
self.indexer,
|
||||
season, episode)
|
||||
self.indexer,
|
||||
season, episode)
|
||||
|
||||
if ep != None:
|
||||
self.episodes[season][episode] = ep
|
||||
|
@ -891,23 +892,13 @@ class TVShow(object):
|
|||
imdb_info[key] = imdbTv.get(key.replace('_', ' '))
|
||||
|
||||
# Filter only the value
|
||||
if imdb_info['runtimes']:
|
||||
imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0)
|
||||
else:
|
||||
imdb_info['runtimes'] = self.runtime
|
||||
imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0) or self.runtime
|
||||
imdb_info['akas'] = '|'.join(imdb_info['akas']) or ''
|
||||
|
||||
if imdb_info['akas']:
|
||||
imdb_info['akas'] = '|'.join(imdb_info['akas'])
|
||||
else:
|
||||
imdb_info['akas'] = ''
|
||||
# Join all genres in a string
|
||||
imdb_info['genres'] = '|'.join(imdb_info['genres']) or ''
|
||||
|
||||
# Join all genres in a string
|
||||
if imdb_info['genres']:
|
||||
imdb_info['genres'] = '|'.join(imdb_info['genres'])
|
||||
else:
|
||||
imdb_info['genres'] = ''
|
||||
|
||||
# Get only the production country certificate if any
|
||||
# Get only the production country certificate if any
|
||||
if imdb_info['certificates'] and imdb_info['countries']:
|
||||
dct = {}
|
||||
try:
|
||||
|
@ -921,11 +912,7 @@ class TVShow(object):
|
|||
else:
|
||||
imdb_info['certificates'] = ''
|
||||
|
||||
if imdb_info['country_codes']:
|
||||
imdb_info['country_codes'] = '|'.join(imdb_info['country_codes'])
|
||||
else:
|
||||
imdb_info['country_codes'] = ''
|
||||
|
||||
imdb_info['country_codes'] = '|'.join(imdb_info['country_codes']) or ''
|
||||
imdb_info['last_update'] = datetime.date.today().toordinal()
|
||||
|
||||
# Rename dict keys without spaces for DB upsert
|
||||
|
@ -1855,18 +1842,21 @@ class TVEpisode(object):
|
|||
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
|
||||
"absolute_number = ? WHERE episode_id = ?",
|
||||
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn,
|
||||
self.status, self.location, self.file_size,self.release_name, self.is_proper, self.show.indexerid,
|
||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
|
||||
self.hastbn,
|
||||
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
|
||||
self.season, self.episode, self.absolute_number, epID]]
|
||||
else:
|
||||
# use a custom insert method to get the data into the DB.
|
||||
return [
|
||||
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
|
||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||
self.description,
|
||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number]]
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||
self.absolute_number]]
|
||||
|
||||
def saveToDB(self, forceSave=False):
|
||||
"""
|
||||
|
|
|
@ -79,7 +79,6 @@ except ImportError:
|
|||
from sickbeard import browser
|
||||
from lib import adba
|
||||
|
||||
|
||||
def _handle_reverse_proxy():
|
||||
if sickbeard.HANDLE_REVERSE_PROXY:
|
||||
cherrypy.lib.cptools.proxy()
|
||||
|
@ -2660,7 +2659,7 @@ class Home:
|
|||
["Anime", anime]]
|
||||
else:
|
||||
t.showlists = [["Shows", sickbeard.showList]]
|
||||
|
||||
|
||||
t.submenu = HomeMenu()
|
||||
return _munge(t)
|
||||
|
||||
|
|
Loading…
Reference in a new issue