mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Fix issue, when adding an existing show set its default group to ensure it appears on the show list page.
This commit is contained in:
parent
7241559650
commit
f463ad5705
4 changed files with 83 additions and 76 deletions
20
CHANGES.md
20
CHANGES.md
|
@ -50,14 +50,18 @@
|
|||
* Add a conclusive bottom line to the pp result report
|
||||
|
||||
[develop changelog]
|
||||
Fix issue changing a custom show list group name that is in use. The bug resulted in the db containing stale group names
|
||||
but worse, orphaned shows disappeared from the Show List page entirely. Changed texts to describe this feature more
|
||||
clearly from tag to group names. Repositioned this feature on the add show, edit show, and config pages. Change the drop
|
||||
down select to dynamically hide/show the list of group names and change the respectively selected help texts. Change
|
||||
insert space between items in the list of group names for readability. Notify user if an attempt to remove an in use
|
||||
group name is prevented. PEP8 and code cleanup to SG conventions. Removed fixed column widths, this means multiple groups
|
||||
have different column widths, but the original approach is preferred because fixed widths look bad with long show titles.
|
||||
Add newlines between error log lines on the Logs & Errors page.
|
||||
* Fix issue, when adding existing shows, set its default group to ensure it now appears on the show list page
|
||||
* Fix issue, changing a custom show list group name that is in use. The issue left db records with stale group names,
|
||||
worse, orphaned shows disappeared from the Show List page entirely
|
||||
* Changed texts to describe this feature more clearly from tag to group names
|
||||
* Repositioned this feature on the add show, edit show, and config pages
|
||||
* Change the drop down select to dynamically hide/show the group name list, and change respectively selected help texts
|
||||
* Change insert space between items in the list of group names for readability
|
||||
* Notify user if an attempt to remove an in use group name is prevented
|
||||
* PEP8 and code cleanup to SG conventions
|
||||
* Removed fixed column widths, this means multiple groups have different column widths, and this original approach is
|
||||
preferred because fixed widths look bad with long show titles
|
||||
* Add newlines between error log lines on the Logs & Errors page removed as a biproduct of trimming html output
|
||||
|
||||
|
||||
### 0.8.3 (2015-04-25 08:48:00 UTC)
|
||||
|
|
|
@ -558,30 +558,29 @@ class Tvdb:
|
|||
|
||||
@retry(tvdb_error)
|
||||
def _loadUrl(self, url, params=None, language=None):
|
||||
try:
|
||||
log().debug("Retrieving URL %s" % url)
|
||||
log().debug('Retrieving URL %s' % url)
|
||||
|
||||
session = requests.session()
|
||||
|
||||
# get response from TVDB
|
||||
if self.config['cache_enabled']:
|
||||
session = CacheControl(requests.session(), cache=caches.FileCache(self.config['cache_location']))
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
session.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
session = CacheControl(session, cache=caches.FileCache(self.config['cache_location']))
|
||||
|
||||
resp = session.get(url, params=params)
|
||||
else:
|
||||
resp = requests.get(url, params=params)
|
||||
if self.config['proxy']:
|
||||
log().debug('Using proxy for URL: %s' % url)
|
||||
session.proxies = {'http': self.config['proxy'], 'https': self.config['proxy']}
|
||||
|
||||
session.headers.update({'Accept-Encoding': 'gzip,deflate'})
|
||||
|
||||
try:
|
||||
resp = session.get(url.strip(), params=params)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
||||
raise tvdb_error('HTTP error %s while loading URL %s' % (e.errno, url))
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
raise tvdb_error("Connection error " + str(e.message) + " while loading URL " + str(url))
|
||||
raise tvdb_error('Connection error %s while loading URL %s' % (e.message, url))
|
||||
except requests.exceptions.Timeout, e:
|
||||
raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||
raise tvdb_error('Connection timed out %s while loading URL %s' % (e.message, url))
|
||||
except Exception:
|
||||
raise tvdb_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc())
|
||||
raise tvdb_error('Unknown exception while loading URL %s: %s' % (url, traceback.format_exc()))
|
||||
|
||||
def process(path, key, value):
|
||||
key = key.lower()
|
||||
|
@ -589,34 +588,34 @@ class Tvdb:
|
|||
# clean up value and do type changes
|
||||
if value:
|
||||
try:
|
||||
if key == 'firstaired' and value in "0000-00-00":
|
||||
if key == 'firstaired' and value in '0000-00-00':
|
||||
new_value = str(dt.date.fromordinal(1))
|
||||
new_value = re.sub("([-]0{2}){1,}", "", new_value)
|
||||
fixDate = parse(new_value, fuzzy=True).date()
|
||||
value = fixDate.strftime("%Y-%m-%d")
|
||||
new_value = re.sub('([-]0{2})+', '', new_value)
|
||||
fix_date = parse(new_value, fuzzy=True).date()
|
||||
value = fix_date.strftime('%Y-%m-%d')
|
||||
elif key == 'firstaired':
|
||||
value = parse(value, fuzzy=True).date()
|
||||
value = value.strftime("%Y-%m-%d")
|
||||
value = value.strftime('%Y-%m-%d')
|
||||
|
||||
#if key == 'airs_time':
|
||||
# value = parse(value).time()
|
||||
# value = value.strftime("%I:%M %p")
|
||||
# value = value.strftime('%I:%M %p')
|
||||
except:
|
||||
pass
|
||||
|
||||
return (key, value)
|
||||
return key, value
|
||||
|
||||
if resp.ok:
|
||||
if 'application/zip' in resp.headers.get("Content-Type", ''):
|
||||
if 'application/zip' in resp.headers.get('Content-Type', ''):
|
||||
try:
|
||||
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
|
||||
log().debug("We recived a zip file unpacking now ...")
|
||||
log().debug('We recived a zip file unpacking now ...')
|
||||
zipdata = StringIO.StringIO()
|
||||
zipdata.write(resp.content)
|
||||
myzipfile = zipfile.ZipFile(zipdata)
|
||||
return xmltodict.parse(myzipfile.read('%s.xml' % language), postprocessor=process)
|
||||
except zipfile.BadZipfile:
|
||||
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
||||
raise tvdb_error('Bad zip file received from thetvdb.com, could not read it')
|
||||
else:
|
||||
try:
|
||||
return xmltodict.parse(resp.content.strip(), postprocessor=process)
|
||||
|
@ -682,9 +681,12 @@ class Tvdb:
|
|||
self.config['params_getSeries']['seriesname'] = series
|
||||
|
||||
try:
|
||||
seriesFound = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries']).values()[0]
|
||||
return seriesFound
|
||||
seriesFound = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
if seriesFound:
|
||||
return seriesFound.values()[0]
|
||||
except:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
def _getSeries(self, series):
|
||||
|
|
|
@ -391,31 +391,29 @@ class TVRage:
|
|||
|
||||
#@retry(tvrage_error)
|
||||
def _loadUrl(self, url, params=None):
|
||||
try:
|
||||
log().debug("Retrieving URL %s" % url)
|
||||
log().debug('Retrieving URL %s' % url)
|
||||
|
||||
session = requests.session()
|
||||
|
||||
# get response from TVRage
|
||||
if self.config['cache_enabled']:
|
||||
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
session = CacheControl(session, cache=caches.FileCache(self.config['cache_location']))
|
||||
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
session.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
log().debug('Using proxy for URL: %s' % url)
|
||||
session.proxies = {'http': self.config['proxy'], 'https': self.config['proxy']}
|
||||
|
||||
session.headers.update({'Accept-Encoding': 'gzip,deflate'})
|
||||
|
||||
try:
|
||||
resp = session.get(url.strip(), params=params)
|
||||
else:
|
||||
resp = requests.get(url.strip(), params=params)
|
||||
|
||||
except requests.exceptions.HTTPError, e:
|
||||
raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
||||
raise tvrage_error('HTTP error %s while loading URL %s' % (e.errno, url))
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
raise tvrage_error("Connection error " + str(e.message) + " while loading URL " + str(url))
|
||||
raise tvrage_error('Connection error %s while loading URL %s' % (e.message, url))
|
||||
except requests.exceptions.Timeout, e:
|
||||
raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||
raise tvrage_error('Connection timed out %s while loading URL %s' % (e.message, url))
|
||||
except Exception:
|
||||
raise tvrage_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc())
|
||||
raise tvrage_error('Unknown exception while loading URL %s: %s' % (url, traceback.format_exc()))
|
||||
|
||||
def remap_keys(path, key, value):
|
||||
name_map = {
|
||||
|
@ -456,22 +454,22 @@ class TVRage:
|
|||
value = filter(None, value)
|
||||
value = '|' + '|'.join(value) + '|'
|
||||
try:
|
||||
if key == 'firstaired' and value in "0000-00-00":
|
||||
if key == 'firstaired' and value in '0000-00-00':
|
||||
new_value = str(dt.date.fromordinal(1))
|
||||
new_value = re.sub("([-]0{2}){1,}", "", new_value)
|
||||
fixDate = parse(new_value, fuzzy=True).date()
|
||||
value = fixDate.strftime("%Y-%m-%d")
|
||||
new_value = re.sub('([-]0{2})+', '', new_value)
|
||||
fix_date = parse(new_value, fuzzy=True).date()
|
||||
value = fix_date.strftime('%Y-%m-%d')
|
||||
elif key == 'firstaired':
|
||||
value = parse(value, fuzzy=True).date()
|
||||
value = value.strftime("%Y-%m-%d")
|
||||
value = value.strftime('%Y-%m-%d')
|
||||
|
||||
#if key == 'airs_time':
|
||||
# value = parse(value).time()
|
||||
# value = value.strftime("%I:%M %p")
|
||||
# value = value.strftime('%I:%M %p')
|
||||
except:
|
||||
pass
|
||||
|
||||
return (key, value)
|
||||
return key, value
|
||||
|
||||
if resp.ok:
|
||||
try:
|
||||
|
@ -544,9 +542,12 @@ class TVRage:
|
|||
self.config['params_getSeries']['show'] = series
|
||||
|
||||
try:
|
||||
seriesFound = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries']).values()[0]
|
||||
return seriesFound
|
||||
seriesFound = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
if seriesFound:
|
||||
return seriesFound.values()[0]
|
||||
except:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
def _getSeries(self, series):
|
||||
|
|
|
@ -294,13 +294,13 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
# set up initial values
|
||||
self.show.location = self.showDir
|
||||
self.show.subtitles = self.subtitles if self.subtitles != None else sickbeard.SUBTITLES_DEFAULT
|
||||
self.show.subtitles = self.subtitles if None is not self.subtitles else sickbeard.SUBTITLES_DEFAULT
|
||||
self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT
|
||||
self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT
|
||||
self.show.anime = self.anime if self.anime != None else sickbeard.ANIME_DEFAULT
|
||||
self.show.scene = self.scene if self.scene != None else sickbeard.SCENE_DEFAULT
|
||||
self.show.paused = self.paused if self.paused != None else False
|
||||
self.show.tag = self.tag
|
||||
self.show.flatten_folders = self.flatten_folders if None is not self.flatten_folders else sickbeard.FLATTEN_FOLDERS_DEFAULT
|
||||
self.show.anime = self.anime if None is not self.anime else sickbeard.ANIME_DEFAULT
|
||||
self.show.scene = self.scene if None is not self.scene else sickbeard.SCENE_DEFAULT
|
||||
self.show.paused = self.paused if None is not self.paused else False
|
||||
self.show.tag = self.tag if None is not self.tag else 'Show List'
|
||||
|
||||
if self.show.anime:
|
||||
self.show.release_groups = BlackAndWhiteList(self.show.indexerid)
|
||||
|
|
Loading…
Reference in a new issue