mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 08:53:37 +00:00
commit
6f426804f3
3 changed files with 36 additions and 12 deletions
|
@ -859,17 +859,6 @@ class PostProcessor(object):
|
||||||
# for curEp in [ep_obj] + ep_obj.relatedEps:
|
# for curEp in [ep_obj] + ep_obj.relatedEps:
|
||||||
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
|
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
|
||||||
|
|
||||||
# delete the existing file (and company)
|
|
||||||
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
|
||||||
try:
|
|
||||||
self._delete(cur_ep.location, associated_files=True)
|
|
||||||
# clean up any left over folders
|
|
||||||
if cur_ep.location:
|
|
||||||
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
|
|
||||||
keep_dir=ep_obj.show._location)
|
|
||||||
except (OSError, IOError):
|
|
||||||
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
|
||||||
|
|
||||||
# if the show directory doesn't exist then make it if allowed
|
# if the show directory doesn't exist then make it if allowed
|
||||||
if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
|
if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
|
||||||
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
|
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
|
||||||
|
@ -977,6 +966,17 @@ class PostProcessor(object):
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
||||||
|
|
||||||
|
# delete the existing file (and company)
|
||||||
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
||||||
|
try:
|
||||||
|
self._delete(cur_ep.location, associated_files=True)
|
||||||
|
# clean up any left over folders
|
||||||
|
if cur_ep.location:
|
||||||
|
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
|
||||||
|
keep_dir=ep_obj.show._location)
|
||||||
|
except (OSError, IOError):
|
||||||
|
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
||||||
|
|
||||||
# download subtitles
|
# download subtitles
|
||||||
if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
|
if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
|
||||||
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
||||||
|
|
|
@ -154,6 +154,26 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
self.minTime = 20
|
self.minTime = 20
|
||||||
|
|
||||||
|
def _get_title_and_url(self, item):
|
||||||
|
"""
|
||||||
|
Retrieves the title and URL data from the item XML node
|
||||||
|
|
||||||
|
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
|
||||||
|
|
||||||
|
Returns: A tuple containing two strings representing title and URL respectively
|
||||||
|
"""
|
||||||
|
|
||||||
|
title = item.title if item.title else None
|
||||||
|
if title:
|
||||||
|
title = u'' + title
|
||||||
|
title = title.replace(' ', '.')
|
||||||
|
|
||||||
|
url = item.link if item.link else None
|
||||||
|
if url:
|
||||||
|
url = url.replace('&', '&')
|
||||||
|
|
||||||
|
return (title, url)
|
||||||
|
|
||||||
def _getDailyData(self):
|
def _getDailyData(self):
|
||||||
params = {'user': provider.username,
|
params = {'user': provider.username,
|
||||||
'api': provider.api_key,
|
'api': provider.api_key,
|
||||||
|
|
|
@ -96,6 +96,10 @@ class TVCache():
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
|
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
|
||||||
|
|
||||||
|
def _get_title_and_url(self, item):
|
||||||
|
# override this in the provider if daily search has a different data layout to backlog searches
|
||||||
|
return self.provider._get_title_and_url(item)
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _getRSSData(self):
|
||||||
|
|
||||||
data = None
|
data = None
|
||||||
|
@ -128,7 +132,7 @@ class TVCache():
|
||||||
# parse data
|
# parse data
|
||||||
cl = []
|
cl = []
|
||||||
for item in data:
|
for item in data:
|
||||||
title, url = self.provider._get_title_and_url(item)
|
title, url = self._get_title_and_url(item)
|
||||||
ci = self._parseItem(title, url)
|
ci = self._parseItem(title, url)
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
Loading…
Reference in a new issue