From a0ef748f3b2d00342ed5759cb2d9a9db6ed1bdc1 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 16:35:10 -0700 Subject: [PATCH 01/13] Fixed saving general settings rootDir error --- sickbeard/webserve.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 58d7435f..c9f264d1 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1477,7 +1477,7 @@ class ConfigGeneral(MainHandler): handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None, proxy_setting=None, anon_redirect=None, git_path=None, calendar_unprotected=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None, - indexer_timeout=None, play_videos=None): + indexer_timeout=None, play_videos=None, rootDir=None): results = [] @@ -1488,6 +1488,7 @@ class ConfigGeneral(MainHandler): sickbeard.AUTO_UPDATE = config.checkbox_to_value(auto_update) sickbeard.NOTIFY_ON_UPDATE = config.checkbox_to_value(notify_on_update) # sickbeard.LOG_DIR is set in config.change_LOG_DIR() + sickbeard.ROOT_DIRS = rootDir sickbeard.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start) config.change_UPDATE_FREQUENCY(update_frequency) From 4af28657afae4e47da30cda4f9a0781d06fd7926 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 16:55:39 -0700 Subject: [PATCH 02/13] Fixed TypeError: list indices must be integers, not list that occured during searches --- sickbeard/providers/generic.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 07131109..56582cce 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -261,10 +261,10 @@ class GenericProvider: # search cache for episode result cacheResult = self.cache.searchCache(epObj, manualSearch) if cacheResult: - if epObj not in results: - results = [cacheResult] + if epObj.episode not in results: + results[epObj.episode] = [result] else: - results.append(cacheResult) + results[epObj.episode].append(result) # found result, search next episode continue From 65eda93b2b7ea4b9629f50cdd12f808a61437ed1 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 17:25:07 -0700 Subject: [PATCH 03/13] Fixed high CPU usage during searches, adding conditional check to prevent un-needed name parsing of search results when search result was previously already parsed and checked during filtering of bad releases --- sickbeard/properFinder.py | 2 +- sickbeard/search.py | 21 ++------------------- sickbeard/show_name_helpers.py | 5 +++-- sickbeard/tvcache.py | 2 +- 4 files changed, 7 insertions(+), 23 deletions(-) diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index d4911c6a..4f21b5ae 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -149,7 +149,7 @@ class ProperFinder(): logger.DEBUG) continue - if not show_name_helpers.filterBadReleases(curProper.name): + if not show_name_helpers.filterBadReleases(curProper.name, parse=False): logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it", logger.DEBUG) continue diff --git a/sickbeard/search.py b/sickbeard/search.py index f9b651a4..4b70ee9c 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -319,23 +319,6 @@ def isFirstBestMatch(result): return False - -def filterSearchResults(show, season, results): - foundResults = {} - - # make a list of all the results for this provider - for curEp in results: - # skip non-tv crap - results[curEp] = filter( - lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show,results[curEp]) - - if curEp in foundResults: - foundResults[curEp] += results[curEp] - else: - foundResults[curEp] = results[curEp] - - return foundResults - def searchForNeededEpisodes(): foundResults = {} @@ -468,7 +451,7 @@ def searchProviders(show, season, episodes, manualSearch=False): for curEp in searchResults: # skip non-tv crap searchResults[curEp] = filter( - lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, searchResults[curEp]) + lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp]) if curEp in foundResults: foundResults[curProvider.name][curEp] += searchResults[curEp] @@ -554,7 +537,7 @@ def searchProviders(show, season, episodes, manualSearch=False): individualResults = nzbSplitter.splitResult(bestSeasonResult) individualResults = filter( - lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, individualResults) + lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults) for curResult in individualResults: if len(curResult.episodes) == 1: diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index 1685d9e4..765890f6 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -36,7 +36,7 @@ resultFilters = ["sub(bed|ed|pack|s)", "(dk|fin|heb|kor|nor|nordic|pl|swe)sub(be "(dir|sample|sub|nfo)fix", "sample", "(dvd)?extras", "dub(bed)?"] -def filterBadReleases(name): +def filterBadReleases(name, parse=True): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. @@ -47,7 +47,8 @@ def filterBadReleases(name): """ try: - NameParser().parse(name) + if parse: + NameParser().parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) return False diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 1dd52d49..6326cca7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -309,7 +309,7 @@ class TVCache(): for curResult in sqlResults: # skip non-tv crap - if not show_name_helpers.filterBadReleases(curResult["name"]): + if not show_name_helpers.filterBadReleases(curResult["name"], parse=False): continue # get the show object, or if it's not one of our shows then ignore it From 6ce43c77824004039c4cae30b255233f5ec06472 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 17:51:44 -0700 Subject: [PATCH 04/13] Added exception catching for timeouts to notifiers --- sickbeard/notifiers/prowl.py | 3 ++- sickbeard/notifiers/pushalot.py | 3 ++- sickbeard/notifiers/pushbullet.py | 6 +++--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/sickbeard/notifiers/prowl.py b/sickbeard/notifiers/prowl.py index 9ef85c54..54180e4e 100644 --- a/sickbeard/notifiers/prowl.py +++ b/sickbeard/notifiers/prowl.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +import socket from httplib import HTTPSConnection, HTTPException from urllib import urlencode @@ -87,7 +88,7 @@ class ProwlNotifier: "/publicapi/add", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) - except (SSLError, HTTPException): + except (SSLError, HTTPException, socket.error): logger.log(u"Prowl notification failed.", logger.ERROR) return False response = http_handler.getresponse() diff --git a/sickbeard/notifiers/pushalot.py b/sickbeard/notifiers/pushalot.py index 40515564..32ed2b6e 100644 --- a/sickbeard/notifiers/pushalot.py +++ b/sickbeard/notifiers/pushalot.py @@ -17,6 +17,7 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +import socket from httplib import HTTPSConnection, HTTPException from urllib import urlencode from ssl import SSLError @@ -77,7 +78,7 @@ class PushalotNotifier: "/api/sendmessage", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) - except (SSLError, HTTPException): + except (SSLError, HTTPException, socket.error): logger.log(u"Pushalot notification failed.", logger.ERROR) return False response = http_handler.getresponse() diff --git a/sickbeard/notifiers/pushbullet.py b/sickbeard/notifiers/pushbullet.py index 274d9715..fedf3fa2 100644 --- a/sickbeard/notifiers/pushbullet.py +++ b/sickbeard/notifiers/pushbullet.py @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . - +import socket import base64 from httplib import HTTPSConnection, HTTPException import json @@ -86,7 +86,7 @@ class PushbulletNotifier: try: logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG) http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString}) - except (SSLError, HTTPException): + except (SSLError, HTTPException, socket.error): logger.log(u"Pushbullet notification failed.", logger.ERROR) return False else: @@ -101,7 +101,7 @@ class PushbulletNotifier: http_handler.request(method, uri, body=data, headers={'Content-Type': 'application/json', 'Authorization': 'Basic %s' % authString}) pass - except (SSLError, HTTPException): + except (SSLError, HTTPException, socket.error): return False response = http_handler.getresponse() From 59675f27ac7584746b3138acb3de28314a8602fd Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 21:36:23 -0700 Subject: [PATCH 05/13] Fixed failed download handling. Improved search queue code. --- sickbeard/failedProcessor.py | 8 +-- sickbeard/providers/ezrss.py | 4 +- sickbeard/providers/generic.py | 2 +- sickbeard/providers/nyaatorrents.py | 4 +- sickbeard/search.py | 26 +++------ sickbeard/searchBacklog.py | 9 +-- sickbeard/search_queue.py | 88 +++++++++++++++-------------- sickbeard/traktChecker.py | 29 ++++------ sickbeard/tvcache.py | 3 - sickbeard/webapi.py | 14 +++-- sickbeard/webserve.py | 37 ++++++------ 11 files changed, 104 insertions(+), 120 deletions(-) diff --git a/sickbeard/failedProcessor.py b/sickbeard/failedProcessor.py index cf967a05..586c5aed 100644 --- a/sickbeard/failedProcessor.py +++ b/sickbeard/failedProcessor.py @@ -64,13 +64,11 @@ class FailedProcessor(object): logger.log(u" - " + str(parsed.release_group), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG) - segment = {parsed.season_number: []} for episode in parsed.episode_numbers: - epObj = parsed.show.getEpisode(parsed.season_number, episode) - segment[parsed.season_number].append(epObj) + segment = parsed.show.getEpisode(parsed.season_number, episode) - cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment) - sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) + cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment) + sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) return True diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 6249c7ec..832cdd90 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -60,7 +60,7 @@ class EZRSSProvider(generic.TorrentProvider): return quality - def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): + def findSearchResults(self, show, episodes, search_mode, manualSearch=False): self.show = show @@ -71,7 +71,7 @@ class EZRSSProvider(generic.TorrentProvider): logger.WARNING) return results - results = generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch) + results = generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch) return results diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 56582cce..df4629b4 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -248,7 +248,7 @@ class GenericProvider: return title, url - def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): + def findSearchResults(self, show, episodes, search_mode, manualSearch=False): self._checkAuth() self.show = show diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index d7f9e3b7..48b973f2 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -55,8 +55,8 @@ class NyaaProvider(generic.TorrentProvider): quality = Quality.sceneQuality(title) return quality - def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): - return generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch) + def findSearchResults(self, show, episodes, search_mode, manualSearch=False): + return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch) def _get_season_search_strings(self, ep_obj): return show_name_helpers.makeSceneShowSearchStrings(self.show) diff --git a/sickbeard/search.py b/sickbeard/search.py index 4b70ee9c..86b665aa 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -390,22 +390,12 @@ def searchForNeededEpisodes(): return foundResults.values() -def searchProviders(show, season, episodes, manualSearch=False): +def searchProviders(show, episodes, manualSearch=False): foundResults = {} finalResults = [] didSearch = False - # build name cache for show - sickbeard.name_cache.buildNameCache(show) - - # check if we want to search for season packs instead of just season/episode - seasonSearch = False - if not manualSearch: - seasonEps = show.getAllEpisodes(season) - if len(seasonEps) == len(episodes): - seasonSearch = True - origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog] @@ -417,23 +407,21 @@ def searchProviders(show, season, episodes, manualSearch=False): threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" foundResults[curProvider.name] = {} - searchCount = 0 - search_mode = 'eponly' - if seasonSearch and curProvider.search_mode == 'sponly': - search_mode = curProvider.search_mode + searchCount = 0 + search_mode = curProvider.search_mode while(True): searchCount += 1 - if search_mode == 'sponly': - logger.log(u"Searching for " + show.name + " Season " + str(season) + " pack") + if search_mode == 'eponly': + logger.log(u"Performing episode search for " + show.name) else: - logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season)) + logger.log(u"Performing season pack search for " + show.name) try: curProvider.cache.updateCache() - searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch) + searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py index 1abe44c7..410ca9d3 100644 --- a/sickbeard/searchBacklog.py +++ b/sickbeard/searchBacklog.py @@ -99,12 +99,13 @@ class BacklogSearcher: segments = self._get_segments(curShow, fromDate) - if len(segments): - backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments) + for season, segment in segments.items(): + self.currentSearchInfo = {'title': self.show.name + " Season " + str(season)} + + backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment) sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable else: - logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season", - logger.DEBUG) + logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping",logger.DEBUG) # don't consider this an actual backlog search if we only did recent eps # or if we only did certain shows diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 5616fc15..59a430ae 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -49,6 +49,12 @@ class SearchQueue(generic_queue.GenericQueue): return True return False + def is_ep_in_queue(self, ep_obj): + for cur_item in self.queue: + if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.ep_obj == ep_obj: + return True + return False + def pause_backlog(self): self.min_priority = generic_queue.QueuePriorities.HIGH @@ -72,13 +78,21 @@ class SearchQueue(generic_queue.GenericQueue): return False def add_item(self, item): - if isinstance(item, DailySearchQueueItem) or ( - isinstance(item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) and not self.is_in_queue( - item.show, item.segment)): + if isinstance(item, DailySearchQueueItem): + # daily searches + generic_queue.GenericQueue.add_item(self, item) + elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment): + # backlog searches + generic_queue.GenericQueue.add_item(self, item) + elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment): + # manual and failed searches generic_queue.GenericQueue.add_item(self, item) else: logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) + return + # build name cache for show + sickbeard.name_cache.buildNameCache(item.show) class DailySearchQueueItem(generic_queue.QueueItem): def __init__(self): @@ -123,7 +137,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem): try: logger.log("Beginning manual search for [" + self.segment.prettyName() + "]") - searchResult = search.searchProviders(self.show, self.segment.season, [self.segment], True) + searchResult = search.searchProviders(self.show, [self.segment], True) if searchResult: # just use the first result for now @@ -161,25 +175,19 @@ class BacklogQueueItem(generic_queue.QueueItem): generic_queue.QueueItem.run(self) try: - for season in self.segment: - sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = { - 'title': self.show.name + " Season " + str(season)} + logger.log("Beginning backlog search for [" + self.show.name + "]") + searchResult = search.searchProviders(self.show, self.segment, False) - wantedEps = self.segment[season] + if searchResult: + for result in searchResult: + # just use the first result for now + logger.log(u"Downloading " + result.name + " from " + result.provider.name) + search.snatchEpisode(result) - logger.log("Beginning backlog search for [" + self.show.name + "]") - searchResult = search.searchProviders(self.show, season, wantedEps, False) - - if searchResult: - for result in searchResult: - # just use the first result for now - logger.log(u"Downloading " + result.name + " from " + result.provider.name) - search.snatchEpisode(result) - - # give the CPU a break - time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) - else: - logger.log(u"No needed episodes found during backlog search for [" + self.show.name + "]") + # give the CPU a break + time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) + else: + logger.log(u"No needed episodes found during backlog search for [" + self.show.name + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) @@ -199,31 +207,29 @@ class FailedQueueItem(generic_queue.QueueItem): generic_queue.QueueItem.run(self) try: - for season, episodes in self.segment.items(): - for epObj in episodes: - logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]") - failed_history.markFailed(epObj) + logger.log(u"Marking episode as bad: [" + self.segment.prettyName() + "]") + failed_history.markFailed(self.segment) - (release, provider) = failed_history.findRelease(epObj) - if release: - failed_history.logFailed(release) - history.logFailed(epObj, release, provider) + (release, provider) = failed_history.findRelease(self.segment) + if release: + failed_history.logFailed(release) + history.logFailed(self.segment, release, provider) - failed_history.revertEpisode(epObj) - logger.log("Beginning failed download search for [" + epObj.prettyName() + "]") + failed_history.revertEpisode(self.segment) + logger.log("Beginning failed download search for [" + self.segment.prettyName() + "]") - searchResult = search.searchProviders(self.show, season, [epObj], True) + searchResult = search.searchProviders(self.show, [self.segment], True) - if searchResult: - for result in searchResult: - # just use the first result for now - logger.log(u"Downloading " + result.name + " from " + result.provider.name) - search.snatchEpisode(result) + if searchResult: + for result in searchResult: + # just use the first result for now + logger.log(u"Downloading " + result.name + " from " + result.provider.name) + search.snatchEpisode(result) - # give the CPU a break - time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) - else: - logger.log(u"No valid episode found to retry for [" + epObj.prettyName() + "]") + # give the CPU a break + time.sleep(common.cpu_presets[sickbeard.CPU_PRESET]) + else: + logger.log(u"No valid episode found to retry for [" + self.segment.prettyName() + "]") except Exception: logger.log(traceback.format_exc(), logger.DEBUG) diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py index a940fc8d..ab9916ff 100644 --- a/sickbeard/traktChecker.py +++ b/sickbeard/traktChecker.py @@ -31,7 +31,6 @@ from lib.trakt import * class TraktChecker(): def __init__(self): self.todoWanted = [] - self.todoBacklog = [] def run(self, force=False): try: @@ -207,7 +206,7 @@ class TraktChecker(): epObj = show.getEpisode(int(s), int(e)) if epObj: - ep_segment = {} + segments = {} with epObj.lock: if epObj.status != SKIPPED: @@ -217,35 +216,27 @@ class TraktChecker(): # figure out what segment the episode is in and remember it so we can backlog it if epObj.season in ep_segment: - ep_segment[epObj.season].append(epObj) + segments[epObj.season].append(epObj) else: - ep_segment[epObj.season] = [epObj] + segments[epObj.season] = [epObj] epObj.status = WANTED epObj.saveToDB() - backlog = (show, ep_segment) - if self.todoBacklog.count(backlog) == 0: - self.todoBacklog.append(backlog) + for season, segment in segments.items(): + cur_backlog_queue_item = search_queue.BacklogQueueItem(show, segment[1]) + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) + logger.log(u"Starting backlog for " + show.name + " season " + str( + season) + " because some eps were set to wanted") def manageNewShow(self, show): episodes = [i for i in self.todoWanted if i[0] == show.indexerid] for episode in episodes: self.todoWanted.remove(episode) + if episode[1] == -1 and sickbeard.TRAKT_START_PAUSED: show.paused = 1 continue - self.setEpisodeToWanted(show, episode[1], episode[2]) - self.startBacklog(show) - def startBacklog(self, show): - segments = [i for i in self.todoBacklog if i[0] == show] - for segment in segments: - cur_backlog_queue_item = search_queue.BacklogQueueItem(show, segment[1]) - sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) - - for season in segment[1]: - logger.log(u"Starting backlog for " + show.name + " season " + str( - season) + " because some eps were set to wanted") - self.todoBacklog.remove(segment) + self.setEpisodeToWanted(show, episode[1], episode[2]) \ No newline at end of file diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 6326cca7..f257a10d 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -341,9 +341,6 @@ class TVCache(): Quality.qualityStrings[curQuality], logger.DEBUG) continue - # build name cache for show - sickbeard.name_cache.buildNameCache(showObj) - if episode: epObj = episode else: diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index f6260440..18e9b486 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -969,7 +969,7 @@ class CMD_EpisodeSetStatus(ApiCall): ep_results = [] failure = False start_backlog = False - ep_segment = {} + segments = {} sql_l = [] for epObj in ep_list: @@ -977,9 +977,9 @@ class CMD_EpisodeSetStatus(ApiCall): if self.status == WANTED: # figure out what episodes are wanted so we can backlog them if epObj.season in ep_segment: - ep_segment[epObj.season].append(epObj) + segments[epObj.season].append(epObj) else: - ep_segment[epObj.season] = [epObj] + segments[epObj.season] = [epObj] # don't let them mess up UNAIRED episodes if epObj.status == UNAIRED: @@ -1009,11 +1009,13 @@ class CMD_EpisodeSetStatus(ApiCall): extra_msg = "" if start_backlog: - cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, ep_segment) - sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable - for season in ep_segment: + for season, segment in segments.items(): + cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment) + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable + logger.log(u"API :: Starting backlog for " + showObj.name + " season " + str( season) + " because some episodes were set to WANTED") + extra_msg = " Backlog started" if failure: diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index c9f264d1..8222457d 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -4121,7 +4121,7 @@ class Home(MainHandler): else: return self._genericMessage("Error", errMsg) - segment = {} + segments = {} if eps is not None: sql_l = [] @@ -4138,10 +4138,10 @@ class Home(MainHandler): if int(status) in [WANTED, FAILED]: # figure out what episodes are wanted so we can backlog them - if epObj.season in segment: - segment[epObj.season].append(epObj) + if epObj.season in segments: + segments[epObj.season].append(epObj) else: - segment[epObj.season] = [epObj] + segments[epObj.season] = [epObj] with epObj.lock: # don't let them mess up UNAIRED episodes @@ -4175,30 +4175,34 @@ class Home(MainHandler): if int(status) == WANTED: msg = "Backlog was automatically started for the following seasons of " + showObj.name + ":
" - for season in segment: + + for season, segment in segments.items(): + cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment) + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable + msg += "
  • Season " + str(season) + "
  • " logger.log(u"Sending backlog for " + showObj.name + " season " + str( season) + " because some eps were set to wanted") + msg += "" - cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment) - sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable - - if segment: + if segments: ui.notifications.message("Backlog started", msg) if int(status) == FAILED: msg = "Retrying Search was automatically started for the following season of " + showObj.name + ":
    " - for season in segment: + + for season, segment in segments.items(): + cur_failed_queue_item = search_queue.FailedQueueItem(showObj, segment) + sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable + msg += "
  • Season " + str(season) + "
  • " logger.log(u"Retrying Search for " + showObj.name + " season " + str( season) + " because some eps were set to failed") + msg += "" - cur_failed_queue_item = search_queue.FailedQueueItem(showObj, segment) - sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable - - if segment: + if segments: ui.notifications.message("Retry Search started", msg) if direct: @@ -4440,11 +4444,8 @@ class Home(MainHandler): if isinstance(ep_obj, str): return json.dumps({'result': 'failure'}) - # create failed segment - segment = {season: [ep_obj]} - # make a queue item for it and put it on the queue - ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, segment) + ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, ep_obj) sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable # wait until the queue item tells us whether it worked or not From 3efa9709afcb301c37b9b1e69b2e7d83961ef544 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 22:01:24 -0700 Subject: [PATCH 06/13] Fixed DailySearchQueueItem show attribute error during searches --- sickbeard/dailysearcher.py | 10 +++++++++- sickbeard/search_queue.py | 10 ++++++---- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py index 221b6b8d..f64766d9 100644 --- a/sickbeard/dailysearcher.py +++ b/sickbeard/dailysearcher.py @@ -49,9 +49,17 @@ class DailySearcher(): [common.UNAIRED, curDate]) sql_l = [] + show = None + for sqlEp in sqlResults: + try: - show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) + if not show or (show and int(sqlEp["showid"]) != show.indexerid): + show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) + + # build name cache for show + sickbeard.name_cache.buildNameCache(show) + except exceptions.MultipleShowObjectsException: logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"]) continue diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 59a430ae..f021bdb7 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -82,17 +82,19 @@ class SearchQueue(generic_queue.GenericQueue): # daily searches generic_queue.GenericQueue.add_item(self, item) elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment): + # build name cache for show + sickbeard.name_cache.buildNameCache(item.show) + # backlog searches generic_queue.GenericQueue.add_item(self, item) elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment): + # build name cache for show + sickbeard.name_cache.buildNameCache(item.show) + # manual and failed searches generic_queue.GenericQueue.add_item(self, item) else: logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) - return - - # build name cache for show - sickbeard.name_cache.buildNameCache(item.show) class DailySearchQueueItem(generic_queue.QueueItem): def __init__(self): From 2fbae22f2f9ad9f020eff6de7cbe71038ee128ab Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 22:06:11 -0700 Subject: [PATCH 07/13] Fixed BacklogSearchQueueItem show attribute error during searches --- sickbeard/searchBacklog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py index 410ca9d3..5b5893bc 100644 --- a/sickbeard/searchBacklog.py +++ b/sickbeard/searchBacklog.py @@ -100,7 +100,7 @@ class BacklogSearcher: segments = self._get_segments(curShow, fromDate) for season, segment in segments.items(): - self.currentSearchInfo = {'title': self.show.name + " Season " + str(season)} + self.currentSearchInfo = {'title': curShow.name + " Season " + str(season)} backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment) sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable From 20c0b4ea720ab2c3ff3e721020a95bad297183f8 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sat, 6 Sep 2014 22:25:01 -0700 Subject: [PATCH 08/13] Fixed global name season error during searches --- sickbeard/providers/generic.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index df4629b4..8b9dae32 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -37,6 +37,7 @@ from sickbeard import clients from hachoir_parser import createParser + class GenericProvider: NZB = "nzb" TORRENT = "torrent" @@ -63,7 +64,7 @@ class GenericProvider: self.session = requests.session() self.headers = { - #Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases, + # Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases, #otherwise session might be broken and download fail, asking again for authentication #'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} 'User-Agent': USER_AGENT} @@ -335,16 +336,15 @@ class GenericProvider: logger.DEBUG) addCacheEntry = True else: - if not len(parse_result.episode_numbers) and ( - parse_result.season_number and parse_result.season_number != season) or ( - not parse_result.season_number and season != 1): + if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in + episodes if + ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: logger.log( u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", logger.DEBUG) addCacheEntry = True - elif len(parse_result.episode_numbers) and ( - parse_result.season_number != season or not [ep for ep in episodes if - ep.scene_episode in parse_result.episode_numbers]): + elif len(parse_result.episode_numbers) and not [ep for ep in episodes if + ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: logger.log( u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", logger.DEBUG) @@ -352,7 +352,7 @@ class GenericProvider: if not addCacheEntry: # we just use the existing info for normal searches - actual_season = season + actual_season = parse_result.season_number actual_episodes = parse_result.episode_numbers else: if not (parse_result.is_air_by_date): From ab16430b1ad9affb77cfe6badcfcf8a2e28c4030 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 7 Sep 2014 00:44:48 -0700 Subject: [PATCH 09/13] Improved newznab offset code --- sickbeard/providers/newznab.py | 41 ++++++++++------------------------ 1 file changed, 12 insertions(+), 29 deletions(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 92e4e1c6..24a662b1 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -264,7 +264,10 @@ class NewznabProvider(generic.NZBProvider): results = [] - while True: + # get and set total items available + + offset = total = 0 + while total >= offset: search_url = self.url + 'api?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.cache.getRSSFeed(search_url) @@ -281,37 +284,17 @@ class NewznabProvider(generic.NZBProvider): u"The data returned from the " + self.name + " is incomplete, this result is unusable", logger.DEBUG) - # attempt to grab the total and offset newznab responses - try: - total = int(data.feed.newznab_response['total']) - offset = int(data.feed.newznab_response['offset']) - except (AttributeError, TypeError): - break - - # sanity check - limiting at 10 at getting 1000 results in-case incorrect total parameter is reported - if params['limit'] > 1000: - logger.log("Excessive results for search, ending search", logger.WARNING) - break - - # sanity check - total should remain constant - if offset != 0 and total != initial_total: - logger.log("Total number of items on newznab response changed, ending search", logger.DEBUG) - break - else: - initial_total = total + # get total and offset attribs + if total == 0: + total = int(data.feed.newznab_response['total'] or 0) + offset = int(data.feed.newznab_response['offset'] or 0) # if there are more items available then the amount given in one call, grab some more - if (total - params['limit']) > offset == params['offset']: - params['offset'] += params['limit'] - logger.log(str( - total - params['offset']) + " more items to be fetched from provider. Fetching another " + str( - params['limit']) + " items.", logger.DEBUG) - else: - break + params['offset'] += params['limit'] - - else: - break + logger.log(str( + total - offset) + " more items to be fetched from provider. Fetching another " + str( + params['limit']) + " items.", logger.DEBUG) return results From ba4b408af315e126bd9bd3403563878d6cd1c7dc Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 7 Sep 2014 00:48:09 -0700 Subject: [PATCH 10/13] Improved newznab offset code --- sickbeard/providers/newznab.py | 42 +++++++++++++++++----------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 24a662b1..f6802008 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -263,38 +263,38 @@ class NewznabProvider(generic.NZBProvider): params['apikey'] = self.key results = [] - - # get and set total items available - offset = total = 0 + while total >= offset: search_url = self.url + 'api?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.cache.getRSSFeed(search_url) - if data and 'entries' in data and self._checkAuthFromData(data): - for item in data.entries: + if not data or not self._checkAuthFromData(data): + break - (title, url) = self._get_title_and_url(item) + for item in data.entries: - if title and url: - results.append(item) - else: - logger.log( - u"The data returned from the " + self.name + " is incomplete, this result is unusable", - logger.DEBUG) + (title, url) = self._get_title_and_url(item) - # get total and offset attribs - if total == 0: - total = int(data.feed.newznab_response['total'] or 0) - offset = int(data.feed.newznab_response['offset'] or 0) + if title and url: + results.append(item) + else: + logger.log( + u"The data returned from the " + self.name + " is incomplete, this result is unusable", + logger.DEBUG) - # if there are more items available then the amount given in one call, grab some more - params['offset'] += params['limit'] + # get total and offset attribs + if total == 0: + total = int(data.feed.newznab_response['total'] or 0) + offset = int(data.feed.newznab_response['offset'] or 0) - logger.log(str( - total - offset) + " more items to be fetched from provider. Fetching another " + str( - params['limit']) + " items.", logger.DEBUG) + # if there are more items available then the amount given in one call, grab some more + params['offset'] += params['limit'] + + logger.log(str( + total - offset) + " more items to be fetched from provider. Fetching another " + str( + params['limit']) + " items.", logger.DEBUG) return results From 991a93991e515630e75d82b723aa711e98f1bd43 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 7 Sep 2014 00:56:23 -0700 Subject: [PATCH 11/13] Fixed newznab to search no more then 1000 results --- sickbeard/providers/newznab.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index f6802008..ba7bada4 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -265,7 +265,7 @@ class NewznabProvider(generic.NZBProvider): results = [] offset = total = 0 - while total >= offset: + while total >= (offset or 1000): search_url = self.url + 'api?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.cache.getRSSFeed(search_url) From 2193a4bfd1e5ad9826f4c6ddffae0fcf2f9df46d Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 7 Sep 2014 02:08:24 -0700 Subject: [PATCH 12/13] Fixed search issues regarding error about result attribute being referenced early --- sickbeard/providers/generic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 8b9dae32..25fbdcf2 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -263,9 +263,9 @@ class GenericProvider: cacheResult = self.cache.searchCache(epObj, manualSearch) if cacheResult: if epObj.episode not in results: - results[epObj.episode] = [result] + results[epObj.episode] = [cacheResult] else: - results[epObj.episode].append(result) + results[epObj.episode].append(cacheResult) # found result, search next episode continue From 8f6d0148307d1ba1b0bfcda43c55156725a28566 Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 7 Sep 2014 02:41:21 -0700 Subject: [PATCH 13/13] Fixed issues with cache results being used during searches --- sickbeard/providers/generic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 25fbdcf2..2b23d711 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -263,9 +263,9 @@ class GenericProvider: cacheResult = self.cache.searchCache(epObj, manualSearch) if cacheResult: if epObj.episode not in results: - results[epObj.episode] = [cacheResult] + results[epObj.episode] = cacheResult else: - results[epObj.episode].append(cacheResult) + results[epObj.episode].extend(cacheResult) # found result, search next episode continue