Merge branch 'dev'

This commit is contained in:
echel0n 2014-09-07 02:49:31 -07:00
commit 7215ed8987
18 changed files with 167 additions and 204 deletions

View file

@ -49,9 +49,17 @@ class DailySearcher():
[common.UNAIRED, curDate])
sql_l = []
show = None
for sqlEp in sqlResults:
try:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
if not show or (show and int(sqlEp["showid"]) != show.indexerid):
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue

View file

@ -64,13 +64,11 @@ class FailedProcessor(object):
logger.log(u" - " + str(parsed.release_group), logger.DEBUG)
logger.log(u" - " + str(parsed.air_date), logger.DEBUG)
segment = {parsed.season_number: []}
for episode in parsed.episode_numbers:
epObj = parsed.show.getEpisode(parsed.season_number, episode)
segment[parsed.season_number].append(epObj)
segment = parsed.show.getEpisode(parsed.season_number, episode)
cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
return True

View file

@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import socket
from httplib import HTTPSConnection, HTTPException
from urllib import urlencode
@ -87,7 +88,7 @@ class ProwlNotifier:
"/publicapi/add",
headers={'Content-type': "application/x-www-form-urlencoded"},
body=urlencode(data))
except (SSLError, HTTPException):
except (SSLError, HTTPException, socket.error):
logger.log(u"Prowl notification failed.", logger.ERROR)
return False
response = http_handler.getresponse()

View file

@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import socket
from httplib import HTTPSConnection, HTTPException
from urllib import urlencode
from ssl import SSLError
@ -77,7 +78,7 @@ class PushalotNotifier:
"/api/sendmessage",
headers={'Content-type': "application/x-www-form-urlencoded"},
body=urlencode(data))
except (SSLError, HTTPException):
except (SSLError, HTTPException, socket.error):
logger.log(u"Pushalot notification failed.", logger.ERROR)
return False
response = http_handler.getresponse()

View file

@ -17,7 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import socket
import base64
from httplib import HTTPSConnection, HTTPException
import json
@ -86,7 +86,7 @@ class PushbulletNotifier:
try:
logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG)
http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
except (SSLError, HTTPException):
except (SSLError, HTTPException, socket.error):
logger.log(u"Pushbullet notification failed.", logger.ERROR)
return False
else:
@ -101,7 +101,7 @@ class PushbulletNotifier:
http_handler.request(method, uri, body=data,
headers={'Content-Type': 'application/json', 'Authorization': 'Basic %s' % authString})
pass
except (SSLError, HTTPException):
except (SSLError, HTTPException, socket.error):
return False
response = http_handler.getresponse()

View file

@ -149,7 +149,7 @@ class ProperFinder():
logger.DEBUG)
continue
if not show_name_helpers.filterBadReleases(curProper.name):
if not show_name_helpers.filterBadReleases(curProper.name, parse=False):
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it",
logger.DEBUG)
continue

View file

@ -60,7 +60,7 @@ class EZRSSProvider(generic.TorrentProvider):
return quality
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False):
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
self.show = show
@ -71,7 +71,7 @@ class EZRSSProvider(generic.TorrentProvider):
logger.WARNING)
return results
results = generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch)
results = generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
return results

View file

@ -37,6 +37,7 @@ from sickbeard import clients
from hachoir_parser import createParser
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
@ -63,7 +64,7 @@ class GenericProvider:
self.session = requests.session()
self.headers = {
#Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
#otherwise session might be broken and download fail, asking again for authentication
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT}
@ -248,7 +249,7 @@ class GenericProvider:
return title, url
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False):
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
self._checkAuth()
self.show = show
@ -261,10 +262,10 @@ class GenericProvider:
# search cache for episode result
cacheResult = self.cache.searchCache(epObj, manualSearch)
if cacheResult:
if epObj not in results:
results = [cacheResult]
if epObj.episode not in results:
results[epObj.episode] = cacheResult
else:
results.append(cacheResult)
results[epObj.episode].extend(cacheResult)
# found result, search next episode
continue
@ -335,16 +336,15 @@ class GenericProvider:
logger.DEBUG)
addCacheEntry = True
else:
if not len(parse_result.episode_numbers) and (
parse_result.season_number and parse_result.season_number != season) or (
not parse_result.season_number and season != 1):
if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
elif len(parse_result.episode_numbers) and (
parse_result.season_number != season or not [ep for ep in episodes if
ep.scene_episode in parse_result.episode_numbers]):
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
@ -352,7 +352,7 @@ class GenericProvider:
if not addCacheEntry:
# we just use the existing info for normal searches
actual_season = season
actual_season = parse_result.season_number
actual_episodes = parse_result.episode_numbers
else:
if not (parse_result.is_air_by_date):

View file

@ -263,56 +263,39 @@ class NewznabProvider(generic.NZBProvider):
params['apikey'] = self.key
results = []
offset = total = 0
while True:
while total >= (offset or 1000):
search_url = self.url + 'api?' + urllib.urlencode(params)
logger.log(u"Search url: " + search_url, logger.DEBUG)
data = self.cache.getRSSFeed(search_url)
if data and 'entries' in data and self._checkAuthFromData(data):
for item in data.entries:
(title, url) = self._get_title_and_url(item)
if title and url:
results.append(item)
else:
logger.log(
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
logger.DEBUG)
# attempt to grab the total and offset newznab responses
try:
total = int(data.feed.newznab_response['total'])
offset = int(data.feed.newznab_response['offset'])
except (AttributeError, TypeError):
break
# sanity check - limiting at 10 at getting 1000 results in-case incorrect total parameter is reported
if params['limit'] > 1000:
logger.log("Excessive results for search, ending search", logger.WARNING)
break
# sanity check - total should remain constant
if offset != 0 and total != initial_total:
logger.log("Total number of items on newznab response changed, ending search", logger.DEBUG)
break
else:
initial_total = total
# if there are more items available then the amount given in one call, grab some more
if (total - params['limit']) > offset == params['offset']:
params['offset'] += params['limit']
logger.log(str(
total - params['offset']) + " more items to be fetched from provider. Fetching another " + str(
params['limit']) + " items.", logger.DEBUG)
else:
break
else:
if not data or not self._checkAuthFromData(data):
break
for item in data.entries:
(title, url) = self._get_title_and_url(item)
if title and url:
results.append(item)
else:
logger.log(
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
logger.DEBUG)
# get total and offset attribs
if total == 0:
total = int(data.feed.newznab_response['total'] or 0)
offset = int(data.feed.newznab_response['offset'] or 0)
# if there are more items available then the amount given in one call, grab some more
params['offset'] += params['limit']
logger.log(str(
total - offset) + " more items to be fetched from provider. Fetching another " + str(
params['limit']) + " items.", logger.DEBUG)
return results
def findPropers(self, search_date=None):

View file

@ -55,8 +55,8 @@ class NyaaProvider(generic.TorrentProvider):
quality = Quality.sceneQuality(title)
return quality
def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False):
return generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch)
def findSearchResults(self, show, episodes, search_mode, manualSearch=False):
return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
def _get_season_search_strings(self, ep_obj):
return show_name_helpers.makeSceneShowSearchStrings(self.show)

View file

@ -319,23 +319,6 @@ def isFirstBestMatch(result):
return False
def filterSearchResults(show, season, results):
foundResults = {}
# make a list of all the results for this provider
for curEp in results:
# skip non-tv crap
results[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show,results[curEp])
if curEp in foundResults:
foundResults[curEp] += results[curEp]
else:
foundResults[curEp] = results[curEp]
return foundResults
def searchForNeededEpisodes():
foundResults = {}
@ -407,22 +390,12 @@ def searchForNeededEpisodes():
return foundResults.values()
def searchProviders(show, season, episodes, manualSearch=False):
def searchProviders(show, episodes, manualSearch=False):
foundResults = {}
finalResults = []
didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
if not manualSearch:
seasonEps = show.getAllEpisodes(season)
if len(seasonEps) == len(episodes):
seasonSearch = True
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog]
@ -434,23 +407,21 @@ def searchProviders(show, season, episodes, manualSearch=False):
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
foundResults[curProvider.name] = {}
searchCount = 0
search_mode = 'eponly'
if seasonSearch and curProvider.search_mode == 'sponly':
search_mode = curProvider.search_mode
searchCount = 0
search_mode = curProvider.search_mode
while(True):
searchCount += 1
if search_mode == 'sponly':
logger.log(u"Searching for " + show.name + " Season " + str(season) + " pack")
if search_mode == 'eponly':
logger.log(u"Performing episode search for " + show.name)
else:
logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season))
logger.log(u"Performing season pack search for " + show.name)
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch)
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
break
@ -468,7 +439,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
for curEp in searchResults:
# skip non-tv crap
searchResults[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, searchResults[curEp])
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, searchResults[curEp])
if curEp in foundResults:
foundResults[curProvider.name][curEp] += searchResults[curEp]
@ -554,7 +525,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, individualResults)
lambda x: show_name_helpers.filterBadReleases(x.name, parse=False) and x.show == show, individualResults)
for curResult in individualResults:
if len(curResult.episodes) == 1:

View file

@ -99,12 +99,13 @@ class BacklogSearcher:
segments = self._get_segments(curShow, fromDate)
if len(segments):
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments)
for season, segment in segments.items():
self.currentSearchInfo = {'title': curShow.name + " Season " + str(season)}
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment)
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
else:
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season",
logger.DEBUG)
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping",logger.DEBUG)
# don't consider this an actual backlog search if we only did recent eps
# or if we only did certain shows

View file

@ -49,6 +49,12 @@ class SearchQueue(generic_queue.GenericQueue):
return True
return False
def is_ep_in_queue(self, ep_obj):
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.ep_obj == ep_obj:
return True
return False
def pause_backlog(self):
self.min_priority = generic_queue.QueuePriorities.HIGH
@ -72,14 +78,24 @@ class SearchQueue(generic_queue.GenericQueue):
return False
def add_item(self, item):
if isinstance(item, DailySearchQueueItem) or (
isinstance(item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) and not self.is_in_queue(
item.show, item.segment)):
if isinstance(item, DailySearchQueueItem):
# daily searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
# build name cache for show
sickbeard.name_cache.buildNameCache(item.show)
# backlog searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment):
# build name cache for show
sickbeard.name_cache.buildNameCache(item.show)
# manual and failed searches
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class DailySearchQueueItem(generic_queue.QueueItem):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
@ -123,7 +139,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
try:
logger.log("Beginning manual search for [" + self.segment.prettyName() + "]")
searchResult = search.searchProviders(self.show, self.segment.season, [self.segment], True)
searchResult = search.searchProviders(self.show, [self.segment], True)
if searchResult:
# just use the first result for now
@ -161,25 +177,19 @@ class BacklogQueueItem(generic_queue.QueueItem):
generic_queue.QueueItem.run(self)
try:
for season in self.segment:
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
'title': self.show.name + " Season " + str(season)}
logger.log("Beginning backlog search for [" + self.show.name + "]")
searchResult = search.searchProviders(self.show, self.segment, False)
wantedEps = self.segment[season]
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
search.snatchEpisode(result)
logger.log("Beginning backlog search for [" + self.show.name + "]")
searchResult = search.searchProviders(self.show, season, wantedEps, False)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u"No needed episodes found during backlog search for [" + self.show.name + "]")
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u"No needed episodes found during backlog search for [" + self.show.name + "]")
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
@ -199,31 +209,29 @@ class FailedQueueItem(generic_queue.QueueItem):
generic_queue.QueueItem.run(self)
try:
for season, episodes in self.segment.items():
for epObj in episodes:
logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]")
failed_history.markFailed(epObj)
logger.log(u"Marking episode as bad: [" + self.segment.prettyName() + "]")
failed_history.markFailed(self.segment)
(release, provider) = failed_history.findRelease(epObj)
if release:
failed_history.logFailed(release)
history.logFailed(epObj, release, provider)
(release, provider) = failed_history.findRelease(self.segment)
if release:
failed_history.logFailed(release)
history.logFailed(self.segment, release, provider)
failed_history.revertEpisode(epObj)
logger.log("Beginning failed download search for [" + epObj.prettyName() + "]")
failed_history.revertEpisode(self.segment)
logger.log("Beginning failed download search for [" + self.segment.prettyName() + "]")
searchResult = search.searchProviders(self.show, season, [epObj], True)
searchResult = search.searchProviders(self.show, [self.segment], True)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
search.snatchEpisode(result)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u"No valid episode found to retry for [" + epObj.prettyName() + "]")
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u"No valid episode found to retry for [" + self.segment.prettyName() + "]")
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)

View file

@ -36,7 +36,7 @@ resultFilters = ["sub(bed|ed|pack|s)", "(dk|fin|heb|kor|nor|nordic|pl|swe)sub(be
"(dir|sample|sub|nfo)fix", "sample", "(dvd)?extras",
"dub(bed)?"]
def filterBadReleases(name):
def filterBadReleases(name, parse=True):
"""
Filters out non-english and just all-around stupid releases by comparing them
to the resultFilters contents.
@ -47,7 +47,8 @@ def filterBadReleases(name):
"""
try:
NameParser().parse(name)
if parse:
NameParser().parse(name)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
return False

View file

@ -31,7 +31,6 @@ from lib.trakt import *
class TraktChecker():
def __init__(self):
self.todoWanted = []
self.todoBacklog = []
def run(self, force=False):
try:
@ -207,7 +206,7 @@ class TraktChecker():
epObj = show.getEpisode(int(s), int(e))
if epObj:
ep_segment = {}
segments = {}
with epObj.lock:
if epObj.status != SKIPPED:
@ -217,35 +216,27 @@ class TraktChecker():
# figure out what segment the episode is in and remember it so we can backlog it
if epObj.season in ep_segment:
ep_segment[epObj.season].append(epObj)
segments[epObj.season].append(epObj)
else:
ep_segment[epObj.season] = [epObj]
segments[epObj.season] = [epObj]
epObj.status = WANTED
epObj.saveToDB()
backlog = (show, ep_segment)
if self.todoBacklog.count(backlog) == 0:
self.todoBacklog.append(backlog)
for season, segment in segments.items():
cur_backlog_queue_item = search_queue.BacklogQueueItem(show, segment[1])
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item)
logger.log(u"Starting backlog for " + show.name + " season " + str(
season) + " because some eps were set to wanted")
def manageNewShow(self, show):
episodes = [i for i in self.todoWanted if i[0] == show.indexerid]
for episode in episodes:
self.todoWanted.remove(episode)
if episode[1] == -1 and sickbeard.TRAKT_START_PAUSED:
show.paused = 1
continue
self.setEpisodeToWanted(show, episode[1], episode[2])
self.startBacklog(show)
def startBacklog(self, show):
segments = [i for i in self.todoBacklog if i[0] == show]
for segment in segments:
cur_backlog_queue_item = search_queue.BacklogQueueItem(show, segment[1])
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item)
for season in segment[1]:
logger.log(u"Starting backlog for " + show.name + " season " + str(
season) + " because some eps were set to wanted")
self.todoBacklog.remove(segment)
self.setEpisodeToWanted(show, episode[1], episode[2])

View file

@ -309,7 +309,7 @@ class TVCache():
for curResult in sqlResults:
# skip non-tv crap
if not show_name_helpers.filterBadReleases(curResult["name"]):
if not show_name_helpers.filterBadReleases(curResult["name"], parse=False):
continue
# get the show object, or if it's not one of our shows then ignore it
@ -341,9 +341,6 @@ class TVCache():
Quality.qualityStrings[curQuality], logger.DEBUG)
continue
# build name cache for show
sickbeard.name_cache.buildNameCache(showObj)
if episode:
epObj = episode
else:

View file

@ -969,7 +969,7 @@ class CMD_EpisodeSetStatus(ApiCall):
ep_results = []
failure = False
start_backlog = False
ep_segment = {}
segments = {}
sql_l = []
for epObj in ep_list:
@ -977,9 +977,9 @@ class CMD_EpisodeSetStatus(ApiCall):
if self.status == WANTED:
# figure out what episodes are wanted so we can backlog them
if epObj.season in ep_segment:
ep_segment[epObj.season].append(epObj)
segments[epObj.season].append(epObj)
else:
ep_segment[epObj.season] = [epObj]
segments[epObj.season] = [epObj]
# don't let them mess up UNAIRED episodes
if epObj.status == UNAIRED:
@ -1009,11 +1009,13 @@ class CMD_EpisodeSetStatus(ApiCall):
extra_msg = ""
if start_backlog:
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, ep_segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable
for season in ep_segment:
for season, segment in segments.items():
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable
logger.log(u"API :: Starting backlog for " + showObj.name + " season " + str(
season) + " because some episodes were set to WANTED")
extra_msg = " Backlog started"
if failure:

View file

@ -4121,7 +4121,7 @@ class Home(MainHandler):
else:
return self._genericMessage("Error", errMsg)
segment = {}
segments = {}
if eps is not None:
sql_l = []
@ -4138,10 +4138,10 @@ class Home(MainHandler):
if int(status) in [WANTED, FAILED]:
# figure out what episodes are wanted so we can backlog them
if epObj.season in segment:
segment[epObj.season].append(epObj)
if epObj.season in segments:
segments[epObj.season].append(epObj)
else:
segment[epObj.season] = [epObj]
segments[epObj.season] = [epObj]
with epObj.lock:
# don't let them mess up UNAIRED episodes
@ -4175,30 +4175,34 @@ class Home(MainHandler):
if int(status) == WANTED:
msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"
for season in segment:
for season, segment in segments.items():
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable
msg += "<li>Season " + str(season) + "</li>"
logger.log(u"Sending backlog for " + showObj.name + " season " + str(
season) + " because some eps were set to wanted")
msg += "</ul>"
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable
if segment:
if segments:
ui.notifications.message("Backlog started", msg)
if int(status) == FAILED:
msg = "Retrying Search was automatically started for the following season of <b>" + showObj.name + "</b>:<br />"
for season in segment:
for season, segment in segments.items():
cur_failed_queue_item = search_queue.FailedQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable
msg += "<li>Season " + str(season) + "</li>"
logger.log(u"Retrying Search for " + showObj.name + " season " + str(
season) + " because some eps were set to failed")
msg += "</ul>"
cur_failed_queue_item = search_queue.FailedQueueItem(showObj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) # @UndefinedVariable
if segment:
if segments:
ui.notifications.message("Retry Search started", msg)
if direct:
@ -4440,11 +4444,8 @@ class Home(MainHandler):
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# create failed segment
segment = {season: [ep_obj]}
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, segment)
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, ep_obj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
# wait until the queue item tells us whether it worked or not