mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-12 14:13:38 +00:00
Fix split_result to return a list in the case of an internal exception
Check if result of split_result is a list to catch potential unhandled internal exceptions
This commit is contained in:
parent
ca0bfd2ab1
commit
bab7cb6b6d
3 changed files with 25 additions and 21 deletions
|
@ -1,4 +1,9 @@
|
|||
### 3.32.11 (2024-11-26 20:15:00 UTC)
|
||||
### 3.32.12 (2024-11-29 19:20:00 UTC)
|
||||
|
||||
* Fix split_result to return a list in the case of an internal exception
|
||||
|
||||
|
||||
### 3.32.11 (2024-11-26 20:15:00 UTC)
|
||||
|
||||
* Fix nzbSplitter needs to use binary data for xml parser
|
||||
|
||||
|
|
|
@ -167,17 +167,15 @@ def _strip_ns(element, ns):
|
|||
|
||||
|
||||
def split_result(result):
|
||||
# type: (sickgear.classes.SearchResult) -> List[sickgear.classes.SearchResult]
|
||||
"""
|
||||
|
||||
:param result: search result
|
||||
:type result: sickgear.classes.SearchResult
|
||||
:return: list of search results
|
||||
:rtype: List[sickgear.classes.SearchResult]
|
||||
"""
|
||||
resp = helpers.get_url(result.url, failure_monitor=False, as_binary=True)
|
||||
if None is resp:
|
||||
logger.error(f'Unable to load url {result.url}, can\'t download season NZB')
|
||||
return False
|
||||
return []
|
||||
|
||||
# parse the season ep name
|
||||
try:
|
||||
|
@ -185,10 +183,10 @@ def split_result(result):
|
|||
parse_result = np.parse(result.name)
|
||||
except InvalidNameException:
|
||||
logger.debug(f'Unable to parse the filename {result.name} into a valid episode')
|
||||
return False
|
||||
return []
|
||||
except InvalidShowException:
|
||||
logger.debug(f'Unable to parse the filename {result.name} into a valid show')
|
||||
return False
|
||||
return []
|
||||
|
||||
# bust it up
|
||||
season = parse_result.season_number if None is not parse_result.season_number else 1
|
||||
|
@ -207,10 +205,10 @@ def split_result(result):
|
|||
parse_result = np.parse(new_nzb)
|
||||
except InvalidNameException:
|
||||
logger.debug(f'Unable to parse the filename {new_nzb} into a valid episode')
|
||||
return False
|
||||
return []
|
||||
except InvalidShowException:
|
||||
logger.debug(f'Unable to parse the filename {new_nzb} into a valid show')
|
||||
return False
|
||||
return []
|
||||
|
||||
# make sure the result is sane
|
||||
if (None is not parse_result.season_number and season != parse_result.season_number) \
|
||||
|
|
|
@ -972,19 +972,20 @@ def search_providers(
|
|||
# if not, break it apart and add them as the lowest priority results
|
||||
individual_results = nzbSplitter.split_result(best_season_result)
|
||||
|
||||
for cur_result in filter(
|
||||
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
|
||||
r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results):
|
||||
ep_num = None
|
||||
if 1 == len(cur_result.ep_obj_list):
|
||||
ep_num = cur_result.ep_obj_list[0].episode
|
||||
elif 1 < len(cur_result.ep_obj_list):
|
||||
ep_num = MULTI_EP_RESULT
|
||||
if isinstance(individual_results, list):
|
||||
for cur_result in filter(
|
||||
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
|
||||
r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results):
|
||||
ep_num = None
|
||||
if 1 == len(cur_result.ep_obj_list):
|
||||
ep_num = cur_result.ep_obj_list[0].episode
|
||||
elif 1 < len(cur_result.ep_obj_list):
|
||||
ep_num = MULTI_EP_RESULT
|
||||
|
||||
if ep_num in found_results[provider_id]:
|
||||
found_results[provider_id][ep_num].append(cur_result)
|
||||
else:
|
||||
found_results[provider_id][ep_num] = [cur_result]
|
||||
if ep_num in found_results[provider_id]:
|
||||
found_results[provider_id][ep_num].append(cur_result)
|
||||
else:
|
||||
found_results[provider_id][ep_num] = [cur_result]
|
||||
|
||||
# If this is a torrent all we can do is leech the entire torrent,
|
||||
# user will have to select which eps not do download in his torrent client
|
||||
|
|
Loading…
Reference in a new issue