Merge branch 'main' into dev
Some checks failed
Python Unit Tests / windows (windows-latest, 3.10) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.11) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.12) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.8) (push) Has been cancelled
Python Unit Tests / windows (windows-latest, 3.9) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.10) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.11) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.12) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.8) (push) Has been cancelled
Python Unit Tests / linux (ubuntu-latest, 3.9) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.10) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.11) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.12) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.8) (push) Has been cancelled
Python Unit Tests / macos (macos-latest, 3.9) (push) Has been cancelled

This commit is contained in:
JackDandy 2024-11-29 19:25:08 +00:00
commit 5532c70f59
3 changed files with 24 additions and 20 deletions

View file

@ -14,6 +14,11 @@
* Fix credits in Kodi episode nfo writer
### 3.32.12 (2024-11-29 19:20:00 UTC)
* Fix split_result to return a list in the case of an internal exception
### 3.32.11 (2024-11-26 20:15:00 UTC)
* Fix nzbSplitter needs to use binary data for xml parser

View file

@ -167,17 +167,15 @@ def _strip_ns(element, ns):
def split_result(result):
# type: (sickgear.classes.SearchResult) -> List[sickgear.classes.SearchResult]
"""
:param result: search result
:type result: sickgear.classes.SearchResult
:return: list of search results
:rtype: List[sickgear.classes.SearchResult]
"""
resp = helpers.get_url(result.url, failure_monitor=False, as_binary=True)
if None is resp:
logger.error(f'Unable to load url {result.url}, can\'t download season NZB')
return False
return []
# parse the season ep name
try:
@ -185,10 +183,10 @@ def split_result(result):
parse_result = np.parse(result.name)
except InvalidNameException:
logger.debug(f'Unable to parse the filename {result.name} into a valid episode')
return False
return []
except InvalidShowException:
logger.debug(f'Unable to parse the filename {result.name} into a valid show')
return False
return []
# bust it up
season = parse_result.season_number if None is not parse_result.season_number else 1
@ -207,10 +205,10 @@ def split_result(result):
parse_result = np.parse(new_nzb)
except InvalidNameException:
logger.debug(f'Unable to parse the filename {new_nzb} into a valid episode')
return False
return []
except InvalidShowException:
logger.debug(f'Unable to parse the filename {new_nzb} into a valid show')
return False
return []
# make sure the result is sane
if (None is not parse_result.season_number and season != parse_result.season_number) \

View file

@ -972,19 +972,20 @@ def search_providers(
# if not, break it apart and add them as the lowest priority results
individual_results = nzbSplitter.split_result(best_season_result)
for cur_result in filter(
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results):
ep_num = None
if 1 == len(cur_result.ep_obj_list):
ep_num = cur_result.ep_obj_list[0].episode
elif 1 < len(cur_result.ep_obj_list):
ep_num = MULTI_EP_RESULT
if isinstance(individual_results, list):
for cur_result in filter(
lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks(
r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results):
ep_num = None
if 1 == len(cur_result.ep_obj_list):
ep_num = cur_result.ep_obj_list[0].episode
elif 1 < len(cur_result.ep_obj_list):
ep_num = MULTI_EP_RESULT
if ep_num in found_results[provider_id]:
found_results[provider_id][ep_num].append(cur_result)
else:
found_results[provider_id][ep_num] = [cur_result]
if ep_num in found_results[provider_id]:
found_results[provider_id][ep_num].append(cur_result)
else:
found_results[provider_id][ep_num] = [cur_result]
# If this is a torrent all we can do is leech the entire torrent,
# user will have to select which eps not do download in his torrent client