Merge pull request #963 from JackDandy/feature/ChangePPImprove

Change improve ABD releases post processing.
This commit is contained in:
JackDandy 2017-08-20 03:50:29 +01:00 committed by GitHub
commit 3312ddc734
9 changed files with 139 additions and 62 deletions

View file

@ -242,6 +242,9 @@ class Quality:
return Quality.FULLHDBLURAY
elif checkName(['2160p', 'web.?(dl|rip|.[hx]26[45])'], all):
return Quality.UHD4KWEB
# p2p
elif checkName(['720HD'], all) and not checkName(['(1080|2160)[pi]'], all):
return Quality.HDTV
else:
return Quality.UNKNOWN

View file

@ -125,6 +125,13 @@ class NameParser(object):
result.series_name = match.group('series_name')
if result.series_name:
result.series_name = self.clean_series_name(result.series_name)
name_parts = re.match('(?i)(.*)[ -]((?:part|pt)[ -]?\w+)$', result.series_name)
try:
result.series_name = name_parts.group(1)
result.extra_info = name_parts.group(2)
except (AttributeError, IndexError):
pass
result.score += 1
if 'series_num' in named_groups and match.group('series_num'):
@ -151,7 +158,7 @@ class NameParser(object):
ep = tmp_show.getEpisode(parse_result.season_number, ep_num)
else:
ep = None
except:
except (StandardError, Exception):
ep = None
en = ep and ep.name and re.match(r'^\W*(\d+)', ep.name) or None
es = en and en.group(1) or None
@ -174,7 +181,13 @@ class NameParser(object):
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
year = int(match.group('air_year'))
month = int(match.group('air_month'))
try:
month = int(match.group('air_month'))
except ValueError:
try:
month = time.strptime(match.group('air_month')[0:3], '%b').tm_mon
except ValueError as e:
raise InvalidNameException(ex(e))
day = int(match.group('air_day'))
# make an attempt to detect YYYY-DD-MM formats
if 12 < month:
@ -182,7 +195,8 @@ class NameParser(object):
month = day
day = tmp_month
try:
result.air_date = datetime.date(year, month, day)
result.air_date = datetime.date(
year + ((1900, 2000)[0 < year < 28], 0)[1900 < year], month, day)
except ValueError as e:
raise InvalidNameException(ex(e))
@ -193,7 +207,10 @@ class NameParser(object):
if tmp_extra_info and 'season_only' == cur_regex_name and re.search(
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
continue
result.extra_info = tmp_extra_info
if tmp_extra_info:
if result.extra_info:
tmp_extra_info = '%s %s' % (result.extra_info, tmp_extra_info)
result.extra_info = tmp_extra_info
result.score += 1
if 'release_group' in named_groups:
@ -251,18 +268,33 @@ class NameParser(object):
# if we have an air-by-date show then get the real season/episode numbers
if best_result.is_air_by_date:
season_number, episode_numbers = None, []
airdate = best_result.air_date.toordinal()
my_db = db.DBConnection()
sql_result = my_db.select(
'SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?',
[show.indexerid, show.indexer, airdate])
season_number = None
episode_numbers = []
'SELECT season, episode, name FROM tv_episodes ' +
'WHERE showid = ? and indexer = ? and airdate = ?', [show.indexerid, show.indexer, airdate])
if sql_result:
season_number = int(sql_result[0][0])
episode_numbers = [int(sql_result[0][1])]
season_number = int(sql_result[0]['season'])
episode_numbers = [int(sql_result[0]['episode'])]
if 1 < len(sql_result):
# multi-eps broadcast on this day
nums = {'1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five',
'6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '10': 'ten'}
patt = '(?i)(?:e(?:p(?:isode)?)?|part|pt)[. _-]?(%s)'
try:
src_num = str(re.findall(patt % '\w+', best_result.extra_info)[0])
alt_num = nums.get(src_num) or list(nums.keys())[list(nums.values()).index(src_num)]
re_partnum = re.compile(patt % ('%s|%s' % (src_num, alt_num)))
for ep_details in sql_result:
if re_partnum.search(ep_details['name']):
season_number = int(ep_details['season'])
episode_numbers = [int(ep_details['episode'])]
break
except (StandardError, Exception):
pass
if self.indexer_lookup and not season_number or not len(episode_numbers):
try:
@ -278,10 +310,12 @@ class NameParser(object):
season_number = int(ep_obj['seasonnumber'])
episode_numbers = [int(ep_obj['episodenumber'])]
except sickbeard.indexer_episodenotfound:
logger.log(u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show.name + ', skipping', logger.WARNING)
logger.log(u'Unable to find episode with date ' + str(best_result.air_date)
+ ' for show ' + show.name + ', skipping', logger.WARNING)
episode_numbers = []
except sickbeard.indexer_error as e:
logger.log(u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name + ': ' + ex(e), logger.WARNING)
logger.log(u'Unable to contact ' + sickbeard.indexerApi(show.indexer).name
+ ': ' + ex(e), logger.WARNING)
episode_numbers = []
for epNo in episode_numbers:
@ -411,7 +445,7 @@ class NameParser(object):
else:
number = 0
except:
except (StandardError, Exception):
# on error try converting from Roman numerals
roman_to_int_map = (('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100),
('XC', 90), ('L', 50), ('XL', 40), ('X', 10),
@ -496,7 +530,8 @@ class NameParser(object):
% name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
# if there's no useful info in it then raise an exception
if None is final_result.season_number and not final_result.episode_numbers and None is final_result.air_date and not final_result.ab_episode_numbers and not final_result.series_name:
if None is final_result.season_number and not final_result.episode_numbers and None is final_result.air_date \
and not final_result.ab_episode_numbers and not final_result.series_name:
raise InvalidNameException('Unable to parse %s' % name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
if cache_result:

View file

@ -104,11 +104,13 @@ normal_regexes = [
('uk_date_format',
# Show.Name.23.11.2010.Source.Quality.Etc-Group
# Show Name - 23-11-2010 - Ep Name
# Show Name - 14-08-17 - Ep Name
# Show Name - 14 Jan 17 - Ep Name
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(?P<air_day>\d{2})[. _-]+ # 23 and separator
(?P<air_month>\d{2})[. _-]+ # 11 and separator
(?P<air_year>(?:19|20)\d{2}) # 2010 and separator
\(?(?P<air_day>\d{2})[. _-]+ # 23 and separator
(?P<air_month>(?:\d{2}|(?:jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)\w*))[. _-]+ # 11 and separator
(?P<air_year>(?:19|20)?\d{2})\)? # 2010 and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
@ -198,8 +200,9 @@ normal_regexes = [
# 01 - Ep Name
'''
^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator
(?P<ep_num>\d{1,2}) # 01
(?:-(?P<extra_ep_num>\d{1,2}))* # 02
(?P<ep_num>\d{1,3}(?!\d)) # 01
(?:-(?P<extra_ep_num>\d{1,3}(?!\d)))* # 02
(\s*(?:of)?\s*\d{1,3})? # of num eps
[. _-]+((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group

View file

@ -481,7 +481,7 @@ class PostProcessor(object):
parse_result = np.parse(name)
self._log(u'Parsed %s<br />.. from %s' % (str(parse_result).decode('utf-8', 'xmlcharrefreplace'), name), logger.DEBUG)
if parse_result.is_air_by_date:
if parse_result.is_air_by_date and (None is parse_result.season_number or not parse_result.episode_numbers):
season = -1
episodes = [parse_result.air_date]
else:

View file

@ -163,13 +163,16 @@ class ProcessTVShow(object):
return result
def check_name(self, name):
@staticmethod
def check_name(name):
so = None
my_db = db.DBConnection()
sql_results = my_db.select('SELECT showid FROM history WHERE resource = ?'
'AND (' + ' OR '.join(
"action LIKE '%%%02d'" % x for x in (SNATCHED, SNATCHED_PROPER,
SNATCHED_BEST)) + ') ORDER BY rowid', [name])
sql_results = my_db.select(
'SELECT showid FROM history' +
' WHERE resource = ?' +
' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in (
SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)) +
' ORDER BY rowid', [name])
if sql_results:
try:
so = helpers.findCertainShow(sickbeard.showList, int(sql_results[-1]['showid']))
@ -180,7 +183,8 @@ class ProcessTVShow(object):
return so
def showObj_helper(self, showObj, base_dir, dir_name, nzb_name, pp_type, alt_showObj=None):
if None is showObj and base_dir == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Post Processing Active
if None is showObj and base_dir == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type:
# Scheduled Post Processing Active
return self.check_name(dir_name)
return (showObj, alt_showObj)[None is showObj and None is not alt_showObj]
@ -207,7 +211,8 @@ class ProcessTVShow(object):
return None
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False, showObj=None):
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None,
failed=False, pp_type='auto', cleanup=False, showObj=None):
"""
Scans through the files in dir_name and processes whatever media files it finds
@ -225,7 +230,8 @@ class ProcessTVShow(object):
# if the client and SickGear are not on the same machine translate the directory in a network directory
elif dir_name and sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR,
ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)
if dir_name:
@ -317,14 +323,17 @@ class ProcessTVShow(object):
video_batch = set(video_batch) - set(video_pick)
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=showObj)
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace,
use_trash=cleanup, showObj=showObj)
except OSError as e:
logger.log('Batch skipped, %s%s' %
(ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)
# Process video files in TV subdirectories
for directory in [x for x in dirs if self._validate_dir(path, x, nzb_name_original, failed, showObj=self.showObj_helper(showObj, dir_name, x, nzb_name, pp_type))]:
for directory in [x for x in dirs if self._validate_dir(
path, x, nzb_name_original, failed,
showObj=self.showObj_helper(showObj, dir_name, x, nzb_name, pp_type))]:
# self._set_process_success(reset=True)
@ -348,7 +357,9 @@ class ProcessTVShow(object):
# Don't Link media when the media is extracted from a rar in the same path
if process_method in ('hardlink', 'symlink') and video_in_rar:
self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type, self.check_video_filenames(walk_dir, video_in_rar)))
self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace,
showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
self.check_video_filenames(walk_dir, video_in_rar)))
video_batch = set(video_files) - set(video_in_rar)
else:
video_batch = video_files
@ -366,7 +377,10 @@ class ProcessTVShow(object):
video_batch = set(video_batch) - set(video_pick)
self._process_media(walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type, self.check_video_filenames(walk_dir, video_pick)))
self._process_media(
walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup,
showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
self.check_video_filenames(walk_dir, video_pick)))
except OSError as e:
logger.log('Batch skipped, %s%s' %
@ -732,10 +746,10 @@ class ProcessTVShow(object):
for wdata in iter(partial(part.read, 4096), b''):
try:
newfile.write(wdata)
except:
except (StandardError, Exception):
logger.log('Failed write to file %s' % f)
return result
except:
except (StandardError, Exception):
logger.log('Failed read from file %s' % f)
return result
result = base_filepath
@ -759,22 +773,23 @@ class ProcessTVShow(object):
pass
if None is parse_result:
try:
parse_result = NameParser(try_scene_exceptions=True,convert=True).parse(dir_name, cache_result=False)
parse_result = NameParser(try_scene_exceptions=True, convert=True).parse(dir_name, cache_result=False)
except (InvalidNameException, InvalidShowException):
# If the filename doesn't parse, then return false as last
# resort. We can assume that unparseable filenames are not
# processed in the past
return False
showlink = ('for "<a href="%s/home/displayShow?show=%s" target="_blank">%s</a>"' % (sickbeard.WEB_ROOT, parse_result.show.indexerid, parse_result.show.name),
parse_result.show.name)[self.any_vid_processed]
showlink = ('for "<a href="%s/home/displayShow?show=%s" target="_blank">%s</a>"' % (
sickbeard.WEB_ROOT, parse_result.show.indexerid, parse_result.show.name),
parse_result.show.name)[self.any_vid_processed]
ep_detail_sql = ''
if parse_result.show.indexerid and 0 < len(parse_result.episode_numbers) and parse_result.season_number:
ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
% (str(parse_result.show.indexerid),
str(parse_result.season_number),
str(parse_result.episode_numbers[0]))
str(parse_result.season_number),
str(parse_result.episode_numbers[0]))
# Avoid processing the same directory again if we use a process method <> move
my_db = db.DBConnection()
@ -793,7 +808,8 @@ class ProcessTVShow(object):
if not isinstance(videofile, unicode):
videofile = unicode(videofile, 'utf_8')
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
sql_result = my_db.select(
'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
if sql_result:
self._log_helper(u'Found a video, but that release %s was already processed,<br />.. skipping: %s'
% (showlink, videofile))
@ -823,7 +839,8 @@ class ProcessTVShow(object):
return False
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False, showObj=None):
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace,
use_trash=False, showObj=None):
processor = None
for cur_video_file in video_files:
@ -835,7 +852,10 @@ class ProcessTVShow(object):
cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
try:
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash, webhandler=self.webhandler, showObj=showObj)
processor = postProcessor.PostProcessor(
cur_video_file_path, nzb_name, process_method, force_replace,
use_trash=use_trash, webhandler=self.webhandler, showObj=showObj)
file_success = processor.process()
process_fail_message = ''
except exceptions.PostProcessingFailed:
@ -862,14 +882,16 @@ class ProcessTVShow(object):
dirs = []
files = []
if dir_name == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Post Processing Active
if dir_name == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type:
# Scheduled Post Processing Active
# Get at first all the subdir in the dir_name
for path, dirs, files in ek.ek(os.walk, dir_name):
break
else:
path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
if None is not nzb_name and not nzb_name.endswith('.nzb') and \
ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)): # For single torrent file without directory
ek.ek(os.path.isfile, ek.ek(os.path.join, dir_name, nzb_name)):
# For single torrent file without directory
dirs = []
files = [ek.ek(os.path.join, dir_name, nzb_name)]
else:
@ -909,6 +931,9 @@ class ProcessTVShow(object):
# backward compatibility prevents the case of this function name from being updated to PEP8
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False, webhandler=None, showObj=None):
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None,
failed=False, type='auto', cleanup=False, webhandler=None, showObj=None):
# backward compatibility prevents the case of this function name from being updated to PEP8
return ProcessTVShow(webhandler).process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup, showObj)
return ProcessTVShow(webhandler).process_dir(
dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup, showObj)

View file

@ -540,20 +540,15 @@ class GenericProvider:
u' didn\'t parse as one, skipping it', logger.DEBUG)
add_cache_entry = True
else:
airdate = parse_result.air_date.toordinal()
my_db = db.DBConnection()
sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' +
'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate])
actual_season = parse_result.season_number
actual_episodes = parse_result.episode_numbers
if 1 != len(sql_results):
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
u' give proper results, skipping it', logger.WARNING)
if not actual_episodes or \
not [ep for ep in episodes if ep.season == actual_season and ep.episode in actual_episodes]:
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True
if not add_cache_entry:
actual_season = int(sql_results[0]['season'])
actual_episodes = [int(sql_results[0]['episode'])]
# add parsed result to cache for usage later on
if add_cache_entry:
logger.log(u'Adding item from search to cache: ' + title, logger.DEBUG)
@ -1165,6 +1160,6 @@ class TorrentProvider(object, GenericProvider):
'[^<]*?no\shits\.\sTry\sadding' +
')', html)
def _cache_data(self):
def _cache_data(self, **kwargs):
return self._search_provider({'Cache': ['']})

View file

@ -15,6 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import base64
import re
import traceback
@ -31,7 +32,21 @@ class IPTorrentsProvider(generic.TorrentProvider):
generic.TorrentProvider.__init__(self, 'IPTorrents')
self.url_home = (['https://iptorrents.%s/' % u for u in 'eu', 'com', 'me', 'ru'] +
['http://rss.workisboring.com/', 'https://ipt-update.com'])
['http://rss.workisboring.com/', 'https://ipt-update.com'] +
[base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('(?i)[q\s1]+', '', x[::-1]) for x in [
'c0RHa', 'vo1QD', 'hJ2L', 'GdhdXe', 'vdnLoN', 'J21cptmc', '5yZulmcv', '02bj', '=iq=']],
[re.sub('(?i)[q\seg]+', '', x[::-1]) for x in [
'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']],
[re.sub('(?i)[q\sek]+', '', x[::-1]) for x in [
'H qa', 'vQoDc0R', '2L ', 'bod', 'hNmLk0N3', 'WLlxemY', 'LtVGZv1', 'wZy9m', '=kQ=']],
[re.sub('(?i)[q\seg1]+', '', x[::-1]) for x in [
'HGa', 'voDc0R', '21L', 'bucmbvt', 'ZyZWQ1L0Vm', 'ycrFW', '02bej5', 'e=gq']],
[re.sub('(?i)[q\sei]+', '', x[::-1]) for x in [
'Q0RHa', 'voiQDc', 'asF2L', 'hVmLuVW', 'yZulGd', 'mbhdmcv1', 'Adl5mLjl', '==Qe']],
[re.sub('(?i)[q\si1g]+', '', x[::-1]) for x in [
'Dc0GRHa', 'vo', 'Cdwl2L', 'FWZy5', 'bvJWL1k', '9mLzt2', 'wZy', '=GG=q']]
]]])
self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s#torrents', 'get': '%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',

View file

@ -129,7 +129,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt,
('search string: ' + search_string.replace('%', ' '), self.name)['Cache' == mode])
('search string: ' + search_string.replace('%', '%%'), self.name)['Cache' == mode])
if mode in 'Season' and len(items[mode]):
break
@ -251,6 +251,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, scene=False, prefix='%', date_detail=(
lambda d: [x.strip('0') for x in (
['{0} {1}% {2}'.format(d.strftime('%d')[-1], d.strftime('%b'), d.strftime('%Y'))] +
[d.strftime('%d %b %Y')] + ([d.strftime('%d %B %Y')], [])[d.strftime('%b') == d.strftime('%B')])]),
ep_detail=(lambda e: [naming_ep_type[2] % e] + (
[], ['%(episodenumber)dof' % e])[1 == tryInt(e.get('seasonnumber'))]), **kwargs)

View file

@ -109,7 +109,7 @@ class ZooqleProvider(generic.TorrentProvider):
def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
def _cache_data(self):
def _cache_data(self, **kwargs):
return self._search_provider({'Cache': ['*']})
provider = ZooqleProvider()