2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
from functools import partial
|
2016-08-24 02:03:06 +00:00
|
|
|
import datetime
|
2014-03-10 05:18:05 +00:00
|
|
|
import os
|
2016-08-11 00:00:36 +00:00
|
|
|
import re
|
2014-03-10 05:18:05 +00:00
|
|
|
import shutil
|
|
|
|
import stat
|
2016-08-11 00:00:36 +00:00
|
|
|
import sys
|
2016-08-24 02:03:06 +00:00
|
|
|
import time
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
from sickbeard import postProcessor
|
|
|
|
from sickbeard import db, helpers, exceptions
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard.exceptions import ex
|
|
|
|
from sickbeard import logger
|
2014-07-06 13:11:04 +00:00
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2014-03-10 05:18:05 +00:00
|
|
|
from sickbeard import common
|
2015-05-14 06:12:06 +00:00
|
|
|
from sickbeard.history import reset_status
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import failedProcessor
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
import lib.rarfile.rarfile as rarfile
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
from lib import simplejson as json
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
try:
|
|
|
|
from lib.send2trash import send2trash
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# noinspection PyArgumentList
|
|
|
|
class ProcessTVShow(object):
|
|
|
|
""" Process a TV Show """
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2016-02-01 22:51:51 +00:00
|
|
|
def __init__(self, webhandler=None):
|
2015-05-08 02:46:54 +00:00
|
|
|
self.files_passed = 0
|
|
|
|
self.files_failed = 0
|
2016-08-11 00:00:36 +00:00
|
|
|
self.fail_detected = False
|
2015-05-08 02:46:54 +00:00
|
|
|
self._output = []
|
2016-02-01 22:51:51 +00:00
|
|
|
self.webhandler = webhandler
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
@property
|
|
|
|
def any_vid_processed(self):
|
|
|
|
return 0 < self.files_passed
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
@property
|
|
|
|
def result(self, pre=True):
|
|
|
|
return (('<br />', u'\n')[pre]).join(self._output)
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
def _buffer(self, text=None):
|
|
|
|
if None is not text:
|
|
|
|
self._output.append(text)
|
2016-02-01 22:51:51 +00:00
|
|
|
if self.webhandler:
|
|
|
|
logger_msg = re.sub(r'(?i)<br(?:[\s/]+)>', '\n', text)
|
|
|
|
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)<[/]a>', r'\1', logger_msg)
|
|
|
|
self.webhandler('%s%s' % (logger_msg, u'\n'))
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
def _log_helper(self, message, log_level=logger.DEBUG):
|
|
|
|
logger_msg = re.sub(r'(?i)<br(?:[\s/]+)>\.*', '', message)
|
|
|
|
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)<[/]a>', r'\1', logger_msg)
|
|
|
|
logger.log(u'%s' % logger_msg, log_level)
|
|
|
|
self._buffer(message)
|
2014-06-07 00:44:44 +00:00
|
|
|
return
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
def _set_process_success(self, state=True, reset=False):
|
|
|
|
if state:
|
|
|
|
self.files_passed += 1
|
|
|
|
else:
|
|
|
|
self.files_failed += 1
|
|
|
|
if reset:
|
|
|
|
self.files_passed = 0
|
|
|
|
self.files_failed = 0
|
|
|
|
|
|
|
|
def _delete_folder(self, folder, check_empty=True):
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# check if it's a folder
|
|
|
|
if not ek.ek(os.path.isdir, folder):
|
|
|
|
return False
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# make sure it isn't TV_DOWNLOAD_DIR
|
|
|
|
if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
|
|
|
|
return False
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# check if it's empty folder when wanted checked
|
|
|
|
if check_empty and ek.ek(os.listdir, folder):
|
|
|
|
return False
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# try deleting folder
|
2014-06-07 00:44:44 +00:00
|
|
|
try:
|
2015-05-08 02:46:54 +00:00
|
|
|
shutil.rmtree(folder)
|
2015-06-08 12:47:01 +00:00
|
|
|
except (OSError, IOError) as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
|
|
|
|
return False
|
2014-06-07 00:44:44 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if ek.ek(os.path.isdir, folder):
|
|
|
|
logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
|
|
|
|
return True
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
def _delete_files(self, process_path, notwanted_files, use_trash=False, force=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if not self.any_vid_processed and not force:
|
2015-05-08 02:46:54 +00:00
|
|
|
return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
result = True
|
2015-05-08 02:46:54 +00:00
|
|
|
# Delete all file not needed
|
|
|
|
for cur_file in notwanted_files:
|
2014-03-16 22:57:38 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
cur_file_path = ek.ek(os.path.join, process_path, cur_file)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if not ek.ek(os.path.isfile, cur_file_path):
|
|
|
|
continue # Prevent error when a notwantedfiles is an associated files
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# check first the read-only attribute
|
|
|
|
file_attribute = ek.ek(os.stat, cur_file_path)[0]
|
|
|
|
if not file_attribute & stat.S_IWRITE:
|
|
|
|
# File is read-only, so make it writeable
|
|
|
|
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
|
|
|
|
try:
|
|
|
|
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
2015-06-08 12:47:01 +00:00
|
|
|
except OSError as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
|
|
|
|
try:
|
|
|
|
if use_trash:
|
|
|
|
ek.ek(send2trash, cur_file_path)
|
|
|
|
else:
|
|
|
|
ek.ek(os.remove, cur_file_path)
|
2015-06-08 12:47:01 +00:00
|
|
|
except OSError as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if ek.ek(os.path.isfile, cur_file_path):
|
|
|
|
result = False
|
|
|
|
else:
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Deleted file ' + cur_file)
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
return result
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False, showObj=None):
|
2015-05-08 02:46:54 +00:00
|
|
|
"""
|
|
|
|
Scans through the files in dir_name and processes whatever media files it finds
|
|
|
|
|
|
|
|
dir_name: The folder name to look in
|
|
|
|
nzb_name: The NZB name which resulted in this folder being downloaded
|
|
|
|
force: True to postprocess already postprocessed files
|
|
|
|
failed: Boolean for whether or not the download failed
|
|
|
|
pp_type: Type of postprocessing auto or manual
|
|
|
|
"""
|
|
|
|
|
|
|
|
# if they passed us a real directory then assume it's the one we want
|
2016-02-01 22:51:51 +00:00
|
|
|
if dir_name and ek.ek(os.path.isdir, dir_name):
|
2015-05-08 02:46:54 +00:00
|
|
|
dir_name = ek.ek(os.path.realpath, dir_name)
|
|
|
|
|
|
|
|
# if the client and SickGear are not on the same machine translate the directory in a network directory
|
2016-02-01 22:51:51 +00:00
|
|
|
elif dir_name and sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
|
2015-05-08 02:46:54 +00:00
|
|
|
and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
|
|
|
|
dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
|
|
|
|
self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if dir_name:
|
|
|
|
self._log_helper(u'Checking folder... ' + dir_name)
|
|
|
|
|
|
|
|
# if we didn't find a real directory then process "failed" or just quit
|
2016-02-01 22:51:51 +00:00
|
|
|
if not dir_name or not ek.ek(os.path.isdir, dir_name):
|
|
|
|
if nzb_name and failed:
|
2016-09-27 20:18:58 +00:00
|
|
|
self._process_failed(dir_name, nzb_name, showObj=showObj)
|
2016-02-01 22:51:51 +00:00
|
|
|
else:
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Unable to figure out what folder to process. ' +
|
|
|
|
u'If your downloader and SickGear aren\'t on the same PC then make sure ' +
|
|
|
|
u'you fill out your completed TV download folder in the PP config.')
|
2016-09-04 20:00:44 +00:00
|
|
|
return self.result
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
|
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
|
|
|
return self.result
|
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
if not process_method:
|
|
|
|
process_method = sickbeard.PROCESS_METHOD
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Processing folder... %s' % path)
|
|
|
|
|
|
|
|
work_files = []
|
|
|
|
joined = self.join(path)
|
|
|
|
if joined:
|
|
|
|
work_files += [joined]
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
rar_files, rarfile_history = self.unused_archives(
|
|
|
|
path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
|
2015-05-08 02:46:54 +00:00
|
|
|
rar_content = self._unrar(path, rar_files, force)
|
2016-08-11 00:00:36 +00:00
|
|
|
if self.fail_detected:
|
2016-09-27 20:18:58 +00:00
|
|
|
self._process_failed(dir_name, nzb_name, showObj=showObj)
|
2016-08-11 00:00:36 +00:00
|
|
|
return self.result
|
|
|
|
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
|
|
|
|
video_files = filter(helpers.has_media_ext, files)
|
|
|
|
video_in_rar = filter(helpers.has_media_ext, rar_content)
|
|
|
|
work_files += [ek.ek(os.path.join, path, item) for item in rar_content]
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
if 0 < len(files):
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files)))
|
2015-05-08 02:46:54 +00:00
|
|
|
if 0 < len(video_files):
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Process video file%s: %s' % (helpers.maybe_plural(video_files), str(video_files)))
|
2015-05-08 02:46:54 +00:00
|
|
|
if 0 < len(rar_content):
|
|
|
|
self._log_helper(u'Process rar content: ' + str(rar_content))
|
|
|
|
if 0 < len(video_in_rar):
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Process video%s in rar: %s' % (helpers.maybe_plural(video_in_rar), str(video_in_rar)))
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
# If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
|
|
|
|
nzb_name_original = nzb_name
|
|
|
|
if 2 <= len(video_files):
|
|
|
|
nzb_name = None
|
|
|
|
|
|
|
|
# self._set_process_success()
|
|
|
|
|
|
|
|
# Don't Link media when the media is extracted from a rar in the same path
|
|
|
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
2016-09-04 20:00:44 +00:00
|
|
|
self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=showObj)
|
2016-08-11 00:00:36 +00:00
|
|
|
self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
|
2015-05-08 02:46:54 +00:00
|
|
|
video_batch = set(video_files) - set(video_in_rar)
|
|
|
|
else:
|
|
|
|
video_batch = video_files
|
|
|
|
|
2016-04-24 02:54:24 +00:00
|
|
|
try:
|
|
|
|
while 0 < len(video_batch):
|
|
|
|
video_pick = ['']
|
|
|
|
video_size = 0
|
|
|
|
for cur_video_file in video_batch:
|
|
|
|
cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
|
|
|
|
if 0 == video_size or cur_video_size > video_size:
|
|
|
|
video_size = cur_video_size
|
|
|
|
video_pick = [cur_video_file]
|
|
|
|
|
|
|
|
video_batch = set(video_batch) - set(video_pick)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-04-24 02:54:24 +00:00
|
|
|
except OSError as e:
|
|
|
|
logger.log('Batch skipped, %s%s' %
|
|
|
|
(ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
# Process video files in TV subdirectories
|
2016-09-23 09:59:10 +00:00
|
|
|
for directory in [x for x in dirs if self._validate_dir(path, x, nzb_name_original, failed, showObj=showObj)]:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
# self._set_process_success(reset=True)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
|
|
|
return self.result
|
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
rar_files, rarfile_history = self.unused_archives(
|
|
|
|
walk_path, filter(helpers.is_first_rar_volume, files), pp_type, process_method, rarfile_history)
|
2016-08-11 00:00:36 +00:00
|
|
|
rar_content = self._unrar(walk_path, rar_files, force)
|
|
|
|
work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
|
|
|
|
if self.fail_detected:
|
2016-09-27 20:18:58 +00:00
|
|
|
self._process_failed(dir_name, nzb_name, showObj=showObj)
|
2016-08-11 00:00:36 +00:00
|
|
|
continue
|
|
|
|
files = list(set(files + rar_content))
|
|
|
|
video_files = filter(helpers.has_media_ext, files)
|
|
|
|
video_in_rar = filter(helpers.has_media_ext, rar_content)
|
|
|
|
notwanted_files = [x for x in files if x not in video_files]
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
# Don't Link media when the media is extracted from a rar in the same path
|
|
|
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
2016-09-04 20:00:44 +00:00
|
|
|
self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
video_batch = set(video_files) - set(video_in_rar)
|
|
|
|
else:
|
|
|
|
video_batch = video_files
|
|
|
|
|
2016-04-24 02:54:24 +00:00
|
|
|
try:
|
|
|
|
while 0 < len(video_batch):
|
|
|
|
video_pick = ['']
|
|
|
|
video_size = 0
|
|
|
|
for cur_video_file in video_batch:
|
2016-08-11 00:00:36 +00:00
|
|
|
cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, walk_path, cur_video_file))
|
2016-04-24 02:54:24 +00:00
|
|
|
|
|
|
|
if 0 == video_size or cur_video_size > video_size:
|
|
|
|
video_size = cur_video_size
|
|
|
|
video_pick = [cur_video_file]
|
|
|
|
|
|
|
|
video_batch = set(video_batch) - set(video_pick)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
self._process_media(walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
2016-04-24 02:54:24 +00:00
|
|
|
except OSError as e:
|
|
|
|
logger.log('Batch skipped, %s%s' %
|
|
|
|
(ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
2016-08-11 00:00:36 +00:00
|
|
|
self._delete_files(walk_path, rar_content)
|
2015-05-08 02:46:54 +00:00
|
|
|
else:
|
|
|
|
# Delete all file not needed
|
|
|
|
if not self.any_vid_processed\
|
|
|
|
or 'move' != process_method\
|
|
|
|
or ('manual' == pp_type and not cleanup): # Avoid deleting files if Manual Postprocessing
|
|
|
|
continue
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
self._delete_files(walk_path, notwanted_files, use_trash=cleanup)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
if 'move' == process_method\
|
2016-08-11 00:00:36 +00:00
|
|
|
and ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, walk_path):
|
|
|
|
self._delete_folder(walk_path, check_empty=False)
|
|
|
|
|
|
|
|
if 'copy' == process_method and work_files:
|
|
|
|
self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
|
|
|
|
for f in sorted(list(set([ek.ek(os.path.dirname, item) for item in work_files]) - {path}),
|
|
|
|
key=len, reverse=True):
|
|
|
|
self._delete_folder(f)
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
def _bottom_line(text, log_level=logger.DEBUG):
|
|
|
|
self._buffer('-' * len(text))
|
|
|
|
self._log_helper(text, log_level)
|
|
|
|
|
|
|
|
if self.any_vid_processed:
|
|
|
|
if not self.files_failed:
|
|
|
|
_bottom_line(u'Successfully processed.', logger.MESSAGE)
|
|
|
|
else:
|
2016-06-01 15:14:37 +00:00
|
|
|
_bottom_line(u'Successfully processed at least one video file%s.' %
|
|
|
|
(', others were skipped', ' and skipped another')[1 == self.files_failed], logger.MESSAGE)
|
2015-05-08 02:46:54 +00:00
|
|
|
else:
|
|
|
|
_bottom_line(u'Failed! Did not process any files.', logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
return self.result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-24 02:03:06 +00:00
|
|
|
@staticmethod
|
|
|
|
def unused_archives(path, archives, pp_type, process_method, archive_history=None):
|
|
|
|
|
|
|
|
archive_history = (archive_history, {})[not archive_history]
|
|
|
|
if ('auto' == pp_type and sickbeard.PROCESS_AUTOMATICALLY
|
|
|
|
and 'copy' == process_method and sickbeard.UNPACK):
|
|
|
|
|
|
|
|
archive_history_file = ek.ek(os.path.join, sickbeard.DATA_DIR, 'archive_history.txt')
|
|
|
|
|
|
|
|
if not archive_history:
|
|
|
|
try:
|
|
|
|
with open(archive_history_file, 'r') as fh:
|
|
|
|
archive_history = json.loads(fh.read(10 * 1024 * 1024))
|
|
|
|
except (IOError, ValueError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
init_history_cnt = len(archive_history)
|
|
|
|
|
|
|
|
for archive in archive_history.keys():
|
|
|
|
if not ek.ek(os.path.isfile, archive):
|
|
|
|
del archive_history[archive]
|
|
|
|
|
|
|
|
unused_files = list(set([ek.ek(os.path.join, path, x) for x in archives]) - set(archive_history.keys()))
|
|
|
|
archives = [ek.ek(os.path.basename, x) for x in unused_files]
|
|
|
|
if unused_files:
|
|
|
|
for f in unused_files:
|
|
|
|
archive_history.setdefault(f, time.mktime(datetime.datetime.utcnow().timetuple()))
|
|
|
|
|
|
|
|
if init_history_cnt != len(archive_history):
|
|
|
|
try:
|
|
|
|
with open(archive_history_file, 'w') as fh:
|
|
|
|
fh.write(json.dumps(archive_history))
|
|
|
|
except (IOError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
return archives, archive_history
|
|
|
|
|
2016-09-23 09:59:10 +00:00
|
|
|
def _validate_dir(self, path, dir_name, nzb_name_original, failed, showObj=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Processing sub dir: ' + dir_name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'):
|
|
|
|
self._log_helper(u'The directory name indicates it failed to extract.')
|
|
|
|
failed = True
|
|
|
|
elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'):
|
|
|
|
self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.')
|
|
|
|
failed = True
|
|
|
|
elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'):
|
|
|
|
self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.')
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if failed:
|
2016-09-27 20:18:58 +00:00
|
|
|
self._process_failed(os.path.join(path, dir_name), nzb_name_original, showObj=showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
return False
|
2014-08-06 13:55:17 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if helpers.is_hidden_folder(dir_name):
|
|
|
|
self._log_helper(u'Ignoring hidden folder: ' + dir_name)
|
|
|
|
return False
|
2014-08-06 13:55:17 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# make sure the directory isn't inside a show directory
|
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select('SELECT * FROM tv_shows')
|
|
|
|
|
|
|
|
for sqlShow in sql_results:
|
|
|
|
if dir_name.lower().startswith(ek.ek(os.path.realpath, sqlShow['location']).lower() + os.sep)\
|
|
|
|
or dir_name.lower() == ek.ek(os.path.realpath, sqlShow['location']).lower():
|
|
|
|
self._log_helper(
|
|
|
|
u'Found an episode that has already been moved to its show dir, skipping',
|
|
|
|
logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Get the videofile list for the next checks
|
|
|
|
all_files = []
|
|
|
|
all_dirs = []
|
2016-08-11 00:00:36 +00:00
|
|
|
process_path = None
|
2015-05-08 02:46:54 +00:00
|
|
|
for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False):
|
|
|
|
all_dirs += process_dir
|
|
|
|
all_files += fileList
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
video_files = filter(helpers.has_media_ext, all_files)
|
2015-05-08 02:46:54 +00:00
|
|
|
all_dirs.append(dir_name)
|
|
|
|
|
|
|
|
# check if the directory have at least one tv video file
|
|
|
|
for video in video_files:
|
|
|
|
try:
|
2016-09-23 09:59:10 +00:00
|
|
|
NameParser(showObj=showObj).parse(video, cache_result=False)
|
2015-05-08 02:46:54 +00:00
|
|
|
return True
|
|
|
|
except (InvalidNameException, InvalidShowException):
|
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
for directory in all_dirs:
|
|
|
|
try:
|
2016-09-23 09:59:10 +00:00
|
|
|
NameParser(showObj=showObj).parse(directory, cache_result=False)
|
2015-05-08 02:46:54 +00:00
|
|
|
return True
|
|
|
|
except (InvalidNameException, InvalidShowException):
|
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if sickbeard.UNPACK and process_path and all_files:
|
2015-05-08 02:46:54 +00:00
|
|
|
# Search for packed release
|
2016-08-11 00:00:36 +00:00
|
|
|
packed_files = filter(helpers.is_first_rar_volume, all_files)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
for packed in packed_files:
|
|
|
|
try:
|
2016-09-23 09:59:10 +00:00
|
|
|
NameParser(showObj=showObj).parse(packed, cache_result=False)
|
2015-05-08 02:46:54 +00:00
|
|
|
return True
|
|
|
|
except (InvalidNameException, InvalidShowException):
|
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
def _unrar(self, path, rar_files, force):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
unpacked_files = []
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if 'win32' == sys.platform:
|
|
|
|
rarfile.UNRAR_TOOL = ek.ek(os.path.join, sickbeard.PROG_DIR, 'lib', 'rarfile', 'UnRAR.exe')
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if sickbeard.UNPACK and rar_files:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Packed releases detected: ' + str(rar_files))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
for archive in rar_files:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
self._log_helper(u'Unpacking archive: ' + archive)
|
2014-08-06 13:55:17 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
try:
|
2016-08-11 00:00:36 +00:00
|
|
|
rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
|
2016-09-07 20:24:10 +00:00
|
|
|
except (StandardError, Exception):
|
|
|
|
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
|
|
|
|
self._set_process_success(False)
|
|
|
|
continue
|
|
|
|
try:
|
2015-05-08 02:46:54 +00:00
|
|
|
# Skip extraction if any file in archive has previously been extracted
|
|
|
|
skip_file = False
|
2016-08-11 00:00:36 +00:00
|
|
|
for file_in_archive in [ek.ek(os.path.basename, x.filename)
|
|
|
|
for x in rar_handle.infolist() if not x.isdir()]:
|
2015-05-08 02:46:54 +00:00
|
|
|
if self._already_postprocessed(path, file_in_archive, force):
|
|
|
|
self._log_helper(
|
|
|
|
u'Archive file already processed, extraction skipped: ' + file_in_archive)
|
|
|
|
skip_file = True
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
if not skip_file:
|
|
|
|
# need to test for password since rar4 doesn't raise PasswordRequired
|
|
|
|
if rar_handle.needs_password():
|
|
|
|
raise rarfile.PasswordRequired
|
|
|
|
|
|
|
|
rar_handle.extractall(path=path)
|
|
|
|
rar_content = [ek.ek(os.path.normpath, x.filename)
|
|
|
|
for x in rar_handle.infolist() if not x.isdir()]
|
|
|
|
renamed = self.cleanup_names(path, rar_content)
|
|
|
|
cur_unpacked = rar_content if not renamed else \
|
|
|
|
(list(set(rar_content) - set(renamed.keys())) + renamed.values())
|
|
|
|
self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map(unicode, cur_unpacked)))
|
|
|
|
unpacked_files += cur_unpacked
|
|
|
|
except (rarfile.PasswordRequired, rarfile.RarWrongPassword):
|
|
|
|
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
|
|
|
|
self._set_process_success(False)
|
|
|
|
self.fail_detected = True
|
2016-09-07 20:24:10 +00:00
|
|
|
except (StandardError, Exception):
|
2016-08-11 00:00:36 +00:00
|
|
|
self._log_helper(u'Failed to unpack archive: %s' % archive, logger.ERROR)
|
2015-05-08 02:46:54 +00:00
|
|
|
self._set_process_success(False)
|
2016-08-11 00:00:36 +00:00
|
|
|
finally:
|
|
|
|
rar_handle.close()
|
|
|
|
del rar_handle
|
2014-03-20 10:24:58 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
elif rar_files:
|
|
|
|
# check for passworded rar's
|
|
|
|
for archive in rar_files:
|
|
|
|
try:
|
|
|
|
rar_handle = rarfile.RarFile(ek.ek(os.path.join, path, archive))
|
2016-09-07 20:24:10 +00:00
|
|
|
except (StandardError, Exception):
|
|
|
|
self._log_helper(u'Failed to open archive: %s' % archive, logger.ERROR)
|
|
|
|
continue
|
|
|
|
try:
|
2016-08-11 00:00:36 +00:00
|
|
|
if rar_handle.needs_password():
|
|
|
|
self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR)
|
|
|
|
self._set_process_success(False)
|
|
|
|
self.failure_detected = True
|
|
|
|
rar_handle.close()
|
|
|
|
del rar_handle
|
2016-09-07 20:24:10 +00:00
|
|
|
except (StandardError, Exception):
|
2016-08-11 00:00:36 +00:00
|
|
|
pass
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
return unpacked_files
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
@staticmethod
|
|
|
|
def cleanup_names(directory, files=None):
|
|
|
|
|
|
|
|
is_renamed = {}
|
|
|
|
num_videos = 0
|
|
|
|
old_name = None
|
|
|
|
new_name = None
|
|
|
|
params = {
|
|
|
|
'base_name': ek.ek(os.path.basename, directory),
|
|
|
|
'reverse_pattern': re.compile('|'.join([
|
|
|
|
r'\.\d{2}e\d{2}s\.', r'\.p0(?:63|27|612)\.', r'\.[pi](?:084|675|0801)\.', r'\b[45]62[xh]\.',
|
|
|
|
r'\.yarulb\.', r'\.vtd[hp]\.', r'\.(?:ld[.-]?)?bew\.', r'\.pir.?(?:shv|dov|dvd|bew|db|rb)\.',
|
|
|
|
r'\brdvd\.', r'\.(?:vts|dcv)\.', r'\b(?:mac|pir)dh\b', r'\.(?:lanretni|reporp|kcaper|reneercs)\.',
|
|
|
|
r'\b(?:caa|3ca|3pm)\b', r'\.cstn\.', r'\.5r\.', r'\brcs\b'
|
|
|
|
]), flags=re.IGNORECASE),
|
|
|
|
'season_pattern': re.compile(r'(.*\.\d{2}e\d{2}s\.)(.*)', flags=re.IGNORECASE),
|
|
|
|
'word_pattern': re.compile(r'([^A-Z0-9]*[A-Z0-9]+)'),
|
|
|
|
'char_replace': [[r'(\w)1\.(\w)', r'\1i\2']],
|
|
|
|
'garbage_name': re.compile(r'^[a-zA-Z0-9]{3,}$'),
|
|
|
|
'media_pattern': re.compile('|'.join([
|
|
|
|
r'\.s\d{2}e\d{2}\.', r'\.(?:36|72|216)0p\.', r'\.(?:480|576|1080)[pi]\.', r'\.[xh]26[45]\b',
|
|
|
|
r'\.bluray\.', r'\.[hp]dtv\.', r'\.web(?:[.-]?dl)?\.', r'\.(?:vhs|vod|dvd|web|bd|br).?rip\.',
|
|
|
|
r'\.dvdr\b', r'\.(?:stv|vcd)\.', r'\bhd(?:cam|rip)\b', r'\.(?:internal|proper|repack|screener)\.',
|
|
|
|
r'\b(?:aac|ac3|mp3)\b', r'\.(?:ntsc|pal|secam)\.', r'\.r5\.', r'\bscr\b', r'\b(?:divx|xvid)\b'
|
|
|
|
]), flags=re.IGNORECASE)
|
|
|
|
}
|
|
|
|
|
|
|
|
def renamer(_dirpath, _filenames, _num_videos, _old_name, _new_name, base_name,
|
|
|
|
reverse_pattern, season_pattern, word_pattern, char_replace, garbage_name, media_pattern):
|
|
|
|
|
|
|
|
for cur_filename in _filenames:
|
|
|
|
|
|
|
|
file_name, file_extension = ek.ek(os.path.splitext, cur_filename)
|
|
|
|
file_path = ek.ek(os.path.join, _dirpath, cur_filename)
|
|
|
|
dir_name = ek.ek(os.path.dirname, file_path)
|
|
|
|
|
|
|
|
if None is not reverse_pattern.search(file_name):
|
|
|
|
na_parts = season_pattern.search(file_name)
|
|
|
|
if None is not na_parts:
|
|
|
|
word_p = word_pattern.findall(na_parts.group(2))
|
|
|
|
new_words = ''
|
|
|
|
for wp in word_p:
|
|
|
|
if '.' == wp[0]:
|
|
|
|
new_words += '.'
|
|
|
|
new_words += re.sub(r'\W', '', wp)
|
|
|
|
for cr in char_replace:
|
|
|
|
new_words = re.sub(cr[0], cr[1], new_words)
|
|
|
|
new_filename = new_words[::-1] + na_parts.group(1)[::-1]
|
|
|
|
else:
|
|
|
|
new_filename = file_name[::-1]
|
|
|
|
logger.log('Reversing base filename "%s" to "%s"' % (file_name, new_filename))
|
|
|
|
try:
|
|
|
|
ek.ek(os.rename, file_path, ek.ek(os.path.join, _dirpath, new_filename + file_extension))
|
|
|
|
is_renamed[ek.ek(os.path.relpath, file_path, directory)] = ek.ek(
|
|
|
|
os.path.relpath, new_filename + file_extension, directory)
|
|
|
|
except OSError as e:
|
|
|
|
logger.log('Error unable to rename file "%s" because %s' % (cur_filename, ex(e)), logger.ERROR)
|
|
|
|
elif helpers.has_media_ext(cur_filename) and \
|
|
|
|
None is not garbage_name.search(file_name) and None is not media_pattern.search(base_name):
|
|
|
|
_num_videos += 1
|
|
|
|
_old_name = file_path
|
|
|
|
_new_name = ek.ek(os.path.join, dir_name, '%s%s' % (base_name, file_extension))
|
|
|
|
return is_renamed, _num_videos, _old_name, _new_name
|
|
|
|
|
|
|
|
if files:
|
|
|
|
is_renamed, num_videos, old_name, new_name = renamer(
|
|
|
|
directory, files, num_videos, old_name, new_name, **params)
|
|
|
|
else:
|
|
|
|
for cur_dirpath, void, cur_filenames in ek.ek(os.walk, directory):
|
|
|
|
is_renamed, num_videos, old_name, new_name = renamer(
|
|
|
|
cur_dirpath, cur_filenames, num_videos, old_name, new_name, **params)
|
|
|
|
|
|
|
|
if all([not is_renamed, 1 == num_videos, old_name, new_name]):
|
|
|
|
try_name = ek.ek(os.path.basename, new_name)
|
|
|
|
logger.log('Renaming file "%s" using dirname as "%s"' % (ek.ek(os.path.basename, old_name), try_name))
|
|
|
|
try:
|
|
|
|
ek.ek(os.rename, old_name, new_name)
|
|
|
|
is_renamed[ek.ek(os.path.relpath, old_name, directory)] = ek.ek(os.path.relpath, new_name, directory)
|
|
|
|
except OSError as e:
|
|
|
|
logger.log('Error unable to rename file "%s" because %s' % (old_name, ex(e)), logger.ERROR)
|
|
|
|
|
|
|
|
return is_renamed
|
|
|
|
|
2016-09-13 15:59:41 +00:00
|
|
|
def join(self, directory):
|
2016-08-11 00:00:36 +00:00
|
|
|
|
|
|
|
result = False
|
|
|
|
chunks = {}
|
|
|
|
matcher = re.compile('\.[0-9]+$')
|
|
|
|
for dirpath, void, filenames in os.walk(directory):
|
|
|
|
for filename in filenames:
|
|
|
|
if None is not matcher.search(filename):
|
|
|
|
maybe_chunk = ek.ek(os.path.join, dirpath, filename)
|
|
|
|
base_filepath, ext = os.path.splitext(maybe_chunk)
|
|
|
|
if base_filepath not in chunks:
|
|
|
|
chunks[base_filepath] = []
|
|
|
|
chunks[base_filepath].append(maybe_chunk)
|
|
|
|
|
|
|
|
if not chunks:
|
|
|
|
return
|
|
|
|
|
|
|
|
for base_filepath in chunks:
|
|
|
|
chunks[base_filepath].sort()
|
|
|
|
chunk_set = chunks[base_filepath]
|
|
|
|
if ek.ek(os.path.isfile, base_filepath):
|
|
|
|
base_filesize = ek.ek(os.path.getsize, base_filepath)
|
|
|
|
chunk_sizes = [ek.ek(os.path.getsize, x) for x in chunk_set]
|
|
|
|
largest_chunk = max(chunk_sizes)
|
|
|
|
if largest_chunk >= base_filesize:
|
|
|
|
outfile = '%s.001' % base_filepath
|
|
|
|
if outfile not in chunk_set:
|
|
|
|
try:
|
|
|
|
ek.ek(os.rename, base_filepath, outfile)
|
|
|
|
except OSError:
|
|
|
|
logger.log('Error unable to rename file %s' % base_filepath, logger.ERROR)
|
|
|
|
return result
|
|
|
|
chunk_set.append(outfile)
|
|
|
|
chunk_set.sort()
|
|
|
|
else:
|
|
|
|
del_dir, del_file = ek.ek(os.path.split, base_filepath)
|
|
|
|
if not self._delete_files(del_dir, [del_file], force=True):
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
if base_filesize == sum(chunk_sizes):
|
|
|
|
logger.log('Join skipped. Total size of %s input files equal to output.. %s (%s bytes)' % (
|
|
|
|
len(chunk_set), base_filepath, base_filesize))
|
|
|
|
else:
|
|
|
|
logger.log('Join skipped. Found output file larger than input.. %s (%s bytes)' % (
|
|
|
|
base_filepath, base_filesize))
|
|
|
|
return result
|
|
|
|
|
|
|
|
with open(base_filepath, 'ab') as newfile:
|
|
|
|
for f in chunk_set:
|
|
|
|
logger.log('Joining file %s' % f)
|
|
|
|
try:
|
|
|
|
with open(f, 'rb') as part:
|
|
|
|
for wdata in iter(partial(part.read, 4096), b''):
|
|
|
|
try:
|
|
|
|
newfile.write(wdata)
|
|
|
|
except:
|
|
|
|
logger.log('Failed write to file %s' % f)
|
|
|
|
return result
|
|
|
|
except:
|
|
|
|
logger.log('Failed read from file %s' % f)
|
|
|
|
return result
|
|
|
|
result = base_filepath
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
def _already_postprocessed(self, dir_name, videofile, force):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-06-01 15:14:37 +00:00
|
|
|
if force or not self.any_vid_processed:
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# Needed for accessing DB with a unicode dir_name
|
|
|
|
if not isinstance(dir_name, unicode):
|
|
|
|
dir_name = unicode(dir_name, 'utf_8')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
parse_result = None
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2015-09-20 11:40:03 +00:00
|
|
|
parse_result = NameParser(try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False)
|
2014-07-06 13:11:04 +00:00
|
|
|
except (InvalidNameException, InvalidShowException):
|
2015-05-14 06:12:06 +00:00
|
|
|
# Does not parse, move on to directory check
|
2014-03-10 05:18:05 +00:00
|
|
|
pass
|
2015-05-08 02:46:54 +00:00
|
|
|
if None is parse_result:
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2015-09-20 11:40:03 +00:00
|
|
|
parse_result = NameParser(try_scene_exceptions=True,convert=True).parse(dir_name, cache_result=False)
|
2014-07-06 13:11:04 +00:00
|
|
|
except (InvalidNameException, InvalidShowException):
|
2015-05-14 06:12:06 +00:00
|
|
|
# If the filename doesn't parse, then return false as last
|
|
|
|
# resort. We can assume that unparseable filenames are not
|
|
|
|
# processed in the past
|
|
|
|
return False
|
|
|
|
|
2016-06-01 15:14:37 +00:00
|
|
|
showlink = ('for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name),
|
2015-05-14 06:12:06 +00:00
|
|
|
parse_result.show.name)[self.any_vid_processed]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
ep_detail_sql = ''
|
2015-08-22 09:35:03 +00:00
|
|
|
if parse_result.show.indexerid and 0 < len(parse_result.episode_numbers) and parse_result.season_number:
|
2015-05-14 06:12:06 +00:00
|
|
|
ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
|
|
|
|
% (str(parse_result.show.indexerid),
|
|
|
|
str(parse_result.season_number),
|
|
|
|
str(parse_result.episode_numbers[0]))
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
# Avoid processing the same directory again if we use a process method <> move
|
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name])
|
|
|
|
if sql_result:
|
2015-10-13 20:29:14 +00:00
|
|
|
self._log_helper(u'Found a release directory %s that has already been processed,<br />.. skipping: %s'
|
2015-05-08 02:46:54 +00:00
|
|
|
% (showlink, dir_name))
|
2015-08-22 09:35:03 +00:00
|
|
|
if ep_detail_sql:
|
|
|
|
reset_status(parse_result.show.indexerid,
|
|
|
|
parse_result.season_number,
|
|
|
|
parse_result.episode_numbers[0])
|
2014-06-07 21:32:38 +00:00
|
|
|
return True
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
else:
|
|
|
|
# This is needed for video whose name differ from dir_name
|
|
|
|
if not isinstance(videofile, unicode):
|
|
|
|
videofile = unicode(videofile, 'utf_8')
|
|
|
|
|
|
|
|
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
|
|
|
|
if sql_result:
|
2015-10-13 20:29:14 +00:00
|
|
|
self._log_helper(u'Found a video, but that release %s was already processed,<br />.. skipping: %s'
|
2015-05-08 02:46:54 +00:00
|
|
|
% (showlink, videofile))
|
2015-08-22 09:35:03 +00:00
|
|
|
if ep_detail_sql:
|
|
|
|
reset_status(parse_result.show.indexerid,
|
|
|
|
parse_result.season_number,
|
|
|
|
parse_result.episode_numbers[0])
|
2015-05-08 02:46:54 +00:00
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# Needed if we have downloaded the same episode @ different quality
|
|
|
|
search_sql = 'SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history'\
|
|
|
|
+ ' ON history.showid=tv_episodes.showid'\
|
|
|
|
+ ' WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode'\
|
|
|
|
+ ep_detail_sql\
|
|
|
|
+ ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\
|
|
|
|
+ ' and history.resource LIKE ?'
|
|
|
|
|
|
|
|
sql_result = my_db.select(search_sql, [u'%' + videofile])
|
|
|
|
if sql_result:
|
2015-10-13 20:29:14 +00:00
|
|
|
self._log_helper(u'Found a video, but the episode %s is already processed,<br />.. skipping: %s'
|
2015-05-08 02:46:54 +00:00
|
|
|
% (showlink, videofile))
|
2015-08-22 09:35:03 +00:00
|
|
|
if ep_detail_sql:
|
|
|
|
reset_status(parse_result.show.indexerid,
|
|
|
|
parse_result.season_number,
|
|
|
|
parse_result.episode_numbers[0])
|
2015-05-08 02:46:54 +00:00
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False, showObj=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
processor = None
|
|
|
|
for cur_video_file in video_files:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if self._already_postprocessed(process_path, cur_video_file, force):
|
|
|
|
self._set_process_success(False)
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
try:
|
2016-09-04 20:00:44 +00:00
|
|
|
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash, webhandler=self.webhandler, showObj=showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
file_success = processor.process()
|
|
|
|
process_fail_message = ''
|
2016-08-11 00:00:36 +00:00
|
|
|
except exceptions.PostProcessingFailed:
|
2015-05-08 02:46:54 +00:00
|
|
|
file_success = False
|
2016-08-11 00:00:36 +00:00
|
|
|
process_fail_message = '<br />.. Post Processing Failed'
|
2015-05-08 02:46:54 +00:00
|
|
|
|
|
|
|
self._set_process_success(file_success)
|
|
|
|
|
|
|
|
if processor:
|
|
|
|
self._buffer(processor.log.strip('\n'))
|
|
|
|
|
|
|
|
if file_success:
|
|
|
|
self._log_helper(u'Successfully processed ' + cur_video_file, logger.MESSAGE)
|
|
|
|
elif self.any_vid_processed:
|
|
|
|
self._log_helper(u'Warning fail for %s%s' % (cur_video_file_path, process_fail_message),
|
|
|
|
logger.WARNING)
|
|
|
|
else:
|
|
|
|
self._log_helper(u'Did not use file %s%s' % (cur_video_file_path, process_fail_message),
|
|
|
|
logger.WARNING)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _get_path_dir_files(dir_name, nzb_name, pp_type):
|
|
|
|
path = ''
|
|
|
|
dirs = []
|
|
|
|
files = []
|
|
|
|
|
|
|
|
if dir_name == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Post Processing Active
|
|
|
|
# Get at first all the subdir in the dir_name
|
|
|
|
for path, dirs, files in ek.ek(os.walk, dir_name):
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-05-08 02:46:54 +00:00
|
|
|
path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
|
|
|
|
if None is not nzb_name and not nzb_name.endswith('.nzb') and os.path.isfile(
|
|
|
|
os.path.join(dir_name, nzb_name)): # For single torrent file without directory
|
|
|
|
dirs = []
|
|
|
|
files = [os.path.join(dir_name, nzb_name)]
|
|
|
|
else:
|
|
|
|
dirs = [dirs]
|
|
|
|
files = []
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
return path, dirs, files
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# noinspection PyArgumentList
|
2016-09-27 20:18:58 +00:00
|
|
|
def _process_failed(self, dir_name, nzb_name, showObj=None):
|
2015-05-08 02:46:54 +00:00
|
|
|
""" Process a download that did not complete correctly """
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
|
|
|
processor = None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
try:
|
2016-09-27 20:18:58 +00:00
|
|
|
processor = failedProcessor.FailedProcessor(dir_name, nzb_name, showObj)
|
2015-05-08 02:46:54 +00:00
|
|
|
self._set_process_success(processor.process())
|
|
|
|
process_fail_message = ''
|
2015-06-08 12:47:01 +00:00
|
|
|
except exceptions.FailedProcessingFailed as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
self._set_process_success(False)
|
|
|
|
process_fail_message = ex(e)
|
|
|
|
|
|
|
|
if processor:
|
|
|
|
self._buffer(processor.log.strip('\n'))
|
|
|
|
|
|
|
|
if sickbeard.DELETE_FAILED and self.any_vid_processed:
|
|
|
|
self._delete_folder(dir_name, check_empty=False)
|
|
|
|
|
|
|
|
task = u'Failed download processing'
|
|
|
|
if self.any_vid_processed:
|
|
|
|
self._log_helper(u'Successful %s: (%s, %s)'
|
|
|
|
% (task.lower(), str(nzb_name), dir_name), logger.MESSAGE)
|
|
|
|
else:
|
|
|
|
self._log_helper(u'%s failed: (%s, %s): %s'
|
|
|
|
% (task, str(nzb_name), dir_name, process_fail_message), logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-19 19:20:33 +00:00
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
# backward compatibility prevents the case of this function name from being updated to PEP8
|
2016-09-04 20:00:44 +00:00
|
|
|
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False, webhandler=None, showObj=None):
|
2015-05-08 02:46:54 +00:00
|
|
|
# backward compatibility prevents the case of this function name from being updated to PEP8
|
2016-09-04 20:00:44 +00:00
|
|
|
return ProcessTVShow(webhandler).process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup, showObj)
|