2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 12:37:22 +00:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import glob
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import stat
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import common
|
|
|
|
from sickbeard import exceptions
|
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import history
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import notifiers
|
|
|
|
from sickbeard import show_name_helpers
|
|
|
|
from sickbeard import failed_history
|
2014-04-29 04:55:59 +00:00
|
|
|
from sickbeard import name_cache
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard.exceptions import ex
|
|
|
|
|
2014-07-06 13:11:04 +00:00
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
from lib import adba
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
class PostProcessor(object):
|
|
|
|
"""
|
|
|
|
A class which will process a media file according to the post processing settings in the config.
|
|
|
|
"""
|
|
|
|
|
|
|
|
EXISTS_LARGER = 1
|
|
|
|
EXISTS_SAME = 2
|
|
|
|
EXISTS_SMALLER = 3
|
|
|
|
DOESNT_EXIST = 4
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
NZB_NAME = 1
|
|
|
|
FOLDER_NAME = 2
|
|
|
|
FILE_NAME = 3
|
|
|
|
|
2014-03-26 01:42:36 +00:00
|
|
|
def __init__(self, file_path, nzb_name=None, process_method=None, is_priority=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Creates a new post processor with the given file path and optionally an NZB name.
|
|
|
|
|
|
|
|
file_path: The path to the file to be processed
|
|
|
|
nzb_name: The name of the NZB which resulted in this file being downloaded (optional)
|
|
|
|
"""
|
|
|
|
# absolute path to the folder that is being processed
|
|
|
|
self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path))
|
|
|
|
|
|
|
|
# full path to file
|
|
|
|
self.file_path = file_path
|
|
|
|
|
|
|
|
# file name only
|
|
|
|
self.file_name = ek.ek(os.path.basename, file_path)
|
|
|
|
|
|
|
|
# the name of the folder only
|
|
|
|
self.folder_name = ek.ek(os.path.basename, self.folder_path)
|
|
|
|
|
|
|
|
# name of the NZB that resulted in this folder
|
|
|
|
self.nzb_name = nzb_name
|
|
|
|
|
|
|
|
self.process_method = process_method if process_method else sickbeard.PROCESS_METHOD
|
|
|
|
|
|
|
|
self.in_history = False
|
|
|
|
self.release_group = None
|
|
|
|
self.is_proper = False
|
|
|
|
|
|
|
|
self.is_priority = is_priority
|
|
|
|
|
|
|
|
self.good_results = {self.NZB_NAME: False,
|
|
|
|
self.FOLDER_NAME: False,
|
|
|
|
self.FILE_NAME: False}
|
|
|
|
|
|
|
|
self.log = ''
|
|
|
|
|
2014-06-30 17:48:18 +00:00
|
|
|
def __del__(self):
|
|
|
|
pass
|
2014-03-10 11:20:29 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _log(self, message, level=logger.MESSAGE):
|
|
|
|
"""
|
|
|
|
A wrapper for the internal logger which also keeps track of messages and saves them to a string for later.
|
|
|
|
|
|
|
|
message: The string to log (unicode)
|
|
|
|
level: The log level to use (optional)
|
|
|
|
"""
|
|
|
|
logger.log(message, level)
|
|
|
|
self.log += message + '\n'
|
|
|
|
|
|
|
|
def _checkForExistingFile(self, existing_file):
|
|
|
|
"""
|
|
|
|
Checks if a file exists already and if it does whether it's bigger or smaller than
|
|
|
|
the file we are post processing
|
|
|
|
|
|
|
|
existing_file: The file to compare to
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
DOESNT_EXIST if the file doesn't exist
|
|
|
|
EXISTS_LARGER if the file exists and is larger than the file we are post processing
|
|
|
|
EXISTS_SMALLER if the file exists and is smaller than the file we are post processing
|
|
|
|
EXISTS_SAME if the file exists and is the same size as the file we are post processing
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not existing_file:
|
|
|
|
self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG)
|
|
|
|
return PostProcessor.DOESNT_EXIST
|
|
|
|
|
|
|
|
# if the new file exists, return the appropriate code depending on the size
|
|
|
|
if ek.ek(os.path.isfile, existing_file):
|
|
|
|
|
|
|
|
# see if it's bigger than our old file
|
|
|
|
if ek.ek(os.path.getsize, existing_file) > ek.ek(os.path.getsize, self.file_path):
|
|
|
|
self._log(u"File " + existing_file + " is larger than " + self.file_path, logger.DEBUG)
|
|
|
|
return PostProcessor.EXISTS_LARGER
|
|
|
|
|
|
|
|
elif ek.ek(os.path.getsize, existing_file) == ek.ek(os.path.getsize, self.file_path):
|
|
|
|
self._log(u"File " + existing_file + " is the same size as " + self.file_path, logger.DEBUG)
|
|
|
|
return PostProcessor.EXISTS_SAME
|
|
|
|
|
|
|
|
else:
|
|
|
|
self._log(u"File " + existing_file + " is smaller than " + self.file_path, logger.DEBUG)
|
|
|
|
return PostProcessor.EXISTS_SMALLER
|
|
|
|
|
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it",
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return PostProcessor.DOESNT_EXIST
|
|
|
|
|
|
|
|
def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False):
|
|
|
|
"""
|
|
|
|
For a given file path searches for files with the same name but different extension and returns their absolute paths
|
|
|
|
|
|
|
|
file_path: The file to check for associated files
|
|
|
|
|
|
|
|
base_name_only: False add extra '.' (conservative search) to file_path minus extension
|
|
|
|
|
|
|
|
Returns: A list containing all files which are associated to the given file
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not file_path:
|
|
|
|
return []
|
|
|
|
|
|
|
|
file_path_list = []
|
|
|
|
|
|
|
|
base_name = file_path.rpartition('.')[0]
|
|
|
|
|
|
|
|
if not base_name_only:
|
2014-03-20 18:03:22 +00:00
|
|
|
base_name = base_name + '.'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# don't strip it all and use cwd by accident
|
|
|
|
if not base_name:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# don't confuse glob with chars we didn't mean to use
|
|
|
|
base_name = re.sub(r'[\[\]\*\?]', r'[\g<0>]', base_name)
|
|
|
|
|
|
|
|
for associated_file_path in ek.ek(glob.glob, base_name + '*'):
|
|
|
|
# only add associated to list
|
|
|
|
if associated_file_path == file_path:
|
|
|
|
continue
|
|
|
|
# only list it if the only non-shared part is the extension or if it is a subtitle
|
2014-03-25 05:57:24 +00:00
|
|
|
if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in common.subtitleExtensions:
|
2014-03-10 05:18:05 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
#Exclude .rar files from associated list
|
|
|
|
if re.search('(^.+\.(rar|r\d+)$)', associated_file_path):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ek.ek(os.path.isfile, associated_file_path):
|
|
|
|
file_path_list.append(associated_file_path)
|
|
|
|
|
|
|
|
return file_path_list
|
|
|
|
|
|
|
|
def _delete(self, file_path, associated_files=False):
|
|
|
|
"""
|
|
|
|
Deletes the file and optionally all associated files.
|
|
|
|
|
|
|
|
file_path: The file to delete
|
|
|
|
associated_files: True to delete all files which differ only by extension, False to leave them
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not file_path:
|
|
|
|
return
|
|
|
|
|
|
|
|
# figure out which files we want to delete
|
|
|
|
file_list = [file_path]
|
|
|
|
if associated_files:
|
|
|
|
file_list = file_list + self.list_associated_files(file_path)
|
|
|
|
|
|
|
|
if not file_list:
|
|
|
|
self._log(u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG)
|
|
|
|
return
|
|
|
|
|
|
|
|
# delete the file and any other files which we want to delete
|
|
|
|
for cur_file in file_list:
|
|
|
|
if ek.ek(os.path.isfile, cur_file):
|
2014-05-02 21:58:27 +00:00
|
|
|
self._log(u"Deleting file " + cur_file, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
#check first the read-only attribute
|
|
|
|
file_attribute = ek.ek(os.stat, cur_file)[0]
|
|
|
|
if (not file_attribute & stat.S_IWRITE):
|
|
|
|
# File is read-only, so make it writeable
|
|
|
|
self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
|
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.chmod, cur_file, stat.S_IWRITE)
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
|
|
|
self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
|
|
|
|
|
|
|
|
ek.ek(os.remove, cur_file)
|
|
|
|
# do the library update for synoindex
|
|
|
|
notifiers.synoindex_notifier.deleteFile(cur_file)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None,
|
|
|
|
subtitles=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
|
|
|
|
and optionally move associated files too.
|
|
|
|
|
|
|
|
file_path: The full path of the media file to act on
|
|
|
|
new_path: Destination path where we want to move/copy the file to
|
|
|
|
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
|
|
|
|
associated_files: Boolean, whether we should copy similarly-named files too
|
|
|
|
action: function that takes an old path and new path and does an operation with them (move/copy)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not action:
|
|
|
|
self._log(u"Must provide an action for the combined file operation", logger.ERROR)
|
|
|
|
return
|
|
|
|
|
|
|
|
file_list = [file_path]
|
|
|
|
if associated_files:
|
|
|
|
file_list = file_list + self.list_associated_files(file_path)
|
|
|
|
elif subtitles:
|
|
|
|
file_list = file_list + self.list_associated_files(file_path, subtitles_only=True)
|
|
|
|
|
|
|
|
if not file_list:
|
|
|
|
self._log(u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG)
|
|
|
|
return
|
|
|
|
|
|
|
|
# create base name with file_path (media_file without .extension)
|
|
|
|
old_base_name = file_path.rpartition('.')[0]
|
|
|
|
old_base_name_length = len(old_base_name)
|
|
|
|
|
|
|
|
# deal with all files
|
|
|
|
for cur_file_path in file_list:
|
|
|
|
|
|
|
|
cur_file_name = ek.ek(os.path.basename, cur_file_path)
|
|
|
|
|
|
|
|
# get the extension without .
|
|
|
|
cur_extension = cur_file_path[old_base_name_length + 1:]
|
|
|
|
|
|
|
|
# check if file have subtitles language
|
|
|
|
if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions:
|
|
|
|
cur_lang = os.path.splitext(cur_extension)[0]
|
|
|
|
if cur_lang in sickbeard.SUBTITLES_LANGUAGES:
|
|
|
|
cur_extension = cur_lang + os.path.splitext(cur_extension)[1]
|
|
|
|
|
|
|
|
# replace .nfo with .nfo-orig to avoid conflicts
|
2014-04-06 16:25:46 +00:00
|
|
|
if cur_extension == 'nfo' and sickbeard.NFO_RENAME == True:
|
2014-03-10 05:18:05 +00:00
|
|
|
cur_extension = 'nfo-orig'
|
|
|
|
|
|
|
|
# If new base name then convert name
|
|
|
|
if new_base_name:
|
|
|
|
new_file_name = new_base_name + '.' + cur_extension
|
|
|
|
# if we're not renaming we still want to change extensions sometimes
|
|
|
|
else:
|
|
|
|
new_file_name = helpers.replaceExtension(cur_file_name, cur_extension)
|
|
|
|
|
|
|
|
if sickbeard.SUBTITLES_DIR and cur_extension in common.subtitleExtensions:
|
|
|
|
subs_new_path = ek.ek(os.path.join, new_path, sickbeard.SUBTITLES_DIR)
|
|
|
|
dir_exists = helpers.makeDir(subs_new_path)
|
|
|
|
if not dir_exists:
|
|
|
|
logger.log(u"Unable to create subtitles folder " + subs_new_path, logger.ERROR)
|
|
|
|
else:
|
|
|
|
helpers.chmodAsParent(subs_new_path)
|
|
|
|
new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name)
|
|
|
|
else:
|
|
|
|
new_file_path = ek.ek(os.path.join, new_path, new_file_name)
|
|
|
|
|
|
|
|
action(cur_file_path, new_file_path)
|
|
|
|
|
|
|
|
def _move(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
|
|
|
"""
|
|
|
|
file_path: The full path of the media file to move
|
|
|
|
new_path: Destination path where we want to move the file to
|
|
|
|
new_base_name: The base filename (no extension) to use during the move. Use None to keep the same name.
|
|
|
|
associated_files: Boolean, whether we should move similarly-named files too
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _int_move(cur_file_path, new_file_path):
|
|
|
|
|
|
|
|
self._log(u"Moving file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
|
|
|
try:
|
|
|
|
helpers.moveFile(cur_file_path, new_file_path)
|
|
|
|
helpers.chmodAsParent(new_file_path)
|
|
|
|
except (IOError, OSError), e:
|
|
|
|
self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
|
|
|
|
raise e
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move,
|
|
|
|
subtitles=subtitles)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
|
|
|
"""
|
|
|
|
file_path: The full path of the media file to copy
|
|
|
|
new_path: Destination path where we want to copy the file to
|
|
|
|
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
|
|
|
|
associated_files: Boolean, whether we should copy similarly-named files too
|
|
|
|
"""
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
def _int_copy(cur_file_path, new_file_path):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
|
|
|
try:
|
|
|
|
helpers.copyFile(cur_file_path, new_file_path)
|
|
|
|
helpers.chmodAsParent(new_file_path)
|
|
|
|
except (IOError, OSError), e:
|
|
|
|
logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
|
|
|
raise e
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy,
|
|
|
|
subtitles=subtitles)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
|
|
|
"""
|
|
|
|
file_path: The full path of the media file to move
|
|
|
|
new_path: Destination path where we want to create a hard linked file
|
|
|
|
new_base_name: The base filename (no extension) to use during the link. Use None to keep the same name.
|
|
|
|
associated_files: Boolean, whether we should move similarly-named files too
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _int_hard_link(cur_file_path, new_file_path):
|
|
|
|
|
|
|
|
self._log(u"Hard linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
|
|
|
try:
|
|
|
|
helpers.hardlinkFile(cur_file_path, new_file_path)
|
|
|
|
helpers.chmodAsParent(new_file_path)
|
|
|
|
except (IOError, OSError), e:
|
|
|
|
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
|
|
|
raise e
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
|
|
|
|
|
|
|
|
def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
|
|
|
"""
|
|
|
|
file_path: The full path of the media file to move
|
|
|
|
new_path: Destination path where we want to move the file to create a symbolic link to
|
|
|
|
new_base_name: The base filename (no extension) to use during the link. Use None to keep the same name.
|
|
|
|
associated_files: Boolean, whether we should move similarly-named files too
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _int_move_and_sym_link(cur_file_path, new_file_path):
|
|
|
|
|
|
|
|
self._log(u"Moving then symbolic linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
|
|
|
try:
|
|
|
|
helpers.moveAndSymlinkFile(cur_file_path, new_file_path)
|
|
|
|
helpers.chmodAsParent(new_file_path)
|
|
|
|
except (IOError, OSError), e:
|
|
|
|
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
|
|
|
raise e
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files,
|
|
|
|
action=_int_move_and_sym_link)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def _history_lookup(self):
|
|
|
|
"""
|
|
|
|
Look up the NZB name in the history and see if it contains a record for self.nzb_name
|
|
|
|
|
|
|
|
Returns a (indexer_id, season, []) tuple. The first two may be None if none were found.
|
|
|
|
"""
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
to_return = (None, None, [], None)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# if we don't have either of these then there's nothing to use to search the history for anyway
|
|
|
|
if not self.nzb_name and not self.folder_name:
|
|
|
|
self.in_history = False
|
|
|
|
return to_return
|
|
|
|
|
|
|
|
# make a list of possible names to use in the search
|
|
|
|
names = []
|
|
|
|
if self.nzb_name:
|
|
|
|
names.append(self.nzb_name)
|
|
|
|
if '.' in self.nzb_name:
|
|
|
|
names.append(self.nzb_name.rpartition(".")[0])
|
|
|
|
if self.folder_name:
|
|
|
|
names.append(self.folder_name)
|
|
|
|
|
|
|
|
# search the database for a possible match and return immediately if we find one
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
for curName in names:
|
|
|
|
search_name = re.sub("[\.\-\ ]", "_", curName)
|
|
|
|
sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [search_name])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
if len(sql_results) == 0:
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
show = helpers.findCertainShow(sickbeard.showList, int(sql_results[0]["showid"]))
|
|
|
|
if not show:
|
|
|
|
continue
|
2014-05-30 08:16:12 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
season = int(sql_results[0]["season"])
|
|
|
|
quality = int(sql_results[0]["quality"])
|
2014-05-02 08:47:02 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
if quality == common.Quality.UNKNOWN:
|
|
|
|
quality = None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
self.in_history = True
|
|
|
|
to_return = (show, season, [], quality)
|
|
|
|
self._log("Found result in history: " + str(to_return), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
return to_return
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
self.in_history = False
|
|
|
|
return to_return
|
|
|
|
|
2014-05-02 08:47:02 +00:00
|
|
|
def _finalize(self, parse_result):
|
|
|
|
self.release_group = parse_result.release_group
|
|
|
|
|
|
|
|
# remember whether it's a proper
|
|
|
|
if parse_result.extra_info:
|
2014-05-26 10:42:34 +00:00
|
|
|
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,re.I) != None
|
2014-05-02 08:47:02 +00:00
|
|
|
|
|
|
|
# if the result is complete then remember that for later
|
|
|
|
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
|
|
|
test_name = ek.ek(os.path.basename, parse_result.original_name)
|
|
|
|
if test_name == self.nzb_name:
|
|
|
|
self.good_results[self.NZB_NAME] = True
|
|
|
|
elif test_name == self.folder_name:
|
|
|
|
self.good_results[self.FOLDER_NAME] = True
|
|
|
|
elif test_name == self.file_name:
|
|
|
|
self.good_results[self.FILE_NAME] = True
|
|
|
|
else:
|
|
|
|
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(
|
|
|
|
self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
|
|
|
else:
|
|
|
|
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name",
|
|
|
|
logger.DEBUG)
|
|
|
|
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
|
|
|
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
|
|
|
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
|
|
|
logger.log("Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _analyze_name(self, name, file=True):
|
|
|
|
"""
|
|
|
|
Takes a name and tries to figure out a show, season, and episode from it.
|
|
|
|
|
|
|
|
name: A string which we want to analyze to determine show info from (unicode)
|
|
|
|
|
|
|
|
Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be []
|
|
|
|
if none were found.
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger.log(u"Analyzing name " + repr(name))
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
to_return = (None, None, [], None)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if not name:
|
|
|
|
return to_return
|
|
|
|
|
|
|
|
# parse the name to break it into show name, season, and episode
|
2014-07-06 13:11:04 +00:00
|
|
|
try:
|
|
|
|
np = NameParser(file, useIndexers=True, convert=True)
|
|
|
|
parse_result = np.parse(name)
|
|
|
|
except InvalidShowException:
|
|
|
|
logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.WARNING)
|
2014-05-30 08:16:12 +00:00
|
|
|
return to_return
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if parse_result.air_by_date:
|
|
|
|
season = -1
|
|
|
|
episodes = [parse_result.air_date]
|
2014-05-01 22:53:37 +00:00
|
|
|
elif parse_result.sports:
|
|
|
|
season = -1
|
|
|
|
episodes = [parse_result.sports_event_date]
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
season = parse_result.season_number
|
|
|
|
episodes = parse_result.episode_numbers
|
|
|
|
|
2014-06-07 11:06:21 +00:00
|
|
|
to_return = (parse_result.show, season, episodes, parse_result.quality)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-02 08:47:02 +00:00
|
|
|
self._finalize(parse_result)
|
2014-03-10 05:18:05 +00:00
|
|
|
return to_return
|
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def _analyze_anidb(self, filePath):
|
|
|
|
# TODO: rewrite this
|
2014-05-26 10:42:34 +00:00
|
|
|
return (None, None, None, None)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
|
|
|
if not helpers.set_up_anidb_connection():
|
2014-05-26 10:42:34 +00:00
|
|
|
return (None, None, None, None)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
|
|
|
ep = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath)
|
|
|
|
try:
|
|
|
|
self._log(u"Trying to lookup " + str(filePath) + " on anidb", logger.MESSAGE)
|
|
|
|
ep.load_data()
|
|
|
|
except Exception, e:
|
|
|
|
self._log(u"exception msg: " + str(e))
|
|
|
|
raise InvalidNameException
|
|
|
|
else:
|
|
|
|
self.anidbEpisode = ep
|
|
|
|
|
|
|
|
#TODO: clean code. it looks like it's from hell
|
|
|
|
for name in ep.allNames:
|
|
|
|
|
|
|
|
indexer_id = name_cache.retrieveNameFromCache(name)
|
|
|
|
if not indexer_id:
|
|
|
|
show = helpers.get_show_by_name(name)
|
|
|
|
if show:
|
|
|
|
indexer_id = show.indexerid
|
|
|
|
else:
|
|
|
|
indexer_id = 0
|
|
|
|
|
|
|
|
if indexer_id:
|
|
|
|
name_cache.addNameToCache(name, indexer_id)
|
|
|
|
if indexer_id:
|
|
|
|
try:
|
|
|
|
show = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
|
|
|
(season, episodes) = helpers.get_all_episodes_from_absolute_number(show, None, [ep.epno])
|
|
|
|
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
|
|
|
|
self._log(str(indexer_id) + ": Indexer object absolute number " + str(
|
|
|
|
ep.epno) + " is incomplete, skipping this episode")
|
|
|
|
else:
|
|
|
|
if len(episodes):
|
|
|
|
self._log(u"Lookup successful from anidb. ", logger.DEBUG)
|
2014-05-26 10:42:34 +00:00
|
|
|
return (show, season, episodes, None)
|
2014-05-26 06:29:22 +00:00
|
|
|
|
|
|
|
if ep.anidb_file_name:
|
|
|
|
self._log(u"Lookup successful, using anidb filename " + str(ep.anidb_file_name), logger.DEBUG)
|
|
|
|
return self._analyze_name(ep.anidb_file_name)
|
|
|
|
raise InvalidNameException
|
|
|
|
|
|
|
|
|
|
|
|
def _build_anidb_episode(self, connection, filePath):
|
|
|
|
ep = adba.Episode(connection, filePath=filePath,
|
|
|
|
paramsF=["quality", "anidb_file_name", "crc32"],
|
|
|
|
paramsA=["epno", "english_name", "short_name_list", "other_name", "synonym_list"])
|
|
|
|
|
|
|
|
return ep
|
|
|
|
|
|
|
|
def _add_to_anidb_mylist(self, filePath):
|
|
|
|
if helpers.set_up_anidb_connection():
|
|
|
|
if not self.anidbEpisode: # seams like we could parse the name before, now lets build the anidb object
|
|
|
|
self.anidbEpisode = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath)
|
|
|
|
|
|
|
|
self._log(u"Adding the file to the anidb mylist", logger.DEBUG)
|
|
|
|
try:
|
|
|
|
self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
|
|
|
|
except Exception, e:
|
|
|
|
self._log(u"exception msg: " + str(e))
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _find_info(self):
|
|
|
|
"""
|
|
|
|
For a given file try to find the showid, season, and episode.
|
|
|
|
"""
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
show = season = quality = None
|
2014-03-10 05:18:05 +00:00
|
|
|
episodes = []
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
# try to look up the nzb in history
|
2014-03-10 05:18:05 +00:00
|
|
|
attempt_list = [self._history_lookup,
|
|
|
|
|
|
|
|
# try to analyze the nzb name
|
|
|
|
lambda: self._analyze_name(self.nzb_name),
|
|
|
|
|
|
|
|
# try to analyze the file name
|
|
|
|
lambda: self._analyze_name(self.file_name),
|
|
|
|
|
|
|
|
# try to analyze the dir name
|
|
|
|
lambda: self._analyze_name(self.folder_name),
|
|
|
|
|
|
|
|
# try to analyze the file + dir names together
|
|
|
|
lambda: self._analyze_name(self.file_path),
|
|
|
|
|
|
|
|
# try to analyze the dir + file name together as one name
|
2014-05-26 10:42:34 +00:00
|
|
|
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name),
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
# try to analyze the file path with the help of aniDB
|
|
|
|
lambda: self._analyze_anidb(self.file_path)
|
2014-03-25 05:57:24 +00:00
|
|
|
]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# attempt every possible method to get our info
|
|
|
|
for cur_attempt in attempt_list:
|
|
|
|
|
|
|
|
try:
|
2014-05-26 10:42:34 +00:00
|
|
|
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
|
2014-03-10 05:18:05 +00:00
|
|
|
except InvalidNameException, e:
|
|
|
|
logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
|
|
|
|
continue
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
if cur_show:
|
|
|
|
show = cur_show
|
2014-05-02 08:47:02 +00:00
|
|
|
|
|
|
|
if cur_quality and not (self.in_history and quality):
|
|
|
|
quality = cur_quality
|
|
|
|
|
2014-03-20 18:03:22 +00:00
|
|
|
if cur_season != None:
|
2014-03-10 05:18:05 +00:00
|
|
|
season = cur_season
|
|
|
|
if cur_episodes:
|
|
|
|
episodes = cur_episodes
|
|
|
|
|
2014-05-02 08:47:02 +00:00
|
|
|
# for air-by-date shows we need to look up the season/episode from database
|
2014-05-26 10:42:34 +00:00
|
|
|
if season == -1 and show and episodes:
|
2014-05-02 08:47:02 +00:00
|
|
|
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
|
|
|
logger.DEBUG)
|
|
|
|
airdate = episodes[0].toordinal()
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
|
|
|
[show.indexerid, show.indexer, airdate])
|
2014-03-12 05:28:30 +00:00
|
|
|
|
2014-05-02 08:47:02 +00:00
|
|
|
if sql_result:
|
|
|
|
season = int(sql_result[0][0])
|
|
|
|
episodes = [int(sql_result[0][1])]
|
|
|
|
else:
|
|
|
|
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
|
2014-05-26 10:42:34 +00:00
|
|
|
show.indexerid) + u", skipping", logger.DEBUG)
|
2014-05-02 08:47:02 +00:00
|
|
|
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
2014-03-10 05:18:05 +00:00
|
|
|
episodes = []
|
2014-05-02 08:47:02 +00:00
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# if there's no season then we can hopefully just use 1 automatically
|
2014-05-26 10:42:34 +00:00
|
|
|
elif season == None and show:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
numseasonsSQlResult = myDB.select(
|
|
|
|
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
|
|
|
|
[show.indexerid, show.indexer])
|
2014-03-20 18:03:22 +00:00
|
|
|
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
2014-05-26 10:42:34 +00:00
|
|
|
u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
season = 1
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
if show and season and episodes:
|
|
|
|
return (show, season, episodes, quality)
|
2014-05-02 08:47:02 +00:00
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
return (show, season, episodes, quality)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
def _get_ep_obj(self, show, season, episodes):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Retrieve the TVEpisode object requested.
|
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
show: The show object belonging to the show we want to process
|
2014-03-10 05:18:05 +00:00
|
|
|
season: The season of the episode (int)
|
|
|
|
episodes: A list of episodes to find (list of ints)
|
|
|
|
|
|
|
|
If the episode(s) can be found then a TVEpisode object with the correct related eps will
|
|
|
|
be instantiated and returned. If the episode can't be found then None will be returned.
|
|
|
|
"""
|
|
|
|
|
|
|
|
root_ep = None
|
|
|
|
for cur_episode in episodes:
|
2014-04-28 22:24:37 +00:00
|
|
|
self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# now that we've figured out which episode this file is just load it manually
|
|
|
|
try:
|
2014-05-26 10:42:34 +00:00
|
|
|
curEp = show.getEpisode(season, cur_episode)
|
2014-03-10 05:18:05 +00:00
|
|
|
except exceptions.EpisodeNotFoundException, e:
|
|
|
|
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
|
|
|
|
raise exceptions.PostProcessingFailed()
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# associate all the episodes together under a single root episode
|
2014-03-20 18:03:22 +00:00
|
|
|
if root_ep == None:
|
2014-03-10 05:18:05 +00:00
|
|
|
root_ep = curEp
|
|
|
|
root_ep.relatedEps = []
|
|
|
|
elif curEp not in root_ep.relatedEps:
|
|
|
|
root_ep.relatedEps.append(curEp)
|
|
|
|
|
|
|
|
return root_ep
|
|
|
|
|
|
|
|
def _get_quality(self, ep_obj):
|
|
|
|
"""
|
|
|
|
Determines the quality of the file that is being post processed, first by checking if it is directly
|
|
|
|
available in the TVEpisode's status or otherwise by parsing through the data available.
|
|
|
|
|
|
|
|
ep_obj: The TVEpisode object related to the file we are post processing
|
|
|
|
|
|
|
|
Returns: A quality value found in common.Quality
|
|
|
|
"""
|
|
|
|
|
|
|
|
ep_quality = common.Quality.UNKNOWN
|
|
|
|
|
|
|
|
# if there is a quality available in the status then we don't need to bother guessing from the filename
|
2014-03-19 23:33:49 +00:00
|
|
|
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
2014-03-25 05:57:24 +00:00
|
|
|
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
2014-03-10 05:18:05 +00:00
|
|
|
if ep_quality != common.Quality.UNKNOWN:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
|
|
|
u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return ep_quality
|
|
|
|
|
|
|
|
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
|
|
|
|
name_list = [self.nzb_name, self.folder_name, self.file_name]
|
|
|
|
|
|
|
|
# search all possible names for our new quality, in case the file or dir doesn't have it
|
|
|
|
for cur_name in name_list:
|
|
|
|
|
|
|
|
# some stuff might be None at this point still
|
|
|
|
if not cur_name:
|
|
|
|
continue
|
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show.is_anime)
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
|
|
|
u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# if we find a good one then use it
|
|
|
|
if ep_quality != common.Quality.UNKNOWN:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
|
|
|
ep_quality] + ", using that", logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return ep_quality
|
|
|
|
|
2014-05-02 22:37:41 +00:00
|
|
|
# Try getting quality from the episode (snatched) status
|
|
|
|
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
|
|
|
|
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
|
|
|
|
if ep_quality != common.Quality.UNKNOWN:
|
|
|
|
self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
|
|
|
return ep_quality
|
|
|
|
|
|
|
|
# Try guessing quality from the file name
|
2014-03-10 05:18:05 +00:00
|
|
|
ep_quality = common.Quality.assumeQuality(self.file_name)
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
|
|
|
u"Guessing quality for name " + self.file_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
if ep_quality != common.Quality.UNKNOWN:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
|
|
|
ep_quality] + ", using that", logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return ep_quality
|
|
|
|
|
2014-03-18 13:50:13 +00:00
|
|
|
test = str(ep_quality)
|
2014-03-10 05:18:05 +00:00
|
|
|
return ep_quality
|
|
|
|
|
|
|
|
def _run_extra_scripts(self, ep_obj):
|
|
|
|
"""
|
|
|
|
Executes any extra scripts defined in the config.
|
|
|
|
|
|
|
|
ep_obj: The object to use when calling the extra script
|
|
|
|
"""
|
|
|
|
for curScriptName in sickbeard.EXTRA_SCRIPTS:
|
|
|
|
|
|
|
|
# generate a safe command line string to execute the script and provide all the parameters
|
|
|
|
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
|
|
|
|
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
|
|
|
|
self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season),
|
|
|
|
str(ep_obj.episode), str(ep_obj.airdate)]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# use subprocess to run the command and capture output
|
|
|
|
self._log(u"Executing command " + str(script_cmd))
|
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
|
|
|
out, err = p.communicate() # @UnusedVariable
|
2014-03-10 05:18:05 +00:00
|
|
|
self._log(u"Script result: " + str(out), logger.DEBUG)
|
|
|
|
|
|
|
|
except OSError, e:
|
|
|
|
self._log(u"Unable to run extra_script: " + ex(e))
|
|
|
|
|
|
|
|
except Exception, e:
|
|
|
|
self._log(u"Unable to run extra_script: " + ex(e))
|
|
|
|
|
|
|
|
def _is_priority(self, ep_obj, new_ep_quality):
|
|
|
|
"""
|
|
|
|
Determines if the episode is a priority download or not (if it is expected). Episodes which are expected
|
|
|
|
(snatched) or larger than the existing episode are priority, others are not.
|
|
|
|
|
|
|
|
ep_obj: The TVEpisode object in question
|
|
|
|
new_ep_quality: The quality of the episode that is being processed
|
|
|
|
|
|
|
|
Returns: True if the episode is priority, False otherwise.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.is_priority:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# if SB downloaded this on purpose then this is a priority download
|
2014-03-19 23:33:49 +00:00
|
|
|
if self.in_history or ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
2014-03-10 05:18:05 +00:00
|
|
|
self._log(u"SB snatched this episode so I'm marking it as priority", logger.DEBUG)
|
|
|
|
return True
|
|
|
|
|
|
|
|
old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
|
|
|
|
|
|
|
# if the user downloaded it manually and it's higher quality than the existing episode then it's priority
|
|
|
|
if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
|
|
|
u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority",
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
# if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
|
|
|
|
if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority",
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def process(self):
|
|
|
|
"""
|
|
|
|
Post-process a given file
|
|
|
|
"""
|
|
|
|
|
|
|
|
self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")")
|
|
|
|
|
|
|
|
if ek.ek(os.path.isdir, self.file_path):
|
|
|
|
self._log(u"File " + self.file_path + " seems to be a directory")
|
|
|
|
return False
|
2014-05-02 08:47:02 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
for ignore_file in self.IGNORED_FILESTRINGS:
|
|
|
|
if ignore_file in self.file_path:
|
|
|
|
self._log(u"File " + self.file_path + " is ignored type, skipping")
|
|
|
|
return False
|
2014-05-02 08:47:02 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# reset per-file stuff
|
|
|
|
self.in_history = False
|
|
|
|
|
2014-05-30 13:00:04 +00:00
|
|
|
# reset the anidb episode object
|
|
|
|
self.anidbEpisode = None
|
|
|
|
|
2014-03-12 05:28:30 +00:00
|
|
|
# try to find the file info
|
2014-05-26 10:42:34 +00:00
|
|
|
(show, season, episodes, quality) = self._find_info()
|
|
|
|
if not show:
|
|
|
|
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
|
|
|
|
logger.ERROR)
|
|
|
|
raise exceptions.PostProcessingFailed()
|
|
|
|
elif season == None or not episodes:
|
2014-05-02 08:47:02 +00:00
|
|
|
self._log(u"Not enough information to determine what episode this is", logger.DEBUG)
|
|
|
|
self._log(u"Quitting post-processing", logger.DEBUG)
|
2014-03-10 20:31:41 +00:00
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# retrieve/create the corresponding TVEpisode objects
|
2014-05-26 10:42:34 +00:00
|
|
|
ep_obj = self._get_ep_obj(show, season, episodes)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# get the quality of the episode we're processing
|
2014-05-02 08:47:02 +00:00
|
|
|
if quality:
|
|
|
|
self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG)
|
|
|
|
new_ep_quality = quality
|
|
|
|
else:
|
|
|
|
new_ep_quality = self._get_quality(ep_obj)
|
2014-03-19 23:33:49 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG)
|
|
|
|
|
2014-03-19 23:33:49 +00:00
|
|
|
# see if this is a priority download (is it snatched, in history, PROPER, or BEST)
|
2014-03-10 05:18:05 +00:00
|
|
|
priority_download = self._is_priority(ep_obj, new_ep_quality)
|
|
|
|
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
|
|
|
|
|
|
|
|
# check for an existing file
|
|
|
|
existing_file_status = self._checkForExistingFile(ep_obj.location)
|
|
|
|
|
|
|
|
# if it's not priority then we don't want to replace smaller files in case it was a mistake
|
|
|
|
if not priority_download:
|
|
|
|
|
|
|
|
# if there's an existing file that we don't want to replace stop here
|
2014-06-19 15:32:52 +00:00
|
|
|
if existing_file_status == PostProcessor.EXISTS_LARGER:
|
|
|
|
if self.is_proper:
|
|
|
|
self._log(u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace", logger.DEBUG)
|
|
|
|
return True
|
|
|
|
|
|
|
|
else:
|
|
|
|
self._log(u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG)
|
|
|
|
return False
|
|
|
|
|
|
|
|
elif existing_file_status == PostProcessor.EXISTS_SAME:
|
|
|
|
self._log(u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# if the file is priority then we're going to replace it even if it exists
|
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
self._log(
|
|
|
|
u"This download is marked a priority download so I'm going to replace an existing file if I find one",
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 10:42:34 +00:00
|
|
|
# set the status of the episodes
|
|
|
|
#for curEp in [ep_obj] + ep_obj.relatedEps:
|
|
|
|
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# delete the existing file (and company)
|
|
|
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
|
|
|
try:
|
|
|
|
self._delete(cur_ep.location, associated_files=True)
|
|
|
|
# clean up any left over folders
|
|
|
|
if cur_ep.location:
|
2014-03-25 05:57:24 +00:00
|
|
|
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
|
|
|
|
keep_dir=ep_obj.show._location)
|
2014-03-10 05:18:05 +00:00
|
|
|
except (OSError, IOError):
|
|
|
|
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
|
|
|
|
|
|
|
# if the show directory doesn't exist then make it if allowed
|
|
|
|
if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
|
|
|
|
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
|
|
|
|
try:
|
|
|
|
ek.ek(os.mkdir, ep_obj.show._location)
|
|
|
|
# do the library update for synoindex
|
|
|
|
notifiers.synoindex_notifier.addFolder(ep_obj.show._location)
|
|
|
|
except (OSError, IOError):
|
|
|
|
raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location)
|
|
|
|
|
|
|
|
# get metadata for the show (but not episode because it hasn't been fully processed)
|
|
|
|
ep_obj.show.writeMetadata(True)
|
|
|
|
|
|
|
|
# update the ep info before we rename so the quality & release name go into the name properly
|
2014-05-30 10:01:49 +00:00
|
|
|
sql_l = []
|
2014-03-10 05:18:05 +00:00
|
|
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
|
|
|
with cur_ep.lock:
|
|
|
|
cur_release_name = None
|
|
|
|
|
|
|
|
# use the best possible representation of the release name
|
|
|
|
if self.good_results[self.NZB_NAME]:
|
|
|
|
cur_release_name = self.nzb_name
|
|
|
|
if cur_release_name.lower().endswith('.nzb'):
|
|
|
|
cur_release_name = cur_release_name.rpartition('.')[0]
|
|
|
|
elif self.good_results[self.FOLDER_NAME]:
|
|
|
|
cur_release_name = self.folder_name
|
|
|
|
elif self.good_results[self.FILE_NAME]:
|
|
|
|
cur_release_name = self.file_name
|
|
|
|
# take the extension off the filename, it's not needed
|
|
|
|
if '.' in self.file_name:
|
|
|
|
cur_release_name = self.file_name.rpartition('.')[0]
|
|
|
|
|
|
|
|
if cur_release_name:
|
|
|
|
self._log("Found release name " + cur_release_name, logger.DEBUG)
|
|
|
|
cur_ep.release_name = cur_release_name
|
|
|
|
else:
|
|
|
|
logger.log("good results: " + repr(self.good_results), logger.DEBUG)
|
|
|
|
|
2014-03-19 23:33:49 +00:00
|
|
|
if ep_obj.status in common.Quality.SNATCHED_BEST:
|
|
|
|
cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality)
|
|
|
|
else:
|
|
|
|
cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
cur_ep.subtitles = []
|
|
|
|
|
|
|
|
cur_ep.subtitles_searchcount = 0
|
|
|
|
|
|
|
|
cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00'
|
|
|
|
|
|
|
|
cur_ep.is_proper = self.is_proper
|
|
|
|
|
2014-05-30 10:01:49 +00:00
|
|
|
sql_l.append(cur_ep.get_sql())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# Just want to keep this consistent for failed handling right now
|
|
|
|
releaseName = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name)
|
|
|
|
if releaseName is not None:
|
|
|
|
failed_history.logSuccess(releaseName)
|
|
|
|
else:
|
|
|
|
self._log(u"Couldn't find release in snatch history", logger.WARNING)
|
|
|
|
|
2014-05-30 11:42:31 +00:00
|
|
|
if sql_l:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
myDB.mass_action(sql_l)
|
2014-06-30 15:57:32 +00:00
|
|
|
|
2014-05-30 10:01:49 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# find the destination folder
|
|
|
|
try:
|
|
|
|
proper_path = ep_obj.proper_path()
|
|
|
|
proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path)
|
|
|
|
|
|
|
|
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
|
|
|
|
except exceptions.ShowDirNotFoundException:
|
2014-03-25 05:57:24 +00:00
|
|
|
raise exceptions.PostProcessingFailed(
|
|
|
|
u"Unable to post-process an episode if the show dir doesn't exist, quitting")
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
|
|
|
|
|
|
|
|
# create any folders we need
|
|
|
|
helpers.make_dirs(dest_path)
|
|
|
|
|
|
|
|
# figure out the base name of the resulting episode file
|
|
|
|
if sickbeard.RENAME_EPISODES:
|
|
|
|
orig_extension = self.file_name.rpartition('.')[-1]
|
|
|
|
new_base_name = ek.ek(os.path.basename, proper_path)
|
|
|
|
new_file_name = new_base_name + '.' + orig_extension
|
|
|
|
|
|
|
|
else:
|
|
|
|
# if we're not renaming then there's no new base name, we'll just use the existing name
|
|
|
|
new_base_name = None
|
|
|
|
new_file_name = self.file_name
|
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
# add to anidb
|
|
|
|
if ep_obj.show.is_anime and sickbeard.ANIDB_USE_MYLIST:
|
|
|
|
self._add_to_anidb_mylist(self.file_path)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
|
|
|
# move the episode and associated files to the show dir
|
2014-04-02 12:09:53 +00:00
|
|
|
if self.process_method == "copy":
|
2014-03-25 05:57:24 +00:00
|
|
|
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
|
|
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
2014-04-02 12:09:53 +00:00
|
|
|
elif self.process_method == "move":
|
2014-03-25 05:57:24 +00:00
|
|
|
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
|
|
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
2014-04-02 12:09:53 +00:00
|
|
|
elif self.process_method == "hardlink":
|
2014-03-25 05:57:24 +00:00
|
|
|
self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
|
|
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
2014-04-02 12:09:53 +00:00
|
|
|
elif self.process_method == "symlink":
|
2014-03-25 05:57:24 +00:00
|
|
|
self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
|
|
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-04-02 06:29:46 +00:00
|
|
|
logger.log(u"Unknown process method: " + str(self.process_method), logger.ERROR)
|
2014-03-25 05:57:24 +00:00
|
|
|
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
2014-03-10 05:18:05 +00:00
|
|
|
except (OSError, IOError):
|
|
|
|
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
|
|
|
|
|
|
|
# download subtitles
|
|
|
|
if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
|
|
|
|
for curEp in [ep_obj]:
|
|
|
|
with cur_ep.lock:
|
|
|
|
cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
|
|
|
|
cur_ep.downloadSubtitles(force=True)
|
|
|
|
|
|
|
|
# put the new location in the database
|
2014-05-30 10:01:49 +00:00
|
|
|
sql_l = []
|
2014-03-10 05:18:05 +00:00
|
|
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
|
|
|
with cur_ep.lock:
|
|
|
|
cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
|
2014-05-30 10:01:49 +00:00
|
|
|
|
|
|
|
sql_l.append(cur_ep.get_sql())
|
|
|
|
|
2014-05-14 12:33:36 +00:00
|
|
|
# set file modify stamp to show airdate
|
|
|
|
if sickbeard.AIRDATE_EPISODES:
|
|
|
|
ep_obj.show.airdateModifyStamp(cur_ep)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-30 10:01:49 +00:00
|
|
|
# generate nfo/tbn
|
|
|
|
ep_obj.createMetaFiles()
|
|
|
|
sql_l.append(ep_obj.get_sql())
|
|
|
|
|
2014-05-30 11:42:31 +00:00
|
|
|
if sql_l:
|
2014-06-21 22:46:59 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
myDB.mass_action(sql_l)
|
2014-06-30 15:57:32 +00:00
|
|
|
|
2014-05-30 10:01:49 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# log it to history
|
|
|
|
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
|
|
|
|
|
|
|
|
# send notifications
|
|
|
|
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
|
|
|
|
|
|
|
# do the library update for XBMC
|
|
|
|
notifiers.xbmc_notifier.update_library(ep_obj.show.name)
|
|
|
|
|
|
|
|
# do the library update for Plex
|
|
|
|
notifiers.plex_notifier.update_library()
|
|
|
|
|
|
|
|
# do the library update for NMJ
|
|
|
|
# nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers)
|
|
|
|
|
|
|
|
# do the library update for Synology Indexer
|
|
|
|
notifiers.synoindex_notifier.addFile(ep_obj.location)
|
|
|
|
|
|
|
|
# do the library update for pyTivo
|
|
|
|
notifiers.pytivo_notifier.update_library(ep_obj)
|
|
|
|
|
|
|
|
# do the library update for Trakt
|
|
|
|
notifiers.trakt_notifier.update_library(ep_obj)
|
|
|
|
|
|
|
|
self._run_extra_scripts(ep_obj)
|
|
|
|
|
|
|
|
return True
|