2014-03-10 05:18:05 +00:00
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
2014-11-12 16:43:14 +00:00
# This file is part of SickGear.
2014-03-10 05:18:05 +00:00
#
2014-11-12 16:43:14 +00:00
# SickGear is free software: you can redistribute it and/or modify
2014-03-10 05:18:05 +00:00
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
2014-11-12 16:43:14 +00:00
# SickGear is distributed in the hope that it will be useful,
2014-03-10 05:18:05 +00:00
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
2014-07-19 23:08:50 +00:00
# GNU General Public License for more details.
2014-03-10 05:18:05 +00:00
#
# You should have received a copy of the GNU General Public License
2014-11-12 16:43:14 +00:00
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
2014-03-10 05:18:05 +00:00
from __future__ import with_statement
import glob
import os
import re
import subprocess
import stat
2016-02-01 22:51:51 +00:00
import threading
2014-03-10 05:18:05 +00:00
import sickbeard
from sickbeard import db
from sickbeard import common
from sickbeard import exceptions
from sickbeard import helpers
from sickbeard import history
from sickbeard import logger
from sickbeard import notifiers
from sickbeard import show_name_helpers
from sickbeard import failed_history
from sickbeard import encodingKludge as ek
from sickbeard . exceptions import ex
2014-07-06 13:11:04 +00:00
from sickbeard . name_parser . parser import NameParser , InvalidNameException , InvalidShowException
2014-03-10 05:18:05 +00:00
2014-05-26 06:29:22 +00:00
from lib import adba
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
try :
from lib . send2trash import send2trash
except ImportError :
pass
2014-07-19 23:08:50 +00:00
2014-03-10 05:18:05 +00:00
class PostProcessor ( object ) :
"""
A class which will process a media file according to the post processing settings in the config .
"""
EXISTS_LARGER = 1
EXISTS_SAME = 2
EXISTS_SMALLER = 3
DOESNT_EXIST = 4
2015-05-08 02:46:54 +00:00
IGNORED_FILESTRINGS = [ ' /.AppleDouble/ ' , ' .DS_Store ' ]
2014-03-10 05:18:05 +00:00
2016-09-04 20:00:44 +00:00
def __init__ ( self , file_path , nzb_name = None , process_method = None , force_replace = None , use_trash = None , webhandler = None , showObj = None ) :
2014-03-10 05:18:05 +00:00
"""
Creates a new post processor with the given file path and optionally an NZB name .
file_path : The path to the file to be processed
nzb_name : The name of the NZB which resulted in this file being downloaded ( optional )
"""
# absolute path to the folder that is being processed
self . folder_path = ek . ek ( os . path . dirname , ek . ek ( os . path . abspath , file_path ) )
# full path to file
self . file_path = file_path
# file name only
2014-07-21 13:29:07 +00:00
self . file_name = ek . ek ( os . path . basename , file_path )
2014-03-10 05:18:05 +00:00
# the name of the folder only
2014-07-21 13:29:07 +00:00
self . folder_name = ek . ek ( os . path . basename , self . folder_path )
2014-03-10 05:18:05 +00:00
# name of the NZB that resulted in this folder
2014-07-21 13:29:07 +00:00
self . nzb_name = nzb_name
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
self . force_replace = force_replace
self . use_trash = use_trash
2014-03-10 05:18:05 +00:00
2016-02-01 22:51:51 +00:00
self . webhandler = webhandler
2016-09-04 20:00:44 +00:00
self . showObj = showObj
2014-03-10 05:18:05 +00:00
self . in_history = False
2014-07-19 23:08:50 +00:00
2014-03-10 05:18:05 +00:00
self . release_group = None
2014-07-19 23:08:50 +00:00
self . release_name = None
2014-03-10 05:18:05 +00:00
self . is_proper = False
self . log = ' '
2015-05-08 02:46:54 +00:00
self . process_method = process_method if process_method else sickbeard . PROCESS_METHOD
self . anime_version = None # anime equivalent of is_proper
self . anidbEpisode = None
2014-03-10 05:18:05 +00:00
def _log ( self , message , level = logger . MESSAGE ) :
"""
A wrapper for the internal logger which also keeps track of messages and saves them to a string for later .
message : The string to log ( unicode )
level : The log level to use ( optional )
"""
2015-05-08 02:46:54 +00:00
logger_msg = re . sub ( r ' (?i)<br(?:[ \ s/]+)> \ .* ' , ' ' , message )
logger_msg = re . sub ( ' (?i)<a[^>]+>([^<]+)<[/]a> ' , r ' \ 1 ' , logger_msg )
logger . log ( u ' %s ' % logger_msg , level )
2014-03-10 05:18:05 +00:00
self . log + = message + ' \n '
2015-05-08 02:46:54 +00:00
def _check_for_existing_file ( self , existing_file ) :
2014-03-10 05:18:05 +00:00
"""
Checks if a file exists already and if it does whether it ' s bigger or smaller than
the file we are post processing
existing_file : The file to compare to
Returns :
DOESNT_EXIST if the file doesn ' t exist
EXISTS_LARGER if the file exists and is larger than the file we are post processing
EXISTS_SMALLER if the file exists and is smaller than the file we are post processing
EXISTS_SAME if the file exists and is the same size as the file we are post processing
"""
if not existing_file :
2015-05-08 02:46:54 +00:00
self . _log ( u ' There is no existing file ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return PostProcessor . DOESNT_EXIST
# if the new file exists, return the appropriate code depending on the size
if ek . ek ( os . path . isfile , existing_file ) :
2015-05-08 02:46:54 +00:00
new_file = u ' New file %s <br />.. is ' % self . file_path
if ek . ek ( os . path . getsize , self . file_path ) == ek . ek ( os . path . getsize , existing_file ) :
self . _log ( u ' %s the same size as %s ' % ( new_file , existing_file ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return PostProcessor . EXISTS_SAME
2015-05-08 02:46:54 +00:00
elif ek . ek ( os . path . getsize , self . file_path ) < ek . ek ( os . path . getsize , existing_file ) :
self . _log ( u ' %s smaller than %s ' % ( new_file , existing_file ) , logger . DEBUG )
return PostProcessor . EXISTS_LARGER
2014-03-10 05:18:05 +00:00
else :
2015-05-08 02:46:54 +00:00
self . _log ( u ' %s larger than %s ' % ( new_file , existing_file ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return PostProcessor . EXISTS_SMALLER
else :
2015-05-08 02:46:54 +00:00
self . _log ( u ' File doesn \' t exist %s ' % existing_file ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return PostProcessor . DOESNT_EXIST
2015-05-08 02:46:54 +00:00
@staticmethod
def list_associated_files ( file_path , base_name_only = False , subtitles_only = False ) :
2014-03-10 05:18:05 +00:00
"""
For a given file path searches for files with the same name but different extension and returns their absolute paths
file_path : The file to check for associated files
base_name_only : False add extra ' . ' ( conservative search ) to file_path minus extension
Returns : A list containing all files which are associated to the given file
"""
if not file_path :
return [ ]
file_path_list = [ ]
2016-06-01 14:25:38 +00:00
tmp_base = base_name = file_path . rpartition ( ' . ' ) [ 0 ]
2014-03-10 05:18:05 +00:00
if not base_name_only :
2016-06-01 14:25:38 +00:00
tmp_base + = ' . '
2014-03-10 05:18:05 +00:00
# don't strip it all and use cwd by accident
2016-06-01 14:25:38 +00:00
if not tmp_base :
2014-03-10 05:18:05 +00:00
return [ ]
# don't confuse glob with chars we didn't mean to use
base_name = re . sub ( r ' [ \ [ \ ] \ * \ ?] ' , r ' [ \ g<0>] ' , base_name )
2016-06-01 14:25:38 +00:00
for meta_ext in [ ' ' , ' -thumb ' , ' .ext ' , ' .ext.cover ' , ' .metathumb ' ] :
for associated_file_path in ek . ek ( glob . glob , ' %s %s .* ' % ( base_name , meta_ext ) ) :
# only add associated to list
if associated_file_path == file_path :
continue
# only list it if the only non-shared part is the extension or if it is a subtitle
if subtitles_only and not associated_file_path [ len ( associated_file_path ) - 3 : ] in common . subtitleExtensions :
continue
# Exclude .rar files from associated list
if re . search ( ' (^.+ \ .(rar|r \ d+)$) ' , associated_file_path ) :
continue
if ek . ek ( os . path . isfile , associated_file_path ) :
file_path_list . append ( associated_file_path )
2014-03-10 05:18:05 +00:00
return file_path_list
def _delete ( self , file_path , associated_files = False ) :
"""
Deletes the file and optionally all associated files .
file_path : The file to delete
associated_files : True to delete all files which differ only by extension , False to leave them
"""
if not file_path :
return
# figure out which files we want to delete
file_list = [ file_path ]
if associated_files :
file_list = file_list + self . list_associated_files ( file_path )
if not file_list :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Not deleting anything because there are no files associated with %s ' % file_path , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
# delete the file and any other files which we want to delete
for cur_file in file_list :
if ek . ek ( os . path . isfile , cur_file ) :
2014-07-19 23:08:50 +00:00
# check first the read-only attribute
2014-03-10 05:18:05 +00:00
file_attribute = ek . ek ( os . stat , cur_file ) [ 0 ]
2015-05-08 02:46:54 +00:00
if not file_attribute & stat . S_IWRITE :
2014-03-10 05:18:05 +00:00
# File is read-only, so make it writeable
try :
2014-03-25 05:57:24 +00:00
ek . ek ( os . chmod , cur_file , stat . S_IWRITE )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Changed read only permissions to writeable to delete file %s ' % cur_file , logger . DEBUG )
2014-03-10 05:18:05 +00:00
except :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Cannot change permissions to writeable to delete file: %s ' % cur_file , logger . WARNING )
try :
if self . use_trash :
ek . ek ( send2trash , cur_file )
else :
ek . ek ( os . remove , cur_file )
2015-06-08 12:47:01 +00:00
except OSError as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to delete file %s : %s ' % ( cur_file , str ( e . strerror ) ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
if True is not ek . ek ( os . path . isfile , cur_file ) :
self . _log ( u ' Deleted file ' + cur_file , logger . DEBUG )
2014-08-22 06:18:35 +00:00
2014-03-10 05:18:05 +00:00
# do the library update for synoindex
notifiers . synoindex_notifier . deleteFile ( cur_file )
2014-03-25 05:57:24 +00:00
def _combined_file_operation ( self , file_path , new_path , new_base_name , associated_files = False , action = None ,
2015-05-08 02:46:54 +00:00
subtitles = False , action_tmpl = None ) :
2014-03-10 05:18:05 +00:00
"""
Performs a generic operation ( move or copy ) on a file . Can rename the file as well as change its location ,
and optionally move associated files too .
file_path : The full path of the media file to act on
new_path : Destination path where we want to move / copy the file to
new_base_name : The base filename ( no extension ) to use during the copy . Use None to keep the same name .
associated_files : Boolean , whether we should copy similarly - named files too
action : function that takes an old path and new path and does an operation with them ( move / copy )
"""
if not action :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Must provide an action for the combined file operation ' , logger . ERROR )
2014-03-10 05:18:05 +00:00
return
file_list = [ file_path ]
if associated_files :
file_list = file_list + self . list_associated_files ( file_path )
elif subtitles :
file_list = file_list + self . list_associated_files ( file_path , subtitles_only = True )
if not file_list :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Not moving anything because there are no files associated with %s ' % file_path , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
# create base name with file_path (media_file without .extension)
old_base_name = file_path . rpartition ( ' . ' ) [ 0 ]
old_base_name_length = len ( old_base_name )
# deal with all files
for cur_file_path in file_list :
cur_file_name = ek . ek ( os . path . basename , cur_file_path )
# get the extension without .
cur_extension = cur_file_path [ old_base_name_length + 1 : ]
2015-05-08 02:46:54 +00:00
# replace .nfo with .nfo-orig to avoid conflicts
if ' nfo ' == cur_extension and True is sickbeard . NFO_RENAME :
cur_extension = ' nfo-orig '
2014-03-10 05:18:05 +00:00
# check if file have subtitles language
if os . path . splitext ( cur_extension ) [ 1 ] [ 1 : ] in common . subtitleExtensions :
cur_lang = os . path . splitext ( cur_extension ) [ 0 ]
if cur_lang in sickbeard . SUBTITLES_LANGUAGES :
cur_extension = cur_lang + os . path . splitext ( cur_extension ) [ 1 ]
# If new base name then convert name
if new_base_name :
new_file_name = new_base_name + ' . ' + cur_extension
# if we're not renaming we still want to change extensions sometimes
else :
new_file_name = helpers . replaceExtension ( cur_file_name , cur_extension )
if sickbeard . SUBTITLES_DIR and cur_extension in common . subtitleExtensions :
subs_new_path = ek . ek ( os . path . join , new_path , sickbeard . SUBTITLES_DIR )
dir_exists = helpers . makeDir ( subs_new_path )
if not dir_exists :
2015-05-08 02:46:54 +00:00
logger . log ( u ' Unable to create subtitles folder ' + subs_new_path , logger . ERROR )
2014-03-10 05:18:05 +00:00
else :
helpers . chmodAsParent ( subs_new_path )
new_file_path = ek . ek ( os . path . join , subs_new_path , new_file_name )
else :
new_file_path = ek . ek ( os . path . join , new_path , new_file_name )
2015-05-08 02:46:54 +00:00
if None is action_tmpl :
action ( cur_file_path , new_file_path )
else :
action ( cur_file_path , new_file_path , action_tmpl )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
def _move ( self , file_path , new_path , new_base_name , associated_files = False , subtitles = False , action_tmpl = None ) :
2014-03-10 05:18:05 +00:00
"""
file_path : The full path of the media file to move
new_path : Destination path where we want to move the file to
new_base_name : The base filename ( no extension ) to use during the move . Use None to keep the same name .
associated_files : Boolean , whether we should move similarly - named files too
"""
2015-05-08 02:46:54 +00:00
def _int_move ( cur_file_path , new_file_path , success_tmpl = u ' %s to %s ' ) :
2014-03-10 05:18:05 +00:00
try :
helpers . moveFile ( cur_file_path , new_file_path )
helpers . chmodAsParent ( new_file_path )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Moved file from ' + ( success_tmpl % ( cur_file_path , new_file_path ) ) , logger . DEBUG )
2015-06-08 12:47:01 +00:00
except ( IOError , OSError ) as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to move file %s <br />.. %s ' % ( success_tmpl % ( cur_file_path , new_file_path ) , str ( e ) ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
raise e
2015-05-08 02:46:54 +00:00
self . _combined_file_operation ( file_path , new_path , new_base_name , associated_files , _int_move ,
subtitles = subtitles , action_tmpl = action_tmpl )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
def _copy ( self , file_path , new_path , new_base_name , associated_files = False , subtitles = False , action_tmpl = None ) :
2014-03-10 05:18:05 +00:00
"""
file_path : The full path of the media file to copy
new_path : Destination path where we want to copy the file to
new_base_name : The base filename ( no extension ) to use during the copy . Use None to keep the same name .
associated_files : Boolean , whether we should copy similarly - named files too
"""
2015-05-08 02:46:54 +00:00
def _int_copy ( cur_file_path , new_file_path , success_tmpl = u ' %s to %s ' ) :
2014-03-10 05:18:05 +00:00
try :
helpers . copyFile ( cur_file_path , new_file_path )
helpers . chmodAsParent ( new_file_path )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Copied file from ' + ( success_tmpl % ( cur_file_path , new_file_path ) ) , logger . DEBUG )
2015-06-08 12:47:01 +00:00
except ( IOError , OSError ) as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to copy %s <br />.. %s ' % ( success_tmpl % ( cur_file_path , new_file_path ) , str ( e ) ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
raise e
2015-05-08 02:46:54 +00:00
self . _combined_file_operation ( file_path , new_path , new_base_name , associated_files , _int_copy ,
subtitles = subtitles , action_tmpl = action_tmpl )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
def _hardlink ( self , file_path , new_path , new_base_name , associated_files = False , action_tmpl = None ) :
2014-03-10 05:18:05 +00:00
"""
file_path : The full path of the media file to move
new_path : Destination path where we want to create a hard linked file
new_base_name : The base filename ( no extension ) to use during the link . Use None to keep the same name .
associated_files : Boolean , whether we should move similarly - named files too
"""
2015-05-08 02:46:54 +00:00
def _int_hard_link ( cur_file_path , new_file_path , success_tmpl = u ' %s to %s ' ) :
2014-03-10 05:18:05 +00:00
try :
helpers . hardlinkFile ( cur_file_path , new_file_path )
helpers . chmodAsParent ( new_file_path )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Hard linked file from ' + ( success_tmpl % ( cur_file_path , new_file_path ) ) , logger . DEBUG )
2015-06-08 12:47:01 +00:00
except ( IOError , OSError ) as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to link file %s <br />.. %s ' % ( success_tmpl % ( cur_file_path , new_file_path ) , str ( e ) ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
raise e
2014-03-25 05:57:24 +00:00
2015-05-08 02:46:54 +00:00
self . _combined_file_operation ( file_path , new_path , new_base_name , associated_files , _int_hard_link ,
action_tmpl = action_tmpl )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
def _move_and_symlink ( self , file_path , new_path , new_base_name , associated_files = False , action_tmpl = None ) :
2014-03-10 05:18:05 +00:00
"""
file_path : The full path of the media file to move
new_path : Destination path where we want to move the file to create a symbolic link to
new_base_name : The base filename ( no extension ) to use during the link . Use None to keep the same name .
associated_files : Boolean , whether we should move similarly - named files too
"""
2015-05-08 02:46:54 +00:00
def _int_move_and_sym_link ( cur_file_path , new_file_path , success_tmpl = u ' %s to %s ' ) :
2014-03-10 05:18:05 +00:00
try :
helpers . moveAndSymlinkFile ( cur_file_path , new_file_path )
helpers . chmodAsParent ( new_file_path )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Moved then symbolic linked file from ' + ( success_tmpl % ( cur_file_path , new_file_path ) ) ,
logger . DEBUG )
2015-06-08 12:47:01 +00:00
except ( IOError , OSError ) as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to link file %s <br />.. %s ' % ( success_tmpl % ( cur_file_path , new_file_path ) , str ( e ) ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
raise e
2014-03-25 05:57:24 +00:00
2015-05-08 02:46:54 +00:00
self . _combined_file_operation ( file_path , new_path , new_base_name , associated_files , _int_move_and_sym_link ,
action_tmpl = action_tmpl )
2014-03-10 05:18:05 +00:00
def _history_lookup ( self ) :
"""
Look up the NZB name in the history and see if it contains a record for self . nzb_name
2015-05-08 02:46:54 +00:00
Returns a ( indexer_id , season , [ ] , quality ) tuple . indexer_id , season , quality may be None and episodes may be [ ] .
2014-03-10 05:18:05 +00:00
"""
2015-05-08 02:46:54 +00:00
to_return = ( None , None , [ ] , None )
self . in_history = False
2014-03-10 05:18:05 +00:00
# if we don't have either of these then there's nothing to use to search the history for anyway
if not self . nzb_name and not self . folder_name :
return to_return
# make a list of possible names to use in the search
names = [ ]
if self . nzb_name :
names . append ( self . nzb_name )
if ' . ' in self . nzb_name :
2015-05-08 02:46:54 +00:00
names . append ( self . nzb_name . rpartition ( ' . ' ) [ 0 ] )
2014-03-10 05:18:05 +00:00
if self . folder_name :
names . append ( self . folder_name )
2015-05-08 02:46:54 +00:00
my_db = db . DBConnection ( )
2014-03-10 05:18:05 +00:00
# search the database for a possible match and return immediately if we find one
2014-06-21 22:46:59 +00:00
for curName in names :
2015-05-08 02:46:54 +00:00
# The underscore character ( _ ) represents a single character to match a pattern from a word or string
search_name = re . sub ( ' [ \ . \ -] ' , ' _ ' , curName )
sql_results = my_db . select ( ' SELECT * FROM history WHERE resource LIKE ? ' , [ search_name ] )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
if 0 == len ( sql_results ) :
2014-06-21 22:46:59 +00:00
continue
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
indexer_id = int ( sql_results [ 0 ] [ ' showid ' ] )
season = int ( sql_results [ 0 ] [ ' season ' ] )
quality = int ( sql_results [ 0 ] [ ' quality ' ] )
self . anime_version = int ( sql_results [ 0 ] [ ' version ' ] )
2014-05-02 08:47:02 +00:00
2015-05-08 02:46:54 +00:00
if common . Quality . UNKNOWN == quality :
2014-06-21 22:46:59 +00:00
quality = None
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
self . in_history = True
2015-05-08 02:46:54 +00:00
show = helpers . findCertainShow ( sickbeard . showList , indexer_id )
to_return = ( show , season , [ ] , quality )
2016-04-24 02:54:24 +00:00
if not show :
self . _log ( u ' Unknown show, check availability on ShowList page ' , logger . DEBUG )
break
2015-05-08 02:46:54 +00:00
self . _log ( u ' Found a match in history for %s ' % show . name , logger . DEBUG )
break
2014-03-10 05:18:05 +00:00
return to_return
2015-05-08 02:46:54 +00:00
def _analyze_name ( self , name , resource = True ) :
2014-03-10 05:18:05 +00:00
"""
Takes a name and tries to figure out a show , season , and episode from it .
name : A string which we want to analyze to determine show info from ( unicode )
Returns a ( indexer_id , season , [ episodes ] ) tuple . The first two may be None and episodes may be [ ]
if none were found .
"""
2015-05-08 02:46:54 +00:00
logger . log ( u ' Analyzing name ' + repr ( name ) )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
to_return = ( None , None , [ ] , None )
2014-03-10 05:18:05 +00:00
if not name :
return to_return
# parse the name to break it into show name, season, and episode
2016-09-04 20:00:44 +00:00
np = NameParser ( resource , try_scene_exceptions = True , convert = True , showObj = self . showObj )
2014-07-06 22:06:19 +00:00
parse_result = np . parse ( name )
2015-09-25 14:33:25 +00:00
self . _log ( u ' Parsed %s <br />.. from %s ' % ( str ( parse_result ) . decode ( ' utf-8 ' , ' xmlcharrefreplace ' ) , name ) , logger . DEBUG )
2014-05-30 08:16:12 +00:00
2014-07-15 02:00:53 +00:00
if parse_result . is_air_by_date :
2014-03-10 05:18:05 +00:00
season = - 1
episodes = [ parse_result . air_date ]
else :
season = parse_result . season_number
episodes = parse_result . episode_numbers
2015-05-08 02:46:54 +00:00
# show object
show = parse_result . show
to_return = ( show , season , episodes , parse_result . quality )
2014-03-10 05:18:05 +00:00
2014-05-02 08:47:02 +00:00
self . _finalize ( parse_result )
2014-03-10 05:18:05 +00:00
return to_return
2015-05-08 02:46:54 +00:00
def _finalize ( self , parse_result ) :
2014-05-26 06:29:22 +00:00
2015-05-08 02:46:54 +00:00
self . release_group = parse_result . release_group
2014-05-26 06:29:22 +00:00
2015-05-08 02:46:54 +00:00
# remember whether it's a proper
if parse_result . extra_info :
self . is_proper = None is not re . search ( ' (^|[ \ . _-])(proper|repack)([ \ . _-]|$) ' , parse_result . extra_info , re . I )
2014-05-26 06:29:22 +00:00
2015-05-08 02:46:54 +00:00
# if the result is complete then set release name
if parse_result . series_name and \
( ( None is not parse_result . season_number and parse_result . episode_numbers ) or parse_result . air_date ) \
and parse_result . release_group :
if not self . release_name :
self . release_name = helpers . remove_extension ( ek . ek ( os . path . basename , parse_result . original_name ) )
else :
logger . log ( u ' Parse result not sufficient (all following have to be set). will not save release name ' , logger . DEBUG )
logger . log ( u ' Parse result(series_name): ' + str ( parse_result . series_name ) , logger . DEBUG )
logger . log ( u ' Parse result(season_number): ' + str ( parse_result . season_number ) , logger . DEBUG )
logger . log ( u ' Parse result(episode_numbers): ' + str ( parse_result . episode_numbers ) , logger . DEBUG )
logger . log ( u ' or Parse result(air_date): ' + str ( parse_result . air_date ) , logger . DEBUG )
logger . log ( u ' Parse result(release_group): ' + str ( parse_result . release_group ) , logger . DEBUG )
2014-05-26 06:29:22 +00:00
2014-03-10 05:18:05 +00:00
def _find_info ( self ) :
"""
For a given file try to find the showid , season , and episode .
"""
2015-05-08 02:46:54 +00:00
show = season = quality = None
2014-03-10 05:18:05 +00:00
episodes = [ ]
2014-03-25 05:57:24 +00:00
# try to look up the nzb in history
2014-03-10 05:18:05 +00:00
attempt_list = [ self . _history_lookup ,
# try to analyze the nzb name
lambda : self . _analyze_name ( self . nzb_name ) ,
# try to analyze the file name
lambda : self . _analyze_name ( self . file_name ) ,
# try to analyze the dir name
lambda : self . _analyze_name ( self . folder_name ) ,
# try to analyze the file + dir names together
lambda : self . _analyze_name ( self . file_path ) ,
# try to analyze the dir + file name together as one name
2015-05-08 02:46:54 +00:00
lambda : self . _analyze_name ( self . folder_name + u ' ' + self . file_name ) ]
2014-03-10 05:18:05 +00:00
# attempt every possible method to get our info
for cur_attempt in attempt_list :
try :
2015-05-08 02:46:54 +00:00
( cur_show , cur_season , cur_episodes , cur_quality ) = cur_attempt ( )
2015-06-08 12:47:01 +00:00
except ( InvalidNameException , InvalidShowException ) as e :
2015-05-08 02:46:54 +00:00
logger . log ( u ' Unable to parse, skipping: ' + ex ( e ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
continue
2014-07-20 05:26:28 +00:00
if not cur_show :
continue
2015-05-08 02:46:54 +00:00
# if we already did a successful history lookup then keep that show value
show = cur_show
2014-05-02 08:47:02 +00:00
if cur_quality and not ( self . in_history and quality ) :
quality = cur_quality
2015-05-08 02:46:54 +00:00
if None is not cur_season :
2014-03-10 05:18:05 +00:00
season = cur_season
2015-05-08 02:46:54 +00:00
2014-03-10 05:18:05 +00:00
if cur_episodes :
episodes = cur_episodes
2014-05-02 08:47:02 +00:00
# for air-by-date shows we need to look up the season/episode from database
2015-05-08 02:46:54 +00:00
if - 1 == season and show and episodes :
2014-07-19 23:08:50 +00:00
self . _log (
2015-05-08 02:46:54 +00:00
u ' Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode ' ,
2014-07-19 23:08:50 +00:00
logger . DEBUG )
2014-05-02 08:47:02 +00:00
airdate = episodes [ 0 ] . toordinal ( )
2015-05-08 02:46:54 +00:00
my_db = db . DBConnection ( )
sql_result = my_db . select (
' SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ? ' ,
2014-07-19 23:08:50 +00:00
[ show . indexerid , show . indexer , airdate ] )
2014-03-12 05:28:30 +00:00
2014-05-02 08:47:02 +00:00
if sql_result :
season = int ( sql_result [ 0 ] [ 0 ] )
episodes = [ int ( sql_result [ 0 ] [ 1 ] ) ]
else :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to find episode with date ' + str ( episodes [ 0 ] ) + u ' for show ' + str (
show . indexerid ) + u ' , skipping ' , logger . DEBUG )
2014-05-02 08:47:02 +00:00
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
2014-03-10 05:18:05 +00:00
episodes = [ ]
2014-05-02 08:47:02 +00:00
continue
2014-03-10 05:18:05 +00:00
# if there's no season then we can hopefully just use 1 automatically
2015-05-08 02:46:54 +00:00
elif None is season and show :
my_db = db . DBConnection ( )
num_seasons_sql_result = my_db . select (
' SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0 ' ,
2014-06-21 22:46:59 +00:00
[ show . indexerid , show . indexer ] )
2015-05-08 02:46:54 +00:00
if 1 == int ( num_seasons_sql_result [ 0 ] [ 0 ] ) and None is season :
2014-03-25 05:57:24 +00:00
self . _log (
2015-05-08 02:46:54 +00:00
u ' No season number found, but this show appears to only have 1 season, setting season number to 1... ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
season = 1
2014-05-26 10:42:34 +00:00
if show and season and episodes :
2015-05-08 02:46:54 +00:00
break
2014-07-19 23:08:50 +00:00
2015-05-08 02:46:54 +00:00
return show , season , episodes , quality
2014-03-10 05:18:05 +00:00
2014-05-26 10:42:34 +00:00
def _get_ep_obj ( self , show , season , episodes ) :
2014-03-10 05:18:05 +00:00
"""
Retrieve the TVEpisode object requested .
2014-05-26 10:42:34 +00:00
show : The show object belonging to the show we want to process
2014-03-10 05:18:05 +00:00
season : The season of the episode ( int )
episodes : A list of episodes to find ( list of ints )
If the episode ( s ) can be found then a TVEpisode object with the correct related eps will
be instantiated and returned . If the episode can ' t be found then None will be returned.
"""
root_ep = None
for cur_episode in episodes :
2015-05-08 02:46:54 +00:00
episode = int ( cur_episode )
self . _log ( u ' Retrieving episode object for %s x %s ' % ( str ( season ) , str ( episode ) ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# now that we've figured out which episode this file is just load it manually
try :
2015-05-08 02:46:54 +00:00
cur_ep = show . getEpisode ( season , episode )
2015-06-08 12:47:01 +00:00
except exceptions . EpisodeNotFoundException as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to create episode: ' + ex ( e ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
raise exceptions . PostProcessingFailed ( )
2014-03-25 05:57:24 +00:00
2014-03-10 05:18:05 +00:00
# associate all the episodes together under a single root episode
2015-05-08 02:46:54 +00:00
if None is root_ep :
root_ep = cur_ep
2014-03-10 05:18:05 +00:00
root_ep . relatedEps = [ ]
2015-05-08 02:46:54 +00:00
elif cur_ep not in root_ep . relatedEps :
root_ep . relatedEps . append ( cur_ep )
2014-03-10 05:18:05 +00:00
return root_ep
def _get_quality ( self , ep_obj ) :
"""
Determines the quality of the file that is being post processed , first by checking if it is directly
available in the TVEpisode ' s status or otherwise by parsing through the data available.
ep_obj : The TVEpisode object related to the file we are post processing
Returns : A quality value found in common . Quality
"""
# if there is a quality available in the status then we don't need to bother guessing from the filename
2014-03-19 23:33:49 +00:00
if ep_obj . status in common . Quality . SNATCHED + common . Quality . SNATCHED_PROPER + common . Quality . SNATCHED_BEST :
2015-05-08 02:46:54 +00:00
old_status , ep_quality = common . Quality . splitCompositeStatus ( ep_obj . status ) # @UnusedVariable
if common . Quality . UNKNOWN != ep_quality :
2014-03-25 05:57:24 +00:00
self . _log (
2015-05-08 02:46:54 +00:00
u ' Using " %s " quality from the old status ' % common . Quality . qualityStrings [ ep_quality ] ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return ep_quality
# search all possible names for our new quality, in case the file or dir doesn't have it
2015-05-08 02:46:54 +00:00
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
for thing , cur_name in { ' nzb name ' : self . nzb_name , ' folder name ' : self . folder_name , ' file name ' : self . file_name } . items ( ) :
2014-03-10 05:18:05 +00:00
# some stuff might be None at this point still
if not cur_name :
continue
2014-05-26 06:29:22 +00:00
ep_quality = common . Quality . nameQuality ( cur_name , ep_obj . show . is_anime )
2015-08-21 02:32:27 +00:00
quality_log = u ' " %s " quality parsed from the %s %s ' % ( common . Quality . qualityStrings [ ep_quality ] , thing , cur_name )
2014-03-10 05:18:05 +00:00
# if we find a good one then use it
2015-05-08 02:46:54 +00:00
if common . Quality . UNKNOWN != ep_quality :
self . _log ( u ' Using ' + quality_log , logger . DEBUG )
2014-05-02 22:37:41 +00:00
return ep_quality
2015-05-08 02:46:54 +00:00
else :
self . _log ( u ' Found ' + quality_log , logger . DEBUG )
2014-05-02 22:37:41 +00:00
2015-08-21 02:32:27 +00:00
ep_quality = common . Quality . fileQuality ( self . file_path )
if common . Quality . UNKNOWN != ep_quality :
self . _log ( u ' Using " %s " quality parsed from the metadata file content of %s '
% ( common . Quality . qualityStrings [ ep_quality ] , self . file_name ) , logger . DEBUG )
return ep_quality
2014-05-02 22:37:41 +00:00
# Try guessing quality from the file name
2014-03-10 05:18:05 +00:00
ep_quality = common . Quality . assumeQuality ( self . file_name )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Using guessed " %s " quality from the file name %s '
% ( common . Quality . qualityStrings [ ep_quality ] , self . file_name ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return ep_quality
def _run_extra_scripts ( self , ep_obj ) :
"""
Executes any extra scripts defined in the config .
ep_obj : The object to use when calling the extra script
"""
for curScriptName in sickbeard . EXTRA_SCRIPTS :
# generate a safe command line string to execute the script and provide all the parameters
script_cmd = [ piece for piece in re . split ( " ( | \\ \" .*? \\ \" | ' .*? ' ) " , curScriptName ) if piece . strip ( ) ]
script_cmd [ 0 ] = ek . ek ( os . path . abspath , script_cmd [ 0 ] )
2015-05-08 02:46:54 +00:00
self . _log ( u ' Absolute path to script: ' + script_cmd [ 0 ] , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
script_cmd = script_cmd + [ ep_obj . location . encode ( sickbeard . SYS_ENCODING ) ,
self . file_path . encode ( sickbeard . SYS_ENCODING ) ,
str ( ep_obj . show . indexerid ) ,
str ( ep_obj . season ) ,
str ( ep_obj . episode ) ,
str ( ep_obj . airdate ) ]
2014-03-10 05:18:05 +00:00
# use subprocess to run the command and capture output
2015-05-08 02:46:54 +00:00
self . _log ( u ' Executing command ' + str ( script_cmd ) )
2014-03-10 05:18:05 +00:00
try :
2014-03-25 05:57:24 +00:00
p = subprocess . Popen ( script_cmd , stdin = subprocess . PIPE , stdout = subprocess . PIPE ,
stderr = subprocess . STDOUT , cwd = sickbeard . PROG_DIR )
out , err = p . communicate ( ) # @UnusedVariable
2015-05-08 02:46:54 +00:00
self . _log ( u ' Script result: ' + str ( out ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-06-08 12:47:01 +00:00
except OSError as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to run extra_script: ' + ex ( e ) )
2014-03-10 05:18:05 +00:00
2015-06-08 12:47:01 +00:00
except Exception as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Unable to run extra_script: ' + ex ( e ) )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
def _safe_replace ( self , ep_obj , new_ep_quality ) :
2014-03-10 05:18:05 +00:00
"""
2015-05-08 02:46:54 +00:00
Determines if the new episode can safely replace old episode .
Episodes which are expected ( snatched ) or larger than the existing episode are priority , others are not .
2014-03-10 05:18:05 +00:00
ep_obj : The TVEpisode object in question
new_ep_quality : The quality of the episode that is being processed
2015-05-08 02:46:54 +00:00
Returns : True if the episode can safely replace old episode , False otherwise .
2014-03-10 05:18:05 +00:00
"""
2015-05-08 02:46:54 +00:00
# if SickGear snatched this then assume it's safe
2015-10-13 20:29:14 +00:00
if ep_obj . status in common . Quality . SNATCHED + common . Quality . SNATCHED_PROPER + common . Quality . SNATCHED_BEST :
2015-05-08 02:46:54 +00:00
self . _log ( u ' SickGear snatched this episode, marking it safe to replace ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
old_ep_status , old_ep_quality = common . Quality . splitCompositeStatus ( ep_obj . status )
2015-05-08 02:46:54 +00:00
# if old episode is not downloaded/archived then it's safe
if common . DOWNLOADED != old_ep_status and common . ARCHIVED != old_ep_status :
self . _log ( u ' Existing episode status is not downloaded/archived, marking it safe to replace ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
2016-09-27 20:18:58 +00:00
if common . ARCHIVED == old_ep_status and common . Quality . NONE == old_ep_quality :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Marking it unsafe to replace because the existing episode status is archived ' , logger . DEBUG )
return False
# Status downloaded. Quality/ size checks
# if manual post process option is set to force_replace then it's safe
if self . force_replace :
self . _log ( u ' Force replace existing episode option is enabled, marking it safe to replace ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
2015-05-08 02:46:54 +00:00
# if the file processed is higher quality than the existing episode then it's safe
if new_ep_quality > old_ep_quality :
if common . Quality . UNKNOWN != new_ep_quality :
self . _log ( u ' Existing episode status is not snatched but the episode to process appears to be better quality than existing episode, marking it safe to replace ' , logger . DEBUG )
return True
else :
self . _log ( u ' Marking it unsafe to replace because an existing episode exists in the database and the episode to process has unknown quality ' , logger . DEBUG )
return False
# if there's an existing downloaded file with same quality, check filesize to decide
if new_ep_quality == old_ep_quality :
2016-09-27 20:18:58 +00:00
if re . search ( r ' \ bproper|repack \ b ' , self . nzb_name , re . I ) or re . search ( r ' \ bproper|repack \ b ' , self . file_name , re . I ) :
self . _log ( u ' Proper or repack with same quality, marking it safe to replace ' , logger . DEBUG )
return True
2015-05-08 02:46:54 +00:00
self . _log ( u ' An episode exists in the database with the same quality as the episode to process ' , logger . DEBUG )
existing_file_status = self . _check_for_existing_file ( ep_obj . location )
# check for an existing file
if PostProcessor . DOESNT_EXIST == existing_file_status :
if not ek . ek ( os . path . isdir , ep_obj . show . location ) and not sickbeard . CREATE_MISSING_SHOW_DIRS :
# File and show location does not exist, marking it unsafe to replace
self . _log ( u ' .. marking it unsafe to replace because show location does not exist ' , logger . DEBUG )
return False
else :
# File does not exist, marking it safe to replace
self . _log ( u ' .. there is no file to replace, marking it safe to continue ' , logger . DEBUG )
return True
self . _log ( u ' Checking size of existing file ' + ep_obj . location , logger . DEBUG )
if PostProcessor . EXISTS_SMALLER == existing_file_status :
# File exists and new file is larger, marking it safe to replace
self . _log ( u ' .. the existing smaller file will be replaced ' , logger . DEBUG )
return True
elif PostProcessor . EXISTS_LARGER == existing_file_status :
# File exists and new file is smaller, marking it unsafe to replace
self . _log ( u ' .. marking it unsafe to replace the existing larger file ' , logger . DEBUG )
return False
elif PostProcessor . EXISTS_SAME == existing_file_status :
# File exists and new file is same size, marking it unsafe to replace
self . _log ( u ' .. marking it unsafe to replace the existing same size file ' , logger . DEBUG )
return False
else :
self . _log ( u ' Unknown file status for: %s This should never happen, please log this as a bug. ' % ep_obj . location , logger . ERROR )
return False
# if there's an existing file with better quality
if old_ep_quality > new_ep_quality and old_ep_quality != common . Quality . UNKNOWN :
# Episode already exists in database and processed episode has lower quality, marking it unsafe to replace
self . _log ( u ' Marking it unsafe to replace the episode that already exists in database with a file of lower quality ' , logger . DEBUG )
return False
2015-10-13 20:29:14 +00:00
if self . in_history :
self . _log ( u ' SickGear snatched this episode, marking it safe to replace ' , logger . DEBUG )
return True
2015-05-08 02:46:54 +00:00
# None of the conditions were met, marking it unsafe to replace
self . _log ( u ' Marking it unsafe to replace because no positive condition is met, you may force replace but it would be better to examine the files ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
def process ( self ) :
"""
Post - process a given file
"""
2016-08-11 00:00:36 +00:00
self . _log ( u ' Processing... %s %s ' % ( ek . ek ( os . path . relpath , self . file_path , self . folder_path ) ,
( u ' <br />.. from nzb %s ' % self . nzb_name , u ' ' ) [ None is self . nzb_name ] ) )
2014-03-10 05:18:05 +00:00
if ek . ek ( os . path . isdir , self . file_path ) :
2016-08-11 00:00:36 +00:00
self . _log ( u ' Expecting file %s <br />.. is actually a directory, skipping ' % self . file_path )
2014-03-10 05:18:05 +00:00
return False
2014-05-02 08:47:02 +00:00
2014-03-10 05:18:05 +00:00
for ignore_file in self . IGNORED_FILESTRINGS :
if ignore_file in self . file_path :
2015-05-08 02:46:54 +00:00
self . _log ( u ' File %s <br />.. is ignored type, skipping ' % self . file_path )
2014-03-10 05:18:05 +00:00
return False
2014-05-02 08:47:02 +00:00
2014-03-10 05:18:05 +00:00
# reset per-file stuff
self . in_history = False
2014-05-30 13:00:04 +00:00
self . anidbEpisode = None
2014-03-12 05:28:30 +00:00
# try to find the file info
2015-05-08 02:46:54 +00:00
( show , season , episodes , quality ) = self . _find_info ( )
# if we don't have it then give up
2014-05-26 10:42:34 +00:00
if not show :
2016-08-11 00:00:36 +00:00
self . _log ( u ' Must add show to SickGear before trying to post process an episode ' , logger . WARNING )
2014-05-26 10:42:34 +00:00
raise exceptions . PostProcessingFailed ( )
2015-05-08 02:46:54 +00:00
elif None is season or not episodes :
self . _log ( u ' Quitting this post process, could not determine what episode this is ' , logger . DEBUG )
2014-03-10 20:31:41 +00:00
return False
2014-03-10 05:18:05 +00:00
# retrieve/create the corresponding TVEpisode objects
2014-05-26 10:42:34 +00:00
ep_obj = self . _get_ep_obj ( show , season , episodes )
2014-03-10 05:18:05 +00:00
# get the quality of the episode we're processing
2015-05-08 02:46:54 +00:00
if common . Quality . UNKNOWN == quality :
2014-05-02 08:47:02 +00:00
new_ep_quality = self . _get_quality ( ep_obj )
2014-07-22 04:53:32 +00:00
else :
2015-05-08 02:46:54 +00:00
new_ep_quality = quality
self . _log ( u ' Using " %s " quality from the snatch history ' % common . Quality . qualityStrings [ new_ep_quality ] , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
# see if it's safe to replace existing episode (is download snatched, PROPER, better quality)
if not self . _safe_replace ( ep_obj , new_ep_quality ) :
# if it's not safe to replace, stop here
self . _log ( u ' Quitting this post process ' , logger . DEBUG )
return False
2014-03-10 05:18:05 +00:00
2014-08-22 06:18:35 +00:00
# delete the existing file (and company)
for cur_ep in [ ep_obj ] + ep_obj . relatedEps :
try :
self . _delete ( cur_ep . location , associated_files = True )
# clean up any left over folders
if cur_ep . location :
helpers . delete_empty_folders ( ek . ek ( os . path . dirname , cur_ep . location ) ,
2015-05-08 02:46:54 +00:00
keep_dir = ep_obj . show . location )
2014-08-22 06:18:35 +00:00
except ( OSError , IOError ) :
2016-08-11 00:00:36 +00:00
raise exceptions . PostProcessingFailed ( u ' Unable to delete existing files ' )
2014-08-22 06:18:35 +00:00
2014-05-26 10:42:34 +00:00
# set the status of the episodes
2014-07-19 23:08:50 +00:00
# for curEp in [ep_obj] + ep_obj.relatedEps:
2014-05-26 10:42:34 +00:00
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
2014-03-10 05:18:05 +00:00
# if the show directory doesn't exist then make it if allowed
2015-05-08 02:46:54 +00:00
if not ek . ek ( os . path . isdir , ep_obj . show . location ) and sickbeard . CREATE_MISSING_SHOW_DIRS :
self . _log ( u ' Show directory does not exist, creating it ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
try :
2015-05-08 02:46:54 +00:00
ek . ek ( os . mkdir , ep_obj . show . location )
2014-03-10 05:18:05 +00:00
# do the library update for synoindex
2015-05-08 02:46:54 +00:00
notifiers . synoindex_notifier . addFolder ( ep_obj . show . location )
2014-03-10 05:18:05 +00:00
except ( OSError , IOError ) :
2015-05-08 02:46:54 +00:00
raise exceptions . PostProcessingFailed ( u ' Unable to create show directory: ' + ep_obj . show . location )
2014-03-10 05:18:05 +00:00
# get metadata for the show (but not episode because it hasn't been fully processed)
ep_obj . show . writeMetadata ( True )
2015-05-08 02:46:54 +00:00
# if we're processing an episode of type anime, get the anime version
2015-07-21 13:35:20 +00:00
anime_version = ( - 1 , self . anime_version ) [ ep_obj . show . is_anime and None is not self . anime_version ]
2015-05-08 02:46:54 +00:00
2014-03-10 05:18:05 +00:00
# update the ep info before we rename so the quality & release name go into the name properly
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
for cur_ep in [ ep_obj ] + ep_obj . relatedEps :
with cur_ep . lock :
2014-07-19 23:08:50 +00:00
if self . release_name :
2015-05-08 02:46:54 +00:00
self . _log ( u ' Found release name ' + self . release_name , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
cur_ep . release_name = self . release_name or ' '
2014-03-10 05:18:05 +00:00
2016-09-27 20:18:58 +00:00
any_qualities , best_qualities = common . Quality . splitQuality ( cur_ep . show . quality )
2015-05-08 02:46:54 +00:00
cur_ep . status = common . Quality . compositeStatus (
* * ( { ' status ' : common . DOWNLOADED , ' quality ' : new_ep_quality } ,
{ ' status ' : common . ARCHIVED , ' quality ' : new_ep_quality } )
2016-09-27 20:18:58 +00:00
[ ep_obj . status in common . Quality . SNATCHED_BEST or
( cur_ep . show . archive_firstmatch and new_ep_quality in best_qualities ) ] )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
cur_ep . release_group = self . release_group or ' '
2014-03-10 05:18:05 +00:00
cur_ep . is_proper = self . is_proper
2015-05-08 02:46:54 +00:00
cur_ep . version = anime_version
2014-07-22 04:53:32 +00:00
2015-05-08 02:46:54 +00:00
cur_ep . subtitles = [ ]
2014-07-22 04:53:32 +00:00
2015-05-08 02:46:54 +00:00
cur_ep . subtitles_searchcount = 0
cur_ep . subtitles_lastsearch = ' 0001-01-01 00:00:00 '
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
sql = cur_ep . get_sql ( )
if None is not sql :
sql_l . append ( sql )
if 0 < len ( sql_l ) :
my_db = db . DBConnection ( )
my_db . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2014-07-21 13:44:01 +00:00
# Just want to keep this consistent for failed handling right now
2015-05-08 02:46:54 +00:00
release_name = show_name_helpers . determineReleaseName ( self . folder_path , self . nzb_name )
if None is not release_name :
failed_history . logSuccess ( release_name )
2014-07-21 13:44:01 +00:00
else :
2016-08-11 00:00:36 +00:00
self . _log ( u ' No snatched release found in history ' , logger . WARNING )
2014-05-30 10:01:49 +00:00
2014-03-10 05:18:05 +00:00
# find the destination folder
try :
proper_path = ep_obj . proper_path ( )
proper_absolute_path = ek . ek ( os . path . join , ep_obj . show . location , proper_path )
dest_path = ek . ek ( os . path . dirname , proper_absolute_path )
2015-05-08 02:46:54 +00:00
2014-03-10 05:18:05 +00:00
except exceptions . ShowDirNotFoundException :
2014-03-25 05:57:24 +00:00
raise exceptions . PostProcessingFailed (
2015-05-08 02:46:54 +00:00
u ' Unable to post process an episode because the show dir does not exist, quitting ' )
2014-03-10 05:18:05 +00:00
2015-05-08 02:46:54 +00:00
self . _log ( u ' Destination folder for this episode is ' + dest_path , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# create any folders we need
2015-05-08 02:46:54 +00:00
if not helpers . make_dirs ( dest_path ) :
raise exceptions . PostProcessingFailed ( u ' Unable to create destination folder: ' + dest_path )
2014-03-10 05:18:05 +00:00
# figure out the base name of the resulting episode file
if sickbeard . RENAME_EPISODES :
new_base_name = ek . ek ( os . path . basename , proper_path )
2015-05-08 02:46:54 +00:00
new_file_name = new_base_name + ' . ' + self . file_name . rpartition ( ' . ' ) [ - 1 ]
2014-03-10 05:18:05 +00:00
else :
# if we're not renaming then there's no new base name, we'll just use the existing name
new_base_name = None
new_file_name = self . file_name
2014-05-26 06:29:22 +00:00
# add to anidb
2015-05-08 02:46:54 +00:00
if sickbeard . ANIDB_USE_MYLIST and ep_obj . show . is_anime :
2014-05-26 06:29:22 +00:00
self . _add_to_anidb_mylist ( self . file_path )
2016-02-01 22:51:51 +00:00
if self . webhandler :
def keep_alive ( webh , stop_event ) :
while not stop_event . is_set ( ) :
stop_event . wait ( 60 )
webh ( ' . ' )
webh ( u ' \n ' )
keepalive_stop = threading . Event ( )
keepalive = threading . Thread ( target = keep_alive , args = ( self . webhandler , keepalive_stop ) )
2014-03-10 05:18:05 +00:00
try :
# move the episode and associated files to the show dir
2015-05-08 02:46:54 +00:00
args_link = { ' file_path ' : self . file_path , ' new_path ' : dest_path ,
' new_base_name ' : new_base_name ,
' associated_files ' : sickbeard . MOVE_ASSOCIATED_FILES }
args_cpmv = { ' subtitles ' : sickbeard . USE_SUBTITLES and ep_obj . show . subtitles ,
' action_tmpl ' : u ' %s <br />.. to %s ' }
args_cpmv . update ( args_link )
2016-02-01 22:51:51 +00:00
if self . webhandler :
self . webhandler ( ' Processing method is " %s " ' % self . process_method )
keepalive . start ( )
2015-05-08 02:46:54 +00:00
if ' copy ' == self . process_method :
self . _copy ( * * args_cpmv )
elif ' move ' == self . process_method :
self . _move ( * * args_cpmv )
elif ' hardlink ' == self . process_method :
self . _hardlink ( * * args_link )
elif ' symlink ' == self . process_method :
self . _move_and_symlink ( * * args_link )
2014-03-10 05:18:05 +00:00
else :
2015-05-08 02:46:54 +00:00
logger . log ( u ' Unknown process method: ' + str ( self . process_method ) , logger . ERROR )
raise exceptions . PostProcessingFailed ( u ' Unable to move the files to the new location ' )
2014-03-10 05:18:05 +00:00
except ( OSError , IOError ) :
2015-05-08 02:46:54 +00:00
raise exceptions . PostProcessingFailed ( u ' Unable to move the files to the new location ' )
2016-02-01 22:51:51 +00:00
finally :
if self . webhandler :
#stop the keep_alive
keepalive_stop . set ( )
2014-03-10 05:18:05 +00:00
# download subtitles
2015-05-08 02:46:54 +00:00
dosubs = sickbeard . USE_SUBTITLES and ep_obj . show . subtitles
2014-03-10 05:18:05 +00:00
# put the new location in the database
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
for cur_ep in [ ep_obj ] + ep_obj . relatedEps :
with cur_ep . lock :
cur_ep . location = ek . ek ( os . path . join , dest_path , new_file_name )
2015-05-08 02:46:54 +00:00
if dosubs :
cur_ep . downloadSubtitles ( force = True )
# set file modify stamp to show airdate
if sickbeard . AIRDATE_EPISODES :
2014-07-21 13:29:07 +00:00
cur_ep . airdateModifyStamp ( )
2015-05-08 02:46:54 +00:00
sql = cur_ep . get_sql ( )
if None is not sql :
sql_l . append ( sql )
if 0 < len ( sql_l ) :
my_db = db . DBConnection ( )
my_db . mass_action ( sql_l )
2014-07-21 13:29:07 +00:00
# generate nfo/tbn
ep_obj . createMetaFiles ( )
2014-05-30 10:01:49 +00:00
2014-03-10 05:18:05 +00:00
# log it to history
2015-05-08 02:46:54 +00:00
history . logDownload ( ep_obj , self . file_path , new_ep_quality , self . release_group , anime_version )
2014-03-10 05:18:05 +00:00
# send notifications
notifiers . notify_download ( ep_obj . _format_pattern ( ' % SN - % Sx %0E - %E N - % QN ' ) )
2016-02-19 17:38:38 +00:00
# do the library update for Emby
notifiers . emby_notifier . update_library ( ep_obj . show )
2014-03-10 05:18:05 +00:00
2015-02-25 12:33:40 +00:00
# do the library update for Kodi
notifiers . kodi_notifier . update_library ( ep_obj . show . name )
2016-02-19 17:38:38 +00:00
# do the library update for XBMC
notifiers . xbmc_notifier . update_library ( ep_obj . show . name )
2014-03-10 05:18:05 +00:00
# do the library update for Plex
2015-02-14 02:28:24 +00:00
notifiers . plex_notifier . update_library ( ep_obj )
2014-03-10 05:18:05 +00:00
# do the library update for NMJ
# nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers)
# do the library update for Synology Indexer
notifiers . synoindex_notifier . addFile ( ep_obj . location )
# do the library update for pyTivo
notifiers . pytivo_notifier . update_library ( ep_obj )
# do the library update for Trakt
2015-11-19 22:05:19 +00:00
notifiers . trakt_notifier . update_collection ( ep_obj )
2014-03-10 05:18:05 +00:00
self . _run_extra_scripts ( ep_obj )
return True
2015-05-08 02:46:54 +00:00
@staticmethod
def _build_anidb_episode ( connection , filepath ) :
ep = adba . Episode ( connection , filePath = filepath ,
paramsF = [ ' quality ' , ' anidb_file_name ' , ' crc32 ' ] ,
paramsA = [ ' epno ' , ' english_name ' , ' short_name_list ' , ' other_name ' , ' synonym_list ' ] )
return ep
def _add_to_anidb_mylist ( self , filepath ) :
if helpers . set_up_anidb_connection ( ) :
if not self . anidbEpisode : # seams like we could parse the name before, now lets build the anidb object
self . anidbEpisode = self . _build_anidb_episode ( sickbeard . ADBA_CONNECTION , filepath )
self . _log ( u ' Adding the file to the anidb mylist ' , logger . DEBUG )
try :
self . anidbEpisode . add_to_mylist ( status = 1 ) # status = 1 sets the status of the file to "internal HDD"
2015-06-08 12:47:01 +00:00
except Exception as e :
2015-05-08 02:46:54 +00:00
self . _log ( u ' exception msg: ' + str ( e ) )