diff --git a/CHANGES.md b/CHANGES.md
index 9aab7b63..ea6311d7 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -31,8 +31,21 @@
* Add General Config/Interface/"Group show list shows into:"... to divide shows into groups on the Show List page
* Change Show List progress bar code, smaller page load, efficient use of js render engine
* Change values used for date sorting on home page and episode view for improved compatibility with posix systems
-* Change response handling in downloaders to simplify logic.
+* Change response handling in downloaders to simplify logic
* Change reduce html payload across page template files
+* Change to post process files ordered largest to smallest and tidied PP logging output
+* Add "then trash subdirs and files" to the Process method "Move" on the manual post process page
+* Add using show scene exceptions with post processing
+* Change overhaul processTV into a thread safe class
+* Change postProcessor and processTV to PEP8 standards
+* Change overhaul Manual Post-Processing page in line with layout style and improve texts
+* Change Force Processes enabled, only the largest video file of many will be processed instead of all files
+* Change visual ui of Postprocessing results to match the logs and errors view
+* Change remove ugly printing of episode object during PP seen in external apps like sabnzbd
+* Change to streamline output toward actual work done instead of showing all vars
+* Change pp report items from describing actions about to happen to instead detail the actual outcome of actions
+* Add clarity to the output of a successful post process but with some issues rather than "there were problems"
+* Add a conclusive bottom line to the pp result report
[develop changelog]
Fix issue changing a custom show list group name that is in use. The bug resulted in the db containing stale group names
diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css
index 268b70a4..4f0ec971 100644
--- a/gui/slick/css/style.css
+++ b/gui/slick/css/style.css
@@ -126,7 +126,7 @@ fonts
font-style:italic
}
-/* SickGear Icons */
+/* Droid Sans */
@font-face{
font-family:'sgicons';
src:url('fonts/sgicons.eot');
@@ -836,14 +836,14 @@ td.tvShow a:hover{
#rename th, #rename td,
#failed th, #failed td,
#backlog th, #backlog td,
-#show-list th, #show-list td {
+#show-list th, #show-list td{
text-align:center
}
#rename th.text-left, #rename td.text-left,
#failed th.text-left, #failed td.text-left,
#backlog th.text-left, #backlog td.text-left,
-#show-list th.text-left, #show-list td.text-left {
+#show-list th.text-left, #show-list td.text-left{
text-align:left
}
@@ -1097,17 +1097,16 @@ home_postprocess.tmpl
========================================================================== */
#postProcess{
- width:800px;
- padding-top:10px;
+ width:650px;
+ padding-top:30px;
margin-right:auto;
margin-left:auto
}
-
/* =======================================================================
displayShow.tmpl
========================================================================== */
-body#display-show.back-art {
+body#display-show.back-art{
color:#fff
}
@@ -1342,11 +1341,11 @@ span.imdbstars, span.imdbstars > *{
text-shadow:5px 5px 10px #000
}
-.qtip-maxwidth {
+.qtip-maxwidth{
max-width:1000px
}
-.season-status {
+.season-status{
font-size:12px;
vertical-align:middle
}
@@ -1385,9 +1384,9 @@ a.service img{
border-color:rgba(0,0,0,0.5)
}
-.back-art.translucent.pro .tvshowImg {
- opacity: 0.85;
- filter: alpha(opacity=85)
+.back-art.translucent.pro .tvshowImg{
+ opacity:0.85;
+ filter:alpha(opacity=85)
}
#checkboxControls{
@@ -1646,8 +1645,8 @@ td.col-search{
}
#testRenameTable tbody td.col-checkbox,
-#testRenameTable tbody td.col-ep {width:1%;vertical-align:middle}
-#testRenameTable tbody td.col-name {width:49%}
+#testRenameTable tbody td.col-ep{width:1%;vertical-align:middle}
+#testRenameTable tbody td.col-name{width:49%}
.input-scene{
height:20px;
@@ -2727,7 +2726,7 @@ div.blackwhitelist.pool input{
margin:5px 0 !important
}
div.blackwhitelist select{
- margin :0 !important
+ margin:0 !important
}
div.blackwhitelist .inuse{
diff --git a/gui/slick/interfaces/default/home_postprocess.tmpl b/gui/slick/interfaces/default/home_postprocess.tmpl
index 8eb074c6..30fcb060 100644
--- a/gui/slick/interfaces/default/home_postprocess.tmpl
+++ b/gui/slick/interfaces/default/home_postprocess.tmpl
@@ -1,86 +1,105 @@
#import sickbeard
##
-#set global $header="Post Processing"
-#set global $title="Post Processing"
-#set global $sbPath="../.."
-#set global $topmenu="home"
+#set global $header = 'Post Processing'
+#set global $title = $header
+#set global $topmenu = 'home'
+#set global $sbPath = '../..'
##
#import os.path
-#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
+#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
-
#if $varExists('header')
#else
$title
#end if
-
-#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
\ No newline at end of file
+#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
\ No newline at end of file
diff --git a/gui/slick/interfaces/default/inc_top.tmpl b/gui/slick/interfaces/default/inc_top.tmpl
index 3912e07b..cd0175ba 100644
--- a/gui/slick/interfaces/default/inc_top.tmpl
+++ b/gui/slick/interfaces/default/inc_top.tmpl
@@ -1,5 +1,6 @@
#import sickbeard
#import urllib
+#slurp
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index f4f97b83..fb20b0c4 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -232,35 +232,32 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
for name in showNames:
- logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(i).name, logger.DEBUG)
+ logger.log(u'Trying to find ' + name + ' on ' + sickbeard.indexerApi(i).name, logger.DEBUG)
try:
- search = t[indexer_id] if indexer_id else t[name]
+ result = t[indexer_id] if indexer_id else t[name]
except:
continue
- try:
- seriesname = search.seriesname
- except:
- seriesname = None
-
- try:
- series_id = search.id
- except:
- series_id = None
+ seriesname = series_id = False
+ for search in result:
+ seriesname = search['seriesname']
+ series_id = search['id']
+ if seriesname and series_id:
+ break
if not (seriesname and series_id):
continue
- if str(name).lower() == str(seriesname).lower and not indexer_id:
- return (seriesname, i, int(series_id))
- elif int(indexer_id) == int(series_id):
- return (seriesname, i, int(indexer_id))
+ if None is indexer_id and str(name).lower() == str(seriesname).lower():
+ return seriesname, i, int(series_id)
+ elif None is not indexer_id and int(indexer_id) == int(series_id):
+ return seriesname, i, int(indexer_id)
if indexer:
break
- return (None, None, None)
+ return None, None, None
def sizeof_fmt(num):
@@ -909,31 +906,35 @@ def full_sanitizeSceneName(name):
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
-def get_show(name, tryIndexers=False):
+def get_show(name, try_indexers=False, try_scene_exceptions=False):
if not sickbeard.showList or None is name:
return
- showObj = None
- fromCache = False
+ show_obj = None
+ from_cache = False
try:
- # check cache for show
cache = sickbeard.name_cache.retrieveNameFromCache(name)
if cache:
- fromCache = True
- showObj = findCertainShow(sickbeard.showList, int(cache))
+ from_cache = True
+ show_obj = findCertainShow(sickbeard.showList, cache)
- if not showObj and tryIndexers:
- showObj = findCertainShow(sickbeard.showList,
- searchIndexerForShowID(full_sanitizeSceneName(name), ui=classes.ShowListUI)[2])
+ if not show_obj and try_scene_exceptions:
+ indexer_id = sickbeard.scene_exceptions.get_scene_exception_by_name(name)[0]
+ if indexer_id:
+ show_obj = findCertainShow(sickbeard.showList, indexer_id)
+
+ if not show_obj and try_indexers:
+ show_obj = findCertainShow(sickbeard.showList,
+ searchIndexerForShowID(full_sanitizeSceneName(name), ui=classes.ShowListUI)[2])
# add show to cache
- if showObj and not fromCache:
- sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
+ if show_obj and not from_cache:
+ sickbeard.name_cache.addNameToCache(name, show_obj.indexerid)
except Exception as e:
- logger.log(u"Error when attempting to find show: " + name + " in SickGear: " + str(e), logger.DEBUG)
+ logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + str(e), logger.DEBUG)
- return showObj
+ return show_obj
def is_hidden_folder(folder):
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index dc665ca0..d0d90171 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -36,12 +36,13 @@ class NameParser(object):
NORMAL_REGEX = 1
ANIME_REGEX = 2
- def __init__(self, file_name=True, showObj=None, tryIndexers=False, convert=False,
+ def __init__(self, file_name=True, showObj=None, try_indexers=False, try_scene_exceptions=False, convert=False,
naming_pattern=False, testing=False):
self.file_name = file_name
self.showObj = showObj
- self.tryIndexers = tryIndexers
+ self.try_indexers = try_indexers
+ self.try_scene_exceptions = try_scene_exceptions
self.convert = convert
self.naming_pattern = naming_pattern
self.testing = testing
@@ -201,7 +202,7 @@ class NameParser(object):
show = None
if not self.naming_pattern:
# try and create a show object for this result
- show = helpers.get_show(bestResult.series_name, self.tryIndexers)
+ show = helpers.get_show(bestResult.series_name, self.try_indexers, self.try_scene_exceptions)
# confirm passed in show object indexer id matches result show object indexer id
if show and not self.testing:
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
index 900ce674..a6ede856 100644
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -35,8 +35,6 @@ from sickbeard import logger
from sickbeard import notifiers
from sickbeard import show_name_helpers
from sickbeard import failed_history
-from sickbeard import name_cache
-
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
@@ -44,6 +42,11 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval
from lib import adba
+try:
+ from lib.send2trash import send2trash
+except ImportError:
+ pass
+
class PostProcessor(object):
"""
@@ -55,9 +58,9 @@ class PostProcessor(object):
EXISTS_SMALLER = 3
DOESNT_EXIST = 4
- IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"]
+ IGNORED_FILESTRINGS = ['/.AppleDouble/', '.DS_Store']
- def __init__(self, file_path, nzb_name=None, process_method=None, is_priority=None):
+ def __init__(self, file_path, nzb_name=None, process_method=None, force_replace=None, use_trash=None):
"""
Creates a new post processor with the given file path and optionally an NZB name.
@@ -79,7 +82,9 @@ class PostProcessor(object):
# name of the NZB that resulted in this folder
self.nzb_name = nzb_name
- self.process_method = process_method if process_method else sickbeard.PROCESS_METHOD
+ self.force_replace = force_replace
+
+ self.use_trash = use_trash
self.in_history = False
@@ -89,11 +94,13 @@ class PostProcessor(object):
self.is_proper = False
- self.is_priority = is_priority
-
self.log = ''
-
- self.version = None
+
+ self.process_method = process_method if process_method else sickbeard.PROCESS_METHOD
+
+ self.anime_version = None # anime equivalent of is_proper
+
+ self.anidbEpisode = None
def _log(self, message, level=logger.MESSAGE):
"""
@@ -102,10 +109,12 @@ class PostProcessor(object):
message: The string to log (unicode)
level: The log level to use (optional)
"""
- logger.log(message, level)
+ logger_msg = re.sub(r'(?i)
\.*', '', message)
+ logger_msg = re.sub('(?i)
]+>([^<]+)<[/]a>', r'\1', logger_msg)
+ logger.log(u'%s' % logger_msg, level)
self.log += message + '\n'
- def _checkForExistingFile(self, existing_file):
+ def _check_for_existing_file(self, existing_file):
"""
Checks if a file exists already and if it does whether it's bigger or smaller than
the file we are post processing
@@ -120,31 +129,29 @@ class PostProcessor(object):
"""
if not existing_file:
- self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG)
+ self._log(u'There is no existing file', logger.DEBUG)
return PostProcessor.DOESNT_EXIST
# if the new file exists, return the appropriate code depending on the size
if ek.ek(os.path.isfile, existing_file):
-
- # see if it's bigger than our old file
- if ek.ek(os.path.getsize, existing_file) > ek.ek(os.path.getsize, self.file_path):
- self._log(u"File " + existing_file + " is larger than " + self.file_path, logger.DEBUG)
- return PostProcessor.EXISTS_LARGER
-
- elif ek.ek(os.path.getsize, existing_file) == ek.ek(os.path.getsize, self.file_path):
- self._log(u"File " + existing_file + " is the same size as " + self.file_path, logger.DEBUG)
+ new_file = u'New file %s
.. is ' % self.file_path
+ if ek.ek(os.path.getsize, self.file_path) == ek.ek(os.path.getsize, existing_file):
+ self._log(u'%sthe same size as %s' % (new_file, existing_file), logger.DEBUG)
return PostProcessor.EXISTS_SAME
-
+ elif ek.ek(os.path.getsize, self.file_path) < ek.ek(os.path.getsize, existing_file):
+ self._log(u'%ssmaller than %s' % (new_file, existing_file), logger.DEBUG)
+ return PostProcessor.EXISTS_LARGER
else:
- self._log(u"File " + existing_file + " is smaller than " + self.file_path, logger.DEBUG)
+ self._log(u'%slarger than %s' % (new_file, existing_file), logger.DEBUG)
return PostProcessor.EXISTS_SMALLER
else:
- self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it",
+ self._log(u'File doesn\'t exist %s' % existing_file,
logger.DEBUG)
return PostProcessor.DOESNT_EXIST
- def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False):
+ @staticmethod
+ def list_associated_files(file_path, base_name_only=False, subtitles_only=False):
"""
For a given file path searches for files with the same name but different extension and returns their absolute paths
@@ -163,7 +170,7 @@ class PostProcessor(object):
base_name = file_path.rpartition('.')[0]
if not base_name_only:
- base_name = base_name + '.'
+ base_name += '.'
# don't strip it all and use cwd by accident
if not base_name:
@@ -206,30 +213,38 @@ class PostProcessor(object):
file_list = file_list + self.list_associated_files(file_path)
if not file_list:
- self._log(u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG)
+ self._log(u'Not deleting anything because there are no files associated with %s' % file_path, logger.DEBUG)
return
# delete the file and any other files which we want to delete
for cur_file in file_list:
if ek.ek(os.path.isfile, cur_file):
- self._log(u"Deleting file " + cur_file, logger.DEBUG)
# check first the read-only attribute
file_attribute = ek.ek(os.stat, cur_file)[0]
- if (not file_attribute & stat.S_IWRITE):
+ if not file_attribute & stat.S_IWRITE:
# File is read-only, so make it writeable
- self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
try:
ek.ek(os.chmod, cur_file, stat.S_IWRITE)
+ self._log(u'Changed read only permissions to writeable to delete file %s' % cur_file, logger.DEBUG)
except:
- self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
+ self._log(u'Cannot change permissions to writeable to delete file: %s' % cur_file, logger.WARNING)
- ek.ek(os.remove, cur_file)
+ try:
+ if self.use_trash:
+ ek.ek(send2trash, cur_file)
+ else:
+ ek.ek(os.remove, cur_file)
+ except OSError, e:
+ self._log(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)), logger.DEBUG)
+
+ if True is not ek.ek(os.path.isfile, cur_file):
+ self._log(u'Deleted file ' + cur_file, logger.DEBUG)
# do the library update for synoindex
notifiers.synoindex_notifier.deleteFile(cur_file)
def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None,
- subtitles=False):
+ subtitles=False, action_tmpl=None):
"""
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
and optionally move associated files too.
@@ -242,7 +257,7 @@ class PostProcessor(object):
"""
if not action:
- self._log(u"Must provide an action for the combined file operation", logger.ERROR)
+ self._log(u'Must provide an action for the combined file operation', logger.ERROR)
return
file_list = [file_path]
@@ -252,7 +267,7 @@ class PostProcessor(object):
file_list = file_list + self.list_associated_files(file_path, subtitles_only=True)
if not file_list:
- self._log(u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG)
+ self._log(u'Not moving anything because there are no files associated with %s' % file_path, logger.DEBUG)
return
# create base name with file_path (media_file without .extension)
@@ -267,16 +282,16 @@ class PostProcessor(object):
# get the extension without .
cur_extension = cur_file_path[old_base_name_length + 1:]
+ # replace .nfo with .nfo-orig to avoid conflicts
+ if 'nfo' == cur_extension and True is sickbeard.NFO_RENAME:
+ cur_extension = 'nfo-orig'
+
# check if file have subtitles language
if os.path.splitext(cur_extension)[1][1:] in common.subtitleExtensions:
cur_lang = os.path.splitext(cur_extension)[0]
if cur_lang in sickbeard.SUBTITLES_LANGUAGES:
cur_extension = cur_lang + os.path.splitext(cur_extension)[1]
- # replace .nfo with .nfo-orig to avoid conflicts
- if cur_extension == 'nfo' and sickbeard.NFO_RENAME == True:
- cur_extension = 'nfo-orig'
-
# If new base name then convert name
if new_base_name:
new_file_name = new_base_name + '.' + cur_extension
@@ -288,16 +303,19 @@ class PostProcessor(object):
subs_new_path = ek.ek(os.path.join, new_path, sickbeard.SUBTITLES_DIR)
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
- logger.log(u"Unable to create subtitles folder " + subs_new_path, logger.ERROR)
+ logger.log(u'Unable to create subtitles folder ' + subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name)
else:
new_file_path = ek.ek(os.path.join, new_path, new_file_name)
- action(cur_file_path, new_file_path)
+ if None is action_tmpl:
+ action(cur_file_path, new_file_path)
+ else:
+ action(cur_file_path, new_file_path, action_tmpl)
- def _move(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
+ def _move(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False, action_tmpl=None):
"""
file_path: The full path of the media file to move
new_path: Destination path where we want to move the file to
@@ -305,20 +323,20 @@ class PostProcessor(object):
associated_files: Boolean, whether we should move similarly-named files too
"""
- def _int_move(cur_file_path, new_file_path):
+ def _int_move(cur_file_path, new_file_path, success_tmpl=u' %s to %s'):
- self._log(u"Moving file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
try:
helpers.moveFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
+ self._log(u'Moved file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e:
- self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
+ self._log(u'Unable to move file %s
.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e
- self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move,
- subtitles=subtitles)
+ self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move,
+ subtitles=subtitles, action_tmpl=action_tmpl)
- def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
+ def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False, action_tmpl=None):
"""
file_path: The full path of the media file to copy
new_path: Destination path where we want to copy the file to
@@ -326,21 +344,20 @@ class PostProcessor(object):
associated_files: Boolean, whether we should copy similarly-named files too
"""
- def _int_copy(cur_file_path, new_file_path):
+ def _int_copy(cur_file_path, new_file_path, success_tmpl=u' %s to %s'):
- self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
try:
helpers.copyFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
+ self._log(u'Copied file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e:
- logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
+ self._log(u'Unable to copy %s
.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e
- self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy,
- subtitles=subtitles)
+ self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_copy,
+ subtitles=subtitles, action_tmpl=action_tmpl)
-
- def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
+ def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, action_tmpl=None):
"""
file_path: The full path of the media file to move
new_path: Destination path where we want to create a hard linked file
@@ -348,19 +365,20 @@ class PostProcessor(object):
associated_files: Boolean, whether we should move similarly-named files too
"""
- def _int_hard_link(cur_file_path, new_file_path):
+ def _int_hard_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'):
- self._log(u"Hard linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
try:
helpers.hardlinkFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
+ self._log(u'Hard linked file from' + (success_tmpl % (cur_file_path, new_file_path)), logger.DEBUG)
except (IOError, OSError), e:
- self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
+ self._log(u'Unable to link file %s
.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e
- self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
+ self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_hard_link,
+ action_tmpl=action_tmpl)
- def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
+ def _move_and_symlink(self, file_path, new_path, new_base_name, associated_files=False, action_tmpl=None):
"""
file_path: The full path of the media file to move
new_path: Destination path where we want to move the file to create a symbolic link to
@@ -368,31 +386,32 @@ class PostProcessor(object):
associated_files: Boolean, whether we should move similarly-named files too
"""
- def _int_move_and_sym_link(cur_file_path, new_file_path):
+ def _int_move_and_sym_link(cur_file_path, new_file_path, success_tmpl=u' %s to %s'):
- self._log(u"Moving then symbolic linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
try:
helpers.moveAndSymlinkFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
+ self._log(u'Moved then symbolic linked file from' + (success_tmpl % (cur_file_path, new_file_path)),
+ logger.DEBUG)
except (IOError, OSError), e:
- self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
+ self._log(u'Unable to link file %s
.. %s' % (success_tmpl % (cur_file_path, new_file_path), str(e)), logger.ERROR)
raise e
- self._combined_file_operation(file_path, new_path, new_base_name, associated_files,
- action=_int_move_and_sym_link)
+ self._combined_file_operation(file_path, new_path, new_base_name, associated_files, _int_move_and_sym_link,
+ action_tmpl=action_tmpl)
def _history_lookup(self):
"""
Look up the NZB name in the history and see if it contains a record for self.nzb_name
- Returns a (indexer_id, season, [], quality, version) tuple. The first two may be None if none were found.
+ Returns a (indexer_id, season, [], quality) tuple. indexer_id, season, quality may be None and episodes may be [].
"""
- to_return = (None, None, [], None, None)
+ to_return = (None, None, [], None)
+ self.in_history = False
# if we don't have either of these then there's nothing to use to search the history for anyway
if not self.nzb_name and not self.folder_name:
- self.in_history = False
return to_return
# make a list of possible names to use in the search
@@ -400,65 +419,38 @@ class PostProcessor(object):
if self.nzb_name:
names.append(self.nzb_name)
if '.' in self.nzb_name:
- names.append(self.nzb_name.rpartition(".")[0])
+ names.append(self.nzb_name.rpartition('.')[0])
if self.folder_name:
names.append(self.folder_name)
- # search the database for a possible match and return immediately if we find one
- myDB = db.DBConnection()
- for curName in names:
- search_name = re.sub("[\.\-\ ]", "_", curName)
- sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [search_name])
+ my_db = db.DBConnection()
- if len(sql_results) == 0:
+ # search the database for a possible match and return immediately if we find one
+ for curName in names:
+ # The underscore character ( _ ) represents a single character to match a pattern from a word or string
+ search_name = re.sub('[ \.\-]', '_', curName)
+ sql_results = my_db.select('SELECT * FROM history WHERE resource LIKE ?', [search_name])
+
+ if 0 == len(sql_results):
continue
- indexer_id = int(sql_results[0]["showid"])
- season = int(sql_results[0]["season"])
- quality = int(sql_results[0]["quality"])
- version = int(sql_results[0]["version"])
+ indexer_id = int(sql_results[0]['showid'])
+ season = int(sql_results[0]['season'])
+ quality = int(sql_results[0]['quality'])
+ self.anime_version = int(sql_results[0]['version'])
- if quality == common.Quality.UNKNOWN:
+ if common.Quality.UNKNOWN == quality:
quality = None
- show = helpers.findCertainShow(sickbeard.showList, indexer_id)
-
self.in_history = True
- self.version = version
- to_return = (show, season, [], quality, version)
- self._log("Found result in history: " + str(to_return), logger.DEBUG)
+ show = helpers.findCertainShow(sickbeard.showList, indexer_id)
+ to_return = (show, season, [], quality)
+ self._log(u'Found a match in history for %s' % show.name, logger.DEBUG)
+ break
- return to_return
-
- self.in_history = False
return to_return
- def _finalize(self, parse_result):
- self.release_group = parse_result.release_group
-
- # remember whether it's a proper
- if parse_result.extra_info:
- self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None
-
- # if the result is complete then remember that for later
- # if the result is complete then set release name
- if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers)
- or parse_result.air_date) and parse_result.release_group:
-
- if not self.release_name:
- self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))
-
- else:
- logger.log(u"Parse result not sufficient (all following have to be set). will not save release name",
- logger.DEBUG)
- logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
- logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
- logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
- logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
- logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
-
-
- def _analyze_name(self, name, file=True):
+ def _analyze_name(self, name, resource=True):
"""
Takes a name and tries to figure out a show, season, and episode from it.
@@ -468,9 +460,9 @@ class PostProcessor(object):
if none were found.
"""
- logger.log(u"Analyzing name " + repr(name))
+ logger.log(u'Analyzing name ' + repr(name))
- to_return = (None, None, [], None, None)
+ to_return = (None, None, [], None)
if not name:
return to_return
@@ -478,11 +470,9 @@ class PostProcessor(object):
name = helpers.remove_non_release_groups(helpers.remove_extension(name))
# parse the name to break it into show name, season, and episode
- np = NameParser(file, tryIndexers=True, convert=True)
+ np = NameParser(resource, try_indexers=True, try_scene_exceptions=True, convert=True)
parse_result = np.parse(name)
-
- # show object
- show = parse_result.show
+ self._log(u'Parsed %s
.. into %s' % (name, str(parse_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG)
if parse_result.is_air_by_date:
season = -1
@@ -491,35 +481,43 @@ class PostProcessor(object):
season = parse_result.season_number
episodes = parse_result.episode_numbers
- to_return = (show, season, episodes, parse_result.quality, None)
+ # show object
+ show = parse_result.show
+ to_return = (show, season, episodes, parse_result.quality)
self._finalize(parse_result)
return to_return
- def _build_anidb_episode(self, connection, filePath):
- ep = adba.Episode(connection, filePath=filePath,
- paramsF=["quality", "anidb_file_name", "crc32"],
- paramsA=["epno", "english_name", "short_name_list", "other_name", "synonym_list"])
+ def _finalize(self, parse_result):
- return ep
+ self.release_group = parse_result.release_group
- def _add_to_anidb_mylist(self, filePath):
- if helpers.set_up_anidb_connection():
- if not self.anidbEpisode: # seams like we could parse the name before, now lets build the anidb object
- self.anidbEpisode = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath)
+ # remember whether it's a proper
+ if parse_result.extra_info:
+ self.is_proper = None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I)
- self._log(u"Adding the file to the anidb mylist", logger.DEBUG)
- try:
- self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
- except Exception, e:
- self._log(u"exception msg: " + str(e))
+ # if the result is complete then set release name
+ if parse_result.series_name and\
+ ((None is not parse_result.season_number and parse_result.episode_numbers) or parse_result.air_date)\
+ and parse_result.release_group:
+
+ if not self.release_name:
+ self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))
+
+ else:
+ logger.log(u'Parse result not sufficient (all following have to be set). will not save release name', logger.DEBUG)
+ logger.log(u'Parse result(series_name): ' + str(parse_result.series_name), logger.DEBUG)
+ logger.log(u'Parse result(season_number): ' + str(parse_result.season_number), logger.DEBUG)
+ logger.log(u'Parse result(episode_numbers): ' + str(parse_result.episode_numbers), logger.DEBUG)
+ logger.log(u' or Parse result(air_date): ' + str(parse_result.air_date), logger.DEBUG)
+ logger.log(u'Parse result(release_group): ' + str(parse_result.release_group), logger.DEBUG)
def _find_info(self):
"""
For a given file try to find the showid, season, and episode.
"""
- show = season = quality = version = None
+ show = season = quality = None
episodes = []
# try to look up the nzb in history
@@ -538,72 +536,69 @@ class PostProcessor(object):
lambda: self._analyze_name(self.file_path),
# try to analyze the dir + file name together as one name
- lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)
- ]
+ lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)]
# attempt every possible method to get our info
for cur_attempt in attempt_list:
try:
- (cur_show, cur_season, cur_episodes, cur_quality, cur_version) = cur_attempt()
+ (cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
except (InvalidNameException, InvalidShowException), e:
- logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
+ logger.log(u'Unable to parse, skipping: ' + ex(e), logger.DEBUG)
continue
if not cur_show:
continue
- else:
- show = cur_show
+
+ # if we already did a successful history lookup then keep that show value
+ show = cur_show
if cur_quality and not (self.in_history and quality):
quality = cur_quality
- # we only get current version for animes from history to prevent issues with old database entries
- if cur_version is not None:
- version = cur_version
-
- if cur_season != None:
+ if None is not cur_season:
season = cur_season
+
if cur_episodes:
episodes = cur_episodes
# for air-by-date shows we need to look up the season/episode from database
- if season == -1 and show and episodes:
+ if -1 == season and show and episodes:
self._log(
- u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
+ u'Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode',
logger.DEBUG)
airdate = episodes[0].toordinal()
- myDB = db.DBConnection()
- sql_result = myDB.select(
- "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
+ my_db = db.DBConnection()
+ sql_result = my_db.select(
+ 'SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?',
[show.indexerid, show.indexer, airdate])
if sql_result:
season = int(sql_result[0][0])
episodes = [int(sql_result[0][1])]
else:
- self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
- show.indexerid) + u", skipping", logger.DEBUG)
+ self._log(u'Unable to find episode with date ' + str(episodes[0]) + u' for show ' + str(
+ show.indexerid) + u', skipping', logger.DEBUG)
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
episodes = []
continue
# if there's no season then we can hopefully just use 1 automatically
- elif season == None and show:
- myDB = db.DBConnection()
- numseasonsSQlResult = myDB.select(
- "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
+ elif None is season and show:
+ my_db = db.DBConnection()
+ num_seasons_sql_result = my_db.select(
+ 'SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0',
[show.indexerid, show.indexer])
- if int(numseasonsSQlResult[0][0]) == 1 and season == None:
+ if 1 == int(num_seasons_sql_result[0][0]) and None is season:
self._log(
- u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
+ u'No season number found, but this show appears to only have 1 season, setting season number to 1...',
logger.DEBUG)
season = 1
if show and season and episodes:
- return (show, season, episodes, quality, version)
+ break
- return (show, season, episodes, quality, version)
+ return show, season, episodes, quality
def _get_ep_obj(self, show, season, episodes):
"""
@@ -619,21 +614,23 @@ class PostProcessor(object):
root_ep = None
for cur_episode in episodes:
- self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG)
+ episode = int(cur_episode)
+
+ self._log(u'Retrieving episode object for %sx%s' % (str(season), str(episode)), logger.DEBUG)
# now that we've figured out which episode this file is just load it manually
try:
- curEp = show.getEpisode(season, cur_episode)
+ cur_ep = show.getEpisode(season, episode)
except exceptions.EpisodeNotFoundException, e:
- self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
+ self._log(u'Unable to create episode: ' + ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed()
# associate all the episodes together under a single root episode
- if root_ep == None:
- root_ep = curEp
+ if None is root_ep:
+ root_ep = cur_ep
root_ep.relatedEps = []
- elif curEp not in root_ep.relatedEps:
- root_ep.relatedEps.append(curEp)
+ elif cur_ep not in root_ep.relatedEps:
+ root_ep.relatedEps.append(cur_ep)
return root_ep
@@ -647,58 +644,38 @@ class PostProcessor(object):
Returns: A quality value found in common.Quality
"""
- ep_quality = common.Quality.UNKNOWN
-
# if there is a quality available in the status then we don't need to bother guessing from the filename
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
- oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
- if ep_quality != common.Quality.UNKNOWN:
+ old_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
+ if common.Quality.UNKNOWN != ep_quality:
self._log(
- u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
+ u'Using "%s" quality from the old status' % common.Quality.qualityStrings[ep_quality],
logger.DEBUG)
return ep_quality
- # nzb name is the most reliable if it exists, followed by folder name and lastly file name
- name_list = [self.nzb_name, self.folder_name, self.file_name]
-
# search all possible names for our new quality, in case the file or dir doesn't have it
- for cur_name in name_list:
+ # nzb name is the most reliable if it exists, followed by folder name and lastly file name
+ for thing, cur_name in {'nzb name': self.nzb_name, 'folder name': self.folder_name, 'file name': self.file_name}.items():
# some stuff might be None at this point still
if not cur_name:
continue
ep_quality = common.Quality.nameQuality(cur_name, ep_obj.show.is_anime)
- self._log(
- u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality],
- logger.DEBUG)
+ quality_log = u' "%s" quality from the %s %s' % (common.Quality.qualityStrings[ep_quality], thing, cur_name)
# if we find a good one then use it
- if ep_quality != common.Quality.UNKNOWN:
- logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[
- ep_quality] + ", using that", logger.DEBUG)
- return ep_quality
-
- # Try getting quality from the episode (snatched) status
- if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
- oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
- if ep_quality != common.Quality.UNKNOWN:
- self._log(
- u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
- logger.DEBUG)
+ if common.Quality.UNKNOWN != ep_quality:
+ self._log(u'Using' + quality_log, logger.DEBUG)
return ep_quality
+ else:
+ self._log(u'Found' + quality_log, logger.DEBUG)
# Try guessing quality from the file name
ep_quality = common.Quality.assumeQuality(self.file_name)
- self._log(
- u"Guessing quality for name " + self.file_name + u", got " + common.Quality.qualityStrings[ep_quality],
- logger.DEBUG)
- if ep_quality != common.Quality.UNKNOWN:
- logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[
- ep_quality] + ", using that", logger.DEBUG)
- return ep_quality
+ self._log(u'Using guessed "%s" quality from the file name %s'
+ % (common.Quality.qualityStrings[ep_quality], self.file_name), logger.DEBUG)
- test = str(ep_quality)
return ep_quality
def _run_extra_scripts(self, ep_obj):
@@ -712,59 +689,120 @@ class PostProcessor(object):
# generate a safe command line string to execute the script and provide all the parameters
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
- self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
+ self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG)
- script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season),
- str(ep_obj.episode), str(ep_obj.airdate)]
+ script_cmd = script_cmd + [ep_obj.location.encode(sickbeard.SYS_ENCODING),
+ self.file_path.encode(sickbeard.SYS_ENCODING),
+ str(ep_obj.show.indexerid),
+ str(ep_obj.season),
+ str(ep_obj.episode),
+ str(ep_obj.airdate)]
# use subprocess to run the command and capture output
- self._log(u"Executing command " + str(script_cmd))
+ self._log(u'Executing command ' + str(script_cmd))
try:
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
out, err = p.communicate() # @UnusedVariable
- self._log(u"Script result: " + str(out), logger.DEBUG)
+ self._log(u'Script result: ' + str(out), logger.DEBUG)
except OSError, e:
- self._log(u"Unable to run extra_script: " + ex(e))
+ self._log(u'Unable to run extra_script: ' + ex(e))
except Exception, e:
- self._log(u"Unable to run extra_script: " + ex(e))
+ self._log(u'Unable to run extra_script: ' + ex(e))
- def _is_priority(self, ep_obj, new_ep_quality):
+ def _safe_replace(self, ep_obj, new_ep_quality):
"""
- Determines if the episode is a priority download or not (if it is expected). Episodes which are expected
- (snatched) or larger than the existing episode are priority, others are not.
+ Determines if the new episode can safely replace old episode.
+ Episodes which are expected (snatched) or larger than the existing episode are priority, others are not.
ep_obj: The TVEpisode object in question
new_ep_quality: The quality of the episode that is being processed
- Returns: True if the episode is priority, False otherwise.
+ Returns: True if the episode can safely replace old episode, False otherwise.
"""
- if self.is_priority:
- return True
-
- # if SB downloaded this on purpose then this is a priority download
- if self.in_history or ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
- self._log(u"SB snatched this episode so I'm marking it as priority", logger.DEBUG)
+ # if SickGear snatched this then assume it's safe
+ if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER\
+ + common.Quality.SNATCHED_BEST or self.in_history:
+ self._log(u'SickGear snatched this episode, marking it safe to replace', logger.DEBUG)
return True
old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
- # if the user downloaded it manually and it's higher quality than the existing episode then it's priority
- if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
- self._log(
- u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority",
- logger.DEBUG)
+ # if old episode is not downloaded/archived then it's safe
+ if common.DOWNLOADED != old_ep_status and common.ARCHIVED != old_ep_status:
+ self._log(u'Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG)
return True
- # if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
- if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
- self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority",
- logger.DEBUG)
+ if common.ARCHIVED == old_ep_status:
+ self._log(u'Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG)
+ return False
+
+ # Status downloaded. Quality/ size checks
+
+ # if manual post process option is set to force_replace then it's safe
+ if self.force_replace:
+ self._log(u'Force replace existing episode option is enabled, marking it safe to replace', logger.DEBUG)
return True
+ # if the file processed is higher quality than the existing episode then it's safe
+ if new_ep_quality > old_ep_quality:
+ if common.Quality.UNKNOWN != new_ep_quality:
+ self._log(u'Existing episode status is not snatched but the episode to process appears to be better quality than existing episode, marking it safe to replace', logger.DEBUG)
+ return True
+
+ else:
+ self._log(u'Marking it unsafe to replace because an existing episode exists in the database and the episode to process has unknown quality', logger.DEBUG)
+ return False
+
+ # if there's an existing downloaded file with same quality, check filesize to decide
+ if new_ep_quality == old_ep_quality:
+ self._log(u'An episode exists in the database with the same quality as the episode to process', logger.DEBUG)
+
+ existing_file_status = self._check_for_existing_file(ep_obj.location)
+
+ # check for an existing file
+ if PostProcessor.DOESNT_EXIST == existing_file_status:
+ if not ek.ek(os.path.isdir, ep_obj.show.location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
+ # File and show location does not exist, marking it unsafe to replace
+ self._log(u'.. marking it unsafe to replace because show location does not exist', logger.DEBUG)
+ return False
+ else:
+ # File does not exist, marking it safe to replace
+ self._log(u'.. there is no file to replace, marking it safe to continue', logger.DEBUG)
+ return True
+
+ self._log(u'Checking size of existing file ' + ep_obj.location, logger.DEBUG)
+
+ if PostProcessor.EXISTS_SMALLER == existing_file_status:
+ # File exists and new file is larger, marking it safe to replace
+ self._log(u'.. the existing smaller file will be replaced', logger.DEBUG)
+ return True
+
+ elif PostProcessor.EXISTS_LARGER == existing_file_status:
+ # File exists and new file is smaller, marking it unsafe to replace
+ self._log(u'.. marking it unsafe to replace the existing larger file', logger.DEBUG)
+ return False
+
+ elif PostProcessor.EXISTS_SAME == existing_file_status:
+ # File exists and new file is same size, marking it unsafe to replace
+ self._log(u'.. marking it unsafe to replace the existing same size file', logger.DEBUG)
+ return False
+
+ else:
+ self._log(u'Unknown file status for: %s This should never happen, please log this as a bug.' % ep_obj.location, logger.ERROR)
+ return False
+
+ # if there's an existing file with better quality
+ if old_ep_quality > new_ep_quality and old_ep_quality != common.Quality.UNKNOWN:
+ # Episode already exists in database and processed episode has lower quality, marking it unsafe to replace
+ self._log(u'Marking it unsafe to replace the episode that already exists in database with a file of lower quality', logger.DEBUG)
+ return False
+
+ # None of the conditions were met, marking it unsafe to replace
+ self._log(u'Marking it unsafe to replace because no positive condition is met, you may force replace but it would be better to examine the files', logger.DEBUG)
return False
def process(self):
@@ -772,86 +810,47 @@ class PostProcessor(object):
Post-process a given file
"""
- self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")")
+ self._log(u'Processing %s%s' % (self.file_path, (u'
.. from nzb %s' % str(self.nzb_name), u'')[None is self.nzb_name]))
if ek.ek(os.path.isdir, self.file_path):
- self._log(u"File " + self.file_path + " seems to be a directory")
+ self._log(u'File %s
.. seems to be a directory' % self.file_path)
return False
for ignore_file in self.IGNORED_FILESTRINGS:
if ignore_file in self.file_path:
- self._log(u"File " + self.file_path + " is ignored type, skipping")
+ self._log(u'File %s
.. is ignored type, skipping' % self.file_path)
return False
# reset per-file stuff
self.in_history = False
-
- # reset the anidb episode object
self.anidbEpisode = None
# try to find the file info
- (show, season, episodes, quality, version) = self._find_info()
+ (show, season, episodes, quality) = self._find_info()
+
+ # if we don't have it then give up
if not show:
- self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
- logger.WARNING)
+ self._log(u'Please add the show to your SickGear then try to post process an episode', logger.WARNING)
raise exceptions.PostProcessingFailed()
- elif season == None or not episodes:
- self._log(u"Not enough information to determine what episode this is", logger.DEBUG)
- self._log(u"Quitting post-processing", logger.DEBUG)
+ elif None is season or not episodes:
+ self._log(u'Quitting this post process, could not determine what episode this is', logger.DEBUG)
return False
# retrieve/create the corresponding TVEpisode objects
ep_obj = self._get_ep_obj(show, season, episodes)
# get the quality of the episode we're processing
- if quality:
- self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality],
- logger.DEBUG)
- new_ep_quality = quality
- else:
+ if common.Quality.UNKNOWN == quality:
new_ep_quality = self._get_quality(ep_obj)
-
- logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG)
-
- # see if this is a priority download (is it snatched, in history, PROPER, or BEST)
- priority_download = self._is_priority(ep_obj, new_ep_quality)
- self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
-
- # get the version of the episode we're processing
- if version:
- self._log(u"Snatch history had a version in it, using that: v" + str(version),
- logger.DEBUG)
- new_ep_version = version
else:
- new_ep_version = -1
+ new_ep_quality = quality
+ self._log(u'Using "%s" quality from the snatch history' % common.Quality.qualityStrings[new_ep_quality], logger.DEBUG)
- # check for an existing file
- existing_file_status = self._checkForExistingFile(ep_obj.location)
-
- # if it's not priority then we don't want to replace smaller files in case it was a mistake
- if not priority_download:
-
- # if there's an existing file that we don't want to replace stop here
- if existing_file_status == PostProcessor.EXISTS_LARGER:
- if self.is_proper:
- self._log(
- u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace",
- logger.DEBUG)
- return True
-
- else:
- self._log(u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG)
- return False
-
- elif existing_file_status == PostProcessor.EXISTS_SAME:
- self._log(u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG)
- return False
-
- # if the file is priority then we're going to replace it even if it exists
- else:
- self._log(
- u"This download is marked a priority download so I'm going to replace an existing file if I find one",
- logger.DEBUG)
+ # see if it's safe to replace existing episode (is download snatched, PROPER, better quality)
+ if not self._safe_replace(ep_obj, new_ep_quality):
+ # if it's not safe to replace, stop here
+ self._log(u'Quitting this post process', logger.DEBUG)
+ return False
# delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps:
@@ -861,42 +860,50 @@ class PostProcessor(object):
# clean up any left over folders
if cur_ep.location:
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
- keep_dir=ep_obj.show._location)
+ keep_dir=ep_obj.show.location)
except (OSError, IOError):
- raise exceptions.PostProcessingFailed("Unable to delete the existing files")
+ raise exceptions.PostProcessingFailed(u'Unable to delete the existing files')
# set the status of the episodes
# for curEp in [ep_obj] + ep_obj.relatedEps:
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# if the show directory doesn't exist then make it if allowed
- if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
- self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
+ if not ek.ek(os.path.isdir, ep_obj.show.location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
+ self._log(u'Show directory does not exist, creating it', logger.DEBUG)
try:
- ek.ek(os.mkdir, ep_obj.show._location)
+ ek.ek(os.mkdir, ep_obj.show.location)
# do the library update for synoindex
- notifiers.synoindex_notifier.addFolder(ep_obj.show._location)
+ notifiers.synoindex_notifier.addFolder(ep_obj.show.location)
except (OSError, IOError):
- raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location)
+ raise exceptions.PostProcessingFailed(u'Unable to create show directory: ' + ep_obj.show.location)
# get metadata for the show (but not episode because it hasn't been fully processed)
ep_obj.show.writeMetadata(True)
+ # if we're processing an episode of type anime, get the anime version
+ anime_version = (-1, self.anime_version)[ep_obj.show.is_anime and None is not self.anime_version and self.anime_version]
+
# update the ep info before we rename so the quality & release name go into the name properly
sql_l = []
for cur_ep in [ep_obj] + ep_obj.relatedEps:
with cur_ep.lock:
if self.release_name:
- self._log("Found release name " + self.release_name, logger.DEBUG)
- cur_ep.release_name = self.release_name
- else:
- cur_ep.release_name = ""
+ self._log(u'Found release name ' + self.release_name, logger.DEBUG)
- if ep_obj.status in common.Quality.SNATCHED_BEST:
- cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality)
- else:
- cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
+ cur_ep.release_name = self.release_name or ''
+
+ cur_ep.status = common.Quality.compositeStatus(
+ **({'status': common.DOWNLOADED, 'quality': new_ep_quality},
+ {'status': common.ARCHIVED, 'quality': new_ep_quality})
+ [ep_obj.status in common.Quality.SNATCHED_BEST])
+
+ cur_ep.release_group = self.release_group or ''
+
+ cur_ep.is_proper = self.is_proper
+
+ cur_ep.version = anime_version
cur_ep.subtitles = []
@@ -904,48 +911,41 @@ class PostProcessor(object):
cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00'
- cur_ep.is_proper = self.is_proper
+ sql = cur_ep.get_sql()
+ if None is not sql:
+ sql_l.append(sql)
- cur_ep.version = new_ep_version
-
- if self.release_group:
- cur_ep.release_group = self.release_group
- else:
- cur_ep.release_group = ""
-
- sql_l.append(cur_ep.get_sql())
-
- if len(sql_l) > 0:
- myDB = db.DBConnection()
- myDB.mass_action(sql_l)
+ if 0 < len(sql_l):
+ my_db = db.DBConnection()
+ my_db.mass_action(sql_l)
# Just want to keep this consistent for failed handling right now
- releaseName = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name)
- if releaseName is not None:
- failed_history.logSuccess(releaseName)
+ release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name)
+ if None is not release_name:
+ failed_history.logSuccess(release_name)
else:
- self._log(u"Couldn't find release in snatch history", logger.WARNING)
+ self._log(u'No release found in snatch history', logger.WARNING)
# find the destination folder
try:
proper_path = ep_obj.proper_path()
proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path)
-
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
+
except exceptions.ShowDirNotFoundException:
raise exceptions.PostProcessingFailed(
- u"Unable to post-process an episode if the show dir doesn't exist, quitting")
+ u'Unable to post process an episode because the show dir does not exist, quitting')
- self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
+ self._log(u'Destination folder for this episode is ' + dest_path, logger.DEBUG)
# create any folders we need
- helpers.make_dirs(dest_path)
+ if not helpers.make_dirs(dest_path):
+ raise exceptions.PostProcessingFailed(u'Unable to create destination folder: ' + dest_path)
# figure out the base name of the resulting episode file
if sickbeard.RENAME_EPISODES:
- orig_extension = self.file_name.rpartition('.')[-1]
new_base_name = ek.ek(os.path.basename, proper_path)
- new_file_name = new_base_name + '.' + orig_extension
+ new_file_name = new_base_name + '.' + self.file_name.rpartition('.')[-1]
else:
# if we're not renaming then there's no new base name, we'll just use the existing name
@@ -953,58 +953,57 @@ class PostProcessor(object):
new_file_name = self.file_name
# add to anidb
- if ep_obj.show.is_anime and sickbeard.ANIDB_USE_MYLIST:
+ if sickbeard.ANIDB_USE_MYLIST and ep_obj.show.is_anime:
self._add_to_anidb_mylist(self.file_path)
try:
# move the episode and associated files to the show dir
- if self.process_method == "copy":
- self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
- sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
- elif self.process_method == "move":
- self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
- sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
- elif self.process_method == "hardlink":
- self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
- sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
- elif self.process_method == "symlink":
- self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
- sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
+ args_link = {'file_path': self.file_path, 'new_path': dest_path,
+ 'new_base_name': new_base_name,
+ 'associated_files': sickbeard.MOVE_ASSOCIATED_FILES}
+ args_cpmv = {'subtitles': sickbeard.USE_SUBTITLES and ep_obj.show.subtitles,
+ 'action_tmpl': u' %s
.. to %s'}
+ args_cpmv.update(args_link)
+ if 'copy' == self.process_method:
+ self._copy(**args_cpmv)
+ elif 'move' == self.process_method:
+ self._move(**args_cpmv)
+ elif 'hardlink' == self.process_method:
+ self._hardlink(**args_link)
+ elif 'symlink' == self.process_method:
+ self._move_and_symlink(**args_link)
else:
- logger.log(u"Unknown process method: " + str(self.process_method), logger.ERROR)
- raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
+ logger.log(u'Unknown process method: ' + str(self.process_method), logger.ERROR)
+ raise exceptions.PostProcessingFailed(u'Unable to move the files to the new location')
except (OSError, IOError):
- raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
+ raise exceptions.PostProcessingFailed(u'Unable to move the files to the new location')
# download subtitles
- if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
- for cur_ep in [ep_obj] + ep_obj.relatedEps:
- with cur_ep.lock:
- cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
- cur_ep.downloadSubtitles(force=True)
+ dosubs = sickbeard.USE_SUBTITLES and ep_obj.show.subtitles
# put the new location in the database
sql_l = []
for cur_ep in [ep_obj] + ep_obj.relatedEps:
with cur_ep.lock:
cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
- sql_l.append(cur_ep.get_sql())
-
- if len(sql_l) > 0:
- myDB = db.DBConnection()
- myDB.mass_action(sql_l)
-
- # set file modify stamp to show airdate
- if sickbeard.AIRDATE_EPISODES:
- for cur_ep in [ep_obj] + ep_obj.relatedEps:
- with cur_ep.lock:
+ if dosubs:
+ cur_ep.downloadSubtitles(force=True)
+ # set file modify stamp to show airdate
+ if sickbeard.AIRDATE_EPISODES:
cur_ep.airdateModifyStamp()
+ sql = cur_ep.get_sql()
+ if None is not sql:
+ sql_l.append(sql)
+
+ if 0 < len(sql_l):
+ my_db = db.DBConnection()
+ my_db.mass_action(sql_l)
# generate nfo/tbn
ep_obj.createMetaFiles()
# log it to history
- history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version)
+ history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, anime_version)
# send notifications
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
@@ -1033,3 +1032,21 @@ class PostProcessor(object):
self._run_extra_scripts(ep_obj)
return True
+
+ @staticmethod
+ def _build_anidb_episode(connection, filepath):
+ ep = adba.Episode(connection, filePath=filepath,
+ paramsF=['quality', 'anidb_file_name', 'crc32'],
+ paramsA=['epno', 'english_name', 'short_name_list', 'other_name', 'synonym_list'])
+ return ep
+
+ def _add_to_anidb_mylist(self, filepath):
+ if helpers.set_up_anidb_connection():
+ if not self.anidbEpisode: # seams like we could parse the name before, now lets build the anidb object
+ self.anidbEpisode = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filepath)
+
+ self._log(u'Adding the file to the anidb mylist', logger.DEBUG)
+ try:
+ self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
+ except Exception, e:
+ self._log(u'exception msg: ' + str(e))
diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py
index 58b8284d..6a205e02 100644
--- a/sickbeard/processTV.py
+++ b/sickbeard/processTV.py
@@ -21,6 +21,7 @@ from __future__ import with_statement
import os
import shutil
import stat
+import re
import sickbeard
from sickbeard import postProcessor
@@ -33,457 +34,545 @@ from sickbeard import common
from sickbeard import failedProcessor
-from lib.unrar2 import RarFile, RarInfo
-from lib.unrar2.rar_exceptions import *
+from lib.unrar2 import RarFile
+
+try:
+ from lib.send2trash import send2trash
+except ImportError:
+ pass
-def delete_folder(folder, check_empty=True):
+# noinspection PyArgumentList
+class ProcessTVShow(object):
+ """ Process a TV Show """
- # check if it's a folder
- if not ek.ek(os.path.isdir, folder):
- return False
+ def __init__(self):
+ self.files_passed = 0
+ self.files_failed = 0
+ self._output = []
- # check if it isn't TV_DOWNLOAD_DIR
- if sickbeard.TV_DOWNLOAD_DIR:
- if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
- return False
+ @property
+ def any_vid_processed(self):
+ return 0 < self.files_passed
- # check if it's empty folder when wanted checked
- if check_empty:
- check_files = ek.ek(os.listdir, folder)
- if check_files:
- return False
+ @property
+ def result(self, pre=True):
+ return (('
', u'\n')[pre]).join(self._output)
- # try deleting folder
- try:
- logger.log(u"Deleting folder: " + folder)
- shutil.rmtree(folder)
- except (OSError, IOError), e:
- logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
- return False
+ def _buffer(self, text=None):
+ if None is not text:
+ self._output.append(text)
- return True
-
-def delete_files(processPath, notwantedFiles):
- global returnStr, process_result
-
- if not process_result:
+ def _log_helper(self, message, log_level=logger.DEBUG):
+ logger_msg = re.sub(r'(?i)
\.*', '', message)
+ logger_msg = re.sub('(?i)]+>([^<]+)<[/]a>', r'\1', logger_msg)
+ logger.log(u'%s' % logger_msg, log_level)
+ self._buffer(message)
return
- #Delete all file not needed
- for cur_file in notwantedFiles:
+ def _set_process_success(self, state=True, reset=False):
+ if state:
+ self.files_passed += 1
+ else:
+ self.files_failed += 1
+ if reset:
+ self.files_passed = 0
+ self.files_failed = 0
- cur_file_path = ek.ek(os.path.join, processPath, cur_file)
+ def _delete_folder(self, folder, check_empty=True):
- if not ek.ek(os.path.isfile, cur_file_path):
- continue #Prevent error when a notwantedfiles is an associated files
-
- returnStr += logHelper(u"Deleting file " + cur_file, logger.DEBUG)
-
- #check first the read-only attribute
- file_attribute = ek.ek(os.stat, cur_file_path)[0]
- if (not file_attribute & stat.S_IWRITE):
- # File is read-only, so make it writeable
- returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
- try:
- ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
- except OSError, e:
- returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + str(e.strerror),
- logger.DEBUG)
- try:
- ek.ek(os.remove, cur_file_path)
- except OSError, e:
- returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + str(e.strerror), logger.DEBUG)
-
-def logHelper(logMessage, logLevel=logger.MESSAGE):
- logger.log(logMessage, logLevel)
- return logMessage + u"\n"
-
-
-def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto"):
- """
- Scans through the files in dirName and processes whatever media files it finds
-
- dirName: The folder name to look in
- nzbName: The NZB name which resulted in this folder being downloaded
- force: True to postprocess already postprocessed files
- failed: Boolean for whether or not the download failed
- type: Type of postprocessing auto or manual
- """
-
- global process_result, returnStr
-
- returnStr = ''
-
- returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
-
- returnStr += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR, logger.DEBUG)
-
- # if they passed us a real dir then assume it's the one we want
- if ek.ek(os.path.isdir, dirName):
- dirName = ek.ek(os.path.realpath, dirName)
-
- # if the client and SickGear are not on the same machine translate the Dir in a network dir
- elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \
- and ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
- dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1])
- returnStr += logHelper(u"Trying to use folder " + dirName, logger.DEBUG)
-
- # if we didn't find a real dir then quit
- if not ek.ek(os.path.isdir, dirName):
- returnStr += logHelper(
- u"Unable to figure out what folder to process. If your downloader and SickGear aren't on the same PC make sure you fill out your TV download dir in the config.",
- logger.DEBUG)
- return returnStr
-
- path, dirs, files = get_path_dir_files(dirName, nzbName, type)
-
- SyncFiles = filter(helpers.isSyncFile, files)
-
- # Don't post process if files are still being synced and option is activated
- if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
- returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
- return returnStr
-
- returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
- returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)
-
- rarFiles = filter(helpers.isRarFile, files)
- rarContent = unRAR(path, rarFiles, force)
- files += rarContent
- videoFiles = filter(helpers.isMediaFile, files)
- videoInRar = filter(helpers.isMediaFile, rarContent)
-
- returnStr += logHelper(u"PostProcessing Files: " + str(files), logger.DEBUG)
- returnStr += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles), logger.DEBUG)
- returnStr += logHelper(u"PostProcessing RarContent: " + str(rarContent), logger.DEBUG)
- returnStr += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar), logger.DEBUG)
-
- # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
- nzbNameOriginal = nzbName
- if len(videoFiles) >= 2:
- nzbName = None
-
- if not process_method:
- process_method = sickbeard.PROCESS_METHOD
-
- process_result = True
-
- #Don't Link media when the media is extracted from a rar in the same path
- if process_method in ('hardlink', 'symlink') and videoInRar:
- process_result = process_media(path, videoInRar, nzbName, 'move', force, is_priority)
- delete_files(path, rarContent)
- for video in set(videoFiles) - set(videoInRar):
- process_result = process_media(path, [video], nzbName, process_method, force, is_priority)
- else:
- for video in videoFiles:
- process_result = process_media(path, [video], nzbName, process_method, force, is_priority)
-
- #Process Video File in all TV Subdir
- for dir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed)]:
-
- process_result = True
-
- for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):
-
- SyncFiles = filter(helpers.isSyncFile, fileList)
-
- # Don't post process if files are still being synced and option is activated
- if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
- returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
- return returnStr
-
- rarFiles = filter(helpers.isRarFile, fileList)
- rarContent = unRAR(processPath, rarFiles, force)
- fileList = set(fileList + rarContent)
- videoFiles = filter(helpers.isMediaFile, fileList)
- videoInRar = filter(helpers.isMediaFile, rarContent)
- notwantedFiles = [x for x in fileList if x not in videoFiles]
-
- #Don't Link media when the media is extracted from a rar in the same path
- if process_method in ('hardlink', 'symlink') and videoInRar:
- process_media(processPath, videoInRar, nzbName, 'move', force, is_priority)
- process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
- is_priority)
- delete_files(processPath, rarContent)
- else:
- process_media(processPath, videoFiles, nzbName, process_method, force, is_priority)
-
- #Delete all file not needed
- if process_method != "move" or not process_result \
- or type == "manual": #Avoid to delete files if is Manual PostProcessing
- continue
-
- delete_files(processPath, notwantedFiles)
-
- if process_method == "move" and \
- ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath,
- sickbeard.TV_DOWNLOAD_DIR):
- if delete_folder(processPath, check_empty=False):
- returnStr += logHelper(u"Deleted folder: " + processPath, logger.DEBUG)
-
- if process_result:
- returnStr += logHelper(u"Successfully processed")
- else:
- returnStr += logHelper(u"Problem(s) during processing", logger.WARNING)
-
- return returnStr
-
-
-def validateDir(path, dirName, nzbNameOriginal, failed):
- global process_result, returnStr
-
- returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
-
- if ek.ek(os.path.basename, dirName).startswith('_FAILED_'):
- returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
- failed = True
- elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
- returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.",
- logger.DEBUG)
- failed = True
- elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'):
- returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.",
- logger.DEBUG)
- return False
-
- if failed:
- process_failed(os.path.join(path, dirName), nzbNameOriginal)
- return False
-
- if helpers.is_hidden_folder(dirName):
- returnStr += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG)
- return False
-
- # make sure the dir isn't inside a show dir
- myDB = db.DBConnection()
- sqlResults = myDB.select("SELECT * FROM tv_shows")
-
- for sqlShow in sqlResults:
- if dirName.lower().startswith(
- ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
- os.path.realpath, sqlShow["location"]).lower():
- returnStr += logHelper(
- u"You're trying to post process an episode that's already been moved to its show dir, skipping",
- logger.ERROR)
+ # check if it's a folder
+ if not ek.ek(os.path.isdir, folder):
return False
- # Get the videofile list for the next checks
- allFiles = []
- allDirs = []
- for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dirName), topdown=False):
- allDirs += processDir
- allFiles += fileList
+ # make sure it isn't TV_DOWNLOAD_DIR
+ if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
+ return False
- videoFiles = filter(helpers.isMediaFile, allFiles)
- allDirs.append(dirName)
+ # check if it's empty folder when wanted checked
+ if check_empty and ek.ek(os.listdir, folder):
+ return False
- #check if the dir have at least one tv video file
- for video in videoFiles:
+ # try deleting folder
try:
- NameParser().parse(video, cache_result=False)
- return True
- except (InvalidNameException, InvalidShowException):
- pass
+ shutil.rmtree(folder)
+ except (OSError, IOError), e:
+ logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
+ return False
- for dir in allDirs:
- try:
- NameParser().parse(dir, cache_result=False)
- return True
- except (InvalidNameException, InvalidShowException):
- pass
+ if ek.ek(os.path.isdir, folder):
+ logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
+ return False
- if sickbeard.UNPACK:
- #Search for packed release
- packedFiles = filter(helpers.isRarFile, allFiles)
+ self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
+ return True
- for packed in packedFiles:
+ def _delete_files(self, process_path, notwanted_files, use_trash=False):
+
+ if not self.any_vid_processed:
+ return
+
+ # Delete all file not needed
+ for cur_file in notwanted_files:
+
+ cur_file_path = ek.ek(os.path.join, process_path, cur_file)
+
+ if not ek.ek(os.path.isfile, cur_file_path):
+ continue # Prevent error when a notwantedfiles is an associated files
+
+ # check first the read-only attribute
+ file_attribute = ek.ek(os.stat, cur_file_path)[0]
+ if not file_attribute & stat.S_IWRITE:
+ # File is read-only, so make it writeable
+ self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
+ try:
+ ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
+ except OSError, e:
+ self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
try:
- NameParser().parse(packed, cache_result=False)
+ if use_trash:
+ ek.ek(send2trash, cur_file_path)
+ else:
+ ek.ek(os.remove, cur_file_path)
+ except OSError, e:
+ self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
+
+ if True is not ek.ek(os.path.isfile, cur_file_path):
+ self._log_helper(u'Deleted file ' + cur_file)
+
+ def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False):
+ """
+ Scans through the files in dir_name and processes whatever media files it finds
+
+ dir_name: The folder name to look in
+ nzb_name: The NZB name which resulted in this folder being downloaded
+ force: True to postprocess already postprocessed files
+ failed: Boolean for whether or not the download failed
+ pp_type: Type of postprocessing auto or manual
+ """
+
+ # if they passed us a real directory then assume it's the one we want
+ if ek.ek(os.path.isdir, dir_name):
+ self._log_helper(u'Processing folder... ' + dir_name)
+ dir_name = ek.ek(os.path.realpath, dir_name)
+
+ # if the client and SickGear are not on the same machine translate the directory in a network directory
+ elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
+ and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
+ dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
+ self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)
+ self._log_helper(u'Trying to use folder... ' + dir_name)
+
+ # if we didn't find a real directory then quit
+ if not ek.ek(os.path.isdir, dir_name):
+ self._log_helper(
+ u'Unable to figure out what folder to process. If your downloader and SickGear aren\'t on the same PC then make sure you fill out your completed TV download folder in the PP config.')
+ return self.result
+
+ path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
+
+ sync_files = filter(helpers.isSyncFile, files)
+
+ # Don't post process if files are still being synced and option is activated
+ if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
+ self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
+ return self.result
+
+ self._log_helper(u'Process path: ' + path)
+ if 0 < len(dirs):
+ self._log_helper(u'Process dir%s: %s' % (('', 's')[1 < len(dirs)], str(dirs)))
+
+ rar_files = filter(helpers.isRarFile, files)
+ rar_content = self._unrar(path, rar_files, force)
+ files += rar_content
+ video_files = filter(helpers.isMediaFile, files)
+ video_in_rar = filter(helpers.isMediaFile, rar_content)
+
+ if 0 < len(files):
+ self._log_helper(u'Process file%s: %s' % (('', 's')[1 < len(files)], str(files)))
+ if 0 < len(video_files):
+ self._log_helper(u'Process video file%s: %s' % (('', 's')[1 < len(video_files)], str(video_files)))
+ if 0 < len(rar_content):
+ self._log_helper(u'Process rar content: ' + str(rar_content))
+ if 0 < len(video_in_rar):
+ self._log_helper(u'Process video in rar: ' + str(video_in_rar))
+
+ # If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
+ nzb_name_original = nzb_name
+ if 2 <= len(video_files):
+ nzb_name = None
+
+ if not process_method:
+ process_method = sickbeard.PROCESS_METHOD
+
+ # self._set_process_success()
+
+ # Don't Link media when the media is extracted from a rar in the same path
+ if process_method in ('hardlink', 'symlink') and video_in_rar:
+ self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace)
+ self._delete_files(path, rar_content)
+ video_batch = set(video_files) - set(video_in_rar)
+ else:
+ video_batch = video_files
+
+ while 0 < len(video_batch):
+ video_pick = ['']
+ video_size = 0
+ for cur_video_file in video_batch:
+ cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
+ if 0 == video_size or cur_video_size > video_size:
+ video_size = cur_video_size
+ video_pick = [cur_video_file]
+
+ video_batch = set(video_batch) - set(video_pick)
+
+ self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
+
+ # Process video files in TV subdirectories
+ for directory in [x for x in dirs if self._validate_dir(path, x, nzb_name_original, failed)]:
+
+ self._set_process_success(reset=True)
+
+ for process_path, process_dir, file_list in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
+
+ sync_files = filter(helpers.isSyncFile, file_list)
+
+ # Don't post process if files are still being synced and option is activated
+ if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
+ self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
+ return self.result
+
+ rar_files = filter(helpers.isRarFile, file_list)
+ rar_content = self._unrar(process_path, rar_files, force)
+ file_list = set(file_list + rar_content)
+ video_files = filter(helpers.isMediaFile, file_list)
+ video_in_rar = filter(helpers.isMediaFile, rar_content)
+ notwanted_files = [x for x in file_list if x not in video_files]
+
+ # Don't Link media when the media is extracted from a rar in the same path
+ if process_method in ('hardlink', 'symlink') and video_in_rar:
+ self._process_media(process_path, video_in_rar, nzb_name, 'move', force, force_replace)
+ video_batch = set(video_files) - set(video_in_rar)
+ else:
+ video_batch = video_files
+
+ while 0 < len(video_batch):
+ video_pick = ['']
+ video_size = 0
+ for cur_video_file in video_batch:
+ cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, process_path, cur_video_file))
+ if 0 == video_size or cur_video_size > video_size:
+ video_size = cur_video_size
+ video_pick = [cur_video_file]
+
+ video_batch = set(video_batch) - set(video_pick)
+
+ self._process_media(process_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
+
+ if process_method in ('hardlink', 'symlink') and video_in_rar:
+ self._delete_files(process_path, rar_content)
+ else:
+ # Delete all file not needed
+ if not self.any_vid_processed\
+ or 'move' != process_method\
+ or ('manual' == pp_type and not cleanup): # Avoid deleting files if Manual Postprocessing
+ continue
+
+ self._delete_files(process_path, notwanted_files, use_trash=cleanup)
+
+ if 'move' == process_method\
+ and ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, process_path):
+ self._delete_folder(process_path, check_empty=False)
+
+ def _bottom_line(text, log_level=logger.DEBUG):
+ self._buffer('-' * len(text))
+ self._log_helper(text, log_level)
+
+ if self.any_vid_processed:
+ if not self.files_failed:
+ _bottom_line(u'Successfully processed.', logger.MESSAGE)
+ else:
+ _bottom_line(u'Successfully processed at least one video file %s.' % (', others were skipped', 'and skipped another')[1 == self.files_failed], logger.MESSAGE)
+ else:
+ _bottom_line(u'Failed! Did not process any files.', logger.WARNING)
+
+ return self.result
+
+ def _validate_dir(self, path, dir_name, nzb_name_original, failed):
+
+ self._log_helper(u'Processing dir: ' + dir_name)
+
+ if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'):
+ self._log_helper(u'The directory name indicates it failed to extract.')
+ failed = True
+ elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'):
+ self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.')
+ failed = True
+ elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'):
+ self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.')
+ return False
+
+ if failed:
+ self._process_failed(os.path.join(path, dir_name), nzb_name_original)
+ return False
+
+ if helpers.is_hidden_folder(dir_name):
+ self._log_helper(u'Ignoring hidden folder: ' + dir_name)
+ return False
+
+ # make sure the directory isn't inside a show directory
+ my_db = db.DBConnection()
+ sql_results = my_db.select('SELECT * FROM tv_shows')
+
+ for sqlShow in sql_results:
+ if dir_name.lower().startswith(ek.ek(os.path.realpath, sqlShow['location']).lower() + os.sep)\
+ or dir_name.lower() == ek.ek(os.path.realpath, sqlShow['location']).lower():
+ self._log_helper(
+ u'Found an episode that has already been moved to its show dir, skipping',
+ logger.ERROR)
+ return False
+
+ # Get the videofile list for the next checks
+ all_files = []
+ all_dirs = []
+ for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False):
+ all_dirs += process_dir
+ all_files += fileList
+
+ video_files = filter(helpers.isMediaFile, all_files)
+ all_dirs.append(dir_name)
+
+ # check if the directory have at least one tv video file
+ for video in video_files:
+ try:
+ NameParser().parse(video, cache_result=False)
return True
except (InvalidNameException, InvalidShowException):
pass
- return False
-
-def unRAR(path, rarFiles, force):
- global process_result, returnStr
-
- unpacked_files = []
-
- if sickbeard.UNPACK and rarFiles:
-
- returnStr += logHelper(u"Packed Releases detected: " + str(rarFiles), logger.DEBUG)
-
- for archive in rarFiles:
-
- returnStr += logHelper(u"Unpacking archive: " + archive, logger.DEBUG)
-
+ for directory in all_dirs:
try:
- rar_handle = RarFile(os.path.join(path, archive))
+ NameParser().parse(directory, cache_result=False)
+ return True
+ except (InvalidNameException, InvalidShowException):
+ pass
- # Skip extraction if any file in archive has previously been extracted
- skip_file = False
- for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
- if already_postprocessed(path, file_in_archive, force):
- returnStr += logHelper(
- u"Archive file already post-processed, extraction skipped: " + file_in_archive,
- logger.DEBUG)
- skip_file = True
- break
+ if sickbeard.UNPACK:
+ # Search for packed release
+ packed_files = filter(helpers.isRarFile, all_files)
- if skip_file:
- continue
+ for packed in packed_files:
+ try:
+ NameParser().parse(packed, cache_result=False)
+ return True
+ except (InvalidNameException, InvalidShowException):
+ pass
- rar_handle.extract(path=path, withSubpath=False, overwrite=False)
- unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
- del rar_handle
- except Exception, e:
- returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR)
- process_result = False
- continue
-
- returnStr += logHelper(u"UnRar content: " + str(unpacked_files), logger.DEBUG)
-
- return unpacked_files
-
-
-def already_postprocessed(dirName, videofile, force):
- global returnStr
-
- if force:
return False
- #Needed for accessing DB with a unicode DirName
- if not isinstance(dirName, unicode):
- dirName = unicode(dirName, 'utf_8')
+ def _unrar(self, path, rar_files, force):
- # Avoid processing the same dir again if we use a process method <> move
- myDB = db.DBConnection()
- sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
- if sqlResult:
- returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
- logger.DEBUG)
- return True
+ unpacked_files = []
- else:
- # This is needed for video whose name differ from dirName
- if not isinstance(videofile, unicode):
- videofile = unicode(videofile, 'utf_8')
+ if sickbeard.UNPACK and rar_files:
- sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
- if sqlResult:
- returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
- logger.DEBUG)
- return True
+ self._log_helper(u'Packed releases detected: ' + str(rar_files))
- #Needed if we have downloaded the same episode @ different quality
- search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
- search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
+ for archive in rar_files:
- np = NameParser(dirName, tryIndexers=True, convert=True)
+ self._log_helper(u'Unpacking archive: ' + archive)
+
+ try:
+ rar_handle = RarFile(os.path.join(path, archive))
+
+ # Skip extraction if any file in archive has previously been extracted
+ skip_file = False
+ for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
+ if self._already_postprocessed(path, file_in_archive, force):
+ self._log_helper(
+ u'Archive file already processed, extraction skipped: ' + file_in_archive)
+ skip_file = True
+ break
+
+ if skip_file:
+ continue
+
+ rar_handle.extract(path=path, withSubpath=False, overwrite=False)
+ unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
+ del rar_handle
+ except Exception, e:
+ self._log_helper(u'Failed to unpack archive %s: %s' % (archive, ex(e)), logger.ERROR)
+ self._set_process_success(False)
+ continue
+
+ self._log_helper(u'Unpacked content: ' + str(unpacked_files))
+
+ return unpacked_files
+
+ def _already_postprocessed(self, dir_name, videofile, force):
+
+ if force and not self.any_vid_processed:
+ return False
+
+ # Needed for accessing DB with a unicode dir_name
+ if not isinstance(dir_name, unicode):
+ dir_name = unicode(dir_name, 'utf_8')
+
+ parse_result = None
try:
- parse_result = np.parse(dirName)
- except:
- parse_result = False
+ parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False)
+ except (InvalidNameException, InvalidShowException):
pass
+ if None is parse_result:
+ try:
+ parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(dir_name, cache_result=False)
+ except (InvalidNameException, InvalidShowException):
+ pass
- if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number):
- search_sql += " and tv_episodes.showid = '" + str(parse_result.show.indexerid)\
- + "' and tv_episodes.season = '" + str(parse_result.season_number)\
- + "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'"
+ showlink = ''
+ ep_detail_sql = ''
+ undo_status = None
+ if parse_result:
+ showlink = (' for "%s"' % (parse_result.show.indexerid, parse_result.show.name),
+ parse_result.show.name)[self.any_vid_processed]
- search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
- search_sql += " and history.resource LIKE ?"
- sqlResult = myDB.select(search_sql, [u'%' + videofile])
- if sqlResult:
- returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
- logger.DEBUG)
+ if parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number:
+ ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
+ % (str(parse_result.show.indexerid),
+ str(parse_result.season_number),
+ str(parse_result.episode_numbers[0]))
+ undo_status = "UPDATE `tv_episodes` SET status="\
+ + "(SELECT h.action FROM `history` as h INNER JOIN `tv_episodes` as t on h.showid=t.showid"\
+ + " where t.showid='%s' and t.season='%s' and t.episode='%s'"\
+ % (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))\
+ + " and (h.action is not t.status) group by h.action order by h.date DESC LIMIT 1)"\
+ + " where showid='%s' and season='%s' and episode='%s'"\
+ % (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))
+
+ # Avoid processing the same directory again if we use a process method <> move
+ my_db = db.DBConnection()
+ sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name])
+ if sql_result:
+ self._log_helper(u'Found a release directory%s that has already been processed,
.. skipping: %s'
+ % (showlink, dir_name))
+ my_db.action(undo_status)
return True
- return False
-
-
-def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority):
- global process_result, returnStr
-
- processor = None
- for cur_video_file in videoFiles:
-
- if already_postprocessed(processPath, cur_video_file, force):
- continue
-
- cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file)
-
- try:
- processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority)
- process_result = processor.process()
- process_fail_message = ""
- except exceptions.PostProcessingFailed, e:
- process_result = False
- process_fail_message = ex(e)
-
- if processor:
- returnStr += processor.log
-
- if process_result:
- returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
else:
- returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message,
- logger.WARNING)
+ # This is needed for video whose name differ from dir_name
+ if not isinstance(videofile, unicode):
+ videofile = unicode(videofile, 'utf_8')
- #If something fail abort the processing on dir
- if not process_result:
- break
+ sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
+ if sql_result:
+ self._log_helper(u'Found a video, but that release%s was already processed,
.. skipping: %s'
+ % (showlink, videofile))
+ my_db.action(undo_status)
+ return True
-def get_path_dir_files(dirName, nzbName, type):
- path = ""
- dirs = []
- files = []
+ # Needed if we have downloaded the same episode @ different quality
+ search_sql = 'SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history'\
+ + ' ON history.showid=tv_episodes.showid'\
+ + ' WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode'\
+ + ep_detail_sql\
+ + ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\
+ + ' and history.resource LIKE ?'
- if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual": #Scheduled Post Processing Active
- #Get at first all the subdir in the dirName
- for path, dirs, files in ek.ek(os.walk, dirName):
- break
- else:
- path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
- if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(
- os.path.join(dirName, nzbName)): #For single torrent file without Dir
- dirs = []
- files = [os.path.join(dirName, nzbName)]
- else:
- dirs = [dirs]
- files = []
+ sql_result = my_db.select(search_sql, [u'%' + videofile])
+ if sql_result:
+ self._log_helper(u'Found a video, but the episode%s is already processed,
.. skipping: %s'
+ % (showlink, videofile))
+ my_db.action(undo_status)
+ return True
- return path, dirs, files
+ return False
+ def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False):
-def process_failed(dirName, nzbName):
- """Process a download that did not complete correctly"""
-
- global returnStr
-
- if sickbeard.USE_FAILED_DOWNLOADS:
processor = None
+ for cur_video_file in video_files:
- try:
- processor = failedProcessor.FailedProcessor(dirName, nzbName)
- process_result = processor.process()
- process_fail_message = ""
- except exceptions.FailedProcessingFailed, e:
- process_result = False
- process_fail_message = ex(e)
+ if self._already_postprocessed(process_path, cur_video_file, force):
+ self._set_process_success(False)
+ continue
- if processor:
- returnStr += processor.log
+ cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
- if sickbeard.DELETE_FAILED and process_result:
- if delete_folder(dirName, check_empty=False):
- returnStr += logHelper(u"Deleted folder: " + dirName, logger.DEBUG)
+ try:
+ processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash)
+ file_success = processor.process()
+ process_fail_message = ''
+ except exceptions.PostProcessingFailed, e:
+ file_success = False
+ process_fail_message = '
.. ' + ex(e)
- if process_result:
- returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
+ self._set_process_success(file_success)
+
+ if processor:
+ self._buffer(processor.log.strip('\n'))
+
+ if file_success:
+ self._log_helper(u'Successfully processed ' + cur_video_file, logger.MESSAGE)
+ elif self.any_vid_processed:
+ self._log_helper(u'Warning fail for %s%s' % (cur_video_file_path, process_fail_message),
+ logger.WARNING)
+ else:
+ self._log_helper(u'Did not use file %s%s' % (cur_video_file_path, process_fail_message),
+ logger.WARNING)
+
+ @staticmethod
+ def _get_path_dir_files(dir_name, nzb_name, pp_type):
+ path = ''
+ dirs = []
+ files = []
+
+ if dir_name == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Post Processing Active
+ # Get at first all the subdir in the dir_name
+ for path, dirs, files in ek.ek(os.walk, dir_name):
+ break
else:
- returnStr += logHelper(
- u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
- logger.WARNING)
+ path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
+ if None is not nzb_name and not nzb_name.endswith('.nzb') and os.path.isfile(
+ os.path.join(dir_name, nzb_name)): # For single torrent file without directory
+ dirs = []
+ files = [os.path.join(dir_name, nzb_name)]
+ else:
+ dirs = [dirs]
+ files = []
+
+ return path, dirs, files
+
+ # noinspection PyArgumentList
+ def _process_failed(self, dir_name, nzb_name):
+ """ Process a download that did not complete correctly """
+
+ if sickbeard.USE_FAILED_DOWNLOADS:
+ processor = None
+
+ try:
+ processor = failedProcessor.FailedProcessor(dir_name, nzb_name)
+ self._set_process_success(processor.process())
+ process_fail_message = ''
+ except exceptions.FailedProcessingFailed, e:
+ self._set_process_success(False)
+ process_fail_message = ex(e)
+
+ if processor:
+ self._buffer(processor.log.strip('\n'))
+
+ if sickbeard.DELETE_FAILED and self.any_vid_processed:
+ self._delete_folder(dir_name, check_empty=False)
+
+ task = u'Failed download processing'
+ if self.any_vid_processed:
+ self._log_helper(u'Successful %s: (%s, %s)'
+ % (task.lower(), str(nzb_name), dir_name), logger.MESSAGE)
+ else:
+ self._log_helper(u'%s failed: (%s, %s): %s'
+ % (task, str(nzb_name), dir_name, process_fail_message), logger.WARNING)
+
+
+# backward compatibility prevents the case of this function name from being updated to PEP8
+def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False):
+ # backward compatibility prevents the case of this function name from being updated to PEP8
+ return ProcessTVShow().process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup)
diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py
index 99908a85..4efe02d0 100644
--- a/sickbeard/show_name_helpers.py
+++ b/sickbeard/show_name_helpers.py
@@ -237,7 +237,7 @@ def determineReleaseName(dir_name=None, nzb_name=None):
"""Determine a release name from an nzb and/or folder name"""
if nzb_name is not None:
- logger.log(u"Using nzb_name for release name.")
+ logger.log(u'Using nzb name for release name.')
return nzb_name.rpartition('.')[0]
if dir_name is None:
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index 6003f7e0..ebd08b9e 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -435,7 +435,7 @@ class TVShow(object):
try:
parse_result = None
- np = NameParser(False, showObj=self, tryIndexers=True)
+ np = NameParser(False, showObj=self, try_indexers=True)
parse_result = np.parse(ep_file_name)
except (InvalidNameException, InvalidShowException):
pass
@@ -627,7 +627,7 @@ class TVShow(object):
logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG)
try:
- myParser = NameParser(showObj=self, tryIndexers=True)
+ myParser = NameParser(showObj=self, try_indexers=True)
parse_result = myParser.parse(file)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG)
diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py
index 9fd8afcc..63ba6183 100644
--- a/sickbeard/webapi.py
+++ b/sickbeard/webapi.py
@@ -1312,7 +1312,7 @@ class CMD_PostProcess(ApiCall):
if not self.type:
self.type = 'manual'
- data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, is_priority=self.is_priority, failed=False, type=self.type)
+ data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, force_replace=self.is_priority, failed=False, type=self.type)
if not self.return_data:
data = ""
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 76e3d3f7..7ea2332b 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -1911,72 +1911,49 @@ class Home(MainHandler):
sceneEpisode=None, sceneAbsolute=None):
# sanitize:
- if forSeason in ['null', '']: forSeason = None
- if forEpisode in ['null', '']: forEpisode = None
- if forAbsolute in ['null', '']: forAbsolute = None
- if sceneSeason in ['null', '']: sceneSeason = None
- if sceneEpisode in ['null', '']: sceneEpisode = None
- if sceneAbsolute in ['null', '']: sceneAbsolute = None
+ show = None if show in [None, 'null', ''] else int(show)
+ indexer = None if indexer in [None, 'null', ''] else int(indexer)
- showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
+ show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, show)
- if showObj.is_anime:
- result = {
- 'success': True,
- 'forAbsolute': forAbsolute,
- }
+ if not show_obj.is_anime:
+ for_season = None if forSeason in [None, 'null', ''] else int(forSeason)
+ for_episode = None if forEpisode in [None, 'null', ''] else int(forEpisode)
+ scene_season = None if sceneSeason in [None, 'null', ''] else int(sceneSeason)
+ scene_episode = None if sceneEpisode in [None, 'null', ''] else int(sceneEpisode)
+ action_log = u'Set episode scene numbering to %sx%s for episode %sx%s of "%s"'\
+ % (scene_season, scene_episode, for_season, for_episode, show_obj.name)
+ ep_args = {'show': show, 'season': for_season, 'episode': for_episode}
+ scene_args = {'indexer_id': show, 'indexer': indexer, 'season': for_season, 'episode': for_episode,
+ 'sceneSeason': scene_season, 'sceneEpisode': scene_episode}
+ result = {'forSeason': for_season, 'forEpisode': for_episode, 'sceneSeason': None, 'sceneEpisode': None}
else:
- result = {
- 'success': True,
- 'forSeason': forSeason,
- 'forEpisode': forEpisode,
- }
+ for_absolute = None if forAbsolute in [None, 'null', ''] else int(forAbsolute)
+ scene_absolute = None if sceneAbsolute in [None, 'null', ''] else int(sceneAbsolute)
+ action_log = u'Set absolute scene numbering to %s for episode %s of "%s"'\
+ % (scene_absolute, for_absolute, show_obj.name)
+ ep_args = {'show': show, 'absolute': for_absolute}
+ scene_args = {'indexer_id': show, 'indexer': indexer, 'absolute_number': for_absolute,
+ 'sceneAbsolute': scene_absolute}
+ result = {'forAbsolute': for_absolute, 'sceneAbsolute': None}
- # retrieve the episode object and fail if we can't get one
- if showObj.is_anime:
- ep_obj = self._getEpisode(show, absolute=forAbsolute)
+ ep_obj = self._getEpisode(**ep_args)
+ result['success'] = not isinstance(ep_obj, str)
+ if result['success']:
+ logger.log(action_log, logger.DEBUG)
+ set_scene_numbering(**scene_args)
+ show_obj.flushEpisodes()
else:
- ep_obj = self._getEpisode(show, forSeason, forEpisode)
-
- if isinstance(ep_obj, str):
- result['success'] = False
result['errorMessage'] = ep_obj
- elif showObj.is_anime:
- logger.log(u'setAbsoluteSceneNumbering for %s from %s to %s' %
- (show, forAbsolute, sceneAbsolute), logger.DEBUG)
- show = int(show)
- indexer = int(indexer)
- forAbsolute = int(forAbsolute)
- if sceneAbsolute is not None: sceneAbsolute = int(sceneAbsolute)
-
- set_scene_numbering(show, indexer, absolute_number=forAbsolute, sceneAbsolute=sceneAbsolute)
+ if not show_obj.is_anime:
+ scene_numbering = get_scene_numbering(show, indexer, for_season, for_episode)
+ if scene_numbering:
+ (result['sceneSeason'], result['sceneEpisode']) = scene_numbering
else:
- logger.log(u'setEpisodeSceneNumbering for %s from %sx%s to %sx%s' %
- (show, forSeason, forEpisode, sceneSeason, sceneEpisode), logger.DEBUG)
-
- show = int(show)
- indexer = int(indexer)
- forSeason = int(forSeason)
- forEpisode = int(forEpisode)
- if sceneSeason is not None: sceneSeason = int(sceneSeason)
- if sceneEpisode is not None: sceneEpisode = int(sceneEpisode)
-
- set_scene_numbering(show, indexer, season=forSeason, episode=forEpisode, sceneSeason=sceneSeason,
- sceneEpisode=sceneEpisode)
-
- if showObj.is_anime:
- sn = get_scene_absolute_numbering(show, indexer, forAbsolute)
- if sn:
- result['sceneAbsolute'] = sn
- else:
- result['sceneAbsolute'] = None
- else:
- sn = get_scene_numbering(show, indexer, forSeason, forEpisode)
- if sn:
- (result['sceneSeason'], result['sceneEpisode']) = sn
- else:
- (result['sceneSeason'], result['sceneEpisode']) = (None, None)
+ scene_numbering = get_scene_absolute_numbering(show, indexer, for_absolute)
+ if scene_numbering:
+ result['sceneAbsolute'] = scene_numbering
return json.dumps(result)
@@ -2024,33 +2001,22 @@ class HomePostProcess(Home):
return t.respond()
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None,
- is_priority=None, failed='0', type='auto', *args, **kwargs):
-
- if failed == '0':
- failed = False
- else:
- failed = True
-
- if force in ['on', '1']:
- force = True
- else:
- force = False
-
- if is_priority in ['on', '1']:
- is_priority = True
- else:
- is_priority = False
+ force_replace=None, failed='0', type='auto', **kwargs):
if not dir:
self.redirect('/home/postprocess/')
else:
- result = processTV.processDir(dir, nzbName, process_method=process_method, force=force,
- is_priority=is_priority, failed=failed, type=type)
- if quiet is not None and int(quiet) == 1:
- return result
+ result = processTV.processDir(dir, nzbName, process_method=process_method, type=type,
+ cleanup='cleanup' in kwargs and kwargs['cleanup'] in ['on', '1'],
+ force=force in ['on', '1'],
+ force_replace=force_replace in ['on', '1'],
+ failed=not '0' == failed)
- result = result.replace('\n', '
\n')
- return self._genericMessage('Postprocessing results', result)
+ result = re.sub(r'(?i)
', '\n', result)
+ if None is not quiet and 1 == int(quiet):
+ return u'%s' % re.sub('(?i)
]+>([^<]+)<[/]a>', r'\1', result)
+
+ return self._genericMessage('Postprocessing results', u'%s
' % result)
class NewHomeAddShows(Home):