mirror of
https://github.com/SickGear/SickGear.git
synced 2025-03-15 17:17:43 +00:00
Merge pull request #247 from JackDandy/feature/ChangePPFileOrder
Change to post processing files ordered largest to smallest
This commit is contained in:
commit
af85e4230c
12 changed files with 1057 additions and 951 deletions
15
CHANGES.md
15
CHANGES.md
|
@ -31,8 +31,21 @@
|
||||||
* Add General Config/Interface/"Group show list shows into:"... to divide shows into groups on the Show List page
|
* Add General Config/Interface/"Group show list shows into:"... to divide shows into groups on the Show List page
|
||||||
* Change Show List progress bar code, smaller page load, efficient use of js render engine
|
* Change Show List progress bar code, smaller page load, efficient use of js render engine
|
||||||
* Change values used for date sorting on home page and episode view for improved compatibility with posix systems
|
* Change values used for date sorting on home page and episode view for improved compatibility with posix systems
|
||||||
* Change response handling in downloaders to simplify logic.
|
* Change response handling in downloaders to simplify logic
|
||||||
* Change reduce html payload across page template files
|
* Change reduce html payload across page template files
|
||||||
|
* Change to post process files ordered largest to smallest and tidied PP logging output
|
||||||
|
* Add "then trash subdirs and files" to the Process method "Move" on the manual post process page
|
||||||
|
* Add using show scene exceptions with post processing
|
||||||
|
* Change overhaul processTV into a thread safe class
|
||||||
|
* Change postProcessor and processTV to PEP8 standards
|
||||||
|
* Change overhaul Manual Post-Processing page in line with layout style and improve texts
|
||||||
|
* Change Force Processes enabled, only the largest video file of many will be processed instead of all files
|
||||||
|
* Change visual ui of Postprocessing results to match the logs and errors view
|
||||||
|
* Change remove ugly printing of episode object during PP seen in external apps like sabnzbd
|
||||||
|
* Change to streamline output toward actual work done instead of showing all vars
|
||||||
|
* Change pp report items from describing actions about to happen to instead detail the actual outcome of actions
|
||||||
|
* Add clarity to the output of a successful post process but with some issues rather than "there were problems"
|
||||||
|
* Add a conclusive bottom line to the pp result report
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
Fix issue changing a custom show list group name that is in use. The bug resulted in the db containing stale group names
|
Fix issue changing a custom show list group name that is in use. The bug resulted in the db containing stale group names
|
||||||
|
|
|
@ -126,7 +126,7 @@ fonts
|
||||||
font-style:italic
|
font-style:italic
|
||||||
}
|
}
|
||||||
|
|
||||||
/* SickGear Icons */
|
/* Droid Sans */
|
||||||
@font-face{
|
@font-face{
|
||||||
font-family:'sgicons';
|
font-family:'sgicons';
|
||||||
src:url('fonts/sgicons.eot');
|
src:url('fonts/sgicons.eot');
|
||||||
|
@ -1097,13 +1097,12 @@ home_postprocess.tmpl
|
||||||
========================================================================== */
|
========================================================================== */
|
||||||
|
|
||||||
#postProcess{
|
#postProcess{
|
||||||
width:800px;
|
width:650px;
|
||||||
padding-top:10px;
|
padding-top:30px;
|
||||||
margin-right:auto;
|
margin-right:auto;
|
||||||
margin-left:auto
|
margin-left:auto
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* =======================================================================
|
/* =======================================================================
|
||||||
displayShow.tmpl
|
displayShow.tmpl
|
||||||
========================================================================== */
|
========================================================================== */
|
||||||
|
|
|
@ -1,86 +1,105 @@
|
||||||
#import sickbeard
|
#import sickbeard
|
||||||
##
|
##
|
||||||
#set global $header="Post Processing"
|
#set global $header = 'Post Processing'
|
||||||
#set global $title="Post Processing"
|
#set global $title = $header
|
||||||
#set global $sbPath="../.."
|
#set global $topmenu = 'home'
|
||||||
#set global $topmenu="home"
|
#set global $sbPath = '../..'
|
||||||
##
|
##
|
||||||
#import os.path
|
#import os.path
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||||
|
|
||||||
<div id="content800">
|
|
||||||
#if $varExists('header')
|
#if $varExists('header')
|
||||||
<h1 class="header">$header</h1>
|
<h1 class="header">$header</h1>
|
||||||
#else
|
#else
|
||||||
<h1 class="title">$title</h1>
|
<h1 class="title">$title</h1>
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
<div id="postProcess">
|
#set selected = ' selected="selected"'
|
||||||
<form name="processForm" method="post" action="processEpisode" style="line-height: 40px;">
|
|
||||||
<table>
|
<form name="processForm" method="post" action="processEpisode">
|
||||||
<input type="hidden" id="type" name="type" value="manual">
|
<input type="hidden" id="type" name="type" value="manual">
|
||||||
<tr>
|
|
||||||
<td style="padding-right:10px;">
|
<div id="postProcess" class="stepDiv">
|
||||||
<b>Enter the folder containing the episode:</b>
|
|
||||||
</td>
|
<div class="field-pair">
|
||||||
<td>
|
<label for="episodeDir">
|
||||||
<input type="text" name="dir" id="episodeDir" class="form-control form-control-inline input-sm input350" />
|
<span class="component-title input">Folder to process</span>
|
||||||
</td>
|
<span class="component-desc">
|
||||||
</tr>
|
<input id="episodeDir" name="dir" type="text" class="form-control form-control-inline input-sm input350">
|
||||||
<tr>
|
<p class="clear-left">select a folder containing episode files</p>
|
||||||
<td>
|
</span>
|
||||||
<b>Process Method to be used:</b>
|
</label>
|
||||||
</td>
|
</div>
|
||||||
<td>
|
|
||||||
<select name="process_method" id="process_method" class="form-control form-control-inline input-sm" >
|
<div class="field-pair">
|
||||||
#set $process_method_text = {'copy': "Copy", 'move': "Move", 'hardlink': "Hard Link", 'symlink' : "Symbolic Link"}
|
<label for="process_method">
|
||||||
#for $curAction in ('copy', 'move', 'hardlink', 'symlink'):
|
<span class="component-title input">Process method to use</span>
|
||||||
#if $sickbeard.PROCESS_METHOD == $curAction:
|
<span class="component-desc">
|
||||||
#set $process_method = "selected=\"selected\""
|
<select id="process_method" name="process_method" class="form-control form-control-inline input-sm">
|
||||||
#else
|
#set $process_methods = {'copy': 'Copy', 'move': 'Move', 'hardlink': 'Hard Link', 'symlink': 'Symbolic Link'}
|
||||||
#set $process_method = ""
|
#for $method in ('copy', 'move', 'hardlink', 'symlink'):
|
||||||
#end if
|
<option value="$method"#echo ('', $selected)[$method == $sickbeard.PROCESS_METHOD]#>$process_methods[$method]</option>
|
||||||
<option value="$curAction" $process_method>$process_method_text[$curAction]</option>
|
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
</td>
|
<label for="cleanup">
|
||||||
</tr>
|
<span id="move-and-trash" style="display:#echo ('none', 'block')['move' == $sickbeard.PROCESS_METHOD]#">
|
||||||
<tr>
|
then trash left over subdirs/files<input id="cleanup" name="cleanup" type="checkbox" style="float:none;margin-left:10px">
|
||||||
<td>
|
</span>
|
||||||
<b>Force already Post Processed Dir/Files:</b>
|
</label>
|
||||||
</td>
|
</span>
|
||||||
<td>
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="field-pair">
|
||||||
|
<label for="force">
|
||||||
|
<span class="component-title">Force already processed</span>
|
||||||
|
<span class="component-desc">
|
||||||
<input id="force" name="force" type="checkbox">
|
<input id="force" name="force" type="checkbox">
|
||||||
</td>
|
<span>allow known post processed dirs/files to be processed again</span>
|
||||||
</tr>
|
</span>
|
||||||
<tr>
|
</label>
|
||||||
<td>
|
</div>
|
||||||
<b>Mark Dir/Files as priority download:</b>
|
|
||||||
</td>
|
<div class="field-pair">
|
||||||
<td>
|
<label for="force_replace">
|
||||||
<input id="is_priority" name="is_priority" type="checkbox">
|
<span class="component-title">Force replace existing</span>
|
||||||
<span style="line-height: 0; font-size: 12px;"><i> (Check it to replace the file even if it exists at higher quality)</i></span>
|
<span class="component-desc">
|
||||||
</td>
|
<input id="force_replace" name="force_replace" type="checkbox">
|
||||||
</tr>
|
<span>replace existing files ignoring file size and video quality differences</span>
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
#if $sickbeard.USE_FAILED_DOWNLOADS:
|
#if $sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
<tr>
|
<div class="field-pair">
|
||||||
<td>
|
<label for="failed">
|
||||||
<b>Mark download as failed:</b>
|
<span class="component-title">Mark as failed</span>
|
||||||
</td>
|
<span class="component-desc">
|
||||||
<td>
|
|
||||||
<input id="failed" name="failed" type="checkbox">
|
<input id="failed" name="failed" type="checkbox">
|
||||||
</td>
|
<span>queue a search to try find a different release for each processed item</span>
|
||||||
</tr>
|
</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
#end if
|
#end if
|
||||||
</table>
|
|
||||||
<input id="submit" class="btn" type="submit" value="Process" />
|
<div class="field-pair" style="margin-top:12px">
|
||||||
|
<span class="component-title input"> </span>
|
||||||
|
<span class="component-desc">
|
||||||
|
<input id="submit" class="btn" type="submit" value="Process">
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div><!-- /postProcess -->
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
<script type="text/javascript" charset="utf-8">
|
<script type="text/javascript" charset="utf-8">
|
||||||
<!--
|
<!--
|
||||||
|
#raw
|
||||||
jQuery('#episodeDir').fileBrowser({title:'Select Unprocessed Episode Folder', key:'postprocessPath'});
|
jQuery('#episodeDir').fileBrowser({title:'Select Unprocessed Episode Folder', key:'postprocessPath'});
|
||||||
|
$('#process_method').change(function(){
|
||||||
|
$('#move-and-trash').css({'display':('move' == $(this).attr('value') ? 'block' : 'none')})
|
||||||
|
});
|
||||||
|
#end raw
|
||||||
//-->
|
//-->
|
||||||
</script>
|
</script>
|
||||||
</div>
|
|
||||||
|
|
||||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
|
@ -1,5 +1,6 @@
|
||||||
#import sickbeard
|
#import sickbeard
|
||||||
#import urllib
|
#import urllib
|
||||||
|
#slurp
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
|
|
|
@ -232,35 +232,32 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
||||||
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
for name in showNames:
|
for name in showNames:
|
||||||
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(i).name, logger.DEBUG)
|
logger.log(u'Trying to find ' + name + ' on ' + sickbeard.indexerApi(i).name, logger.DEBUG)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
search = t[indexer_id] if indexer_id else t[name]
|
result = t[indexer_id] if indexer_id else t[name]
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
seriesname = series_id = False
|
||||||
seriesname = search.seriesname
|
for search in result:
|
||||||
except:
|
seriesname = search['seriesname']
|
||||||
seriesname = None
|
series_id = search['id']
|
||||||
|
if seriesname and series_id:
|
||||||
try:
|
break
|
||||||
series_id = search.id
|
|
||||||
except:
|
|
||||||
series_id = None
|
|
||||||
|
|
||||||
if not (seriesname and series_id):
|
if not (seriesname and series_id):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if str(name).lower() == str(seriesname).lower and not indexer_id:
|
if None is indexer_id and str(name).lower() == str(seriesname).lower():
|
||||||
return (seriesname, i, int(series_id))
|
return seriesname, i, int(series_id)
|
||||||
elif int(indexer_id) == int(series_id):
|
elif None is not indexer_id and int(indexer_id) == int(series_id):
|
||||||
return (seriesname, i, int(indexer_id))
|
return seriesname, i, int(indexer_id)
|
||||||
|
|
||||||
if indexer:
|
if indexer:
|
||||||
break
|
break
|
||||||
|
|
||||||
return (None, None, None)
|
return None, None, None
|
||||||
|
|
||||||
|
|
||||||
def sizeof_fmt(num):
|
def sizeof_fmt(num):
|
||||||
|
@ -909,31 +906,35 @@ def full_sanitizeSceneName(name):
|
||||||
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
||||||
|
|
||||||
|
|
||||||
def get_show(name, tryIndexers=False):
|
def get_show(name, try_indexers=False, try_scene_exceptions=False):
|
||||||
if not sickbeard.showList or None is name:
|
if not sickbeard.showList or None is name:
|
||||||
return
|
return
|
||||||
|
|
||||||
showObj = None
|
show_obj = None
|
||||||
fromCache = False
|
from_cache = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# check cache for show
|
|
||||||
cache = sickbeard.name_cache.retrieveNameFromCache(name)
|
cache = sickbeard.name_cache.retrieveNameFromCache(name)
|
||||||
if cache:
|
if cache:
|
||||||
fromCache = True
|
from_cache = True
|
||||||
showObj = findCertainShow(sickbeard.showList, int(cache))
|
show_obj = findCertainShow(sickbeard.showList, cache)
|
||||||
|
|
||||||
if not showObj and tryIndexers:
|
if not show_obj and try_scene_exceptions:
|
||||||
showObj = findCertainShow(sickbeard.showList,
|
indexer_id = sickbeard.scene_exceptions.get_scene_exception_by_name(name)[0]
|
||||||
|
if indexer_id:
|
||||||
|
show_obj = findCertainShow(sickbeard.showList, indexer_id)
|
||||||
|
|
||||||
|
if not show_obj and try_indexers:
|
||||||
|
show_obj = findCertainShow(sickbeard.showList,
|
||||||
searchIndexerForShowID(full_sanitizeSceneName(name), ui=classes.ShowListUI)[2])
|
searchIndexerForShowID(full_sanitizeSceneName(name), ui=classes.ShowListUI)[2])
|
||||||
|
|
||||||
# add show to cache
|
# add show to cache
|
||||||
if showObj and not fromCache:
|
if show_obj and not from_cache:
|
||||||
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
|
sickbeard.name_cache.addNameToCache(name, show_obj.indexerid)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log(u"Error when attempting to find show: " + name + " in SickGear: " + str(e), logger.DEBUG)
|
logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + str(e), logger.DEBUG)
|
||||||
|
|
||||||
return showObj
|
return show_obj
|
||||||
|
|
||||||
|
|
||||||
def is_hidden_folder(folder):
|
def is_hidden_folder(folder):
|
||||||
|
|
|
@ -36,12 +36,13 @@ class NameParser(object):
|
||||||
NORMAL_REGEX = 1
|
NORMAL_REGEX = 1
|
||||||
ANIME_REGEX = 2
|
ANIME_REGEX = 2
|
||||||
|
|
||||||
def __init__(self, file_name=True, showObj=None, tryIndexers=False, convert=False,
|
def __init__(self, file_name=True, showObj=None, try_indexers=False, try_scene_exceptions=False, convert=False,
|
||||||
naming_pattern=False, testing=False):
|
naming_pattern=False, testing=False):
|
||||||
|
|
||||||
self.file_name = file_name
|
self.file_name = file_name
|
||||||
self.showObj = showObj
|
self.showObj = showObj
|
||||||
self.tryIndexers = tryIndexers
|
self.try_indexers = try_indexers
|
||||||
|
self.try_scene_exceptions = try_scene_exceptions
|
||||||
self.convert = convert
|
self.convert = convert
|
||||||
self.naming_pattern = naming_pattern
|
self.naming_pattern = naming_pattern
|
||||||
self.testing = testing
|
self.testing = testing
|
||||||
|
@ -201,7 +202,7 @@ class NameParser(object):
|
||||||
show = None
|
show = None
|
||||||
if not self.naming_pattern:
|
if not self.naming_pattern:
|
||||||
# try and create a show object for this result
|
# try and create a show object for this result
|
||||||
show = helpers.get_show(bestResult.series_name, self.tryIndexers)
|
show = helpers.get_show(bestResult.series_name, self.try_indexers, self.try_scene_exceptions)
|
||||||
|
|
||||||
# confirm passed in show object indexer id matches result show object indexer id
|
# confirm passed in show object indexer id matches result show object indexer id
|
||||||
if show and not self.testing:
|
if show and not self.testing:
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -21,6 +21,7 @@ from __future__ import with_statement
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
|
import re
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import postProcessor
|
from sickbeard import postProcessor
|
||||||
|
@ -33,271 +34,334 @@ from sickbeard import common
|
||||||
|
|
||||||
from sickbeard import failedProcessor
|
from sickbeard import failedProcessor
|
||||||
|
|
||||||
from lib.unrar2 import RarFile, RarInfo
|
from lib.unrar2 import RarFile
|
||||||
from lib.unrar2.rar_exceptions import *
|
|
||||||
|
try:
|
||||||
|
from lib.send2trash import send2trash
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def delete_folder(folder, check_empty=True):
|
# noinspection PyArgumentList
|
||||||
|
class ProcessTVShow(object):
|
||||||
|
""" Process a TV Show """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.files_passed = 0
|
||||||
|
self.files_failed = 0
|
||||||
|
self._output = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def any_vid_processed(self):
|
||||||
|
return 0 < self.files_passed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def result(self, pre=True):
|
||||||
|
return (('<br />', u'\n')[pre]).join(self._output)
|
||||||
|
|
||||||
|
def _buffer(self, text=None):
|
||||||
|
if None is not text:
|
||||||
|
self._output.append(text)
|
||||||
|
|
||||||
|
def _log_helper(self, message, log_level=logger.DEBUG):
|
||||||
|
logger_msg = re.sub(r'(?i)<br(?:[\s/]+)>\.*', '', message)
|
||||||
|
logger_msg = re.sub('(?i)<a[^>]+>([^<]+)<[/]a>', r'\1', logger_msg)
|
||||||
|
logger.log(u'%s' % logger_msg, log_level)
|
||||||
|
self._buffer(message)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _set_process_success(self, state=True, reset=False):
|
||||||
|
if state:
|
||||||
|
self.files_passed += 1
|
||||||
|
else:
|
||||||
|
self.files_failed += 1
|
||||||
|
if reset:
|
||||||
|
self.files_passed = 0
|
||||||
|
self.files_failed = 0
|
||||||
|
|
||||||
|
def _delete_folder(self, folder, check_empty=True):
|
||||||
|
|
||||||
# check if it's a folder
|
# check if it's a folder
|
||||||
if not ek.ek(os.path.isdir, folder):
|
if not ek.ek(os.path.isdir, folder):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# check if it isn't TV_DOWNLOAD_DIR
|
# make sure it isn't TV_DOWNLOAD_DIR
|
||||||
if sickbeard.TV_DOWNLOAD_DIR:
|
if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
|
||||||
if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# check if it's empty folder when wanted checked
|
# check if it's empty folder when wanted checked
|
||||||
if check_empty:
|
if check_empty and ek.ek(os.listdir, folder):
|
||||||
check_files = ek.ek(os.listdir, folder)
|
|
||||||
if check_files:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# try deleting folder
|
# try deleting folder
|
||||||
try:
|
try:
|
||||||
logger.log(u"Deleting folder: " + folder)
|
|
||||||
shutil.rmtree(folder)
|
shutil.rmtree(folder)
|
||||||
except (OSError, IOError), e:
|
except (OSError, IOError), e:
|
||||||
logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
|
logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if ek.ek(os.path.isdir, folder):
|
||||||
|
logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def delete_files(processPath, notwantedFiles):
|
def _delete_files(self, process_path, notwanted_files, use_trash=False):
|
||||||
global returnStr, process_result
|
|
||||||
|
|
||||||
if not process_result:
|
if not self.any_vid_processed:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete all file not needed
|
# Delete all file not needed
|
||||||
for cur_file in notwantedFiles:
|
for cur_file in notwanted_files:
|
||||||
|
|
||||||
cur_file_path = ek.ek(os.path.join, processPath, cur_file)
|
cur_file_path = ek.ek(os.path.join, process_path, cur_file)
|
||||||
|
|
||||||
if not ek.ek(os.path.isfile, cur_file_path):
|
if not ek.ek(os.path.isfile, cur_file_path):
|
||||||
continue # Prevent error when a notwantedfiles is an associated files
|
continue # Prevent error when a notwantedfiles is an associated files
|
||||||
|
|
||||||
returnStr += logHelper(u"Deleting file " + cur_file, logger.DEBUG)
|
|
||||||
|
|
||||||
# check first the read-only attribute
|
# check first the read-only attribute
|
||||||
file_attribute = ek.ek(os.stat, cur_file_path)[0]
|
file_attribute = ek.ek(os.stat, cur_file_path)[0]
|
||||||
if (not file_attribute & stat.S_IWRITE):
|
if not file_attribute & stat.S_IWRITE:
|
||||||
# File is read-only, so make it writeable
|
# File is read-only, so make it writeable
|
||||||
returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
|
self._log_helper(u'Changing ReadOnly flag for file ' + cur_file)
|
||||||
try:
|
try:
|
||||||
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + str(e.strerror),
|
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
|
||||||
logger.DEBUG)
|
|
||||||
try:
|
try:
|
||||||
|
if use_trash:
|
||||||
|
ek.ek(send2trash, cur_file_path)
|
||||||
|
else:
|
||||||
ek.ek(os.remove, cur_file_path)
|
ek.ek(os.remove, cur_file_path)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + str(e.strerror), logger.DEBUG)
|
self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
|
||||||
|
|
||||||
def logHelper(logMessage, logLevel=logger.MESSAGE):
|
if True is not ek.ek(os.path.isfile, cur_file_path):
|
||||||
logger.log(logMessage, logLevel)
|
self._log_helper(u'Deleted file ' + cur_file)
|
||||||
return logMessage + u"\n"
|
|
||||||
|
|
||||||
|
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False):
|
||||||
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto"):
|
|
||||||
"""
|
"""
|
||||||
Scans through the files in dirName and processes whatever media files it finds
|
Scans through the files in dir_name and processes whatever media files it finds
|
||||||
|
|
||||||
dirName: The folder name to look in
|
dir_name: The folder name to look in
|
||||||
nzbName: The NZB name which resulted in this folder being downloaded
|
nzb_name: The NZB name which resulted in this folder being downloaded
|
||||||
force: True to postprocess already postprocessed files
|
force: True to postprocess already postprocessed files
|
||||||
failed: Boolean for whether or not the download failed
|
failed: Boolean for whether or not the download failed
|
||||||
type: Type of postprocessing auto or manual
|
pp_type: Type of postprocessing auto or manual
|
||||||
"""
|
"""
|
||||||
|
|
||||||
global process_result, returnStr
|
# if they passed us a real directory then assume it's the one we want
|
||||||
|
if ek.ek(os.path.isdir, dir_name):
|
||||||
|
self._log_helper(u'Processing folder... ' + dir_name)
|
||||||
|
dir_name = ek.ek(os.path.realpath, dir_name)
|
||||||
|
|
||||||
returnStr = ''
|
# if the client and SickGear are not on the same machine translate the directory in a network directory
|
||||||
|
|
||||||
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
|
|
||||||
|
|
||||||
returnStr += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR, logger.DEBUG)
|
|
||||||
|
|
||||||
# if they passed us a real dir then assume it's the one we want
|
|
||||||
if ek.ek(os.path.isdir, dirName):
|
|
||||||
dirName = ek.ek(os.path.realpath, dirName)
|
|
||||||
|
|
||||||
# if the client and SickGear are not on the same machine translate the Dir in a network dir
|
|
||||||
elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
|
elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
|
||||||
and ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
|
and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
|
||||||
dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1])
|
dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
|
||||||
returnStr += logHelper(u"Trying to use folder " + dirName, logger.DEBUG)
|
self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)
|
||||||
|
self._log_helper(u'Trying to use folder... ' + dir_name)
|
||||||
|
|
||||||
# if we didn't find a real dir then quit
|
# if we didn't find a real directory then quit
|
||||||
if not ek.ek(os.path.isdir, dirName):
|
if not ek.ek(os.path.isdir, dir_name):
|
||||||
returnStr += logHelper(
|
self._log_helper(
|
||||||
u"Unable to figure out what folder to process. If your downloader and SickGear aren't on the same PC make sure you fill out your TV download dir in the config.",
|
u'Unable to figure out what folder to process. If your downloader and SickGear aren\'t on the same PC then make sure you fill out your completed TV download folder in the PP config.')
|
||||||
logger.DEBUG)
|
return self.result
|
||||||
return returnStr
|
|
||||||
|
|
||||||
path, dirs, files = get_path_dir_files(dirName, nzbName, type)
|
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
|
||||||
|
|
||||||
SyncFiles = filter(helpers.isSyncFile, files)
|
sync_files = filter(helpers.isSyncFile, files)
|
||||||
|
|
||||||
# Don't post process if files are still being synced and option is activated
|
# Don't post process if files are still being synced and option is activated
|
||||||
if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
|
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
|
||||||
returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
|
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
||||||
return returnStr
|
return self.result
|
||||||
|
|
||||||
returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
|
self._log_helper(u'Process path: ' + path)
|
||||||
returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)
|
if 0 < len(dirs):
|
||||||
|
self._log_helper(u'Process dir%s: %s' % (('', 's')[1 < len(dirs)], str(dirs)))
|
||||||
|
|
||||||
rarFiles = filter(helpers.isRarFile, files)
|
rar_files = filter(helpers.isRarFile, files)
|
||||||
rarContent = unRAR(path, rarFiles, force)
|
rar_content = self._unrar(path, rar_files, force)
|
||||||
files += rarContent
|
files += rar_content
|
||||||
videoFiles = filter(helpers.isMediaFile, files)
|
video_files = filter(helpers.isMediaFile, files)
|
||||||
videoInRar = filter(helpers.isMediaFile, rarContent)
|
video_in_rar = filter(helpers.isMediaFile, rar_content)
|
||||||
|
|
||||||
returnStr += logHelper(u"PostProcessing Files: " + str(files), logger.DEBUG)
|
if 0 < len(files):
|
||||||
returnStr += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles), logger.DEBUG)
|
self._log_helper(u'Process file%s: %s' % (('', 's')[1 < len(files)], str(files)))
|
||||||
returnStr += logHelper(u"PostProcessing RarContent: " + str(rarContent), logger.DEBUG)
|
if 0 < len(video_files):
|
||||||
returnStr += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar), logger.DEBUG)
|
self._log_helper(u'Process video file%s: %s' % (('', 's')[1 < len(video_files)], str(video_files)))
|
||||||
|
if 0 < len(rar_content):
|
||||||
|
self._log_helper(u'Process rar content: ' + str(rar_content))
|
||||||
|
if 0 < len(video_in_rar):
|
||||||
|
self._log_helper(u'Process video in rar: ' + str(video_in_rar))
|
||||||
|
|
||||||
# If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
|
# If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
|
||||||
nzbNameOriginal = nzbName
|
nzb_name_original = nzb_name
|
||||||
if len(videoFiles) >= 2:
|
if 2 <= len(video_files):
|
||||||
nzbName = None
|
nzb_name = None
|
||||||
|
|
||||||
if not process_method:
|
if not process_method:
|
||||||
process_method = sickbeard.PROCESS_METHOD
|
process_method = sickbeard.PROCESS_METHOD
|
||||||
|
|
||||||
process_result = True
|
# self._set_process_success()
|
||||||
|
|
||||||
# Don't Link media when the media is extracted from a rar in the same path
|
# Don't Link media when the media is extracted from a rar in the same path
|
||||||
if process_method in ('hardlink', 'symlink') and videoInRar:
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
||||||
process_result = process_media(path, videoInRar, nzbName, 'move', force, is_priority)
|
self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace)
|
||||||
delete_files(path, rarContent)
|
self._delete_files(path, rar_content)
|
||||||
for video in set(videoFiles) - set(videoInRar):
|
video_batch = set(video_files) - set(video_in_rar)
|
||||||
process_result = process_media(path, [video], nzbName, process_method, force, is_priority)
|
|
||||||
else:
|
else:
|
||||||
for video in videoFiles:
|
video_batch = video_files
|
||||||
process_result = process_media(path, [video], nzbName, process_method, force, is_priority)
|
|
||||||
|
|
||||||
#Process Video File in all TV Subdir
|
while 0 < len(video_batch):
|
||||||
for dir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed)]:
|
video_pick = ['']
|
||||||
|
video_size = 0
|
||||||
|
for cur_video_file in video_batch:
|
||||||
|
cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
|
||||||
|
if 0 == video_size or cur_video_size > video_size:
|
||||||
|
video_size = cur_video_size
|
||||||
|
video_pick = [cur_video_file]
|
||||||
|
|
||||||
process_result = True
|
video_batch = set(video_batch) - set(video_pick)
|
||||||
|
|
||||||
for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):
|
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
|
||||||
|
|
||||||
SyncFiles = filter(helpers.isSyncFile, fileList)
|
# Process video files in TV subdirectories
|
||||||
|
for directory in [x for x in dirs if self._validate_dir(path, x, nzb_name_original, failed)]:
|
||||||
|
|
||||||
|
self._set_process_success(reset=True)
|
||||||
|
|
||||||
|
for process_path, process_dir, file_list in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
|
||||||
|
|
||||||
|
sync_files = filter(helpers.isSyncFile, file_list)
|
||||||
|
|
||||||
# Don't post process if files are still being synced and option is activated
|
# Don't post process if files are still being synced and option is activated
|
||||||
if SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES:
|
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
|
||||||
returnStr += logHelper(u"Found temporary sync files, skipping post processing", logger.ERROR)
|
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
||||||
return returnStr
|
return self.result
|
||||||
|
|
||||||
rarFiles = filter(helpers.isRarFile, fileList)
|
rar_files = filter(helpers.isRarFile, file_list)
|
||||||
rarContent = unRAR(processPath, rarFiles, force)
|
rar_content = self._unrar(process_path, rar_files, force)
|
||||||
fileList = set(fileList + rarContent)
|
file_list = set(file_list + rar_content)
|
||||||
videoFiles = filter(helpers.isMediaFile, fileList)
|
video_files = filter(helpers.isMediaFile, file_list)
|
||||||
videoInRar = filter(helpers.isMediaFile, rarContent)
|
video_in_rar = filter(helpers.isMediaFile, rar_content)
|
||||||
notwantedFiles = [x for x in fileList if x not in videoFiles]
|
notwanted_files = [x for x in file_list if x not in video_files]
|
||||||
|
|
||||||
# Don't Link media when the media is extracted from a rar in the same path
|
# Don't Link media when the media is extracted from a rar in the same path
|
||||||
if process_method in ('hardlink', 'symlink') and videoInRar:
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
||||||
process_media(processPath, videoInRar, nzbName, 'move', force, is_priority)
|
self._process_media(process_path, video_in_rar, nzb_name, 'move', force, force_replace)
|
||||||
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
|
video_batch = set(video_files) - set(video_in_rar)
|
||||||
is_priority)
|
|
||||||
delete_files(processPath, rarContent)
|
|
||||||
else:
|
else:
|
||||||
process_media(processPath, videoFiles, nzbName, process_method, force, is_priority)
|
video_batch = video_files
|
||||||
|
|
||||||
|
while 0 < len(video_batch):
|
||||||
|
video_pick = ['']
|
||||||
|
video_size = 0
|
||||||
|
for cur_video_file in video_batch:
|
||||||
|
cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, process_path, cur_video_file))
|
||||||
|
if 0 == video_size or cur_video_size > video_size:
|
||||||
|
video_size = cur_video_size
|
||||||
|
video_pick = [cur_video_file]
|
||||||
|
|
||||||
|
video_batch = set(video_batch) - set(video_pick)
|
||||||
|
|
||||||
|
self._process_media(process_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
|
||||||
|
|
||||||
|
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
||||||
|
self._delete_files(process_path, rar_content)
|
||||||
|
else:
|
||||||
# Delete all file not needed
|
# Delete all file not needed
|
||||||
if process_method != "move" or not process_result \
|
if not self.any_vid_processed\
|
||||||
or type == "manual": #Avoid to delete files if is Manual PostProcessing
|
or 'move' != process_method\
|
||||||
|
or ('manual' == pp_type and not cleanup): # Avoid deleting files if Manual Postprocessing
|
||||||
continue
|
continue
|
||||||
|
|
||||||
delete_files(processPath, notwantedFiles)
|
self._delete_files(process_path, notwanted_files, use_trash=cleanup)
|
||||||
|
|
||||||
if process_method == "move" and \
|
if 'move' == process_method\
|
||||||
ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath,
|
and ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, process_path):
|
||||||
sickbeard.TV_DOWNLOAD_DIR):
|
self._delete_folder(process_path, check_empty=False)
|
||||||
if delete_folder(processPath, check_empty=False):
|
|
||||||
returnStr += logHelper(u"Deleted folder: " + processPath, logger.DEBUG)
|
|
||||||
|
|
||||||
if process_result:
|
def _bottom_line(text, log_level=logger.DEBUG):
|
||||||
returnStr += logHelper(u"Successfully processed")
|
self._buffer('-' * len(text))
|
||||||
|
self._log_helper(text, log_level)
|
||||||
|
|
||||||
|
if self.any_vid_processed:
|
||||||
|
if not self.files_failed:
|
||||||
|
_bottom_line(u'Successfully processed.', logger.MESSAGE)
|
||||||
else:
|
else:
|
||||||
returnStr += logHelper(u"Problem(s) during processing", logger.WARNING)
|
_bottom_line(u'Successfully processed at least one video file %s.' % (', others were skipped', 'and skipped another')[1 == self.files_failed], logger.MESSAGE)
|
||||||
|
else:
|
||||||
|
_bottom_line(u'Failed! Did not process any files.', logger.WARNING)
|
||||||
|
|
||||||
return returnStr
|
return self.result
|
||||||
|
|
||||||
|
def _validate_dir(self, path, dir_name, nzb_name_original, failed):
|
||||||
|
|
||||||
def validateDir(path, dirName, nzbNameOriginal, failed):
|
self._log_helper(u'Processing dir: ' + dir_name)
|
||||||
global process_result, returnStr
|
|
||||||
|
|
||||||
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
|
if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'):
|
||||||
|
self._log_helper(u'The directory name indicates it failed to extract.')
|
||||||
if ek.ek(os.path.basename, dirName).startswith('_FAILED_'):
|
|
||||||
returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
|
|
||||||
failed = True
|
failed = True
|
||||||
elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
|
elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'):
|
||||||
returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.",
|
self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.')
|
||||||
logger.DEBUG)
|
|
||||||
failed = True
|
failed = True
|
||||||
elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'):
|
elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'):
|
||||||
returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.",
|
self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.')
|
||||||
logger.DEBUG)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
process_failed(os.path.join(path, dirName), nzbNameOriginal)
|
self._process_failed(os.path.join(path, dir_name), nzb_name_original)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if helpers.is_hidden_folder(dirName):
|
if helpers.is_hidden_folder(dir_name):
|
||||||
returnStr += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG)
|
self._log_helper(u'Ignoring hidden folder: ' + dir_name)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# make sure the dir isn't inside a show dir
|
# make sure the directory isn't inside a show directory
|
||||||
myDB = db.DBConnection()
|
my_db = db.DBConnection()
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
sql_results = my_db.select('SELECT * FROM tv_shows')
|
||||||
|
|
||||||
for sqlShow in sqlResults:
|
for sqlShow in sql_results:
|
||||||
if dirName.lower().startswith(
|
if dir_name.lower().startswith(ek.ek(os.path.realpath, sqlShow['location']).lower() + os.sep)\
|
||||||
ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
|
or dir_name.lower() == ek.ek(os.path.realpath, sqlShow['location']).lower():
|
||||||
os.path.realpath, sqlShow["location"]).lower():
|
self._log_helper(
|
||||||
returnStr += logHelper(
|
u'Found an episode that has already been moved to its show dir, skipping',
|
||||||
u"You're trying to post process an episode that's already been moved to its show dir, skipping",
|
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Get the videofile list for the next checks
|
# Get the videofile list for the next checks
|
||||||
allFiles = []
|
all_files = []
|
||||||
allDirs = []
|
all_dirs = []
|
||||||
for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dirName), topdown=False):
|
for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False):
|
||||||
allDirs += processDir
|
all_dirs += process_dir
|
||||||
allFiles += fileList
|
all_files += fileList
|
||||||
|
|
||||||
videoFiles = filter(helpers.isMediaFile, allFiles)
|
video_files = filter(helpers.isMediaFile, all_files)
|
||||||
allDirs.append(dirName)
|
all_dirs.append(dir_name)
|
||||||
|
|
||||||
#check if the dir have at least one tv video file
|
# check if the directory have at least one tv video file
|
||||||
for video in videoFiles:
|
for video in video_files:
|
||||||
try:
|
try:
|
||||||
NameParser().parse(video, cache_result=False)
|
NameParser().parse(video, cache_result=False)
|
||||||
return True
|
return True
|
||||||
except (InvalidNameException, InvalidShowException):
|
except (InvalidNameException, InvalidShowException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for dir in allDirs:
|
for directory in all_dirs:
|
||||||
try:
|
try:
|
||||||
NameParser().parse(dir, cache_result=False)
|
NameParser().parse(directory, cache_result=False)
|
||||||
return True
|
return True
|
||||||
except (InvalidNameException, InvalidShowException):
|
except (InvalidNameException, InvalidShowException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if sickbeard.UNPACK:
|
if sickbeard.UNPACK:
|
||||||
# Search for packed release
|
# Search for packed release
|
||||||
packedFiles = filter(helpers.isRarFile, allFiles)
|
packed_files = filter(helpers.isRarFile, all_files)
|
||||||
|
|
||||||
for packed in packedFiles:
|
for packed in packed_files:
|
||||||
try:
|
try:
|
||||||
NameParser().parse(packed, cache_result=False)
|
NameParser().parse(packed, cache_result=False)
|
||||||
return True
|
return True
|
||||||
|
@ -306,18 +370,17 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def unRAR(path, rarFiles, force):
|
def _unrar(self, path, rar_files, force):
|
||||||
global process_result, returnStr
|
|
||||||
|
|
||||||
unpacked_files = []
|
unpacked_files = []
|
||||||
|
|
||||||
if sickbeard.UNPACK and rarFiles:
|
if sickbeard.UNPACK and rar_files:
|
||||||
|
|
||||||
returnStr += logHelper(u"Packed Releases detected: " + str(rarFiles), logger.DEBUG)
|
self._log_helper(u'Packed releases detected: ' + str(rar_files))
|
||||||
|
|
||||||
for archive in rarFiles:
|
for archive in rar_files:
|
||||||
|
|
||||||
returnStr += logHelper(u"Unpacking archive: " + archive, logger.DEBUG)
|
self._log_helper(u'Unpacking archive: ' + archive)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
rar_handle = RarFile(os.path.join(path, archive))
|
rar_handle = RarFile(os.path.join(path, archive))
|
||||||
|
@ -325,10 +388,9 @@ def unRAR(path, rarFiles, force):
|
||||||
# Skip extraction if any file in archive has previously been extracted
|
# Skip extraction if any file in archive has previously been extracted
|
||||||
skip_file = False
|
skip_file = False
|
||||||
for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
|
for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
|
||||||
if already_postprocessed(path, file_in_archive, force):
|
if self._already_postprocessed(path, file_in_archive, force):
|
||||||
returnStr += logHelper(
|
self._log_helper(
|
||||||
u"Archive file already post-processed, extraction skipped: " + file_in_archive,
|
u'Archive file already processed, extraction skipped: ' + file_in_archive)
|
||||||
logger.DEBUG)
|
|
||||||
skip_file = True
|
skip_file = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -339,151 +401,178 @@ def unRAR(path, rarFiles, force):
|
||||||
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
||||||
del rar_handle
|
del rar_handle
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
returnStr += logHelper(u"Failed Unrar archive " + archive + ': ' + ex(e), logger.ERROR)
|
self._log_helper(u'Failed to unpack archive %s: %s' % (archive, ex(e)), logger.ERROR)
|
||||||
process_result = False
|
self._set_process_success(False)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
returnStr += logHelper(u"UnRar content: " + str(unpacked_files), logger.DEBUG)
|
self._log_helper(u'Unpacked content: ' + str(unpacked_files))
|
||||||
|
|
||||||
return unpacked_files
|
return unpacked_files
|
||||||
|
|
||||||
|
def _already_postprocessed(self, dir_name, videofile, force):
|
||||||
|
|
||||||
def already_postprocessed(dirName, videofile, force):
|
if force and not self.any_vid_processed:
|
||||||
global returnStr
|
|
||||||
|
|
||||||
if force:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#Needed for accessing DB with a unicode DirName
|
# Needed for accessing DB with a unicode dir_name
|
||||||
if not isinstance(dirName, unicode):
|
if not isinstance(dir_name, unicode):
|
||||||
dirName = unicode(dirName, 'utf_8')
|
dir_name = unicode(dir_name, 'utf_8')
|
||||||
|
|
||||||
# Avoid processing the same dir again if we use a process method <> move
|
parse_result = None
|
||||||
myDB = db.DBConnection()
|
try:
|
||||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
|
parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False)
|
||||||
if sqlResult:
|
except (InvalidNameException, InvalidShowException):
|
||||||
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
|
pass
|
||||||
logger.DEBUG)
|
if None is parse_result:
|
||||||
|
try:
|
||||||
|
parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(dir_name, cache_result=False)
|
||||||
|
except (InvalidNameException, InvalidShowException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
showlink = ''
|
||||||
|
ep_detail_sql = ''
|
||||||
|
undo_status = None
|
||||||
|
if parse_result:
|
||||||
|
showlink = (' for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name),
|
||||||
|
parse_result.show.name)[self.any_vid_processed]
|
||||||
|
|
||||||
|
if parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number:
|
||||||
|
ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
|
||||||
|
% (str(parse_result.show.indexerid),
|
||||||
|
str(parse_result.season_number),
|
||||||
|
str(parse_result.episode_numbers[0]))
|
||||||
|
undo_status = "UPDATE `tv_episodes` SET status="\
|
||||||
|
+ "(SELECT h.action FROM `history` as h INNER JOIN `tv_episodes` as t on h.showid=t.showid"\
|
||||||
|
+ " where t.showid='%s' and t.season='%s' and t.episode='%s'"\
|
||||||
|
% (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))\
|
||||||
|
+ " and (h.action is not t.status) group by h.action order by h.date DESC LIMIT 1)"\
|
||||||
|
+ " where showid='%s' and season='%s' and episode='%s'"\
|
||||||
|
% (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))
|
||||||
|
|
||||||
|
# Avoid processing the same directory again if we use a process method <> move
|
||||||
|
my_db = db.DBConnection()
|
||||||
|
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name])
|
||||||
|
if sql_result:
|
||||||
|
self._log_helper(u'Found a release directory%s that has already been processed,<br />.. skipping: %s'
|
||||||
|
% (showlink, dir_name))
|
||||||
|
my_db.action(undo_status)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# This is needed for video whose name differ from dirName
|
# This is needed for video whose name differ from dir_name
|
||||||
if not isinstance(videofile, unicode):
|
if not isinstance(videofile, unicode):
|
||||||
videofile = unicode(videofile, 'utf_8')
|
videofile = unicode(videofile, 'utf_8')
|
||||||
|
|
||||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
|
sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
|
||||||
if sqlResult:
|
if sql_result:
|
||||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
self._log_helper(u'Found a video, but that release%s was already processed,<br />.. skipping: %s'
|
||||||
logger.DEBUG)
|
% (showlink, videofile))
|
||||||
|
my_db.action(undo_status)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Needed if we have downloaded the same episode @ different quality
|
# Needed if we have downloaded the same episode @ different quality
|
||||||
search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
|
search_sql = 'SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history'\
|
||||||
search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
|
+ ' ON history.showid=tv_episodes.showid'\
|
||||||
|
+ ' WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode'\
|
||||||
|
+ ep_detail_sql\
|
||||||
|
+ ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\
|
||||||
|
+ ' and history.resource LIKE ?'
|
||||||
|
|
||||||
np = NameParser(dirName, tryIndexers=True, convert=True)
|
sql_result = my_db.select(search_sql, [u'%' + videofile])
|
||||||
try:
|
if sql_result:
|
||||||
parse_result = np.parse(dirName)
|
self._log_helper(u'Found a video, but the episode%s is already processed,<br />.. skipping: %s'
|
||||||
except:
|
% (showlink, videofile))
|
||||||
parse_result = False
|
my_db.action(undo_status)
|
||||||
pass
|
|
||||||
|
|
||||||
if parse_result and (parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number):
|
|
||||||
search_sql += " and tv_episodes.showid = '" + str(parse_result.show.indexerid)\
|
|
||||||
+ "' and tv_episodes.season = '" + str(parse_result.season_number)\
|
|
||||||
+ "' and tv_episodes.episode = '" + str(parse_result.episode_numbers[0]) + "'"
|
|
||||||
|
|
||||||
search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
|
|
||||||
search_sql += " and history.resource LIKE ?"
|
|
||||||
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
|
||||||
if sqlResult:
|
|
||||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
|
||||||
logger.DEBUG)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False):
|
||||||
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority):
|
|
||||||
global process_result, returnStr
|
|
||||||
|
|
||||||
processor = None
|
processor = None
|
||||||
for cur_video_file in videoFiles:
|
for cur_video_file in video_files:
|
||||||
|
|
||||||
if already_postprocessed(processPath, cur_video_file, force):
|
if self._already_postprocessed(process_path, cur_video_file, force):
|
||||||
|
self._set_process_success(False)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file)
|
cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority)
|
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash)
|
||||||
process_result = processor.process()
|
file_success = processor.process()
|
||||||
process_fail_message = ""
|
process_fail_message = ''
|
||||||
except exceptions.PostProcessingFailed, e:
|
except exceptions.PostProcessingFailed, e:
|
||||||
process_result = False
|
file_success = False
|
||||||
process_fail_message = ex(e)
|
process_fail_message = '<br />.. ' + ex(e)
|
||||||
|
|
||||||
|
self._set_process_success(file_success)
|
||||||
|
|
||||||
if processor:
|
if processor:
|
||||||
returnStr += processor.log
|
self._buffer(processor.log.strip('\n'))
|
||||||
|
|
||||||
if process_result:
|
if file_success:
|
||||||
returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
|
self._log_helper(u'Successfully processed ' + cur_video_file, logger.MESSAGE)
|
||||||
|
elif self.any_vid_processed:
|
||||||
|
self._log_helper(u'Warning fail for %s%s' % (cur_video_file_path, process_fail_message),
|
||||||
|
logger.WARNING)
|
||||||
else:
|
else:
|
||||||
returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message,
|
self._log_helper(u'Did not use file %s%s' % (cur_video_file_path, process_fail_message),
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
|
|
||||||
#If something fail abort the processing on dir
|
@staticmethod
|
||||||
if not process_result:
|
def _get_path_dir_files(dir_name, nzb_name, pp_type):
|
||||||
break
|
path = ''
|
||||||
|
|
||||||
def get_path_dir_files(dirName, nzbName, type):
|
|
||||||
path = ""
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
|
|
||||||
if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual": #Scheduled Post Processing Active
|
if dir_name == sickbeard.TV_DOWNLOAD_DIR and not nzb_name or 'manual' == pp_type: # Scheduled Post Processing Active
|
||||||
#Get at first all the subdir in the dirName
|
# Get at first all the subdir in the dir_name
|
||||||
for path, dirs, files in ek.ek(os.walk, dirName):
|
for path, dirs, files in ek.ek(os.walk, dir_name):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
|
path, dirs = ek.ek(os.path.split, dir_name) # Script Post Processing
|
||||||
if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(
|
if None is not nzb_name and not nzb_name.endswith('.nzb') and os.path.isfile(
|
||||||
os.path.join(dirName, nzbName)): #For single torrent file without Dir
|
os.path.join(dir_name, nzb_name)): # For single torrent file without directory
|
||||||
dirs = []
|
dirs = []
|
||||||
files = [os.path.join(dirName, nzbName)]
|
files = [os.path.join(dir_name, nzb_name)]
|
||||||
else:
|
else:
|
||||||
dirs = [dirs]
|
dirs = [dirs]
|
||||||
files = []
|
files = []
|
||||||
|
|
||||||
return path, dirs, files
|
return path, dirs, files
|
||||||
|
|
||||||
|
# noinspection PyArgumentList
|
||||||
def process_failed(dirName, nzbName):
|
def _process_failed(self, dir_name, nzb_name):
|
||||||
""" Process a download that did not complete correctly """
|
""" Process a download that did not complete correctly """
|
||||||
|
|
||||||
global returnStr
|
|
||||||
|
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
processor = None
|
processor = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
processor = failedProcessor.FailedProcessor(dirName, nzbName)
|
processor = failedProcessor.FailedProcessor(dir_name, nzb_name)
|
||||||
process_result = processor.process()
|
self._set_process_success(processor.process())
|
||||||
process_fail_message = ""
|
process_fail_message = ''
|
||||||
except exceptions.FailedProcessingFailed, e:
|
except exceptions.FailedProcessingFailed, e:
|
||||||
process_result = False
|
self._set_process_success(False)
|
||||||
process_fail_message = ex(e)
|
process_fail_message = ex(e)
|
||||||
|
|
||||||
if processor:
|
if processor:
|
||||||
returnStr += processor.log
|
self._buffer(processor.log.strip('\n'))
|
||||||
|
|
||||||
if sickbeard.DELETE_FAILED and process_result:
|
if sickbeard.DELETE_FAILED and self.any_vid_processed:
|
||||||
if delete_folder(dirName, check_empty=False):
|
self._delete_folder(dir_name, check_empty=False)
|
||||||
returnStr += logHelper(u"Deleted folder: " + dirName, logger.DEBUG)
|
|
||||||
|
|
||||||
if process_result:
|
task = u'Failed download processing'
|
||||||
returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
|
if self.any_vid_processed:
|
||||||
|
self._log_helper(u'Successful %s: (%s, %s)'
|
||||||
|
% (task.lower(), str(nzb_name), dir_name), logger.MESSAGE)
|
||||||
else:
|
else:
|
||||||
returnStr += logHelper(
|
self._log_helper(u'%s failed: (%s, %s): %s'
|
||||||
u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
|
% (task, str(nzb_name), dir_name, process_fail_message), logger.WARNING)
|
||||||
logger.WARNING)
|
|
||||||
|
|
||||||
|
# backward compatibility prevents the case of this function name from being updated to PEP8
|
||||||
|
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False):
|
||||||
|
# backward compatibility prevents the case of this function name from being updated to PEP8
|
||||||
|
return ProcessTVShow().process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup)
|
||||||
|
|
|
@ -237,7 +237,7 @@ def determineReleaseName(dir_name=None, nzb_name=None):
|
||||||
"""Determine a release name from an nzb and/or folder name"""
|
"""Determine a release name from an nzb and/or folder name"""
|
||||||
|
|
||||||
if nzb_name is not None:
|
if nzb_name is not None:
|
||||||
logger.log(u"Using nzb_name for release name.")
|
logger.log(u'Using nzb name for release name.')
|
||||||
return nzb_name.rpartition('.')[0]
|
return nzb_name.rpartition('.')[0]
|
||||||
|
|
||||||
if dir_name is None:
|
if dir_name is None:
|
||||||
|
|
|
@ -435,7 +435,7 @@ class TVShow(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parse_result = None
|
parse_result = None
|
||||||
np = NameParser(False, showObj=self, tryIndexers=True)
|
np = NameParser(False, showObj=self, try_indexers=True)
|
||||||
parse_result = np.parse(ep_file_name)
|
parse_result = np.parse(ep_file_name)
|
||||||
except (InvalidNameException, InvalidShowException):
|
except (InvalidNameException, InvalidShowException):
|
||||||
pass
|
pass
|
||||||
|
@ -627,7 +627,7 @@ class TVShow(object):
|
||||||
logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG)
|
logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myParser = NameParser(showObj=self, tryIndexers=True)
|
myParser = NameParser(showObj=self, try_indexers=True)
|
||||||
parse_result = myParser.parse(file)
|
parse_result = myParser.parse(file)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG)
|
logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG)
|
||||||
|
|
|
@ -1312,7 +1312,7 @@ class CMD_PostProcess(ApiCall):
|
||||||
if not self.type:
|
if not self.type:
|
||||||
self.type = 'manual'
|
self.type = 'manual'
|
||||||
|
|
||||||
data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, is_priority=self.is_priority, failed=False, type=self.type)
|
data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, force_replace=self.is_priority, failed=False, type=self.type)
|
||||||
|
|
||||||
if not self.return_data:
|
if not self.return_data:
|
||||||
data = ""
|
data = ""
|
||||||
|
|
|
@ -1911,72 +1911,49 @@ class Home(MainHandler):
|
||||||
sceneEpisode=None, sceneAbsolute=None):
|
sceneEpisode=None, sceneAbsolute=None):
|
||||||
|
|
||||||
# sanitize:
|
# sanitize:
|
||||||
if forSeason in ['null', '']: forSeason = None
|
show = None if show in [None, 'null', ''] else int(show)
|
||||||
if forEpisode in ['null', '']: forEpisode = None
|
indexer = None if indexer in [None, 'null', ''] else int(indexer)
|
||||||
if forAbsolute in ['null', '']: forAbsolute = None
|
|
||||||
if sceneSeason in ['null', '']: sceneSeason = None
|
|
||||||
if sceneEpisode in ['null', '']: sceneEpisode = None
|
|
||||||
if sceneAbsolute in ['null', '']: sceneAbsolute = None
|
|
||||||
|
|
||||||
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
|
show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, show)
|
||||||
|
|
||||||
if showObj.is_anime:
|
if not show_obj.is_anime:
|
||||||
result = {
|
for_season = None if forSeason in [None, 'null', ''] else int(forSeason)
|
||||||
'success': True,
|
for_episode = None if forEpisode in [None, 'null', ''] else int(forEpisode)
|
||||||
'forAbsolute': forAbsolute,
|
scene_season = None if sceneSeason in [None, 'null', ''] else int(sceneSeason)
|
||||||
}
|
scene_episode = None if sceneEpisode in [None, 'null', ''] else int(sceneEpisode)
|
||||||
|
action_log = u'Set episode scene numbering to %sx%s for episode %sx%s of "%s"'\
|
||||||
|
% (scene_season, scene_episode, for_season, for_episode, show_obj.name)
|
||||||
|
ep_args = {'show': show, 'season': for_season, 'episode': for_episode}
|
||||||
|
scene_args = {'indexer_id': show, 'indexer': indexer, 'season': for_season, 'episode': for_episode,
|
||||||
|
'sceneSeason': scene_season, 'sceneEpisode': scene_episode}
|
||||||
|
result = {'forSeason': for_season, 'forEpisode': for_episode, 'sceneSeason': None, 'sceneEpisode': None}
|
||||||
else:
|
else:
|
||||||
result = {
|
for_absolute = None if forAbsolute in [None, 'null', ''] else int(forAbsolute)
|
||||||
'success': True,
|
scene_absolute = None if sceneAbsolute in [None, 'null', ''] else int(sceneAbsolute)
|
||||||
'forSeason': forSeason,
|
action_log = u'Set absolute scene numbering to %s for episode %s of "%s"'\
|
||||||
'forEpisode': forEpisode,
|
% (scene_absolute, for_absolute, show_obj.name)
|
||||||
}
|
ep_args = {'show': show, 'absolute': for_absolute}
|
||||||
|
scene_args = {'indexer_id': show, 'indexer': indexer, 'absolute_number': for_absolute,
|
||||||
|
'sceneAbsolute': scene_absolute}
|
||||||
|
result = {'forAbsolute': for_absolute, 'sceneAbsolute': None}
|
||||||
|
|
||||||
# retrieve the episode object and fail if we can't get one
|
ep_obj = self._getEpisode(**ep_args)
|
||||||
if showObj.is_anime:
|
result['success'] = not isinstance(ep_obj, str)
|
||||||
ep_obj = self._getEpisode(show, absolute=forAbsolute)
|
if result['success']:
|
||||||
|
logger.log(action_log, logger.DEBUG)
|
||||||
|
set_scene_numbering(**scene_args)
|
||||||
|
show_obj.flushEpisodes()
|
||||||
else:
|
else:
|
||||||
ep_obj = self._getEpisode(show, forSeason, forEpisode)
|
|
||||||
|
|
||||||
if isinstance(ep_obj, str):
|
|
||||||
result['success'] = False
|
|
||||||
result['errorMessage'] = ep_obj
|
result['errorMessage'] = ep_obj
|
||||||
elif showObj.is_anime:
|
|
||||||
logger.log(u'setAbsoluteSceneNumbering for %s from %s to %s' %
|
|
||||||
(show, forAbsolute, sceneAbsolute), logger.DEBUG)
|
|
||||||
|
|
||||||
show = int(show)
|
if not show_obj.is_anime:
|
||||||
indexer = int(indexer)
|
scene_numbering = get_scene_numbering(show, indexer, for_season, for_episode)
|
||||||
forAbsolute = int(forAbsolute)
|
if scene_numbering:
|
||||||
if sceneAbsolute is not None: sceneAbsolute = int(sceneAbsolute)
|
(result['sceneSeason'], result['sceneEpisode']) = scene_numbering
|
||||||
|
|
||||||
set_scene_numbering(show, indexer, absolute_number=forAbsolute, sceneAbsolute=sceneAbsolute)
|
|
||||||
else:
|
else:
|
||||||
logger.log(u'setEpisodeSceneNumbering for %s from %sx%s to %sx%s' %
|
scene_numbering = get_scene_absolute_numbering(show, indexer, for_absolute)
|
||||||
(show, forSeason, forEpisode, sceneSeason, sceneEpisode), logger.DEBUG)
|
if scene_numbering:
|
||||||
|
result['sceneAbsolute'] = scene_numbering
|
||||||
show = int(show)
|
|
||||||
indexer = int(indexer)
|
|
||||||
forSeason = int(forSeason)
|
|
||||||
forEpisode = int(forEpisode)
|
|
||||||
if sceneSeason is not None: sceneSeason = int(sceneSeason)
|
|
||||||
if sceneEpisode is not None: sceneEpisode = int(sceneEpisode)
|
|
||||||
|
|
||||||
set_scene_numbering(show, indexer, season=forSeason, episode=forEpisode, sceneSeason=sceneSeason,
|
|
||||||
sceneEpisode=sceneEpisode)
|
|
||||||
|
|
||||||
if showObj.is_anime:
|
|
||||||
sn = get_scene_absolute_numbering(show, indexer, forAbsolute)
|
|
||||||
if sn:
|
|
||||||
result['sceneAbsolute'] = sn
|
|
||||||
else:
|
|
||||||
result['sceneAbsolute'] = None
|
|
||||||
else:
|
|
||||||
sn = get_scene_numbering(show, indexer, forSeason, forEpisode)
|
|
||||||
if sn:
|
|
||||||
(result['sceneSeason'], result['sceneEpisode']) = sn
|
|
||||||
else:
|
|
||||||
(result['sceneSeason'], result['sceneEpisode']) = (None, None)
|
|
||||||
|
|
||||||
return json.dumps(result)
|
return json.dumps(result)
|
||||||
|
|
||||||
|
@ -2024,33 +2001,22 @@ class HomePostProcess(Home):
|
||||||
return t.respond()
|
return t.respond()
|
||||||
|
|
||||||
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None,
|
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None,
|
||||||
is_priority=None, failed='0', type='auto', *args, **kwargs):
|
force_replace=None, failed='0', type='auto', **kwargs):
|
||||||
|
|
||||||
if failed == '0':
|
|
||||||
failed = False
|
|
||||||
else:
|
|
||||||
failed = True
|
|
||||||
|
|
||||||
if force in ['on', '1']:
|
|
||||||
force = True
|
|
||||||
else:
|
|
||||||
force = False
|
|
||||||
|
|
||||||
if is_priority in ['on', '1']:
|
|
||||||
is_priority = True
|
|
||||||
else:
|
|
||||||
is_priority = False
|
|
||||||
|
|
||||||
if not dir:
|
if not dir:
|
||||||
self.redirect('/home/postprocess/')
|
self.redirect('/home/postprocess/')
|
||||||
else:
|
else:
|
||||||
result = processTV.processDir(dir, nzbName, process_method=process_method, force=force,
|
result = processTV.processDir(dir, nzbName, process_method=process_method, type=type,
|
||||||
is_priority=is_priority, failed=failed, type=type)
|
cleanup='cleanup' in kwargs and kwargs['cleanup'] in ['on', '1'],
|
||||||
if quiet is not None and int(quiet) == 1:
|
force=force in ['on', '1'],
|
||||||
return result
|
force_replace=force_replace in ['on', '1'],
|
||||||
|
failed=not '0' == failed)
|
||||||
|
|
||||||
result = result.replace('\n', '<br />\n')
|
result = re.sub(r'(?i)<br(?:[\s/]+)>', '\n', result)
|
||||||
return self._genericMessage('Postprocessing results', result)
|
if None is not quiet and 1 == int(quiet):
|
||||||
|
return u'%s' % re.sub('(?i)<a[^>]+>([^<]+)<[/]a>', r'\1', result)
|
||||||
|
|
||||||
|
return self._genericMessage('Postprocessing results', u'<pre>%s</pre>' % result)
|
||||||
|
|
||||||
|
|
||||||
class NewHomeAddShows(Home):
|
class NewHomeAddShows(Home):
|
||||||
|
|
Loading…
Reference in a new issue