Change unpack files once only in auto post processing copy mode.

This commit is contained in:
JackDandy 2016-08-24 03:03:06 +01:00
parent 24afe020d8
commit 5a2d3fbb85
4 changed files with 57 additions and 15 deletions

View file

@ -1,6 +1,6 @@
### 0.12.0 (2016-xx-xx xx:xx:xx UTC) ### 0.12.0 (2016-xx-xx xx:xx:xx UTC)
* Add strict Python version check (equal or higher than 2.7.9 and less than 3.0), ** exit ** if incorrect version * Add strict Python version check (equal to, or higher than 2.7.9 and less than 3.0), **exit** if incorrect version
* Update unidecode library 0.04.11 to 0.04.18 (fd57cbf) * Update unidecode library 0.04.11 to 0.04.18 (fd57cbf)
* Update xmltodict library 0.9.2 (579a005) to 0.9.2 (eac0031) * Update xmltodict library 0.9.2 (579a005) to 0.9.2 (eac0031)
* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05) * Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05)
@ -127,6 +127,7 @@
* Fix Nyaa and TT torrent providers * Fix Nyaa and TT torrent providers
* Change PrivateHD torrent provider * Change PrivateHD torrent provider
* Fix Add from Trakt * Fix Add from Trakt
* Change unpack files once only in auto post processing copy mode
### 0.11.14 (2016-07-25 03:10:00 UTC) ### 0.11.14 (2016-07-25 03:10:00 UTC)

View file

@ -302,6 +302,7 @@ function fetch_branches() {
$.each(branches, function (i, text) { $.each(branches, function (i, text) {
add_option_to_branches(text); add_option_to_branches(text);
}); });
$('#branchVersion').find('option[value=' + data['current'] + ']').attr('selected','selected');
$('#branchCheckout').removeAttr('disabled'); $('#branchCheckout').removeAttr('disabled');
} else { } else {
add_option_to_branches('No branches available'); add_option_to_branches('No branches available');

View file

@ -19,11 +19,13 @@
from __future__ import with_statement from __future__ import with_statement
from functools import partial from functools import partial
import datetime
import os import os
import re import re
import shutil import shutil
import stat import stat
import sys import sys
import time
import sickbeard import sickbeard
from sickbeard import postProcessor from sickbeard import postProcessor
@ -39,6 +41,11 @@ from sickbeard import failedProcessor
import lib.rarfile.rarfile as rarfile import lib.rarfile.rarfile as rarfile
try:
import json
except ImportError:
from lib import simplejson as json
try: try:
from lib.send2trash import send2trash from lib.send2trash import send2trash
except ImportError: except ImportError:
@ -190,13 +197,13 @@ class ProcessTVShow(object):
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
sync_files = filter(helpers.isSyncFile, files) if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
# Don't post process if files are still being synced and option is activated
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result return self.result
if not process_method:
process_method = sickbeard.PROCESS_METHOD
self._log_helper(u'Processing folder... %s' % path) self._log_helper(u'Processing folder... %s' % path)
work_files = [] work_files = []
@ -204,7 +211,8 @@ class ProcessTVShow(object):
if joined: if joined:
work_files += [joined] work_files += [joined]
rar_files = filter(helpers.is_first_rar_volume, files) rar_files, rarfile_history = self.unused_archives(
path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
rar_content = self._unrar(path, rar_files, force) rar_content = self._unrar(path, rar_files, force)
if self.fail_detected: if self.fail_detected:
self._process_failed(dir_name, nzb_name) self._process_failed(dir_name, nzb_name)
@ -228,9 +236,6 @@ class ProcessTVShow(object):
if 2 <= len(video_files): if 2 <= len(video_files):
nzb_name = None nzb_name = None
if not process_method:
process_method = sickbeard.PROCESS_METHOD
# self._set_process_success() # self._set_process_success()
# Don't Link media when the media is extracted from a rar in the same path # Don't Link media when the media is extracted from a rar in the same path
@ -266,14 +271,12 @@ class ProcessTVShow(object):
for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False): for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
sync_files = filter(helpers.isSyncFile, files) if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
# Don't post process if files are still being synced and option is activated
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
return self.result return self.result
rar_files = filter(helpers.is_first_rar_volume, files) rar_files, rarfile_history = self.unused_archives(
walk_path, filter(helpers.is_first_rar_volume, files), pp_type, process_method, rarfile_history)
rar_content = self._unrar(walk_path, rar_files, force) rar_content = self._unrar(walk_path, rar_files, force)
work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content] work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
if self.fail_detected: if self.fail_detected:
@ -346,6 +349,43 @@ class ProcessTVShow(object):
return self.result return self.result
@staticmethod
def unused_archives(path, archives, pp_type, process_method, archive_history=None):
archive_history = (archive_history, {})[not archive_history]
if ('auto' == pp_type and sickbeard.PROCESS_AUTOMATICALLY
and 'copy' == process_method and sickbeard.UNPACK):
archive_history_file = ek.ek(os.path.join, sickbeard.DATA_DIR, 'archive_history.txt')
if not archive_history:
try:
with open(archive_history_file, 'r') as fh:
archive_history = json.loads(fh.read(10 * 1024 * 1024))
except (IOError, ValueError, Exception):
pass
init_history_cnt = len(archive_history)
for archive in archive_history.keys():
if not ek.ek(os.path.isfile, archive):
del archive_history[archive]
unused_files = list(set([ek.ek(os.path.join, path, x) for x in archives]) - set(archive_history.keys()))
archives = [ek.ek(os.path.basename, x) for x in unused_files]
if unused_files:
for f in unused_files:
archive_history.setdefault(f, time.mktime(datetime.datetime.utcnow().timetuple()))
if init_history_cnt != len(archive_history):
try:
with open(archive_history_file, 'w') as fh:
fh.write(json.dumps(archive_history))
except (IOError, Exception):
pass
return archives, archive_history
def _validate_dir(self, path, dir_name, nzb_name_original, failed): def _validate_dir(self, path, dir_name, nzb_name_original, failed):
self._log_helper(u'Processing sub dir: ' + dir_name) self._log_helper(u'Processing sub dir: ' + dir_name)

View file

@ -4330,7 +4330,7 @@ class ConfigGeneral(Config):
def fetch_branches(): def fetch_branches():
try: try:
branches = sickbeard.versionCheckScheduler.action.list_remote_branches() branches = sickbeard.versionCheckScheduler.action.list_remote_branches()
return json.dumps({'result': 'success', 'branches': branches}) return json.dumps({'result': 'success', 'branches': branches, 'current': sickbeard.BRANCH or 'master'})
except Exception as e: except Exception as e:
logger.log(u'exception msg: ' + str(e), logger.DEBUG) logger.log(u'exception msg: ' + str(e), logger.DEBUG)
return json.dumps({'result': 'fail'}) return json.dumps({'result': 'fail'})