mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change unpack files once only in auto post processing copy mode.
This commit is contained in:
parent
24afe020d8
commit
5a2d3fbb85
4 changed files with 57 additions and 15 deletions
|
@ -1,6 +1,6 @@
|
|||
### 0.12.0 (2016-xx-xx xx:xx:xx UTC)
|
||||
|
||||
* Add strict Python version check (equal or higher than 2.7.9 and less than 3.0), ** exit ** if incorrect version
|
||||
* Add strict Python version check (equal to, or higher than 2.7.9 and less than 3.0), **exit** if incorrect version
|
||||
* Update unidecode library 0.04.11 to 0.04.18 (fd57cbf)
|
||||
* Update xmltodict library 0.9.2 (579a005) to 0.9.2 (eac0031)
|
||||
* Update Tornado Web Server 4.3.dev1 (1b6157d) to 4.4.dev1 (c2b4d05)
|
||||
|
@ -127,6 +127,7 @@
|
|||
* Fix Nyaa and TT torrent providers
|
||||
* Change PrivateHD torrent provider
|
||||
* Fix Add from Trakt
|
||||
* Change unpack files once only in auto post processing copy mode
|
||||
|
||||
|
||||
### 0.11.14 (2016-07-25 03:10:00 UTC)
|
||||
|
|
|
@ -302,6 +302,7 @@ function fetch_branches() {
|
|||
$.each(branches, function (i, text) {
|
||||
add_option_to_branches(text);
|
||||
});
|
||||
$('#branchVersion').find('option[value=' + data['current'] + ']').attr('selected','selected');
|
||||
$('#branchCheckout').removeAttr('disabled');
|
||||
} else {
|
||||
add_option_to_branches('No branches available');
|
||||
|
|
|
@ -19,11 +19,13 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
from functools import partial
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import postProcessor
|
||||
|
@ -39,6 +41,11 @@ from sickbeard import failedProcessor
|
|||
|
||||
import lib.rarfile.rarfile as rarfile
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
from lib import simplejson as json
|
||||
|
||||
try:
|
||||
from lib.send2trash import send2trash
|
||||
except ImportError:
|
||||
|
@ -190,13 +197,13 @@ class ProcessTVShow(object):
|
|||
|
||||
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
|
||||
|
||||
sync_files = filter(helpers.isSyncFile, files)
|
||||
|
||||
# Don't post process if files are still being synced and option is activated
|
||||
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
|
||||
if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
|
||||
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
||||
return self.result
|
||||
|
||||
if not process_method:
|
||||
process_method = sickbeard.PROCESS_METHOD
|
||||
|
||||
self._log_helper(u'Processing folder... %s' % path)
|
||||
|
||||
work_files = []
|
||||
|
@ -204,7 +211,8 @@ class ProcessTVShow(object):
|
|||
if joined:
|
||||
work_files += [joined]
|
||||
|
||||
rar_files = filter(helpers.is_first_rar_volume, files)
|
||||
rar_files, rarfile_history = self.unused_archives(
|
||||
path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
|
||||
rar_content = self._unrar(path, rar_files, force)
|
||||
if self.fail_detected:
|
||||
self._process_failed(dir_name, nzb_name)
|
||||
|
@ -228,9 +236,6 @@ class ProcessTVShow(object):
|
|||
if 2 <= len(video_files):
|
||||
nzb_name = None
|
||||
|
||||
if not process_method:
|
||||
process_method = sickbeard.PROCESS_METHOD
|
||||
|
||||
# self._set_process_success()
|
||||
|
||||
# Don't Link media when the media is extracted from a rar in the same path
|
||||
|
@ -266,14 +271,12 @@ class ProcessTVShow(object):
|
|||
|
||||
for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):
|
||||
|
||||
sync_files = filter(helpers.isSyncFile, files)
|
||||
|
||||
# Don't post process if files are still being synced and option is activated
|
||||
if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
|
||||
if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
|
||||
self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
|
||||
return self.result
|
||||
|
||||
rar_files = filter(helpers.is_first_rar_volume, files)
|
||||
rar_files, rarfile_history = self.unused_archives(
|
||||
walk_path, filter(helpers.is_first_rar_volume, files), pp_type, process_method, rarfile_history)
|
||||
rar_content = self._unrar(walk_path, rar_files, force)
|
||||
work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
|
||||
if self.fail_detected:
|
||||
|
@ -346,6 +349,43 @@ class ProcessTVShow(object):
|
|||
|
||||
return self.result
|
||||
|
||||
@staticmethod
|
||||
def unused_archives(path, archives, pp_type, process_method, archive_history=None):
|
||||
|
||||
archive_history = (archive_history, {})[not archive_history]
|
||||
if ('auto' == pp_type and sickbeard.PROCESS_AUTOMATICALLY
|
||||
and 'copy' == process_method and sickbeard.UNPACK):
|
||||
|
||||
archive_history_file = ek.ek(os.path.join, sickbeard.DATA_DIR, 'archive_history.txt')
|
||||
|
||||
if not archive_history:
|
||||
try:
|
||||
with open(archive_history_file, 'r') as fh:
|
||||
archive_history = json.loads(fh.read(10 * 1024 * 1024))
|
||||
except (IOError, ValueError, Exception):
|
||||
pass
|
||||
|
||||
init_history_cnt = len(archive_history)
|
||||
|
||||
for archive in archive_history.keys():
|
||||
if not ek.ek(os.path.isfile, archive):
|
||||
del archive_history[archive]
|
||||
|
||||
unused_files = list(set([ek.ek(os.path.join, path, x) for x in archives]) - set(archive_history.keys()))
|
||||
archives = [ek.ek(os.path.basename, x) for x in unused_files]
|
||||
if unused_files:
|
||||
for f in unused_files:
|
||||
archive_history.setdefault(f, time.mktime(datetime.datetime.utcnow().timetuple()))
|
||||
|
||||
if init_history_cnt != len(archive_history):
|
||||
try:
|
||||
with open(archive_history_file, 'w') as fh:
|
||||
fh.write(json.dumps(archive_history))
|
||||
except (IOError, Exception):
|
||||
pass
|
||||
|
||||
return archives, archive_history
|
||||
|
||||
def _validate_dir(self, path, dir_name, nzb_name_original, failed):
|
||||
|
||||
self._log_helper(u'Processing sub dir: ' + dir_name)
|
||||
|
|
|
@ -4330,7 +4330,7 @@ class ConfigGeneral(Config):
|
|||
def fetch_branches():
|
||||
try:
|
||||
branches = sickbeard.versionCheckScheduler.action.list_remote_branches()
|
||||
return json.dumps({'result': 'success', 'branches': branches})
|
||||
return json.dumps({'result': 'success', 'branches': branches, 'current': sickbeard.BRANCH or 'master'})
|
||||
except Exception as e:
|
||||
logger.log(u'exception msg: ' + str(e), logger.DEBUG)
|
||||
return json.dumps({'result': 'fail'})
|
||||
|
|
Loading…
Reference in a new issue