2015-05-04 19:14:29 +00:00
|
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# This file is part of SickGear.
|
2014-08-09 00:19:29 +00:00
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-09 11:13:00 +00:00
|
|
|
|
from __future__ import print_function
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from __future__ import with_statement
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
import base64
|
|
|
|
|
import datetime
|
2014-08-09 00:19:29 +00:00
|
|
|
|
import getpass
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import hashlib
|
2014-08-09 00:19:29 +00:00
|
|
|
|
import os
|
|
|
|
|
import re
|
|
|
|
|
import shutil
|
|
|
|
|
import socket
|
|
|
|
|
import stat
|
|
|
|
|
import tempfile
|
|
|
|
|
import time
|
|
|
|
|
import traceback
|
|
|
|
|
import urlparse
|
|
|
|
|
import uuid
|
2015-02-13 00:20:16 +00:00
|
|
|
|
import subprocess
|
2017-08-20 13:59:37 +00:00
|
|
|
|
import sys
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
import adba
|
|
|
|
|
import requests
|
|
|
|
|
import requests.exceptions
|
2017-02-17 03:16:51 +00:00
|
|
|
|
from cfscrape import CloudflareScraper
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import sickbeard
|
|
|
|
|
import subliminal
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
import json
|
|
|
|
|
except ImportError:
|
|
|
|
|
from lib import simplejson as json
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
import xml.etree.cElementTree as etree
|
|
|
|
|
except ImportError:
|
|
|
|
|
import elementtree.ElementTree as etree
|
|
|
|
|
|
|
|
|
|
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
2016-09-04 20:00:44 +00:00
|
|
|
|
from sickbeard import logger, db, notifiers, clients
|
2017-11-27 19:35:20 +00:00
|
|
|
|
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, cpu_presets, statusStrings, \
|
|
|
|
|
SNATCHED_ANY, DOWNLOADED, ARCHIVED, IGNORED, Quality
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
|
|
2014-11-20 03:06:45 +00:00
|
|
|
|
from lib.cachecontrol import CacheControl, caches
|
2016-10-02 01:04:02 +00:00
|
|
|
|
from lib.scandir.scandir import scandir
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from itertools import izip, cycle
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def indentXML(elem, level=0):
|
|
|
|
|
'''
|
|
|
|
|
Does our pretty printing, makes Matt very happy
|
|
|
|
|
'''
|
|
|
|
|
i = "\n" + level * " "
|
|
|
|
|
if len(elem):
|
|
|
|
|
if not elem.text or not elem.text.strip():
|
|
|
|
|
elem.text = i + " "
|
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
|
elem.tail = i
|
|
|
|
|
for elem in elem:
|
|
|
|
|
indentXML(elem, level + 1)
|
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
|
elem.tail = i
|
|
|
|
|
else:
|
|
|
|
|
# Strip out the newlines from text
|
|
|
|
|
if elem.text:
|
|
|
|
|
elem.text = elem.text.replace('\n', ' ')
|
|
|
|
|
if level and (not elem.tail or not elem.tail.strip()):
|
|
|
|
|
elem.tail = i
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def remove_extension(name):
|
|
|
|
|
"""
|
|
|
|
|
Remove download or media extension from name (if any)
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if name and "." in name:
|
|
|
|
|
base_name, sep, extension = name.rpartition('.') # @UnusedVariable
|
|
|
|
|
if base_name and extension.lower() in ['nzb', 'torrent'] + mediaExtensions:
|
|
|
|
|
name = base_name
|
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def remove_non_release_groups(name, is_anime=False):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Remove non release groups from name
|
|
|
|
|
"""
|
|
|
|
|
|
2015-09-25 14:33:25 +00:00
|
|
|
|
if name:
|
2015-11-25 12:12:40 +00:00
|
|
|
|
rc = [re.compile(r'(?i)' + v) for v in [
|
2016-11-01 18:13:51 +00:00
|
|
|
|
'([\s\.\-_\[\{\(]*(no-rar|nzbgeek|ripsalot|siklopentan)[\s\.\-_\]\}\)]*)$',
|
|
|
|
|
'([\s\.\-_\[\{\(]rp[\s\.\-_\]\}\)]*)$',
|
2015-09-18 00:06:34 +00:00
|
|
|
|
'(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*(www\.\w+.\w+)[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
|
|
|
|
'(?<=\w)([\s\.\-_]*[\[\{\(]\s*(rar(bg|tv)|((e[tz]|v)tv))[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'] +
|
2016-09-07 20:24:10 +00:00
|
|
|
|
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
|
|
|
|
'^([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)(?=\w)'], [])[is_anime]]
|
|
|
|
|
rename = name = remove_extension(name)
|
2015-09-25 14:33:25 +00:00
|
|
|
|
while rename:
|
|
|
|
|
for regex in rc:
|
|
|
|
|
name = regex.sub('', name)
|
|
|
|
|
rename = (name, False)[name == rename]
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def replaceExtension(filename, newExt):
|
|
|
|
|
sepFile = filename.rpartition(".")
|
|
|
|
|
if sepFile[0] == "":
|
|
|
|
|
return filename
|
|
|
|
|
else:
|
|
|
|
|
return sepFile[0] + "." + newExt
|
|
|
|
|
|
|
|
|
|
|
2014-08-11 10:29:28 +00:00
|
|
|
|
def isSyncFile(filename):
|
|
|
|
|
extension = filename.rpartition(".")[2].lower()
|
|
|
|
|
if extension == '!sync' or extension == 'lftp-pget-status':
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
def has_media_ext(filename):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# ignore samples
|
2016-08-11 00:00:36 +00:00
|
|
|
|
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I) \
|
|
|
|
|
or filename.startswith('._'): # and MAC OS's 'resource fork' files
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
sep_file = filename.rpartition('.')
|
|
|
|
|
return (None is re.search('extras?$', sep_file[0], re.I)) and (sep_file[2].lower() in mediaExtensions)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2016-10-04 14:05:14 +00:00
|
|
|
|
def has_image_ext(filename):
|
|
|
|
|
try:
|
|
|
|
|
if ek.ek(os.path.splitext, filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']:
|
|
|
|
|
return True
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
pass
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
def is_first_rar_volume(filename):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
return None is not re.search('(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)', filename)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sanitizeFileName(name):
|
|
|
|
|
# remove bad chars from the filename
|
|
|
|
|
name = re.sub(r'[\\/\*]', '-', name)
|
|
|
|
|
name = re.sub(r'[:"<>|?]', '', name)
|
|
|
|
|
|
|
|
|
|
# remove leading/trailing periods and spaces
|
|
|
|
|
name = name.strip(' .')
|
|
|
|
|
|
2016-06-14 22:09:30 +00:00
|
|
|
|
for char in sickbeard.REMOVE_FILENAME_CHARS or []:
|
|
|
|
|
name = name.replace(char, '')
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
def remove_file_failed(filename):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
2016-11-14 21:33:15 +00:00
|
|
|
|
ek.ek(os.remove, filename)
|
|
|
|
|
except (StandardError, Exception):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
def findCertainShow(showList, indexerid):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
results = []
|
2014-08-30 08:47:00 +00:00
|
|
|
|
if showList and indexerid:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
results = filter(lambda x: int(x.indexerid) == int(indexerid), showList)
|
|
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
|
if len(results) == 1:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return results[0]
|
|
|
|
|
elif len(results) > 1:
|
|
|
|
|
raise MultipleShowObjectsException()
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def find_show_by_id(show_list, id_dict, no_mapped_ids=True):
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
:param show_list:
|
|
|
|
|
:type show_list: list
|
|
|
|
|
:param id_dict: {indexer: id}
|
|
|
|
|
:type id_dict: dict
|
|
|
|
|
:param no_mapped_ids:
|
|
|
|
|
:type no_mapped_ids: bool
|
|
|
|
|
:return: showObj or MultipleShowObjectsException
|
|
|
|
|
"""
|
|
|
|
|
results = []
|
|
|
|
|
if show_list and id_dict and isinstance(id_dict, dict):
|
|
|
|
|
id_dict = {k: v for k, v in id_dict.items() if v > 0}
|
|
|
|
|
if no_mapped_ids:
|
|
|
|
|
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
|
|
|
|
if k == s.indexer and v == s.indexerid]))
|
|
|
|
|
else:
|
|
|
|
|
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
|
|
|
|
if v == s.ids.get(k, {'id': 0})['id']]))
|
|
|
|
|
|
|
|
|
|
if len(results) == 1:
|
|
|
|
|
return results[0]
|
|
|
|
|
elif len(results) > 1:
|
|
|
|
|
raise MultipleShowObjectsException()
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
def makeDir(path):
|
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.makedirs, path)
|
|
|
|
|
# do the library update for synoindex
|
2017-10-17 15:43:28 +00:00
|
|
|
|
notifiers.NotifierFactory().get('SYNOINDEX').addFolder(path)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
except OSError:
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
|
|
|
|
showNames = [re.sub('[. -]', ' ', regShowName)]
|
|
|
|
|
|
|
|
|
|
# Query Indexers for each search term and build the list of results
|
|
|
|
|
for i in sickbeard.indexerApi().indexers if not indexer else int(indexer or []):
|
|
|
|
|
# Query Indexers for each search term and build the list of results
|
|
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(i).api_params.copy()
|
|
|
|
|
if ui is not None: lINDEXER_API_PARMS['custom_ui'] = ui
|
|
|
|
|
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
|
|
|
|
|
|
|
|
|
|
for name in showNames:
|
2016-02-07 17:57:48 +00:00
|
|
|
|
logger.log('Trying to find %s on %s' % (name, sickbeard.indexerApi(i).name), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
try:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
result = t[indexer_id] if indexer_id else t[name]
|
2014-08-09 00:19:29 +00:00
|
|
|
|
except:
|
|
|
|
|
continue
|
|
|
|
|
|
2016-02-07 17:57:48 +00:00
|
|
|
|
seriesname = series_id = None
|
|
|
|
|
for search in result if isinstance(result, list) else [result]:
|
|
|
|
|
try:
|
|
|
|
|
seriesname = search['seriesname']
|
|
|
|
|
series_id = search['id']
|
|
|
|
|
except:
|
|
|
|
|
series_id = seriesname = None
|
|
|
|
|
continue
|
2015-05-08 02:46:54 +00:00
|
|
|
|
if seriesname and series_id:
|
|
|
|
|
break
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if not (seriesname and series_id):
|
|
|
|
|
continue
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
if None is indexer_id and str(name).lower() == str(seriesname).lower():
|
|
|
|
|
return seriesname, i, int(series_id)
|
|
|
|
|
elif None is not indexer_id and int(indexer_id) == int(series_id):
|
|
|
|
|
return seriesname, i, int(indexer_id)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if indexer:
|
|
|
|
|
break
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
return None, None, None
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sizeof_fmt(num):
|
2018-01-15 01:42:24 +00:00
|
|
|
|
for x in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
if num < 1024.0:
|
|
|
|
|
return "%3.1f %s" % (num, x)
|
|
|
|
|
num /= 1024.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def listMediaFiles(path):
|
|
|
|
|
if not dir or not ek.ek(os.path.isdir, path):
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
files = []
|
|
|
|
|
for curFile in ek.ek(os.listdir, path):
|
|
|
|
|
fullCurFile = ek.ek(os.path.join, path, curFile)
|
|
|
|
|
|
|
|
|
|
# if it's a folder do it recursively
|
|
|
|
|
if ek.ek(os.path.isdir, fullCurFile) and not curFile.startswith('.') and not curFile == 'Extras':
|
|
|
|
|
files += listMediaFiles(fullCurFile)
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
elif has_media_ext(curFile):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
files.append(fullCurFile)
|
|
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def copyFile(srcFile, destFile):
|
2015-02-13 00:20:16 +00:00
|
|
|
|
if os.name.startswith('posix'):
|
2017-07-31 00:24:22 +00:00
|
|
|
|
ek.ek(subprocess.call, ['cp', srcFile, destFile])
|
2015-02-13 00:20:16 +00:00
|
|
|
|
else:
|
|
|
|
|
ek.ek(shutil.copyfile, srcFile, destFile)
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
ek.ek(shutil.copymode, srcFile, destFile)
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def moveFile(srcFile, destFile):
|
|
|
|
|
try:
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, srcFile, destFile)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
|
except OSError:
|
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
ek.ek(os.unlink, srcFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def link(src, dst):
|
|
|
|
|
if os.name == 'nt':
|
|
|
|
|
import ctypes
|
|
|
|
|
|
|
|
|
|
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
|
|
|
|
else:
|
2016-11-14 21:33:15 +00:00
|
|
|
|
ek.ek(os.link, src, dst)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def hardlinkFile(srcFile, destFile):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(link, srcFile, destFile)
|
|
|
|
|
fixSetGroupID(destFile)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-10-13 15:20:44 +00:00
|
|
|
|
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead",
|
|
|
|
|
logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def symlink(src, dst):
|
|
|
|
|
if os.name == 'nt':
|
|
|
|
|
import ctypes
|
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
if ctypes.windll.kernel32.CreateSymbolicLinkW(
|
|
|
|
|
unicode(dst), unicode(src), 1 if ek.ek(os.path.isdir, src) else 0) in [0, 1280]:
|
|
|
|
|
raise ctypes.WinError()
|
2014-08-09 00:19:29 +00:00
|
|
|
|
else:
|
2016-11-14 21:33:15 +00:00
|
|
|
|
ek.ek(os.symlink, src, dst)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def moveAndSymlinkFile(srcFile, destFile):
|
|
|
|
|
try:
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, srcFile, destFile)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
|
ek.ek(symlink, destFile, srcFile)
|
|
|
|
|
except:
|
|
|
|
|
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def make_dirs(path):
|
|
|
|
|
"""
|
|
|
|
|
Creates any folders that are missing and assigns them the permissions of their
|
|
|
|
|
parents
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
|
# Windows, create all missing folders
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
if os.name in ('nt', 'ce'):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
logger.log(u'Path %s doesn\'t exist, creating it' % path, logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
ek.ek(os.makedirs, path)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
logger.log(u'Failed creating %s : %s' % (path, ex(e)), logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# not Windows, create all missing folders and set permissions
|
|
|
|
|
else:
|
|
|
|
|
sofar = ''
|
|
|
|
|
folder_list = path.split(os.path.sep)
|
|
|
|
|
|
|
|
|
|
# look through each subfolder and make sure they all exist
|
|
|
|
|
for cur_folder in folder_list:
|
|
|
|
|
sofar += cur_folder + os.path.sep
|
|
|
|
|
|
|
|
|
|
# if it exists then just keep walking down the line
|
|
|
|
|
if ek.ek(os.path.isdir, sofar):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
logger.log(u'Path %s doesn\'t exist, creating it' % sofar, logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
ek.ek(os.mkdir, sofar)
|
|
|
|
|
# use normpath to remove end separator, otherwise checks permissions against itself
|
|
|
|
|
chmodAsParent(ek.ek(os.path.normpath, sofar))
|
|
|
|
|
# do the library update for synoindex
|
2017-10-17 15:43:28 +00:00
|
|
|
|
notifiers.NotifierFactory().get('SYNOINDEX').addFolder(sofar)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
logger.log(u'Failed creating %s : %s' % (sofar, ex(e)), logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|
|
|
|
"""
|
|
|
|
|
Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
|
|
|
|
|
left that are now empty.
|
|
|
|
|
|
|
|
|
|
cur_path: The absolute path to the file you want to move/rename
|
|
|
|
|
new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
|
|
|
|
|
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
|
|
|
|
|
"""
|
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
new_dest_dir, new_dest_name = ek.ek(os.path.split, new_path) # @UnusedVariable
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if old_path_length == 0 or old_path_length > len(cur_path):
|
|
|
|
|
# approach from the right
|
2016-11-14 21:33:15 +00:00
|
|
|
|
cur_file_name, cur_file_ext = ek.ek(os.path.splitext, cur_path) # @UnusedVariable
|
2014-08-09 00:19:29 +00:00
|
|
|
|
else:
|
|
|
|
|
# approach from the left
|
|
|
|
|
cur_file_ext = cur_path[old_path_length:]
|
|
|
|
|
cur_file_name = cur_path[:old_path_length]
|
|
|
|
|
|
|
|
|
|
if cur_file_ext[1:] in subtitleExtensions:
|
|
|
|
|
# Extract subtitle language from filename
|
2016-11-14 21:33:15 +00:00
|
|
|
|
sublang = ek.ek(os.path.splitext, cur_file_name)[1][1:]
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# Check if the language extracted from filename is a valid language
|
|
|
|
|
try:
|
|
|
|
|
language = subliminal.language.Language(sublang, strict=True)
|
|
|
|
|
cur_file_ext = '.' + sublang + cur_file_ext
|
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# put the extension on the incoming file
|
|
|
|
|
new_path += cur_file_ext
|
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
make_dirs(ek.ek(os.path.dirname, new_path))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# move the file
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Renaming file from " + cur_path + " to " + new_path)
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, cur_path, new_path)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# clean up any old folders that are empty
|
|
|
|
|
delete_empty_folders(ek.ek(os.path.dirname, cur_path))
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|
|
|
|
"""
|
|
|
|
|
Walks backwards up the path and deletes any empty folders found.
|
|
|
|
|
|
|
|
|
|
check_empty_dir: The path to clean (absolute path to a folder)
|
|
|
|
|
keep_dir: Clean until this path is reached
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# treat check_empty_dir as empty when it only contains these items
|
|
|
|
|
ignore_items = []
|
|
|
|
|
|
|
|
|
|
logger.log(u"Trying to clean any empty folders under " + check_empty_dir)
|
|
|
|
|
|
|
|
|
|
# as long as the folder exists and doesn't contain any files, delete it
|
|
|
|
|
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
|
|
|
|
|
check_files = ek.ek(os.listdir, check_empty_dir)
|
|
|
|
|
|
|
|
|
|
if not check_files or (len(check_files) <= len(ignore_items) and all(
|
|
|
|
|
[check_file in ignore_items for check_file in check_files])):
|
|
|
|
|
# directory is empty or contains only ignore_items
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
|
|
|
|
# need shutil.rmtree when ignore_items is really implemented
|
|
|
|
|
ek.ek(os.rmdir, check_empty_dir)
|
|
|
|
|
# do the library update for synoindex
|
2017-10-17 15:43:28 +00:00
|
|
|
|
notifiers.NotifierFactory().get('SYNOINDEX').deleteFolder(check_empty_dir)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except OSError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
|
|
|
|
|
break
|
|
|
|
|
check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
|
|
|
|
|
else:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fileBitFilter(mode):
|
|
|
|
|
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
|
|
|
|
if mode & bit:
|
|
|
|
|
mode -= bit
|
|
|
|
|
|
|
|
|
|
return mode
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def chmodAsParent(childPath):
|
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
|
|
|
|
|
|
|
|
|
if not parentPath:
|
|
|
|
|
logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPathStat = ek.ek(os.stat, parentPath)
|
|
|
|
|
parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
childPathStat = ek.ek(os.stat, childPath)
|
|
|
|
|
childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
if ek.ek(os.path.isfile, childPath):
|
|
|
|
|
childMode = fileBitFilter(parentMode)
|
|
|
|
|
else:
|
|
|
|
|
childMode = parentMode
|
|
|
|
|
|
|
|
|
|
if childPath_mode == childMode:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
childPath_owner = childPathStat.st_uid
|
|
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
|
|
|
|
|
|
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
|
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.chmod, childPath, childMode)
|
|
|
|
|
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
except OSError:
|
|
|
|
|
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fixSetGroupID(childPath):
|
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
|
|
|
|
parentStat = ek.ek(os.stat, parentPath)
|
|
|
|
|
parentMode = stat.S_IMODE(parentStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
if parentMode & stat.S_ISGID:
|
|
|
|
|
parentGID = parentStat[stat.ST_GID]
|
|
|
|
|
childStat = ek.ek(os.stat, childPath)
|
|
|
|
|
childGID = childStat[stat.ST_GID]
|
|
|
|
|
|
|
|
|
|
if childGID == parentGID:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
childPath_owner = childStat.st_uid
|
|
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
|
|
|
|
|
|
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
|
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
|
|
|
|
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
|
|
|
|
except OSError:
|
|
|
|
|
logger.log(
|
|
|
|
|
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
|
|
|
|
|
childPath, parentGID), logger.ERROR)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_absolute_number_from_season_and_episode(show, season, episode):
|
|
|
|
|
absolute_number = None
|
|
|
|
|
|
|
|
|
|
if season and episode:
|
|
|
|
|
myDB = db.DBConnection()
|
2015-05-09 12:37:50 +00:00
|
|
|
|
sql = 'SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?'
|
2014-08-09 00:19:29 +00:00
|
|
|
|
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
|
|
|
|
|
|
|
|
|
if len(sqlResults) == 1:
|
|
|
|
|
absolute_number = int(sqlResults[0]["absolute_number"])
|
|
|
|
|
logger.log(
|
|
|
|
|
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
else:
|
|
|
|
|
logger.log(
|
|
|
|
|
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(
|
|
|
|
|
episode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
return absolute_number
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
|
|
|
|
|
episodes = []
|
|
|
|
|
season = None
|
|
|
|
|
|
|
|
|
|
if len(absolute_numbers):
|
|
|
|
|
if not show and indexer_id:
|
|
|
|
|
show = findCertainShow(sickbeard.showList, indexer_id)
|
|
|
|
|
|
|
|
|
|
if show:
|
|
|
|
|
for absolute_number in absolute_numbers:
|
|
|
|
|
ep = show.getEpisode(None, None, absolute_number=absolute_number)
|
|
|
|
|
if ep:
|
|
|
|
|
episodes.append(ep.episode)
|
2015-05-09 12:37:50 +00:00
|
|
|
|
season = ep.season # this will always take the last found season so eps that cross the season
|
|
|
|
|
# border are not handled well
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return (season, episodes)
|
|
|
|
|
|
|
|
|
|
|
2015-05-18 18:49:45 +00:00
|
|
|
|
def sanitizeSceneName(name):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Takes a show name and returns the "scenified" version of it.
|
2014-11-23 20:45:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
Returns: A string containing the scene version of the show name given.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if name:
|
2017-03-30 02:18:36 +00:00
|
|
|
|
bad_chars = u",:()£'!?\u2019"
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# strip out any bad chars
|
|
|
|
|
for x in bad_chars:
|
|
|
|
|
name = name.replace(x, "")
|
|
|
|
|
|
|
|
|
|
# tidy up stuff that doesn't belong in scene names
|
|
|
|
|
name = name.replace("- ", ".").replace(" ", ".").replace("&", "and").replace('/', '.')
|
|
|
|
|
name = re.sub("\.\.*", ".", name)
|
|
|
|
|
|
|
|
|
|
if name.endswith('.'):
|
|
|
|
|
name = name[:-1]
|
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
else:
|
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_https_certificates(ssl_cert, ssl_key):
|
|
|
|
|
"""
|
|
|
|
|
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from OpenSSL import crypto # @UnresolvedImport
|
|
|
|
|
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
|
|
|
|
serial # @UnresolvedImport
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# Create the CA Certificate
|
2017-01-17 13:09:14 +00:00
|
|
|
|
cakey = createKeyPair(TYPE_RSA, 4096)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
careq = createCertRequest(cakey, CN='Certificate Authority')
|
|
|
|
|
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
|
|
|
|
|
2014-11-12 16:43:14 +00:00
|
|
|
|
cname = 'SickGear'
|
2017-01-17 13:09:14 +00:00
|
|
|
|
pkey = createKeyPair(TYPE_RSA, 4096)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
req = createCertRequest(pkey, CN=cname)
|
|
|
|
|
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
|
|
|
|
|
|
|
|
|
# Save the key and certificate to disk
|
|
|
|
|
try:
|
|
|
|
|
open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
|
|
|
|
open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
|
|
|
|
except:
|
|
|
|
|
logger.log(u"Error creating SSL key and certificate", logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
import doctest
|
|
|
|
|
|
|
|
|
|
doctest.testmod()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_xml(data, del_xmlns=False):
|
|
|
|
|
"""
|
|
|
|
|
Parse data into an xml elementtree.ElementTree
|
|
|
|
|
|
|
|
|
|
data: data string containing xml
|
|
|
|
|
del_xmlns: if True, removes xmlns namesspace from data before parsing
|
|
|
|
|
|
|
|
|
|
Returns: parsed data as elementtree or None
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if del_xmlns:
|
|
|
|
|
data = re.sub(' xmlns="[^"]+"', '', data)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
parsedXML = etree.fromstring(data)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
|
|
|
|
|
parsedXML = None
|
|
|
|
|
|
|
|
|
|
return parsedXML
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def backupVersionedFile(old_file, version):
|
2015-06-05 09:44:43 +00:00
|
|
|
|
num_tries = 0
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-05 09:44:43 +00:00
|
|
|
|
new_file = '%s.v%s' % (old_file, version)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
while not ek.ek(os.path.isfile, new_file):
|
2015-06-05 09:44:43 +00:00
|
|
|
|
if not ek.ek(os.path.isfile, old_file) or 0 == get_size(old_file):
|
|
|
|
|
logger.log(u'No need to create backup', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
try:
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Trying to back up %s to %s' % (old_file, new_file), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
shutil.copy(old_file, new_file)
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Backup done', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING)
|
|
|
|
|
num_tries += 1
|
|
|
|
|
time.sleep(3)
|
|
|
|
|
logger.log(u'Trying again.', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-05 09:44:43 +00:00
|
|
|
|
if 3 <= num_tries:
|
|
|
|
|
logger.log(u'Unable to back up %s to %s please do it manually.' % (old_file, new_file), logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def restoreVersionedFile(backup_file, version):
|
|
|
|
|
numTries = 0
|
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
new_file, backup_version = ek.ek(os.path.splitext, backup_file)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
restore_file = new_file + '.' + 'v' + str(version)
|
|
|
|
|
|
|
|
|
|
if not ek.ek(os.path.isfile, new_file):
|
|
|
|
|
logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
logger.log(
|
|
|
|
|
u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
shutil.move(new_file, new_file + '.' + 'r' + str(version))
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(
|
|
|
|
|
u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
|
|
|
|
|
logger.WARNING)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
while not ek.ek(os.path.isfile, new_file):
|
|
|
|
|
if not ek.ek(os.path.isfile, restore_file):
|
|
|
|
|
logger.log(u"Not restoring, " + restore_file + " doesn't exist", logger.DEBUG)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Trying to restore " + restore_file + " to " + new_file, logger.DEBUG)
|
|
|
|
|
shutil.copy(restore_file, new_file)
|
|
|
|
|
logger.log(u"Restore done", logger.DEBUG)
|
|
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
|
|
|
|
|
numTries += 1
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
logger.log(u"Trying again.", logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
if numTries >= 10:
|
|
|
|
|
logger.log(u"Unable to restore " + restore_file + " to " + new_file + " please do it manually.",
|
|
|
|
|
logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# try to convert to int, if it fails the default will be returned
|
|
|
|
|
def tryInt(s, s_default=0):
|
|
|
|
|
try:
|
|
|
|
|
return int(s)
|
|
|
|
|
except:
|
|
|
|
|
return s_default
|
|
|
|
|
|
|
|
|
|
|
2016-01-12 19:42:37 +00:00
|
|
|
|
# try to convert to float, return default on failure
|
|
|
|
|
def tryFloat(s, s_default=0.0):
|
|
|
|
|
try:
|
|
|
|
|
return float(s)
|
|
|
|
|
except:
|
|
|
|
|
return float(s_default)
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# generates a md5 hash of a file
|
|
|
|
|
def md5_for_file(filename, block_size=2 ** 16):
|
|
|
|
|
try:
|
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
|
while True:
|
|
|
|
|
data = f.read(block_size)
|
|
|
|
|
if not data:
|
|
|
|
|
break
|
|
|
|
|
md5.update(data)
|
|
|
|
|
f.close()
|
|
|
|
|
return md5.hexdigest()
|
|
|
|
|
except Exception:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
def md5_for_text(text):
|
|
|
|
|
result = None
|
|
|
|
|
try:
|
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
|
md5.update(str(text))
|
|
|
|
|
raw_md5 = md5.hexdigest()
|
|
|
|
|
result = raw_md5[17:] + raw_md5[9:17] + raw_md5[0:9]
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
pass
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
def get_lan_ip():
|
|
|
|
|
"""
|
2014-11-23 20:45:50 +00:00
|
|
|
|
Simple function to get LAN localhost_ip
|
2014-08-09 00:19:29 +00:00
|
|
|
|
http://stackoverflow.com/questions/11735821/python-get-localhost-ip
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if os.name != "nt":
|
|
|
|
|
import fcntl
|
|
|
|
|
import struct
|
|
|
|
|
|
|
|
|
|
def get_interface_ip(ifname):
|
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
|
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
|
|
|
|
|
ifname[:15]))[20:24])
|
|
|
|
|
|
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
|
|
|
|
if ip.startswith("127.") and os.name != "nt":
|
|
|
|
|
interfaces = [
|
|
|
|
|
"eth0",
|
|
|
|
|
"eth1",
|
|
|
|
|
"eth2",
|
|
|
|
|
"wlan0",
|
|
|
|
|
"wlan1",
|
|
|
|
|
"wifi0",
|
|
|
|
|
"ath0",
|
|
|
|
|
"ath1",
|
|
|
|
|
"ppp0",
|
|
|
|
|
]
|
|
|
|
|
for ifname in interfaces:
|
|
|
|
|
try:
|
|
|
|
|
ip = get_interface_ip(ifname)
|
2015-06-09 11:13:00 +00:00
|
|
|
|
print(ifname, ip)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
|
|
|
|
except IOError:
|
|
|
|
|
pass
|
|
|
|
|
return ip
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_url(url):
|
|
|
|
|
"""
|
|
|
|
|
Check if a URL exists without downloading the whole file.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2015-06-13 11:26:09 +00:00
|
|
|
|
return requests.head(url).ok
|
|
|
|
|
except:
|
|
|
|
|
return False
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2014-11-09 02:49:38 +00:00
|
|
|
|
def anon_url(*url):
|
|
|
|
|
"""
|
|
|
|
|
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
|
|
|
|
|
"""
|
|
|
|
|
return '' if None in url else '%s%s' % (sickbeard.ANON_REDIRECT, ''.join(str(s) for s in url))
|
|
|
|
|
|
|
|
|
|
|
2015-02-24 13:37:27 +00:00
|
|
|
|
def starify(text, verify=False):
|
|
|
|
|
"""
|
|
|
|
|
Return text input string with either its latter half or its centre area (if 12 chars or more)
|
|
|
|
|
replaced with asterisks. Useful for securely presenting api keys to a ui.
|
|
|
|
|
|
|
|
|
|
If verify is true, return true if text is a star block created text else return false.
|
|
|
|
|
"""
|
2015-06-19 23:34:56 +00:00
|
|
|
|
return '' if not text\
|
|
|
|
|
else ((('%s%s' % (text[:len(text) / 2], '*' * (len(text) / 2))),
|
|
|
|
|
('%s%s%s' % (text[:4], '*' * (len(text) - 8), text[-4:])))[12 <= len(text)],
|
|
|
|
|
set('*') == set((text[len(text) / 2:], text[4:-4])[12 <= len(text)]))[verify]
|
2015-02-24 13:37:27 +00:00
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Encryption
|
|
|
|
|
==========
|
|
|
|
|
By Pedro Jose Pereira Vieito <pvieito@gmail.com> (@pvieito)
|
|
|
|
|
|
|
|
|
|
* If encryption_version==0 then return data without encryption
|
|
|
|
|
* The keys should be unique for each device
|
|
|
|
|
|
|
|
|
|
To add a new encryption_version:
|
2014-11-23 20:45:50 +00:00
|
|
|
|
1) Code your new encryption_version
|
2014-08-09 00:19:29 +00:00
|
|
|
|
2) Update the last encryption_version available in webserve.py
|
|
|
|
|
3) Remember to maintain old encryption versions and key generators for retrocompatibility
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Key Generators
|
|
|
|
|
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
|
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# Encryption Functions
|
|
|
|
|
def encrypt(data, encryption_version=0, decrypt=False):
|
|
|
|
|
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
|
|
|
|
if encryption_version == 1:
|
|
|
|
|
if decrypt:
|
|
|
|
|
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
|
|
|
|
else:
|
|
|
|
|
return base64.encodestring(
|
|
|
|
|
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
|
|
|
|
|
# Version 0: Plain text
|
|
|
|
|
else:
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def decrypt(data, encryption_version=0):
|
|
|
|
|
return encrypt(data, encryption_version, decrypt=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def full_sanitizeSceneName(name):
|
|
|
|
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
|
|
|
|
|
|
|
|
|
|
2016-02-07 17:57:48 +00:00
|
|
|
|
def get_show(name, try_scene_exceptions=False, use_cache=True):
|
2015-03-10 00:26:46 +00:00
|
|
|
|
if not sickbeard.showList or None is name:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
show_obj = None
|
|
|
|
|
from_cache = False
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
cache = sickbeard.name_cache.retrieveNameFromCache(name)
|
|
|
|
|
if cache:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
from_cache = True
|
|
|
|
|
show_obj = findCertainShow(sickbeard.showList, cache)
|
|
|
|
|
|
|
|
|
|
if not show_obj and try_scene_exceptions:
|
|
|
|
|
indexer_id = sickbeard.scene_exceptions.get_scene_exception_by_name(name)[0]
|
|
|
|
|
if indexer_id:
|
|
|
|
|
show_obj = findCertainShow(sickbeard.showList, indexer_id)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# add show to cache
|
2016-02-07 17:57:48 +00:00
|
|
|
|
if use_cache and show_obj and not from_cache:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
sickbeard.name_cache.addNameToCache(name, show_obj.indexerid)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
except Exception as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + str(e), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
return show_obj
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_hidden_folder(folder):
|
|
|
|
|
"""
|
|
|
|
|
Returns True if folder is hidden.
|
|
|
|
|
On Linux based systems hidden folders start with . (dot)
|
|
|
|
|
folder: Full path of folder to check
|
|
|
|
|
"""
|
|
|
|
|
if ek.ek(os.path.isdir, folder):
|
|
|
|
|
if ek.ek(os.path.basename, folder).startswith('.'):
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def real_path(path):
|
|
|
|
|
"""
|
|
|
|
|
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
|
|
|
|
"""
|
|
|
|
|
return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def validateShow(show, season=None, episode=None):
|
|
|
|
|
indexer_lang = show.lang
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(show.indexer).api_params.copy()
|
Add fanart to Episodes View, Display Show, Edit Show, and Media Renamer page.
Add "Maximum fanart image files per show to cache" to config General/Interface.
Add populate images when the daily show updater is run with a default maximum 3 images per show.
Change force full update in a show will replace existing images with new.
Add fanart livepanel to lower right of Episodes View and Display Show page.
Add highlight panel red until button is clicked a few times.
Add flick through multiple background images on Episodes View and Display Show page.
Add persistent move poster image to right hand side or hide on Display Show page (multi-click the eye).
Add persistent translucency of background images on Episodes View and Display Show page.
Add persistent fanart rating to avoid art completely, random display, random from a group, or display fave always.
Add persistent views of the show detail on Display Show page.
Add persistent views on Episodes View.
Add persistent button to collapse and expand card images on Episode View/Layout daybyday.
Add non persistent "Open gear" and "Full fanart" image views to Episodes View and Display Show page.
Add "smart" selection of fanart image to display on Episode view.
Change insert [!] and change text shade of ended shows in drop down show list on Display Show page.
Change button graphic for next and previous show of show list on Display Show page.
Add logic to hide some livepanel buttons until artwork becomes available or in other circumstances.
Add "(Ended)" where appropriate to show title on Display Show page.
Add links to fanart.tv where appropriate on Display Show page.
Change use tense for label "Airs" or "Aired" depending on if show ended.
Change display "No files" instead of "0 files" and "Upgrade once" instead of "End upgrade on first match".
Add persistent button to newest season to "Show all" episodes.
Add persistent button to all shown seasons to "Hide most" episodes.
Add button to older seasons to toggle "Show Season n" or "Show Specials" with "Hide..." episodes.
Add season level status counts next to each season header on display show page
Add sorting to season table headers on display show page
Add filename and size to quality badge on display show page, removed its redundant "downloaded" text
Remove redundant "Add show" buttons
Change combine the NFO and TBN columns into a single Meta column
Change reduce screen estate used by episode numbers columns
Change improve clarity of text on Add Show page.
Add "Reset fanart ratings" to show Edit/Other tab.
Add fanart usage to show Edit/Other tab.
Add fanart keys guide to show Edit/Other tab.
Change add placeholder tip to "Alternative release name(s)" on show Edit.
Change add placeholder tip to search box on shows Search.
Change hide Anime tips on show Edit when selecting its mutually exclusive options.
Change label "End upgrade on first match" to "Upgrade once" on show Edit.
Change improve performance rendering displayShow.
Add total episodes to start of show description (excludes specials if those are hidden).
Add "Add show" actions i.e. "Search", "Trakt cards", "IMDb cards", and "Anime" to Shows menu.
Add "Import (existing)" action to Tools menu.
Change SD quality from red to dark green, 2160p UHD 4K is red.
Change relocate the functions of Logs & Errors to the right side Tools menu -> View Log File.
Add warning indicator to the Tools menu in different colour depending on error count (green through red).
Change View Log error item output from reversed to natural order.
Change View Log add a typeface and some colour to improve readability.
Change View Log/Errors only display "Clear Errors" button when there are errors to clear.
Change improve performance of View Log File.
2016-02-28 23:43:40 +00:00
|
|
|
|
lINDEXER_API_PARMS['dvdorder'] = 0 != show.dvdorder
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if indexer_lang and not indexer_lang == 'en':
|
|
|
|
|
lINDEXER_API_PARMS['language'] = indexer_lang
|
|
|
|
|
|
|
|
|
|
t = sickbeard.indexerApi(show.indexer).indexer(**lINDEXER_API_PARMS)
|
|
|
|
|
if season is None and episode is None:
|
|
|
|
|
return t
|
|
|
|
|
|
|
|
|
|
return t[show.indexerid][season][episode]
|
2016-09-22 11:43:21 +00:00
|
|
|
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound, TypeError):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_up_anidb_connection():
|
|
|
|
|
if not sickbeard.USE_ANIDB:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'Usage of anidb disabled. Skipping', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if not sickbeard.ANIDB_USERNAME and not sickbeard.ANIDB_PASSWORD:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'anidb username and/or password are not set. Aborting anidb lookup.', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if not sickbeard.ADBA_CONNECTION:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
anidb_logger = lambda x: logger.log('ANIDB: ' + str(x), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
sickbeard.ADBA_CONNECTION = adba.Connection(keepAlive=True, log=anidb_logger)
|
|
|
|
|
|
2015-02-16 03:17:56 +00:00
|
|
|
|
auth = False
|
|
|
|
|
try:
|
|
|
|
|
auth = sickbeard.ADBA_CONNECTION.authed()
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'exception msg: ' + str(e))
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
if not auth:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'exception msg: ' + str(e))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
return sickbeard.ADBA_CONNECTION.authed()
|
|
|
|
|
|
|
|
|
|
|
2017-08-20 23:11:54 +00:00
|
|
|
|
def touch_file(fname, atime=None):
|
|
|
|
|
if None is not atime:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
2015-06-14 03:49:23 +00:00
|
|
|
|
with open(fname, 'a'):
|
2016-11-14 21:33:15 +00:00
|
|
|
|
ek.ek(os.utime, fname, (atime, atime))
|
2017-08-20 23:11:54 +00:00
|
|
|
|
return True
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
logger.log('File air date stamping not available on your OS', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _getTempDir():
|
|
|
|
|
"""Returns the [system temp dir]/tvdb_api-u501 (or
|
|
|
|
|
tvdb_api-myuser)
|
|
|
|
|
"""
|
|
|
|
|
if hasattr(os, 'getuid'):
|
|
|
|
|
uid = "u%d" % (os.getuid())
|
|
|
|
|
else:
|
|
|
|
|
# For Windows
|
|
|
|
|
try:
|
|
|
|
|
uid = getpass.getuser()
|
|
|
|
|
except ImportError:
|
2016-11-14 21:33:15 +00:00
|
|
|
|
return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear")
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2016-11-14 21:33:15 +00:00
|
|
|
|
return ek.ek(os.path.join, tempfile.gettempdir(), "SickGear-%s" % (uid))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-02-10 04:57:44 +00:00
|
|
|
|
|
|
|
|
|
def proxy_setting(proxy_setting, request_url, force=False):
|
|
|
|
|
"""
|
|
|
|
|
Returns a list of a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting
|
|
|
|
|
starts with "PAC:" (case-insensitive) and b) True/False if "PAC" is found in the proxy_setting.
|
|
|
|
|
|
|
|
|
|
The PAC data parser is crude, javascript is not eval'd. The first "PROXY URL" found is extracted with a list
|
|
|
|
|
of "url_a_part.url_remaining", "url_b_part.url_remaining", "url_n_part.url_remaining" and so on.
|
|
|
|
|
Also, PAC data items are escaped for matching therefore regular expression items will not match a request_url.
|
|
|
|
|
|
|
|
|
|
If force is True or request_url contains a PAC parsed data item then the PAC proxy address is returned else False.
|
|
|
|
|
None is returned in the event of an error fetching PAC data.
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# check for "PAC" usage
|
|
|
|
|
match = re.search(r'^\s*PAC:\s*(.*)', proxy_setting, re.I)
|
|
|
|
|
if not match:
|
|
|
|
|
return proxy_setting, False
|
|
|
|
|
pac_url = match.group(1)
|
|
|
|
|
|
|
|
|
|
# prevent a recursive test with existing proxy setting when fetching PAC url
|
|
|
|
|
proxy_setting_backup = sickbeard.PROXY_SETTING
|
|
|
|
|
sickbeard.PROXY_SETTING = ''
|
|
|
|
|
|
|
|
|
|
resp = ''
|
|
|
|
|
try:
|
|
|
|
|
resp = getURL(pac_url)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
sickbeard.PROXY_SETTING = proxy_setting_backup
|
|
|
|
|
|
|
|
|
|
if not resp:
|
|
|
|
|
return None, False
|
|
|
|
|
|
|
|
|
|
proxy_address = None
|
|
|
|
|
request_url_match = False
|
2015-02-10 22:09:25 +00:00
|
|
|
|
parsed_url = urlparse.urlparse(request_url)
|
|
|
|
|
netloc = (parsed_url.path, parsed_url.netloc)['' != parsed_url.netloc]
|
2015-02-10 04:57:44 +00:00
|
|
|
|
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^\.]+\.[^'"]*)(?:['"])""", resp, re.I):
|
|
|
|
|
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
|
|
|
|
|
if data:
|
|
|
|
|
if force:
|
|
|
|
|
return data.group(1), True
|
|
|
|
|
proxy_address = (proxy_address, data.group(1))[None is proxy_address]
|
2015-02-10 22:09:25 +00:00
|
|
|
|
elif re.search(re.escape(pac_data.group(1)), netloc, re.I):
|
2015-02-10 04:57:44 +00:00
|
|
|
|
request_url_match = True
|
|
|
|
|
if None is not proxy_address:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if None is proxy_address:
|
|
|
|
|
return None, True
|
|
|
|
|
|
|
|
|
|
return (False, proxy_address)[request_url_match], True
|
|
|
|
|
|
|
|
|
|
|
2017-07-19 14:58:03 +00:00
|
|
|
|
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False,
|
|
|
|
|
raise_status_code=False, raise_exceptions=False, **kwargs):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
2017-06-23 22:13:58 +00:00
|
|
|
|
Either
|
|
|
|
|
1) Returns a byte-string retrieved from the url provider.
|
|
|
|
|
2) Return True/False if success after using kwargs 'savefile' set to file pathname.
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# selectively mute some errors
|
|
|
|
|
mute = []
|
|
|
|
|
for muted in filter(
|
|
|
|
|
lambda x: kwargs.get(x, False), ['mute_connect_err', 'mute_read_timeout', 'mute_connect_timeout']):
|
|
|
|
|
mute += [muted]
|
|
|
|
|
del kwargs[muted]
|
|
|
|
|
|
|
|
|
|
# reuse or instantiate request session
|
2015-04-28 17:32:37 +00:00
|
|
|
|
if None is session:
|
2017-02-17 03:16:51 +00:00
|
|
|
|
session = CloudflareScraper.create_scraper()
|
2018-01-15 17:35:27 +00:00
|
|
|
|
session.headers.update({'User-Agent': USER_AGENT})
|
2016-04-19 22:28:44 +00:00
|
|
|
|
|
2017-08-27 16:33:32 +00:00
|
|
|
|
# download and save file or simply fetch url
|
|
|
|
|
savename = None
|
|
|
|
|
if 'savename' in kwargs:
|
|
|
|
|
# session streaming
|
|
|
|
|
session.stream = True
|
|
|
|
|
savename = kwargs.pop('savename')
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
if 'nocache' in kwargs:
|
|
|
|
|
del kwargs['nocache']
|
|
|
|
|
else:
|
2016-04-19 22:28:44 +00:00
|
|
|
|
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
2016-11-14 21:33:15 +00:00
|
|
|
|
session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# session master headers
|
|
|
|
|
req_headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
2018-01-15 17:35:27 +00:00
|
|
|
|
'Accept-Encoding': 'gzip,deflate'}
|
2014-08-09 00:19:29 +00:00
|
|
|
|
if headers:
|
|
|
|
|
req_headers.update(headers)
|
2017-02-17 03:16:51 +00:00
|
|
|
|
if hasattr(session, 'reserved') and 'headers' in session.reserved:
|
|
|
|
|
req_headers.update(session.reserved['headers'] or {})
|
2014-08-09 00:19:29 +00:00
|
|
|
|
session.headers.update(req_headers)
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# session paramaters
|
|
|
|
|
session.params = params
|
2016-02-19 17:38:38 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# session ssl verify
|
2014-08-09 00:19:29 +00:00
|
|
|
|
session.verify = False
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response = None
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# sanitise url
|
2014-08-09 00:19:29 +00:00
|
|
|
|
parsed = list(urlparse.urlparse(url))
|
2017-06-23 22:13:58 +00:00
|
|
|
|
parsed[2] = re.sub('/{2,}', '/', parsed[2]) # replace two or more / with one
|
2014-08-09 00:19:29 +00:00
|
|
|
|
url = urlparse.urlunparse(parsed)
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
# session proxies
|
2014-08-09 00:19:29 +00:00
|
|
|
|
if sickbeard.PROXY_SETTING:
|
2015-02-10 04:57:44 +00:00
|
|
|
|
(proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, url)
|
|
|
|
|
msg = '%sproxy for url: %s' % (('', 'PAC parsed ')[pac_found], url)
|
|
|
|
|
if None is proxy_address:
|
|
|
|
|
logger.log('Proxy error, aborted the request using %s' % msg, logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
elif proxy_address:
|
|
|
|
|
logger.log('Using %s' % msg, logger.DEBUG)
|
2017-06-23 22:13:58 +00:00
|
|
|
|
session.proxies = {'http': proxy_address, 'https': proxy_address}
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2014-08-09 00:45:21 +00:00
|
|
|
|
# decide if we get or post data to server
|
2015-11-20 22:52:19 +00:00
|
|
|
|
if 'post_json' in kwargs:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
kwargs.setdefault('json', kwargs.pop('post_json'))
|
|
|
|
|
|
2014-08-09 00:45:21 +00:00
|
|
|
|
if post_data:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
kwargs.setdefault('data', post_data)
|
2017-06-23 22:13:58 +00:00
|
|
|
|
|
2015-11-20 22:52:19 +00:00
|
|
|
|
if 'data' in kwargs or 'json' in kwargs:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response = session.post(url, timeout=timeout, **kwargs)
|
2014-08-09 00:45:21 +00:00
|
|
|
|
else:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response = session.get(url, timeout=timeout, **kwargs)
|
|
|
|
|
if response.ok and not response.content and 'url=' in response.headers.get('Refresh', '').lower():
|
|
|
|
|
url = response.headers.get('Refresh').lower().split('url=')[1].strip('/')
|
2016-06-13 23:10:43 +00:00
|
|
|
|
if not url.startswith('http'):
|
|
|
|
|
parsed[2] = '/%s' % url
|
|
|
|
|
url = urlparse.urlunparse(parsed)
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response = session.get(url, timeout=timeout, **kwargs)
|
2014-08-09 00:45:21 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if raise_status_code:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response.raise_for_status()
|
|
|
|
|
|
|
|
|
|
if not response.ok:
|
|
|
|
|
http_err_text = 'CloudFlare Ray ID' in response.content and \
|
|
|
|
|
'CloudFlare reports, "Website is offline"; ' or ''
|
|
|
|
|
if response.status_code in clients.http_error_code:
|
|
|
|
|
http_err_text += clients.http_error_code[response.status_code]
|
|
|
|
|
elif response.status_code in range(520, 527):
|
2016-08-10 10:37:34 +00:00
|
|
|
|
http_err_text += 'Origin server connection failure'
|
2015-06-04 00:30:42 +00:00
|
|
|
|
else:
|
|
|
|
|
http_err_text = 'Custom HTTP error code'
|
2016-08-10 10:37:34 +00:00
|
|
|
|
logger.log(u'Response not ok. %s: %s from requested url %s'
|
2017-06-23 22:13:58 +00:00
|
|
|
|
% (response.status_code, http_err_text, url), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.HTTPError as e:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if raise_status_code:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
response.raise_for_status()
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'HTTP error %s while loading URL%s' % (
|
|
|
|
|
e.errno, _maybe_request_url(e)), logger.WARNING)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
2016-10-18 21:55:17 +00:00
|
|
|
|
if 'mute_connect_err' not in mute:
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'Connection error msg:%s while loading URL%s' % (
|
|
|
|
|
e.message, _maybe_request_url(e)), logger.WARNING)
|
2017-07-19 14:58:03 +00:00
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.ReadTimeout as e:
|
2016-10-18 21:55:17 +00:00
|
|
|
|
if 'mute_read_timeout' not in mute:
|
|
|
|
|
logger.log(u'Read timed out msg:%s while loading URL%s' % (
|
|
|
|
|
e.message, _maybe_request_url(e)), logger.WARNING)
|
2017-07-19 14:58:03 +00:00
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
2015-02-24 13:37:27 +00:00
|
|
|
|
return
|
2015-11-12 00:18:19 +00:00
|
|
|
|
except (requests.exceptions.Timeout, socket.timeout) as e:
|
2016-10-18 21:55:17 +00:00
|
|
|
|
if 'mute_connect_timeout' not in mute:
|
|
|
|
|
logger.log(u'Connection timed out msg:%s while loading URL %s' % (
|
|
|
|
|
e.message, _maybe_request_url(e, url)), logger.WARNING)
|
2017-07-19 14:58:03 +00:00
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
2015-06-19 23:34:56 +00:00
|
|
|
|
return
|
2015-09-18 00:06:34 +00:00
|
|
|
|
except Exception as e:
|
|
|
|
|
if e.message:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
logger.log(u'Exception caught while loading URL %s\r\nDetail... %s\r\n%s'
|
|
|
|
|
% (url, e.message, traceback.format_exc()), logger.WARNING)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
else:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
logger.log(u'Unknown exception while loading URL %s\r\nDetail... %s'
|
|
|
|
|
% (url, traceback.format_exc()), logger.WARNING)
|
2017-07-19 14:58:03 +00:00
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if json:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
try:
|
2017-06-23 22:13:58 +00:00
|
|
|
|
data_json = response.json()
|
2017-02-17 14:58:04 +00:00
|
|
|
|
return ({}, data_json)[isinstance(data_json, (dict, list))]
|
2015-11-20 22:52:19 +00:00
|
|
|
|
except (TypeError, Exception) as e:
|
|
|
|
|
logger.log(u'JSON data issue from URL %s\r\nDetail... %s' % (url, e.message), logger.WARNING)
|
2017-07-19 14:58:03 +00:00
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
2015-11-20 22:52:19 +00:00
|
|
|
|
return None
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
if savename:
|
|
|
|
|
try:
|
|
|
|
|
with open(savename, 'wb') as fp:
|
|
|
|
|
for chunk in response.iter_content(chunk_size=1024):
|
|
|
|
|
if chunk:
|
|
|
|
|
fp.write(chunk)
|
|
|
|
|
fp.flush()
|
|
|
|
|
ek.ek(os.fsync, fp.fileno())
|
2016-06-10 00:28:37 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
chmodAsParent(savename)
|
2016-06-10 00:28:37 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
except EnvironmentError as e:
|
|
|
|
|
logger.log(u'Unable to save the file: ' + ex(e), logger.ERROR)
|
|
|
|
|
if raise_exceptions:
|
|
|
|
|
raise e
|
|
|
|
|
return
|
|
|
|
|
return True
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
return response.content
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
def _maybe_request_url(e, def_url=''):
|
|
|
|
|
return hasattr(e, 'request') and hasattr(e.request, 'url') and ' ' + e.request.url or def_url
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
def download_file(url, filename, session=None, **kwargs):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2017-06-23 22:13:58 +00:00
|
|
|
|
if None is getURL(url, session=session, savename=filename, **kwargs):
|
2016-11-14 21:33:15 +00:00
|
|
|
|
remove_file_failed(filename)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def clearCache(force=False):
|
|
|
|
|
|
|
|
|
|
# clean out cache directory, remove everything > 12 hours old
|
|
|
|
|
if sickbeard.CACHE_DIR:
|
2016-10-08 22:37:27 +00:00
|
|
|
|
logger.log(u'Trying to clean cache folder %s' % sickbeard.CACHE_DIR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# Does our cache_dir exists
|
|
|
|
|
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
|
2016-10-08 22:37:27 +00:00
|
|
|
|
logger.log(u'Skipping clean of non-existing folder: %s' % sickbeard.CACHE_DIR, logger.WARNING)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
else:
|
2016-10-08 22:37:27 +00:00
|
|
|
|
exclude = ['rss', 'images', 'zoneinfo']
|
|
|
|
|
del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(hours=12)).timetuple())
|
|
|
|
|
for f in scantree(sickbeard.CACHE_DIR, exclude, follow_symlinks=True):
|
|
|
|
|
if f.is_file(follow_symlinks=False) and (force or del_time > f.stat(follow_symlinks=False).st_mtime):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.remove, f.path)
|
|
|
|
|
except OSError as e:
|
|
|
|
|
logger.log('Unable to delete %s: %r / %s' % (f.path, e, str(e)), logger.WARNING)
|
|
|
|
|
elif f.is_dir(follow_symlinks=False) and f.name not in ['cheetah', 'sessions', 'indexers']:
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.rmdir, f.path)
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
2014-10-02 08:37:08 +00:00
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-10-02 08:37:08 +00:00
|
|
|
|
def human(size):
|
|
|
|
|
"""
|
|
|
|
|
format a size in bytes into a 'human' file size, e.g. bytes, KB, MB, GB, TB, PB
|
|
|
|
|
Note that bytes/KB will be reported in whole numbers but MB and above will have greater precision
|
|
|
|
|
e.g. 1 byte, 43 bytes, 443 KB, 4.3 MB, 4.43 GB, etc
|
|
|
|
|
"""
|
|
|
|
|
if size == 1:
|
|
|
|
|
# because I really hate unnecessary plurals
|
|
|
|
|
return "1 byte"
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB', 2), ('PB', 2)]
|
2014-10-02 08:37:08 +00:00
|
|
|
|
|
|
|
|
|
num = float(size)
|
|
|
|
|
for suffix, precision in suffixes_table:
|
|
|
|
|
if num < 1024.0:
|
|
|
|
|
break
|
|
|
|
|
num /= 1024.0
|
|
|
|
|
|
|
|
|
|
if precision == 0:
|
|
|
|
|
formatted_size = "%d" % num
|
|
|
|
|
else:
|
|
|
|
|
formatted_size = str(round(num, ndigits=precision))
|
|
|
|
|
|
|
|
|
|
return "%s %s" % (formatted_size, suffix)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_size(start_path='.'):
|
2016-01-07 13:15:52 +00:00
|
|
|
|
if ek.ek(os.path.isfile, start_path):
|
|
|
|
|
return ek.ek(os.path.getsize, start_path)
|
2017-10-31 00:30:58 +00:00
|
|
|
|
try:
|
|
|
|
|
return sum(map((lambda x: x.stat(follow_symlinks=False).st_size), scantree(start_path)))
|
|
|
|
|
except OSError:
|
|
|
|
|
return 0
|
2014-10-02 08:37:08 +00:00
|
|
|
|
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
2014-11-27 03:30:00 +00:00
|
|
|
|
def remove_article(text=''):
|
2014-12-13 05:04:21 +00:00
|
|
|
|
return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
|
|
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
|
|
|
|
|
|
def maybe_plural(number=1):
|
|
|
|
|
return ('s', '')[1 == number]
|
|
|
|
|
|
|
|
|
|
|
2014-11-16 15:00:05 +00:00
|
|
|
|
def build_dict(seq, key):
|
|
|
|
|
return dict((d[key], dict(d, index=index)) for (index, d) in enumerate(seq))
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-12-13 05:04:21 +00:00
|
|
|
|
def client_host(server_host):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Return the host on which a client can connect to the given listener.'''
|
|
|
|
|
if server_host == '0.0.0.0':
|
|
|
|
|
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
|
|
|
|
|
return '127.0.0.1'
|
|
|
|
|
if server_host in ('::', '::0', '::0.0.0.0'):
|
|
|
|
|
# :: is IN6ADDR_ANY, which should answer on localhost.
|
|
|
|
|
# ::0 and ::0.0.0.0 are non-canonical but common ways to write
|
|
|
|
|
# IN6ADDR_ANY.
|
|
|
|
|
return '::1'
|
|
|
|
|
return server_host
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def wait_for_free_port(host, port):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Wait for the specified port to become free (drop requests).'''
|
|
|
|
|
if not host:
|
|
|
|
|
raise ValueError("Host values of '' or None are not allowed.")
|
|
|
|
|
for trial in range(50):
|
|
|
|
|
try:
|
|
|
|
|
# we are expecting a free port, so reduce the timeout
|
|
|
|
|
check_port(host, port, timeout=0.1)
|
|
|
|
|
except IOError:
|
|
|
|
|
# Give the old server thread time to free the port.
|
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
else:
|
|
|
|
|
return
|
|
|
|
|
|
2015-02-08 02:59:10 +00:00
|
|
|
|
raise IOError("Port %r is not free on %r" % (port, host))
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_port(host, port, timeout=1.0):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Raise an error if the given port is not free on the given host.'''
|
|
|
|
|
if not host:
|
|
|
|
|
raise ValueError("Host values of '' or None are not allowed.")
|
|
|
|
|
host = client_host(host)
|
|
|
|
|
port = int(port)
|
|
|
|
|
|
|
|
|
|
import socket
|
|
|
|
|
|
|
|
|
|
# AF_INET or AF_INET6 socket
|
|
|
|
|
# Get the correct address family for our host (allows IPv6 addresses)
|
|
|
|
|
try:
|
|
|
|
|
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
|
|
|
|
|
socket.SOCK_STREAM)
|
|
|
|
|
except socket.gaierror:
|
|
|
|
|
if ':' in host:
|
|
|
|
|
info = [(socket.AF_INET6, socket.SOCK_STREAM, 0, "", (host, port, 0, 0))]
|
|
|
|
|
else:
|
|
|
|
|
info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))]
|
|
|
|
|
|
|
|
|
|
for res in info:
|
|
|
|
|
af, socktype, proto, canonname, sa = res
|
|
|
|
|
s = None
|
|
|
|
|
try:
|
|
|
|
|
s = socket.socket(af, socktype, proto)
|
|
|
|
|
# See http://groups.google.com/group/cherrypy-users/
|
|
|
|
|
# browse_frm/thread/bbfe5eb39c904fe0
|
|
|
|
|
s.settimeout(timeout)
|
|
|
|
|
s.connect((host, port))
|
|
|
|
|
s.close()
|
|
|
|
|
raise IOError("Port %s is in use on %s; perhaps the previous "
|
|
|
|
|
"httpserver did not shut down properly." %
|
|
|
|
|
(repr(port), repr(host)))
|
|
|
|
|
except socket.error:
|
|
|
|
|
if s:
|
|
|
|
|
s.close()
|
2015-04-23 10:02:21 +00:00
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2015-04-23 10:02:21 +00:00
|
|
|
|
def clear_unused_providers():
|
2015-07-13 09:39:20 +00:00
|
|
|
|
providers = [x.cache.providerID for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
2015-04-23 10:02:21 +00:00
|
|
|
|
|
|
|
|
|
if providers:
|
|
|
|
|
myDB = db.DBConnection('cache.db')
|
2015-05-09 12:37:50 +00:00
|
|
|
|
myDB.action('DELETE FROM provider_cache WHERE provider NOT IN (%s)' % ','.join(['?'] * len(providers)), providers)
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
|
def make_search_segment_html_string(segment, max_eps=5):
|
|
|
|
|
seg_str = ''
|
|
|
|
|
if segment and not isinstance(segment, list):
|
|
|
|
|
segment = [segment]
|
|
|
|
|
if segment and len(segment) > max_eps:
|
|
|
|
|
seasons = [x for x in set([x.season for x in segment])]
|
|
|
|
|
seg_str = u'Season' + maybe_plural(len(seasons)) + ': '
|
|
|
|
|
first_run = True
|
|
|
|
|
for x in seasons:
|
|
|
|
|
eps = [str(s.episode) for s in segment if s.season == x]
|
|
|
|
|
ep_c = len(eps)
|
|
|
|
|
seg_str += ('' if first_run else ' ,') + str(x) + ' <span title="Episode' + maybe_plural(ep_c) + ': ' + ', '.join(eps) + '">(' + str(ep_c) + ' Ep' + maybe_plural(ep_c) + ')</span>'
|
|
|
|
|
first_run = False
|
|
|
|
|
elif segment:
|
|
|
|
|
episodes = ['S' + str(x.season).zfill(2) + 'E' + str(x.episode).zfill(2) for x in segment]
|
|
|
|
|
seg_str = u'Episode' + maybe_plural(len(episodes)) + ': ' + ', '.join(episodes)
|
|
|
|
|
return seg_str
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def has_anime():
|
|
|
|
|
return False if not sickbeard.showList else any(filter(lambda show: show.is_anime, sickbeard.showList))
|
2016-02-11 16:25:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cpu_sleep():
|
|
|
|
|
if cpu_presets[sickbeard.CPU_PRESET]:
|
|
|
|
|
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
|
2016-10-08 22:37:27 +00:00
|
|
|
|
def scantree(path, exclude=None, follow_symlinks=False):
|
2016-10-02 01:04:02 +00:00
|
|
|
|
"""Recursively yield DirEntry objects for given directory."""
|
2016-10-08 22:37:27 +00:00
|
|
|
|
exclude = (exclude, ([exclude], [])[None is exclude])[not isinstance(exclude, list)]
|
2016-10-02 01:04:02 +00:00
|
|
|
|
for entry in ek.ek(scandir, path):
|
2016-10-08 22:37:27 +00:00
|
|
|
|
if entry.is_dir(follow_symlinks=follow_symlinks):
|
|
|
|
|
if entry.name not in exclude:
|
|
|
|
|
for subentry in scantree(entry.path):
|
|
|
|
|
yield subentry
|
2016-10-02 01:04:02 +00:00
|
|
|
|
yield entry
|
|
|
|
|
else:
|
|
|
|
|
yield entry
|
|
|
|
|
|
|
|
|
|
|
2016-10-03 18:31:54 +00:00
|
|
|
|
def cleanup_cache():
|
|
|
|
|
"""
|
|
|
|
|
Delete old cached files
|
|
|
|
|
"""
|
2017-08-23 17:39:30 +00:00
|
|
|
|
delete_not_changed_in([ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images', 'browse', 'thumb', x) for x in [
|
|
|
|
|
'anidb', 'imdb', 'trakt', 'tvdb']])
|
2016-10-03 18:31:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_not_changed_in(paths, days=30, minutes=0):
|
|
|
|
|
"""
|
|
|
|
|
Delete files under paths not changed in n days and/or n minutes.
|
|
|
|
|
If a file was modified later than days/and or minutes, then don't delete it.
|
|
|
|
|
|
2016-10-08 22:37:27 +00:00
|
|
|
|
:param paths: Path(s) to scan for files to delete
|
|
|
|
|
:type paths: String or List of strings
|
2016-10-03 18:31:54 +00:00
|
|
|
|
:param days: Purge files not modified in this number of days (default: 30 days)
|
|
|
|
|
:param minutes: Purge files not modified in this number of minutes (default: 0 minutes)
|
|
|
|
|
:return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted
|
|
|
|
|
"""
|
|
|
|
|
del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)).timetuple())
|
|
|
|
|
errors = 0
|
|
|
|
|
qualified = 0
|
2016-10-08 22:37:27 +00:00
|
|
|
|
for c in (paths, [paths])[not isinstance(paths, list)]:
|
2016-10-02 01:04:02 +00:00
|
|
|
|
try:
|
|
|
|
|
for f in scantree(c):
|
2016-10-03 18:31:54 +00:00
|
|
|
|
if f.is_file(follow_symlinks=False) and del_time > f.stat(follow_symlinks=False).st_mtime:
|
2016-10-02 01:04:02 +00:00
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.remove, f.path)
|
2016-10-03 18:31:54 +00:00
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
errors += 1
|
|
|
|
|
qualified += 1
|
|
|
|
|
except (StandardError, Exception):
|
2016-10-02 01:04:02 +00:00
|
|
|
|
pass
|
2016-10-03 18:31:54 +00:00
|
|
|
|
return qualified, errors
|
2016-10-02 01:04:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_file_timestamp(filename, min_age=3, new_time=None):
|
|
|
|
|
min_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=min_age)).timetuple())
|
|
|
|
|
try:
|
|
|
|
|
if ek.ek(os.path.isfile, filename) and ek.ek(os.path.getmtime, filename) < min_time:
|
|
|
|
|
ek.ek(os.utime, filename, new_time)
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
pass
|
2017-07-17 19:29:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def should_delete_episode(status):
|
2018-01-17 07:37:27 +00:00
|
|
|
|
s = Quality.splitCompositeStatus(status)[0]
|
2017-11-27 19:35:20 +00:00
|
|
|
|
if s not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED, IGNORED]:
|
2017-07-17 19:29:32 +00:00
|
|
|
|
return True
|
|
|
|
|
logger.log('not safe to delete episode from db because of status: %s' % statusStrings[s], logger.DEBUG)
|
2017-08-20 13:59:37 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_link(filepath):
|
|
|
|
|
"""
|
|
|
|
|
Check if given file/pathname is symbolic link
|
|
|
|
|
|
|
|
|
|
:param filepath: file or path to check
|
|
|
|
|
:return: True or False
|
|
|
|
|
"""
|
|
|
|
|
if 'win32' == sys.platform:
|
|
|
|
|
if not ek.ek(os.path.exists, filepath):
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
import ctypes
|
|
|
|
|
invalid_file_attributes = 0xFFFFFFFF
|
|
|
|
|
file_attribute_reparse_point = 0x0400
|
|
|
|
|
|
|
|
|
|
attr = ctypes.windll.kernel32.GetFileAttributesW(unicode(filepath))
|
|
|
|
|
return invalid_file_attributes != attr and 0 != attr & file_attribute_reparse_point
|
|
|
|
|
|
|
|
|
|
return ek.ek(os.path.islink, filepath)
|
2017-08-20 23:11:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def datetime_to_epoch(dt):
|
|
|
|
|
""" convert a datetime to seconds after (or possibly before) 1970-1-1 """
|
|
|
|
|
""" can raise an error with dates pre 1970-1-1 """
|
|
|
|
|
if not isinstance(getattr(dt, 'tzinfo'), datetime.tzinfo):
|
|
|
|
|
from sickbeard.network_timezones import sb_timezone
|
|
|
|
|
dt = dt.replace(tzinfo=sb_timezone)
|
|
|
|
|
utc_naive = dt.replace(tzinfo=None) - dt.utcoffset()
|
|
|
|
|
return int((utc_naive - datetime.datetime(1970, 1, 1)).total_seconds())
|
2018-01-09 23:47:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def df():
|
|
|
|
|
"""
|
|
|
|
|
Return disk free space at known parent locations
|
|
|
|
|
|
|
|
|
|
:return: string path, string value that is formatted size
|
|
|
|
|
:rtype: list of tuples
|
|
|
|
|
"""
|
|
|
|
|
result = []
|
|
|
|
|
min_output = True
|
2018-01-23 01:02:13 +00:00
|
|
|
|
if sickbeard.ROOT_DIRS and sickbeard.DISPLAY_FREESPACE:
|
2018-01-09 23:47:36 +00:00
|
|
|
|
targets = []
|
|
|
|
|
for path in sickbeard.ROOT_DIRS.split('|')[1:]:
|
|
|
|
|
location_parts = os.path.splitdrive(path)
|
|
|
|
|
target = location_parts[0]
|
|
|
|
|
if 'win32' == sys.platform:
|
|
|
|
|
if not re.match('(?i)[a-z]:(?:\\\\)?$', target):
|
|
|
|
|
# simple drive letter not found, fallback to full path
|
|
|
|
|
target = path
|
|
|
|
|
min_output = False
|
|
|
|
|
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
|
|
|
|
|
target = path
|
|
|
|
|
min_output = False
|
|
|
|
|
if target and target not in targets:
|
|
|
|
|
targets += [target]
|
|
|
|
|
free = freespace(path)
|
|
|
|
|
if None is not free:
|
|
|
|
|
result += [(target, sizeof_fmt(free).replace(' ', ''))]
|
|
|
|
|
return result, min_output
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def freespace(path=None):
|
|
|
|
|
"""
|
|
|
|
|
Return free space available at path location
|
|
|
|
|
|
|
|
|
|
:param path: Example paths (Windows) = '\\\\192.168.0.1\\sharename\\existing_path', 'd:\\existing_path'
|
|
|
|
|
Untested with mount points under linux
|
|
|
|
|
:type path: basestring
|
|
|
|
|
:return: Size in bytes
|
|
|
|
|
:rtype: long
|
|
|
|
|
"""
|
|
|
|
|
result = None
|
|
|
|
|
|
|
|
|
|
if 'win32' == sys.platform:
|
|
|
|
|
try:
|
|
|
|
|
import ctypes
|
|
|
|
|
if None is not ctypes:
|
|
|
|
|
max_val = (2 ** 64) - 1
|
|
|
|
|
storage = ctypes.c_ulonglong(max_val)
|
|
|
|
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path), None, None, ctypes.pointer(storage))
|
|
|
|
|
result = (storage.value, None)[max_val == storage.value]
|
|
|
|
|
except(StandardError, Exception):
|
|
|
|
|
pass
|
|
|
|
|
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
|
2018-02-07 15:59:55 +00:00
|
|
|
|
try:
|
|
|
|
|
storage = os.statvfs(path) # perms errors can result
|
|
|
|
|
result = storage.f_bavail * storage.f_frsize
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
2018-01-09 23:47:36 +00:00
|
|
|
|
|
|
|
|
|
return result
|