2015-05-04 19:14:29 +00:00
|
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# This file is part of SickGear.
|
2014-08-09 00:19:29 +00:00
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-09 11:13:00 +00:00
|
|
|
|
from __future__ import print_function
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from __future__ import with_statement
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
import base64
|
|
|
|
|
import datetime
|
2014-08-09 00:19:29 +00:00
|
|
|
|
import getpass
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import hashlib
|
2014-08-09 00:19:29 +00:00
|
|
|
|
import os
|
|
|
|
|
import re
|
|
|
|
|
import shutil
|
|
|
|
|
import socket
|
|
|
|
|
import stat
|
|
|
|
|
import tempfile
|
|
|
|
|
import time
|
|
|
|
|
import traceback
|
|
|
|
|
import urlparse
|
|
|
|
|
import uuid
|
2015-02-13 00:20:16 +00:00
|
|
|
|
import subprocess
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
import adba
|
|
|
|
|
import requests
|
|
|
|
|
import requests.exceptions
|
2016-09-04 20:00:44 +00:00
|
|
|
|
import sickbeard
|
|
|
|
|
import subliminal
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
import json
|
|
|
|
|
except ImportError:
|
|
|
|
|
from lib import simplejson as json
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
import xml.etree.cElementTree as etree
|
|
|
|
|
except ImportError:
|
|
|
|
|
import elementtree.ElementTree as etree
|
|
|
|
|
|
|
|
|
|
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
2016-09-04 20:00:44 +00:00
|
|
|
|
from sickbeard import logger, db, notifiers, clients
|
2016-02-11 16:25:29 +00:00
|
|
|
|
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, cpu_presets
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
|
|
2014-11-20 03:06:45 +00:00
|
|
|
|
from lib.cachecontrol import CacheControl, caches
|
2016-10-02 01:04:02 +00:00
|
|
|
|
from lib.scandir.scandir import scandir
|
2014-08-09 00:19:29 +00:00
|
|
|
|
from itertools import izip, cycle
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def indentXML(elem, level=0):
|
|
|
|
|
'''
|
|
|
|
|
Does our pretty printing, makes Matt very happy
|
|
|
|
|
'''
|
|
|
|
|
i = "\n" + level * " "
|
|
|
|
|
if len(elem):
|
|
|
|
|
if not elem.text or not elem.text.strip():
|
|
|
|
|
elem.text = i + " "
|
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
|
elem.tail = i
|
|
|
|
|
for elem in elem:
|
|
|
|
|
indentXML(elem, level + 1)
|
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
|
elem.tail = i
|
|
|
|
|
else:
|
|
|
|
|
# Strip out the newlines from text
|
|
|
|
|
if elem.text:
|
|
|
|
|
elem.text = elem.text.replace('\n', ' ')
|
|
|
|
|
if level and (not elem.tail or not elem.tail.strip()):
|
|
|
|
|
elem.tail = i
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def remove_extension(name):
|
|
|
|
|
"""
|
|
|
|
|
Remove download or media extension from name (if any)
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if name and "." in name:
|
|
|
|
|
base_name, sep, extension = name.rpartition('.') # @UnusedVariable
|
|
|
|
|
if base_name and extension.lower() in ['nzb', 'torrent'] + mediaExtensions:
|
|
|
|
|
name = base_name
|
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
def remove_non_release_groups(name, is_anime=False):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Remove non release groups from name
|
|
|
|
|
"""
|
|
|
|
|
|
2015-09-25 14:33:25 +00:00
|
|
|
|
if name:
|
2015-11-25 12:12:40 +00:00
|
|
|
|
rc = [re.compile(r'(?i)' + v) for v in [
|
2015-09-18 00:06:34 +00:00
|
|
|
|
'([\s\.\-_\[\{\(]*(no-rar|nzbgeek|ripsalot|rp|siklopentan)[\s\.\-_\]\}\)]*)$',
|
|
|
|
|
'(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*(www\.\w+.\w+)[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
|
|
|
|
'(?<=\w)([\s\.\-_]*[\[\{\(]\s*(rar(bg|tv)|((e[tz]|v)tv))[\s\.\-_]*[\]\}\)][\s\.\-_]*)$'] +
|
2016-09-07 20:24:10 +00:00
|
|
|
|
(['(?<=\w)([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)$',
|
|
|
|
|
'^([\s\.\-_]*[\[\{\(][\s\.\-_]*[\w\s\.\-\_]+[\s\.\-_]*[\]\}\)][\s\.\-_]*)(?=\w)'], [])[is_anime]]
|
|
|
|
|
rename = name = remove_extension(name)
|
2015-09-25 14:33:25 +00:00
|
|
|
|
while rename:
|
|
|
|
|
for regex in rc:
|
|
|
|
|
name = regex.sub('', name)
|
|
|
|
|
rename = (name, False)[name == rename]
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def replaceExtension(filename, newExt):
|
|
|
|
|
sepFile = filename.rpartition(".")
|
|
|
|
|
if sepFile[0] == "":
|
|
|
|
|
return filename
|
|
|
|
|
else:
|
|
|
|
|
return sepFile[0] + "." + newExt
|
|
|
|
|
|
|
|
|
|
|
2014-08-11 10:29:28 +00:00
|
|
|
|
def isSyncFile(filename):
|
|
|
|
|
extension = filename.rpartition(".")[2].lower()
|
|
|
|
|
if extension == '!sync' or extension == 'lftp-pget-status':
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
def has_media_ext(filename):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# ignore samples
|
2016-08-11 00:00:36 +00:00
|
|
|
|
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I) \
|
|
|
|
|
or filename.startswith('._'): # and MAC OS's 'resource fork' files
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
sep_file = filename.rpartition('.')
|
|
|
|
|
return (None is re.search('extras?$', sep_file[0], re.I)) and (sep_file[2].lower() in mediaExtensions)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
def is_first_rar_volume(filename):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
return None is not re.search('(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)', filename)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sanitizeFileName(name):
|
|
|
|
|
# remove bad chars from the filename
|
|
|
|
|
name = re.sub(r'[\\/\*]', '-', name)
|
|
|
|
|
name = re.sub(r'[:"<>|?]', '', name)
|
|
|
|
|
|
|
|
|
|
# remove leading/trailing periods and spaces
|
|
|
|
|
name = name.strip(' .')
|
|
|
|
|
|
2016-06-14 22:09:30 +00:00
|
|
|
|
for char in sickbeard.REMOVE_FILENAME_CHARS or []:
|
|
|
|
|
name = name.replace(char, '')
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _remove_file_failed(file):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.remove, file)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
def findCertainShow(showList, indexerid):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
results = []
|
2014-08-30 08:47:00 +00:00
|
|
|
|
if showList and indexerid:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
results = filter(lambda x: int(x.indexerid) == int(indexerid), showList)
|
|
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
|
if len(results) == 1:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return results[0]
|
|
|
|
|
elif len(results) > 1:
|
|
|
|
|
raise MultipleShowObjectsException()
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def find_show_by_id(show_list, id_dict, no_mapped_ids=True):
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
:param show_list:
|
|
|
|
|
:type show_list: list
|
|
|
|
|
:param id_dict: {indexer: id}
|
|
|
|
|
:type id_dict: dict
|
|
|
|
|
:param no_mapped_ids:
|
|
|
|
|
:type no_mapped_ids: bool
|
|
|
|
|
:return: showObj or MultipleShowObjectsException
|
|
|
|
|
"""
|
|
|
|
|
results = []
|
|
|
|
|
if show_list and id_dict and isinstance(id_dict, dict):
|
|
|
|
|
id_dict = {k: v for k, v in id_dict.items() if v > 0}
|
|
|
|
|
if no_mapped_ids:
|
|
|
|
|
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
|
|
|
|
if k == s.indexer and v == s.indexerid]))
|
|
|
|
|
else:
|
|
|
|
|
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
|
|
|
|
if v == s.ids.get(k, {'id': 0})['id']]))
|
|
|
|
|
|
|
|
|
|
if len(results) == 1:
|
|
|
|
|
return results[0]
|
|
|
|
|
elif len(results) > 1:
|
|
|
|
|
raise MultipleShowObjectsException()
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
def makeDir(path):
|
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.makedirs, path)
|
|
|
|
|
# do the library update for synoindex
|
|
|
|
|
notifiers.synoindex_notifier.addFolder(path)
|
|
|
|
|
except OSError:
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
|
|
|
|
showNames = [re.sub('[. -]', ' ', regShowName)]
|
|
|
|
|
|
|
|
|
|
# Query Indexers for each search term and build the list of results
|
|
|
|
|
for i in sickbeard.indexerApi().indexers if not indexer else int(indexer or []):
|
|
|
|
|
# Query Indexers for each search term and build the list of results
|
|
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(i).api_params.copy()
|
|
|
|
|
if ui is not None: lINDEXER_API_PARMS['custom_ui'] = ui
|
|
|
|
|
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
|
|
|
|
|
|
|
|
|
|
for name in showNames:
|
2016-02-07 17:57:48 +00:00
|
|
|
|
logger.log('Trying to find %s on %s' % (name, sickbeard.indexerApi(i).name), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
try:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
result = t[indexer_id] if indexer_id else t[name]
|
2014-08-09 00:19:29 +00:00
|
|
|
|
except:
|
|
|
|
|
continue
|
|
|
|
|
|
2016-02-07 17:57:48 +00:00
|
|
|
|
seriesname = series_id = None
|
|
|
|
|
for search in result if isinstance(result, list) else [result]:
|
|
|
|
|
try:
|
|
|
|
|
seriesname = search['seriesname']
|
|
|
|
|
series_id = search['id']
|
|
|
|
|
except:
|
|
|
|
|
series_id = seriesname = None
|
|
|
|
|
continue
|
2015-05-08 02:46:54 +00:00
|
|
|
|
if seriesname and series_id:
|
|
|
|
|
break
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if not (seriesname and series_id):
|
|
|
|
|
continue
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
if None is indexer_id and str(name).lower() == str(seriesname).lower():
|
|
|
|
|
return seriesname, i, int(series_id)
|
|
|
|
|
elif None is not indexer_id and int(indexer_id) == int(series_id):
|
|
|
|
|
return seriesname, i, int(indexer_id)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
if indexer:
|
|
|
|
|
break
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
return None, None, None
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sizeof_fmt(num):
|
|
|
|
|
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
|
|
|
|
|
if num < 1024.0:
|
|
|
|
|
return "%3.1f %s" % (num, x)
|
|
|
|
|
num /= 1024.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def listMediaFiles(path):
|
|
|
|
|
if not dir or not ek.ek(os.path.isdir, path):
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
files = []
|
|
|
|
|
for curFile in ek.ek(os.listdir, path):
|
|
|
|
|
fullCurFile = ek.ek(os.path.join, path, curFile)
|
|
|
|
|
|
|
|
|
|
# if it's a folder do it recursively
|
|
|
|
|
if ek.ek(os.path.isdir, fullCurFile) and not curFile.startswith('.') and not curFile == 'Extras':
|
|
|
|
|
files += listMediaFiles(fullCurFile)
|
|
|
|
|
|
2016-08-11 00:00:36 +00:00
|
|
|
|
elif has_media_ext(curFile):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
files.append(fullCurFile)
|
|
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def copyFile(srcFile, destFile):
|
2015-02-13 00:20:16 +00:00
|
|
|
|
if os.name.startswith('posix'):
|
|
|
|
|
subprocess.call(['cp', srcFile, destFile])
|
|
|
|
|
else:
|
|
|
|
|
ek.ek(shutil.copyfile, srcFile, destFile)
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
ek.ek(shutil.copymode, srcFile, destFile)
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def moveFile(srcFile, destFile):
|
|
|
|
|
try:
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, srcFile, destFile)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
|
except OSError:
|
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
ek.ek(os.unlink, srcFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def link(src, dst):
|
|
|
|
|
if os.name == 'nt':
|
|
|
|
|
import ctypes
|
|
|
|
|
|
|
|
|
|
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
|
|
|
|
else:
|
|
|
|
|
os.link(src, dst)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def hardlinkFile(srcFile, destFile):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(link, srcFile, destFile)
|
|
|
|
|
fixSetGroupID(destFile)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-10-13 15:20:44 +00:00
|
|
|
|
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ": " + ex(e) + ". Copying instead",
|
|
|
|
|
logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def symlink(src, dst):
|
|
|
|
|
if os.name == 'nt':
|
|
|
|
|
import ctypes
|
|
|
|
|
|
|
|
|
|
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,
|
|
|
|
|
1280]: raise ctypes.WinError()
|
|
|
|
|
else:
|
|
|
|
|
os.symlink(src, dst)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def moveAndSymlinkFile(srcFile, destFile):
|
|
|
|
|
try:
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, srcFile, destFile)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
|
ek.ek(symlink, destFile, srcFile)
|
|
|
|
|
except:
|
|
|
|
|
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def make_dirs(path):
|
|
|
|
|
"""
|
|
|
|
|
Creates any folders that are missing and assigns them the permissions of their
|
|
|
|
|
parents
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
logger.log(u"Checking if the path " + path + " already exists", logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
|
# Windows, create all missing folders
|
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
|
try:
|
2015-02-08 02:59:10 +00:00
|
|
|
|
logger.log(u"Folder " + path + " doesn't exist, creating it", logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
ek.ek(os.makedirs, path)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# not Windows, create all missing folders and set permissions
|
|
|
|
|
else:
|
|
|
|
|
sofar = ''
|
|
|
|
|
folder_list = path.split(os.path.sep)
|
|
|
|
|
|
|
|
|
|
# look through each subfolder and make sure they all exist
|
|
|
|
|
for cur_folder in folder_list:
|
|
|
|
|
sofar += cur_folder + os.path.sep
|
|
|
|
|
|
|
|
|
|
# if it exists then just keep walking down the line
|
|
|
|
|
if ek.ek(os.path.isdir, sofar):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
2015-02-08 02:59:10 +00:00
|
|
|
|
logger.log(u"Folder " + sofar + " doesn't exist, creating it", logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
ek.ek(os.mkdir, sofar)
|
|
|
|
|
# use normpath to remove end separator, otherwise checks permissions against itself
|
|
|
|
|
chmodAsParent(ek.ek(os.path.normpath, sofar))
|
|
|
|
|
# do the library update for synoindex
|
|
|
|
|
notifiers.synoindex_notifier.addFolder(sofar)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|
|
|
|
"""
|
|
|
|
|
Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
|
|
|
|
|
left that are now empty.
|
|
|
|
|
|
|
|
|
|
cur_path: The absolute path to the file you want to move/rename
|
|
|
|
|
new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
|
|
|
|
|
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
new_dest_dir, new_dest_name = os.path.split(new_path) # @UnusedVariable
|
|
|
|
|
|
|
|
|
|
if old_path_length == 0 or old_path_length > len(cur_path):
|
|
|
|
|
# approach from the right
|
|
|
|
|
cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable
|
|
|
|
|
else:
|
|
|
|
|
# approach from the left
|
|
|
|
|
cur_file_ext = cur_path[old_path_length:]
|
|
|
|
|
cur_file_name = cur_path[:old_path_length]
|
|
|
|
|
|
|
|
|
|
if cur_file_ext[1:] in subtitleExtensions:
|
|
|
|
|
# Extract subtitle language from filename
|
|
|
|
|
sublang = os.path.splitext(cur_file_name)[1][1:]
|
|
|
|
|
|
|
|
|
|
# Check if the language extracted from filename is a valid language
|
|
|
|
|
try:
|
|
|
|
|
language = subliminal.language.Language(sublang, strict=True)
|
|
|
|
|
cur_file_ext = '.' + sublang + cur_file_ext
|
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# put the extension on the incoming file
|
|
|
|
|
new_path += cur_file_ext
|
|
|
|
|
|
|
|
|
|
make_dirs(os.path.dirname(new_path))
|
|
|
|
|
|
|
|
|
|
# move the file
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Renaming file from " + cur_path + " to " + new_path)
|
2015-02-13 05:00:24 +00:00
|
|
|
|
ek.ek(shutil.move, cur_path, new_path)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except (OSError, IOError) as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# clean up any old folders that are empty
|
|
|
|
|
delete_empty_folders(ek.ek(os.path.dirname, cur_path))
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|
|
|
|
"""
|
|
|
|
|
Walks backwards up the path and deletes any empty folders found.
|
|
|
|
|
|
|
|
|
|
check_empty_dir: The path to clean (absolute path to a folder)
|
|
|
|
|
keep_dir: Clean until this path is reached
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# treat check_empty_dir as empty when it only contains these items
|
|
|
|
|
ignore_items = []
|
|
|
|
|
|
|
|
|
|
logger.log(u"Trying to clean any empty folders under " + check_empty_dir)
|
|
|
|
|
|
|
|
|
|
# as long as the folder exists and doesn't contain any files, delete it
|
|
|
|
|
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
|
|
|
|
|
check_files = ek.ek(os.listdir, check_empty_dir)
|
|
|
|
|
|
|
|
|
|
if not check_files or (len(check_files) <= len(ignore_items) and all(
|
|
|
|
|
[check_file in ignore_items for check_file in check_files])):
|
|
|
|
|
# directory is empty or contains only ignore_items
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
|
|
|
|
# need shutil.rmtree when ignore_items is really implemented
|
|
|
|
|
ek.ek(os.rmdir, check_empty_dir)
|
|
|
|
|
# do the library update for synoindex
|
|
|
|
|
notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except OSError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
|
|
|
|
|
break
|
|
|
|
|
check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
|
|
|
|
|
else:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fileBitFilter(mode):
|
|
|
|
|
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
|
|
|
|
if mode & bit:
|
|
|
|
|
mode -= bit
|
|
|
|
|
|
|
|
|
|
return mode
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def chmodAsParent(childPath):
|
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
|
|
|
|
|
|
|
|
|
if not parentPath:
|
|
|
|
|
logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPathStat = ek.ek(os.stat, parentPath)
|
|
|
|
|
parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
childPathStat = ek.ek(os.stat, childPath)
|
|
|
|
|
childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
if ek.ek(os.path.isfile, childPath):
|
|
|
|
|
childMode = fileBitFilter(parentMode)
|
|
|
|
|
else:
|
|
|
|
|
childMode = parentMode
|
|
|
|
|
|
|
|
|
|
if childPath_mode == childMode:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
childPath_owner = childPathStat.st_uid
|
|
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
|
|
|
|
|
|
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
|
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.chmod, childPath, childMode)
|
|
|
|
|
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
except OSError:
|
|
|
|
|
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fixSetGroupID(childPath):
|
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
|
|
|
|
parentStat = ek.ek(os.stat, parentPath)
|
|
|
|
|
parentMode = stat.S_IMODE(parentStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
|
|
if parentMode & stat.S_ISGID:
|
|
|
|
|
parentGID = parentStat[stat.ST_GID]
|
|
|
|
|
childStat = ek.ek(os.stat, childPath)
|
|
|
|
|
childGID = childStat[stat.ST_GID]
|
|
|
|
|
|
|
|
|
|
if childGID == parentGID:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
childPath_owner = childStat.st_uid
|
|
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
|
|
|
|
|
|
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
|
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
|
|
|
|
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
|
|
|
|
except OSError:
|
|
|
|
|
logger.log(
|
|
|
|
|
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
|
|
|
|
|
childPath, parentGID), logger.ERROR)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_absolute_number_from_season_and_episode(show, season, episode):
|
|
|
|
|
absolute_number = None
|
|
|
|
|
|
|
|
|
|
if season and episode:
|
|
|
|
|
myDB = db.DBConnection()
|
2015-05-09 12:37:50 +00:00
|
|
|
|
sql = 'SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?'
|
2014-08-09 00:19:29 +00:00
|
|
|
|
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
|
|
|
|
|
|
|
|
|
if len(sqlResults) == 1:
|
|
|
|
|
absolute_number = int(sqlResults[0]["absolute_number"])
|
|
|
|
|
logger.log(
|
|
|
|
|
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
else:
|
|
|
|
|
logger.log(
|
|
|
|
|
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(
|
|
|
|
|
episode),
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
return absolute_number
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
|
|
|
|
|
episodes = []
|
|
|
|
|
season = None
|
|
|
|
|
|
|
|
|
|
if len(absolute_numbers):
|
|
|
|
|
if not show and indexer_id:
|
|
|
|
|
show = findCertainShow(sickbeard.showList, indexer_id)
|
|
|
|
|
|
|
|
|
|
if show:
|
|
|
|
|
for absolute_number in absolute_numbers:
|
|
|
|
|
ep = show.getEpisode(None, None, absolute_number=absolute_number)
|
|
|
|
|
if ep:
|
|
|
|
|
episodes.append(ep.episode)
|
2015-05-09 12:37:50 +00:00
|
|
|
|
season = ep.season # this will always take the last found season so eps that cross the season
|
|
|
|
|
# border are not handled well
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return (season, episodes)
|
|
|
|
|
|
|
|
|
|
|
2015-05-18 18:49:45 +00:00
|
|
|
|
def sanitizeSceneName(name):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Takes a show name and returns the "scenified" version of it.
|
2014-11-23 20:45:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
Returns: A string containing the scene version of the show name given.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if name:
|
2015-05-18 18:49:45 +00:00
|
|
|
|
bad_chars = u",:()'!?\u2019"
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# strip out any bad chars
|
|
|
|
|
for x in bad_chars:
|
|
|
|
|
name = name.replace(x, "")
|
|
|
|
|
|
|
|
|
|
# tidy up stuff that doesn't belong in scene names
|
|
|
|
|
name = name.replace("- ", ".").replace(" ", ".").replace("&", "and").replace('/', '.')
|
|
|
|
|
name = re.sub("\.\.*", ".", name)
|
|
|
|
|
|
|
|
|
|
if name.endswith('.'):
|
|
|
|
|
name = name[:-1]
|
|
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
else:
|
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_https_certificates(ssl_cert, ssl_key):
|
|
|
|
|
"""
|
|
|
|
|
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from OpenSSL import crypto # @UnresolvedImport
|
|
|
|
|
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
|
|
|
|
serial # @UnresolvedImport
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
# Create the CA Certificate
|
|
|
|
|
cakey = createKeyPair(TYPE_RSA, 1024)
|
|
|
|
|
careq = createCertRequest(cakey, CN='Certificate Authority')
|
|
|
|
|
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
|
|
|
|
|
2014-11-12 16:43:14 +00:00
|
|
|
|
cname = 'SickGear'
|
2014-08-09 00:19:29 +00:00
|
|
|
|
pkey = createKeyPair(TYPE_RSA, 1024)
|
|
|
|
|
req = createCertRequest(pkey, CN=cname)
|
|
|
|
|
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
|
|
|
|
|
|
|
|
|
# Save the key and certificate to disk
|
|
|
|
|
try:
|
|
|
|
|
open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
|
|
|
|
open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
|
|
|
|
except:
|
|
|
|
|
logger.log(u"Error creating SSL key and certificate", logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
import doctest
|
|
|
|
|
|
|
|
|
|
doctest.testmod()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_xml(data, del_xmlns=False):
|
|
|
|
|
"""
|
|
|
|
|
Parse data into an xml elementtree.ElementTree
|
|
|
|
|
|
|
|
|
|
data: data string containing xml
|
|
|
|
|
del_xmlns: if True, removes xmlns namesspace from data before parsing
|
|
|
|
|
|
|
|
|
|
Returns: parsed data as elementtree or None
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if del_xmlns:
|
|
|
|
|
data = re.sub(' xmlns="[^"]+"', '', data)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
parsedXML = etree.fromstring(data)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
|
|
|
|
|
parsedXML = None
|
|
|
|
|
|
|
|
|
|
return parsedXML
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def backupVersionedFile(old_file, version):
|
2015-06-05 09:44:43 +00:00
|
|
|
|
num_tries = 0
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-05 09:44:43 +00:00
|
|
|
|
new_file = '%s.v%s' % (old_file, version)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
while not ek.ek(os.path.isfile, new_file):
|
2015-06-05 09:44:43 +00:00
|
|
|
|
if not ek.ek(os.path.isfile, old_file) or 0 == get_size(old_file):
|
|
|
|
|
logger.log(u'No need to create backup', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
try:
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Trying to back up %s to %s' % (old_file, new_file), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
shutil.copy(old_file, new_file)
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Backup done', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-06-05 09:44:43 +00:00
|
|
|
|
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING)
|
|
|
|
|
num_tries += 1
|
|
|
|
|
time.sleep(3)
|
|
|
|
|
logger.log(u'Trying again.', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-06-05 09:44:43 +00:00
|
|
|
|
if 3 <= num_tries:
|
|
|
|
|
logger.log(u'Unable to back up %s to %s please do it manually.' % (old_file, new_file), logger.ERROR)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def restoreVersionedFile(backup_file, version):
|
|
|
|
|
numTries = 0
|
|
|
|
|
|
|
|
|
|
new_file, backup_version = os.path.splitext(backup_file)
|
|
|
|
|
restore_file = new_file + '.' + 'v' + str(version)
|
|
|
|
|
|
|
|
|
|
if not ek.ek(os.path.isfile, new_file):
|
|
|
|
|
logger.log(u"Not restoring, " + new_file + " doesn't exist", logger.DEBUG)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
logger.log(
|
|
|
|
|
u"Trying to backup " + new_file + " to " + new_file + "." + "r" + str(version) + " before restoring backup",
|
|
|
|
|
logger.DEBUG)
|
|
|
|
|
shutil.move(new_file, new_file + '.' + 'r' + str(version))
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(
|
|
|
|
|
u"Error while trying to backup DB file " + restore_file + " before proceeding with restore: " + ex(e),
|
|
|
|
|
logger.WARNING)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
while not ek.ek(os.path.isfile, new_file):
|
|
|
|
|
if not ek.ek(os.path.isfile, restore_file):
|
|
|
|
|
logger.log(u"Not restoring, " + restore_file + " doesn't exist", logger.DEBUG)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
logger.log(u"Trying to restore " + restore_file + " to " + new_file, logger.DEBUG)
|
|
|
|
|
shutil.copy(restore_file, new_file)
|
|
|
|
|
logger.log(u"Restore done", logger.DEBUG)
|
|
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Error while trying to restore " + restore_file + ": " + ex(e), logger.WARNING)
|
|
|
|
|
numTries += 1
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
logger.log(u"Trying again.", logger.DEBUG)
|
|
|
|
|
|
|
|
|
|
if numTries >= 10:
|
|
|
|
|
logger.log(u"Unable to restore " + restore_file + " to " + new_file + " please do it manually.",
|
|
|
|
|
logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# try to convert to int, if it fails the default will be returned
|
|
|
|
|
def tryInt(s, s_default=0):
|
|
|
|
|
try:
|
|
|
|
|
return int(s)
|
|
|
|
|
except:
|
|
|
|
|
return s_default
|
|
|
|
|
|
|
|
|
|
|
2016-01-12 19:42:37 +00:00
|
|
|
|
# try to convert to float, return default on failure
|
|
|
|
|
def tryFloat(s, s_default=0.0):
|
|
|
|
|
try:
|
|
|
|
|
return float(s)
|
|
|
|
|
except:
|
|
|
|
|
return float(s_default)
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# generates a md5 hash of a file
|
|
|
|
|
def md5_for_file(filename, block_size=2 ** 16):
|
|
|
|
|
try:
|
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
|
while True:
|
|
|
|
|
data = f.read(block_size)
|
|
|
|
|
if not data:
|
|
|
|
|
break
|
|
|
|
|
md5.update(data)
|
|
|
|
|
f.close()
|
|
|
|
|
return md5.hexdigest()
|
|
|
|
|
except Exception:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_lan_ip():
|
|
|
|
|
"""
|
2014-11-23 20:45:50 +00:00
|
|
|
|
Simple function to get LAN localhost_ip
|
2014-08-09 00:19:29 +00:00
|
|
|
|
http://stackoverflow.com/questions/11735821/python-get-localhost-ip
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if os.name != "nt":
|
|
|
|
|
import fcntl
|
|
|
|
|
import struct
|
|
|
|
|
|
|
|
|
|
def get_interface_ip(ifname):
|
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
|
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
|
|
|
|
|
ifname[:15]))[20:24])
|
|
|
|
|
|
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
|
|
|
|
if ip.startswith("127.") and os.name != "nt":
|
|
|
|
|
interfaces = [
|
|
|
|
|
"eth0",
|
|
|
|
|
"eth1",
|
|
|
|
|
"eth2",
|
|
|
|
|
"wlan0",
|
|
|
|
|
"wlan1",
|
|
|
|
|
"wifi0",
|
|
|
|
|
"ath0",
|
|
|
|
|
"ath1",
|
|
|
|
|
"ppp0",
|
|
|
|
|
]
|
|
|
|
|
for ifname in interfaces:
|
|
|
|
|
try:
|
|
|
|
|
ip = get_interface_ip(ifname)
|
2015-06-09 11:13:00 +00:00
|
|
|
|
print(ifname, ip)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
break
|
|
|
|
|
except IOError:
|
|
|
|
|
pass
|
|
|
|
|
return ip
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_url(url):
|
|
|
|
|
"""
|
|
|
|
|
Check if a URL exists without downloading the whole file.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2015-06-13 11:26:09 +00:00
|
|
|
|
return requests.head(url).ok
|
|
|
|
|
except:
|
|
|
|
|
return False
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
2014-11-09 02:49:38 +00:00
|
|
|
|
def anon_url(*url):
|
|
|
|
|
"""
|
|
|
|
|
Return a URL string consisting of the Anonymous redirect URL and an arbitrary number of values appended.
|
|
|
|
|
"""
|
|
|
|
|
return '' if None in url else '%s%s' % (sickbeard.ANON_REDIRECT, ''.join(str(s) for s in url))
|
|
|
|
|
|
|
|
|
|
|
2015-02-24 13:37:27 +00:00
|
|
|
|
def starify(text, verify=False):
|
|
|
|
|
"""
|
|
|
|
|
Return text input string with either its latter half or its centre area (if 12 chars or more)
|
|
|
|
|
replaced with asterisks. Useful for securely presenting api keys to a ui.
|
|
|
|
|
|
|
|
|
|
If verify is true, return true if text is a star block created text else return false.
|
|
|
|
|
"""
|
2015-06-19 23:34:56 +00:00
|
|
|
|
return '' if not text\
|
|
|
|
|
else ((('%s%s' % (text[:len(text) / 2], '*' * (len(text) / 2))),
|
|
|
|
|
('%s%s%s' % (text[:4], '*' * (len(text) - 8), text[-4:])))[12 <= len(text)],
|
|
|
|
|
set('*') == set((text[len(text) / 2:], text[4:-4])[12 <= len(text)]))[verify]
|
2015-02-24 13:37:27 +00:00
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Encryption
|
|
|
|
|
==========
|
|
|
|
|
By Pedro Jose Pereira Vieito <pvieito@gmail.com> (@pvieito)
|
|
|
|
|
|
|
|
|
|
* If encryption_version==0 then return data without encryption
|
|
|
|
|
* The keys should be unique for each device
|
|
|
|
|
|
|
|
|
|
To add a new encryption_version:
|
2014-11-23 20:45:50 +00:00
|
|
|
|
1) Code your new encryption_version
|
2014-08-09 00:19:29 +00:00
|
|
|
|
2) Update the last encryption_version available in webserve.py
|
|
|
|
|
3) Remember to maintain old encryption versions and key generators for retrocompatibility
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Key Generators
|
|
|
|
|
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
|
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# Encryption Functions
|
|
|
|
|
def encrypt(data, encryption_version=0, decrypt=False):
|
|
|
|
|
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
|
|
|
|
if encryption_version == 1:
|
|
|
|
|
if decrypt:
|
|
|
|
|
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
|
|
|
|
else:
|
|
|
|
|
return base64.encodestring(
|
|
|
|
|
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
|
|
|
|
|
# Version 0: Plain text
|
|
|
|
|
else:
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def decrypt(data, encryption_version=0):
|
|
|
|
|
return encrypt(data, encryption_version, decrypt=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def full_sanitizeSceneName(name):
|
|
|
|
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
|
|
|
|
|
|
|
|
|
|
2016-02-07 17:57:48 +00:00
|
|
|
|
def get_show(name, try_scene_exceptions=False, use_cache=True):
|
2015-03-10 00:26:46 +00:00
|
|
|
|
if not sickbeard.showList or None is name:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
show_obj = None
|
|
|
|
|
from_cache = False
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
cache = sickbeard.name_cache.retrieveNameFromCache(name)
|
|
|
|
|
if cache:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
from_cache = True
|
|
|
|
|
show_obj = findCertainShow(sickbeard.showList, cache)
|
|
|
|
|
|
|
|
|
|
if not show_obj and try_scene_exceptions:
|
|
|
|
|
indexer_id = sickbeard.scene_exceptions.get_scene_exception_by_name(name)[0]
|
|
|
|
|
if indexer_id:
|
|
|
|
|
show_obj = findCertainShow(sickbeard.showList, indexer_id)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# add show to cache
|
2016-02-07 17:57:48 +00:00
|
|
|
|
if use_cache and show_obj and not from_cache:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
sickbeard.name_cache.addNameToCache(name, show_obj.indexerid)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
except Exception as e:
|
2015-05-08 02:46:54 +00:00
|
|
|
|
logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + str(e), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-05-08 02:46:54 +00:00
|
|
|
|
return show_obj
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_hidden_folder(folder):
|
|
|
|
|
"""
|
|
|
|
|
Returns True if folder is hidden.
|
|
|
|
|
On Linux based systems hidden folders start with . (dot)
|
|
|
|
|
folder: Full path of folder to check
|
|
|
|
|
"""
|
|
|
|
|
if ek.ek(os.path.isdir, folder):
|
|
|
|
|
if ek.ek(os.path.basename, folder).startswith('.'):
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def real_path(path):
|
|
|
|
|
"""
|
|
|
|
|
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
|
|
|
|
"""
|
|
|
|
|
return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def validateShow(show, season=None, episode=None):
|
|
|
|
|
indexer_lang = show.lang
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(show.indexer).api_params.copy()
|
|
|
|
|
|
|
|
|
|
if indexer_lang and not indexer_lang == 'en':
|
|
|
|
|
lINDEXER_API_PARMS['language'] = indexer_lang
|
|
|
|
|
|
|
|
|
|
t = sickbeard.indexerApi(show.indexer).indexer(**lINDEXER_API_PARMS)
|
|
|
|
|
if season is None and episode is None:
|
|
|
|
|
return t
|
|
|
|
|
|
|
|
|
|
return t[show.indexerid][season][episode]
|
2016-09-22 11:43:21 +00:00
|
|
|
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound, TypeError):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_up_anidb_connection():
|
|
|
|
|
if not sickbeard.USE_ANIDB:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'Usage of anidb disabled. Skipping', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if not sickbeard.ANIDB_USERNAME and not sickbeard.ANIDB_PASSWORD:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'anidb username and/or password are not set. Aborting anidb lookup.', logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if not sickbeard.ADBA_CONNECTION:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
anidb_logger = lambda x: logger.log('ANIDB: ' + str(x), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
sickbeard.ADBA_CONNECTION = adba.Connection(keepAlive=True, log=anidb_logger)
|
|
|
|
|
|
2015-02-16 03:17:56 +00:00
|
|
|
|
auth = False
|
|
|
|
|
try:
|
|
|
|
|
auth = sickbeard.ADBA_CONNECTION.authed()
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'exception msg: ' + str(e))
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
if not auth:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
try:
|
|
|
|
|
sickbeard.ADBA_CONNECTION.auth(sickbeard.ANIDB_USERNAME, sickbeard.ANIDB_PASSWORD)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except Exception as e:
|
2015-02-16 03:17:56 +00:00
|
|
|
|
logger.log(u'exception msg: ' + str(e))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
return sickbeard.ADBA_CONNECTION.authed()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def touchFile(fname, atime=None):
|
|
|
|
|
if None != atime:
|
|
|
|
|
try:
|
2015-06-14 03:49:23 +00:00
|
|
|
|
with open(fname, 'a'):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
os.utime(fname, (atime, atime))
|
|
|
|
|
return True
|
|
|
|
|
except:
|
|
|
|
|
logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _getTempDir():
|
|
|
|
|
"""Returns the [system temp dir]/tvdb_api-u501 (or
|
|
|
|
|
tvdb_api-myuser)
|
|
|
|
|
"""
|
|
|
|
|
if hasattr(os, 'getuid'):
|
|
|
|
|
uid = "u%d" % (os.getuid())
|
|
|
|
|
else:
|
|
|
|
|
# For Windows
|
|
|
|
|
try:
|
|
|
|
|
uid = getpass.getuser()
|
|
|
|
|
except ImportError:
|
2014-11-12 16:43:14 +00:00
|
|
|
|
return os.path.join(tempfile.gettempdir(), "SickGear")
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2014-11-12 16:43:14 +00:00
|
|
|
|
return os.path.join(tempfile.gettempdir(), "SickGear-%s" % (uid))
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2015-02-10 04:57:44 +00:00
|
|
|
|
|
|
|
|
|
def proxy_setting(proxy_setting, request_url, force=False):
|
|
|
|
|
"""
|
|
|
|
|
Returns a list of a) proxy_setting address value or a PAC is fetched and parsed if proxy_setting
|
|
|
|
|
starts with "PAC:" (case-insensitive) and b) True/False if "PAC" is found in the proxy_setting.
|
|
|
|
|
|
|
|
|
|
The PAC data parser is crude, javascript is not eval'd. The first "PROXY URL" found is extracted with a list
|
|
|
|
|
of "url_a_part.url_remaining", "url_b_part.url_remaining", "url_n_part.url_remaining" and so on.
|
|
|
|
|
Also, PAC data items are escaped for matching therefore regular expression items will not match a request_url.
|
|
|
|
|
|
|
|
|
|
If force is True or request_url contains a PAC parsed data item then the PAC proxy address is returned else False.
|
|
|
|
|
None is returned in the event of an error fetching PAC data.
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# check for "PAC" usage
|
|
|
|
|
match = re.search(r'^\s*PAC:\s*(.*)', proxy_setting, re.I)
|
|
|
|
|
if not match:
|
|
|
|
|
return proxy_setting, False
|
|
|
|
|
pac_url = match.group(1)
|
|
|
|
|
|
|
|
|
|
# prevent a recursive test with existing proxy setting when fetching PAC url
|
|
|
|
|
proxy_setting_backup = sickbeard.PROXY_SETTING
|
|
|
|
|
sickbeard.PROXY_SETTING = ''
|
|
|
|
|
|
|
|
|
|
resp = ''
|
|
|
|
|
try:
|
|
|
|
|
resp = getURL(pac_url)
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
sickbeard.PROXY_SETTING = proxy_setting_backup
|
|
|
|
|
|
|
|
|
|
if not resp:
|
|
|
|
|
return None, False
|
|
|
|
|
|
|
|
|
|
proxy_address = None
|
|
|
|
|
request_url_match = False
|
2015-02-10 22:09:25 +00:00
|
|
|
|
parsed_url = urlparse.urlparse(request_url)
|
|
|
|
|
netloc = (parsed_url.path, parsed_url.netloc)['' != parsed_url.netloc]
|
2015-02-10 04:57:44 +00:00
|
|
|
|
for pac_data in re.finditer(r"""(?:[^'"]*['"])([^\.]+\.[^'"]*)(?:['"])""", resp, re.I):
|
|
|
|
|
data = re.search(r"""PROXY\s+([^'"]+)""", pac_data.group(1), re.I)
|
|
|
|
|
if data:
|
|
|
|
|
if force:
|
|
|
|
|
return data.group(1), True
|
|
|
|
|
proxy_address = (proxy_address, data.group(1))[None is proxy_address]
|
2015-02-10 22:09:25 +00:00
|
|
|
|
elif re.search(re.escape(pac_data.group(1)), netloc, re.I):
|
2015-02-10 04:57:44 +00:00
|
|
|
|
request_url_match = True
|
|
|
|
|
if None is not proxy_address:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if None is proxy_address:
|
|
|
|
|
return None, True
|
|
|
|
|
|
|
|
|
|
return (False, proxy_address)[request_url_match], True
|
|
|
|
|
|
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False, raise_status_code=False, **kwargs):
|
2014-08-09 00:19:29 +00:00
|
|
|
|
"""
|
|
|
|
|
Returns a byte-string retrieved from the url provider.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# request session
|
2015-04-28 17:32:37 +00:00
|
|
|
|
if None is session:
|
|
|
|
|
session = requests.session()
|
2016-04-19 22:28:44 +00:00
|
|
|
|
|
|
|
|
|
if not kwargs.get('nocache'):
|
|
|
|
|
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
|
|
|
|
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
|
|
|
|
else:
|
|
|
|
|
del(kwargs['nocache'])
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
# request session headers
|
|
|
|
|
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
|
|
|
|
|
if headers:
|
|
|
|
|
req_headers.update(headers)
|
|
|
|
|
session.headers.update(req_headers)
|
|
|
|
|
|
2016-02-19 17:38:38 +00:00
|
|
|
|
mute_connect_err = kwargs.get('mute_connect_err')
|
|
|
|
|
if mute_connect_err:
|
|
|
|
|
del(kwargs['mute_connect_err'])
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
# request session ssl verify
|
|
|
|
|
session.verify = False
|
|
|
|
|
|
|
|
|
|
# request session paramaters
|
|
|
|
|
session.params = params
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# Remove double-slashes from url
|
|
|
|
|
parsed = list(urlparse.urlparse(url))
|
|
|
|
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
|
|
|
|
url = urlparse.urlunparse(parsed)
|
|
|
|
|
|
|
|
|
|
# request session proxies
|
|
|
|
|
if sickbeard.PROXY_SETTING:
|
2015-02-10 04:57:44 +00:00
|
|
|
|
(proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, url)
|
|
|
|
|
msg = '%sproxy for url: %s' % (('', 'PAC parsed ')[pac_found], url)
|
|
|
|
|
if None is proxy_address:
|
|
|
|
|
logger.log('Proxy error, aborted the request using %s' % msg, logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
elif proxy_address:
|
|
|
|
|
logger.log('Using %s' % msg, logger.DEBUG)
|
|
|
|
|
session.proxies = {
|
|
|
|
|
'http': proxy_address,
|
|
|
|
|
'https': proxy_address
|
|
|
|
|
}
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
2014-08-09 00:45:21 +00:00
|
|
|
|
# decide if we get or post data to server
|
2015-11-20 22:52:19 +00:00
|
|
|
|
if 'post_json' in kwargs:
|
|
|
|
|
kwargs.setdefault('json', kwargs.get('post_json'))
|
|
|
|
|
del(kwargs['post_json'])
|
2014-08-09 00:45:21 +00:00
|
|
|
|
if post_data:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
kwargs.setdefault('data', post_data)
|
|
|
|
|
if 'data' in kwargs or 'json' in kwargs:
|
|
|
|
|
resp = session.post(url, timeout=timeout, **kwargs)
|
2014-08-09 00:45:21 +00:00
|
|
|
|
else:
|
2015-12-17 01:21:09 +00:00
|
|
|
|
resp = session.get(url, timeout=timeout, **kwargs)
|
2016-06-09 01:23:49 +00:00
|
|
|
|
if resp.ok and not resp.content and 'url=' in resp.headers.get('Refresh', '').lower():
|
2016-06-13 23:10:43 +00:00
|
|
|
|
url = resp.headers.get('Refresh').lower().split('url=')[1].strip('/')
|
|
|
|
|
if not url.startswith('http'):
|
|
|
|
|
parsed[2] = '/%s' % url
|
|
|
|
|
url = urlparse.urlunparse(parsed)
|
2016-06-09 01:23:49 +00:00
|
|
|
|
resp = session.get(url, timeout=timeout, **kwargs)
|
2014-08-09 00:45:21 +00:00
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if raise_status_code:
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
if not resp.ok:
|
2016-08-10 10:37:34 +00:00
|
|
|
|
http_err_text = 'CloudFlare Ray ID' in resp.content and 'CloudFlare reports, "Website is offline"; ' or ''
|
2015-06-04 00:30:42 +00:00
|
|
|
|
if resp.status_code in clients.http_error_code:
|
2016-08-10 10:37:34 +00:00
|
|
|
|
http_err_text += clients.http_error_code[resp.status_code]
|
2015-06-04 00:30:42 +00:00
|
|
|
|
elif resp.status_code in range(520, 527):
|
2016-08-10 10:37:34 +00:00
|
|
|
|
http_err_text += 'Origin server connection failure'
|
2015-06-04 00:30:42 +00:00
|
|
|
|
else:
|
|
|
|
|
http_err_text = 'Custom HTTP error code'
|
2016-08-10 10:37:34 +00:00
|
|
|
|
logger.log(u'Response not ok. %s: %s from requested url %s'
|
|
|
|
|
% (resp.status_code, http_err_text, url), logger.DEBUG)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.HTTPError as e:
|
2016-09-04 20:00:44 +00:00
|
|
|
|
if raise_status_code:
|
|
|
|
|
resp.raise_for_status()
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'HTTP error %s while loading URL%s' % (
|
|
|
|
|
e.errno, _maybe_request_url(e)), logger.WARNING)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
2016-02-19 17:38:38 +00:00
|
|
|
|
if not mute_connect_err:
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'Connection error msg:%s while loading URL%s' % (
|
|
|
|
|
e.message, _maybe_request_url(e)), logger.WARNING)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.ReadTimeout as e:
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'Read timed out msg:%s while loading URL%s' % (
|
|
|
|
|
e.message, _maybe_request_url(e)), logger.WARNING)
|
2015-02-24 13:37:27 +00:00
|
|
|
|
return
|
2015-11-12 00:18:19 +00:00
|
|
|
|
except (requests.exceptions.Timeout, socket.timeout) as e:
|
2016-06-10 00:28:37 +00:00
|
|
|
|
logger.log(u'Connection timed out msg:%s while loading URL %s' % (
|
|
|
|
|
e.message, _maybe_request_url(e, url)), logger.WARNING)
|
2015-06-19 23:34:56 +00:00
|
|
|
|
return
|
2015-09-18 00:06:34 +00:00
|
|
|
|
except Exception as e:
|
|
|
|
|
if e.message:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
logger.log(u'Exception caught while loading URL %s\r\nDetail... %s\r\n%s'
|
|
|
|
|
% (url, e.message, traceback.format_exc()), logger.WARNING)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
else:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
logger.log(u'Unknown exception while loading URL %s\r\nDetail... %s'
|
|
|
|
|
% (url, traceback.format_exc()), logger.WARNING)
|
2014-08-09 00:19:29 +00:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if json:
|
2015-11-20 22:52:19 +00:00
|
|
|
|
try:
|
|
|
|
|
return resp.json()
|
|
|
|
|
except (TypeError, Exception) as e:
|
|
|
|
|
logger.log(u'JSON data issue from URL %s\r\nDetail... %s' % (url, e.message), logger.WARNING)
|
|
|
|
|
return None
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
return resp.content
|
|
|
|
|
|
2015-02-10 22:09:25 +00:00
|
|
|
|
|
2016-06-10 00:28:37 +00:00
|
|
|
|
def _maybe_request_url(e, def_url=''):
|
|
|
|
|
return hasattr(e, 'request') and hasattr(e.request, 'url') and ' ' + e.request.url or def_url
|
|
|
|
|
|
|
|
|
|
|
2014-08-09 00:19:29 +00:00
|
|
|
|
def download_file(url, filename, session=None):
|
|
|
|
|
# create session
|
2015-04-28 17:32:37 +00:00
|
|
|
|
if None is session:
|
|
|
|
|
session = requests.session()
|
2014-08-09 00:19:29 +00:00
|
|
|
|
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
|
|
|
|
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
|
|
|
|
|
|
|
|
|
# request session headers
|
|
|
|
|
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
|
|
|
|
|
|
|
|
|
|
# request session ssl verify
|
|
|
|
|
session.verify = False
|
|
|
|
|
|
|
|
|
|
# request session streaming
|
|
|
|
|
session.stream = True
|
|
|
|
|
|
|
|
|
|
# request session proxies
|
|
|
|
|
if sickbeard.PROXY_SETTING:
|
2015-02-10 04:57:44 +00:00
|
|
|
|
(proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, url)
|
|
|
|
|
msg = '%sproxy for url: %s' % (('', 'PAC parsed ')[pac_found], url)
|
|
|
|
|
if None is proxy_address:
|
|
|
|
|
logger.log('Proxy error, aborted the request using %s' % msg, logger.DEBUG)
|
|
|
|
|
return
|
|
|
|
|
elif proxy_address:
|
|
|
|
|
logger.log('Using %s' % msg, logger.DEBUG)
|
|
|
|
|
session.proxies = {
|
|
|
|
|
'http': proxy_address,
|
|
|
|
|
'https': proxy_address
|
|
|
|
|
}
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
resp = session.get(url)
|
|
|
|
|
if not resp.ok:
|
|
|
|
|
logger.log(u"Requested url " + url + " returned status code is " + str(
|
|
|
|
|
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
with open(filename, 'wb') as fp:
|
|
|
|
|
for chunk in resp.iter_content(chunk_size=1024):
|
|
|
|
|
if chunk:
|
|
|
|
|
fp.write(chunk)
|
|
|
|
|
fp.flush()
|
2016-03-15 16:34:58 +00:00
|
|
|
|
os.fsync(fp.fileno())
|
2014-08-09 00:19:29 +00:00
|
|
|
|
|
|
|
|
|
chmodAsParent(filename)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.HTTPError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
|
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
|
|
|
|
return False
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
|
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
|
|
|
|
return False
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except requests.exceptions.Timeout as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
|
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
|
|
|
|
return False
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except EnvironmentError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
|
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
|
|
|
|
|
return False
|
|
|
|
|
except Exception:
|
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
|
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def clearCache(force=False):
|
|
|
|
|
update_datetime = datetime.datetime.now()
|
|
|
|
|
|
|
|
|
|
# clean out cache directory, remove everything > 12 hours old
|
|
|
|
|
if sickbeard.CACHE_DIR:
|
|
|
|
|
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR)
|
|
|
|
|
|
|
|
|
|
# Does our cache_dir exists
|
|
|
|
|
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
|
|
|
|
|
logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING)
|
|
|
|
|
else:
|
|
|
|
|
max_age = datetime.timedelta(hours=12)
|
|
|
|
|
|
|
|
|
|
# Get all our cache files
|
|
|
|
|
exclude = ['rss', 'images']
|
|
|
|
|
for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR, topdown=True):
|
|
|
|
|
cache_dirs[:] = [d for d in cache_dirs if d not in exclude]
|
|
|
|
|
|
|
|
|
|
for file in cache_files:
|
|
|
|
|
cache_file = ek.ek(os.path.join, cache_root, file)
|
|
|
|
|
|
|
|
|
|
if ek.ek(os.path.isfile, cache_file):
|
|
|
|
|
cache_file_modified = datetime.datetime.fromtimestamp(
|
|
|
|
|
ek.ek(os.path.getmtime, cache_file))
|
|
|
|
|
|
|
|
|
|
if force or (update_datetime - cache_file_modified > max_age):
|
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.remove, cache_file)
|
2015-06-08 12:47:01 +00:00
|
|
|
|
except OSError as e:
|
2014-08-09 00:19:29 +00:00
|
|
|
|
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
|
|
|
|
|
logger.WARNING)
|
2014-10-02 08:37:08 +00:00
|
|
|
|
break
|
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-10-02 08:37:08 +00:00
|
|
|
|
def human(size):
|
|
|
|
|
"""
|
|
|
|
|
format a size in bytes into a 'human' file size, e.g. bytes, KB, MB, GB, TB, PB
|
|
|
|
|
Note that bytes/KB will be reported in whole numbers but MB and above will have greater precision
|
|
|
|
|
e.g. 1 byte, 43 bytes, 443 KB, 4.3 MB, 4.43 GB, etc
|
|
|
|
|
"""
|
|
|
|
|
if size == 1:
|
|
|
|
|
# because I really hate unnecessary plurals
|
|
|
|
|
return "1 byte"
|
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB', 2), ('PB', 2)]
|
2014-10-02 08:37:08 +00:00
|
|
|
|
|
|
|
|
|
num = float(size)
|
|
|
|
|
for suffix, precision in suffixes_table:
|
|
|
|
|
if num < 1024.0:
|
|
|
|
|
break
|
|
|
|
|
num /= 1024.0
|
|
|
|
|
|
|
|
|
|
if precision == 0:
|
|
|
|
|
formatted_size = "%d" % num
|
|
|
|
|
else:
|
|
|
|
|
formatted_size = str(round(num, ndigits=precision))
|
|
|
|
|
|
|
|
|
|
return "%s %s" % (formatted_size, suffix)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_size(start_path='.'):
|
2016-01-07 13:15:52 +00:00
|
|
|
|
if ek.ek(os.path.isfile, start_path):
|
|
|
|
|
return ek.ek(os.path.getsize, start_path)
|
2014-10-02 08:37:08 +00:00
|
|
|
|
total_size = 0
|
2014-10-02 23:34:09 +00:00
|
|
|
|
for dirpath, dirnames, filenames in ek.ek(os.walk, start_path):
|
2014-10-02 08:37:08 +00:00
|
|
|
|
for f in filenames:
|
2014-10-02 23:34:09 +00:00
|
|
|
|
fp = ek.ek(os.path.join, dirpath, f)
|
|
|
|
|
total_size += ek.ek(os.path.getsize, fp)
|
2014-10-02 08:37:08 +00:00
|
|
|
|
return total_size
|
|
|
|
|
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
2014-11-27 03:30:00 +00:00
|
|
|
|
def remove_article(text=''):
|
2014-12-13 05:04:21 +00:00
|
|
|
|
return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
|
|
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
|
|
|
|
|
|
def maybe_plural(number=1):
|
|
|
|
|
return ('s', '')[1 == number]
|
|
|
|
|
|
|
|
|
|
|
2014-11-16 15:00:05 +00:00
|
|
|
|
def build_dict(seq, key):
|
|
|
|
|
return dict((d[key], dict(d, index=index)) for (index, d) in enumerate(seq))
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
2015-03-16 15:13:20 +00:00
|
|
|
|
|
2014-12-13 05:04:21 +00:00
|
|
|
|
def client_host(server_host):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Return the host on which a client can connect to the given listener.'''
|
|
|
|
|
if server_host == '0.0.0.0':
|
|
|
|
|
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
|
|
|
|
|
return '127.0.0.1'
|
|
|
|
|
if server_host in ('::', '::0', '::0.0.0.0'):
|
|
|
|
|
# :: is IN6ADDR_ANY, which should answer on localhost.
|
|
|
|
|
# ::0 and ::0.0.0.0 are non-canonical but common ways to write
|
|
|
|
|
# IN6ADDR_ANY.
|
|
|
|
|
return '::1'
|
|
|
|
|
return server_host
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def wait_for_free_port(host, port):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Wait for the specified port to become free (drop requests).'''
|
|
|
|
|
if not host:
|
|
|
|
|
raise ValueError("Host values of '' or None are not allowed.")
|
|
|
|
|
for trial in range(50):
|
|
|
|
|
try:
|
|
|
|
|
# we are expecting a free port, so reduce the timeout
|
|
|
|
|
check_port(host, port, timeout=0.1)
|
|
|
|
|
except IOError:
|
|
|
|
|
# Give the old server thread time to free the port.
|
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
else:
|
|
|
|
|
return
|
|
|
|
|
|
2015-02-08 02:59:10 +00:00
|
|
|
|
raise IOError("Port %r is not free on %r" % (port, host))
|
2014-12-13 05:04:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_port(host, port, timeout=1.0):
|
|
|
|
|
'''Extracted from cherrypy libs
|
|
|
|
|
Raise an error if the given port is not free on the given host.'''
|
|
|
|
|
if not host:
|
|
|
|
|
raise ValueError("Host values of '' or None are not allowed.")
|
|
|
|
|
host = client_host(host)
|
|
|
|
|
port = int(port)
|
|
|
|
|
|
|
|
|
|
import socket
|
|
|
|
|
|
|
|
|
|
# AF_INET or AF_INET6 socket
|
|
|
|
|
# Get the correct address family for our host (allows IPv6 addresses)
|
|
|
|
|
try:
|
|
|
|
|
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
|
|
|
|
|
socket.SOCK_STREAM)
|
|
|
|
|
except socket.gaierror:
|
|
|
|
|
if ':' in host:
|
|
|
|
|
info = [(socket.AF_INET6, socket.SOCK_STREAM, 0, "", (host, port, 0, 0))]
|
|
|
|
|
else:
|
|
|
|
|
info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))]
|
|
|
|
|
|
|
|
|
|
for res in info:
|
|
|
|
|
af, socktype, proto, canonname, sa = res
|
|
|
|
|
s = None
|
|
|
|
|
try:
|
|
|
|
|
s = socket.socket(af, socktype, proto)
|
|
|
|
|
# See http://groups.google.com/group/cherrypy-users/
|
|
|
|
|
# browse_frm/thread/bbfe5eb39c904fe0
|
|
|
|
|
s.settimeout(timeout)
|
|
|
|
|
s.connect((host, port))
|
|
|
|
|
s.close()
|
|
|
|
|
raise IOError("Port %s is in use on %s; perhaps the previous "
|
|
|
|
|
"httpserver did not shut down properly." %
|
|
|
|
|
(repr(port), repr(host)))
|
|
|
|
|
except socket.error:
|
|
|
|
|
if s:
|
|
|
|
|
s.close()
|
2015-04-23 10:02:21 +00:00
|
|
|
|
|
2015-05-09 12:37:50 +00:00
|
|
|
|
|
2015-04-23 10:02:21 +00:00
|
|
|
|
def clear_unused_providers():
|
2015-07-13 09:39:20 +00:00
|
|
|
|
providers = [x.cache.providerID for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
2015-04-23 10:02:21 +00:00
|
|
|
|
|
|
|
|
|
if providers:
|
|
|
|
|
myDB = db.DBConnection('cache.db')
|
2015-05-09 12:37:50 +00:00
|
|
|
|
myDB.action('DELETE FROM provider_cache WHERE provider NOT IN (%s)' % ','.join(['?'] * len(providers)), providers)
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
|
def make_search_segment_html_string(segment, max_eps=5):
|
|
|
|
|
seg_str = ''
|
|
|
|
|
if segment and not isinstance(segment, list):
|
|
|
|
|
segment = [segment]
|
|
|
|
|
if segment and len(segment) > max_eps:
|
|
|
|
|
seasons = [x for x in set([x.season for x in segment])]
|
|
|
|
|
seg_str = u'Season' + maybe_plural(len(seasons)) + ': '
|
|
|
|
|
first_run = True
|
|
|
|
|
for x in seasons:
|
|
|
|
|
eps = [str(s.episode) for s in segment if s.season == x]
|
|
|
|
|
ep_c = len(eps)
|
|
|
|
|
seg_str += ('' if first_run else ' ,') + str(x) + ' <span title="Episode' + maybe_plural(ep_c) + ': ' + ', '.join(eps) + '">(' + str(ep_c) + ' Ep' + maybe_plural(ep_c) + ')</span>'
|
|
|
|
|
first_run = False
|
|
|
|
|
elif segment:
|
|
|
|
|
episodes = ['S' + str(x.season).zfill(2) + 'E' + str(x.episode).zfill(2) for x in segment]
|
|
|
|
|
seg_str = u'Episode' + maybe_plural(len(episodes)) + ': ' + ', '.join(episodes)
|
|
|
|
|
return seg_str
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def has_anime():
|
|
|
|
|
return False if not sickbeard.showList else any(filter(lambda show: show.is_anime, sickbeard.showList))
|
2016-02-11 16:25:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cpu_sleep():
|
|
|
|
|
if cpu_presets[sickbeard.CPU_PRESET]:
|
|
|
|
|
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
2016-09-04 20:00:44 +00:00
|
|
|
|
|
|
|
|
|
|
2016-10-02 01:04:02 +00:00
|
|
|
|
def scantree(path):
|
|
|
|
|
"""Recursively yield DirEntry objects for given directory."""
|
|
|
|
|
for entry in ek.ek(scandir, path):
|
|
|
|
|
if entry.is_dir(follow_symlinks=False):
|
|
|
|
|
for entry in scantree(entry.path):
|
|
|
|
|
yield entry
|
|
|
|
|
else:
|
|
|
|
|
yield entry
|
|
|
|
|
|
|
|
|
|
|
2016-10-03 18:31:54 +00:00
|
|
|
|
def cleanup_cache():
|
|
|
|
|
"""
|
|
|
|
|
Delete old cached files
|
|
|
|
|
"""
|
|
|
|
|
delete_not_changed_in([ek.ek(os.path.join, sickbeard.CACHE_DIR, *x) for x in [
|
2016-10-03 20:31:17 +00:00
|
|
|
|
('images', 'trakt'), ('images', 'imdb'), ('images', 'anidb')]])
|
2016-10-03 18:31:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def delete_not_changed_in(paths, days=30, minutes=0):
|
|
|
|
|
"""
|
|
|
|
|
Delete files under paths not changed in n days and/or n minutes.
|
|
|
|
|
If a file was modified later than days/and or minutes, then don't delete it.
|
|
|
|
|
|
|
|
|
|
:param paths: List of paths to scan for files to delete
|
|
|
|
|
:param days: Purge files not modified in this number of days (default: 30 days)
|
|
|
|
|
:param minutes: Purge files not modified in this number of minutes (default: 0 minutes)
|
|
|
|
|
:return: tuple; number of files that qualify for deletion, number of qualifying files that failed to be deleted
|
|
|
|
|
"""
|
|
|
|
|
del_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=days, minutes=minutes)).timetuple())
|
|
|
|
|
errors = 0
|
|
|
|
|
qualified = 0
|
|
|
|
|
for c in paths:
|
2016-10-02 01:04:02 +00:00
|
|
|
|
try:
|
|
|
|
|
for f in scantree(c):
|
2016-10-03 18:31:54 +00:00
|
|
|
|
if f.is_file(follow_symlinks=False) and del_time > f.stat(follow_symlinks=False).st_mtime:
|
2016-10-02 01:04:02 +00:00
|
|
|
|
try:
|
|
|
|
|
ek.ek(os.remove, f.path)
|
2016-10-03 18:31:54 +00:00
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
errors += 1
|
|
|
|
|
qualified += 1
|
|
|
|
|
except (StandardError, Exception):
|
2016-10-02 01:04:02 +00:00
|
|
|
|
pass
|
2016-10-03 18:31:54 +00:00
|
|
|
|
return qualified, errors
|
2016-10-02 01:04:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_file_timestamp(filename, min_age=3, new_time=None):
|
|
|
|
|
min_time = time.mktime((datetime.datetime.now() - datetime.timedelta(days=min_age)).timetuple())
|
|
|
|
|
try:
|
|
|
|
|
if ek.ek(os.path.isfile, filename) and ek.ek(os.path.getmtime, filename) < min_time:
|
|
|
|
|
ek.ek(os.utime, filename, new_time)
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
|
pass
|