2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
|
|
|
# This file is part of Sick Beard.
|
|
|
|
#
|
|
|
|
# Sick Beard is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Sick Beard is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import socket
|
|
|
|
import stat
|
|
|
|
import time
|
|
|
|
import traceback
|
|
|
|
import urllib
|
|
|
|
import hashlib
|
|
|
|
import httplib
|
|
|
|
import urlparse
|
|
|
|
import uuid
|
|
|
|
import base64
|
2014-03-25 05:57:24 +00:00
|
|
|
import string
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
from lib import requests
|
2014-04-02 05:44:47 +00:00
|
|
|
from lib.requests import exceptions
|
2014-03-10 05:18:05 +00:00
|
|
|
from itertools import izip, cycle
|
|
|
|
|
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
from lib import simplejson as json
|
|
|
|
|
|
|
|
try:
|
|
|
|
import xml.etree.cElementTree as etree
|
|
|
|
except ImportError:
|
|
|
|
import elementtree.ElementTree as etree
|
|
|
|
|
|
|
|
from xml.dom.minidom import Node
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
|
|
|
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
|
|
|
from sickbeard import logger, classes
|
2014-03-25 05:57:24 +00:00
|
|
|
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import notifiers
|
|
|
|
|
|
|
|
from lib import subliminal
|
|
|
|
#from sickbeard.subtitles import EXTENSIONS
|
|
|
|
|
|
|
|
urllib._urlopener = classes.SickBeardURLopener()
|
|
|
|
|
|
|
|
|
|
|
|
def indentXML(elem, level=0):
|
|
|
|
'''
|
|
|
|
Does our pretty printing, makes Matt very happy
|
|
|
|
'''
|
|
|
|
i = "\n" + level * " "
|
|
|
|
if len(elem):
|
|
|
|
if not elem.text or not elem.text.strip():
|
|
|
|
elem.text = i + " "
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
elem.tail = i
|
|
|
|
for elem in elem:
|
|
|
|
indentXML(elem, level + 1)
|
|
|
|
if not elem.tail or not elem.tail.strip():
|
|
|
|
elem.tail = i
|
|
|
|
else:
|
|
|
|
# Strip out the newlines from text
|
|
|
|
if elem.text:
|
|
|
|
elem.text = elem.text.replace('\n', ' ')
|
|
|
|
if level and (not elem.tail or not elem.tail.strip()):
|
|
|
|
elem.tail = i
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def replaceExtension(filename, newExt):
|
|
|
|
'''
|
|
|
|
>>> replaceExtension('foo.avi', 'mkv')
|
|
|
|
'foo.mkv'
|
|
|
|
>>> replaceExtension('.vimrc', 'arglebargle')
|
|
|
|
'.vimrc'
|
|
|
|
>>> replaceExtension('a.b.c', 'd')
|
|
|
|
'a.b.d'
|
|
|
|
>>> replaceExtension('', 'a')
|
|
|
|
''
|
|
|
|
>>> replaceExtension('foo.bar', '')
|
|
|
|
'foo.'
|
|
|
|
'''
|
|
|
|
sepFile = filename.rpartition(".")
|
|
|
|
if sepFile[0] == "":
|
|
|
|
return filename
|
|
|
|
else:
|
|
|
|
return sepFile[0] + "." + newExt
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def isMediaFile(filename):
|
|
|
|
# ignore samples
|
|
|
|
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# ignore MAC OS's retarded "resource fork" files
|
|
|
|
if filename.startswith('._'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
sepFile = filename.rpartition(".")
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if re.search('extras?$', sepFile[0], re.I):
|
|
|
|
return False
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if sepFile[2].lower() in mediaExtensions:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def isRarFile(filename):
|
|
|
|
archive_regex = '(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)'
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if re.search(archive_regex, filename):
|
|
|
|
return True
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def isBeingWritten(filepath):
|
2014-03-25 05:57:24 +00:00
|
|
|
# Return True if file was modified within 60 seconds. it might still be being written to.
|
2014-03-10 05:18:05 +00:00
|
|
|
ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath))
|
|
|
|
if ctime > time.time() - 60:
|
|
|
|
return True
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def sanitizeFileName(name):
|
|
|
|
'''
|
|
|
|
>>> sanitizeFileName('a/b/c')
|
|
|
|
'a-b-c'
|
|
|
|
>>> sanitizeFileName('abc')
|
|
|
|
'abc'
|
|
|
|
>>> sanitizeFileName('a"b')
|
|
|
|
'ab'
|
|
|
|
>>> sanitizeFileName('.a.b..')
|
|
|
|
'a.b'
|
|
|
|
'''
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# remove bad chars from the filename
|
|
|
|
name = re.sub(r'[\\/\*]', '-', name)
|
|
|
|
name = re.sub(r'[:"<>|?]', '', name)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# remove leading/trailing periods and spaces
|
|
|
|
name = name.strip(' .')
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return name
|
|
|
|
|
|
|
|
|
2014-04-26 03:42:35 +00:00
|
|
|
def getURL(url, post_data=None, headers=None, params=None, json=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Returns a byte-string retrieved from the url provider.
|
|
|
|
"""
|
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
req_headers = ['User-Agent', USER_AGENT, 'Accept-Encoding', 'gzip,deflate']
|
|
|
|
if headers:
|
|
|
|
for cur_header in headers:
|
|
|
|
req_headers.append(cur_header)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
# Remove double-slashes from url
|
|
|
|
parsed = list(urlparse.urlparse(url))
|
2014-03-25 05:57:24 +00:00
|
|
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
2014-03-10 05:18:05 +00:00
|
|
|
url = urlparse.urlunparse(parsed)
|
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
it = iter(req_headers)
|
2014-04-23 11:21:20 +00:00
|
|
|
|
2014-04-24 15:50:35 +00:00
|
|
|
if sickbeard.PROXY_SETTING:
|
|
|
|
proxies = {
|
|
|
|
"http": sickbeard.PROXY_SETTING,
|
|
|
|
"https": sickbeard.PROXY_SETTING,
|
|
|
|
}
|
|
|
|
|
2014-04-25 11:25:22 +00:00
|
|
|
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies, verify=False)
|
2014-04-24 15:50:35 +00:00
|
|
|
else:
|
2014-04-25 11:25:22 +00:00
|
|
|
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)), verify=False)
|
2014-03-16 01:01:12 +00:00
|
|
|
except requests.HTTPError, e:
|
|
|
|
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
return None
|
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
except requests.ConnectionError, e:
|
|
|
|
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
return None
|
|
|
|
|
2014-03-16 02:17:34 +00:00
|
|
|
except requests.Timeout, e:
|
|
|
|
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
|
|
|
return None
|
|
|
|
|
2014-04-26 03:42:35 +00:00
|
|
|
if json:
|
|
|
|
return resp.json() if resp.ok else None
|
|
|
|
|
2014-03-20 05:33:34 +00:00
|
|
|
return resp.content if resp.ok else None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _remove_file_failed(file):
|
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.remove, file)
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def download_file(url, filename):
|
|
|
|
try:
|
2014-04-25 11:25:22 +00:00
|
|
|
r = requests.get(url, stream=True, verify=False)
|
2014-03-20 06:17:18 +00:00
|
|
|
with open(filename, 'wb') as fp:
|
|
|
|
for chunk in r.iter_content(chunk_size=1024):
|
|
|
|
if chunk:
|
|
|
|
fp.write(chunk)
|
|
|
|
fp.flush()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
except requests.HTTPError, e:
|
2014-03-10 05:18:05 +00:00
|
|
|
_remove_file_failed(filename)
|
2014-03-16 01:01:12 +00:00
|
|
|
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-03-16 01:01:12 +00:00
|
|
|
except requests.ConnectionError, e:
|
|
|
|
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-03-16 02:17:34 +00:00
|
|
|
except requests.Timeout, e:
|
|
|
|
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
|
|
|
return False
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
except Exception:
|
|
|
|
_remove_file_failed(filename)
|
|
|
|
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
|
|
|
return False
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def findCertainShow(showList, indexerid):
|
|
|
|
results = filter(lambda x: x.indexerid == indexerid, showList)
|
|
|
|
if len(results) == 0:
|
|
|
|
return None
|
|
|
|
elif len(results) > 1:
|
|
|
|
raise MultipleShowObjectsException()
|
|
|
|
else:
|
|
|
|
return results[0]
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def makeDir(path):
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
try:
|
|
|
|
ek.ek(os.makedirs, path)
|
|
|
|
# do the library update for synoindex
|
|
|
|
notifiers.synoindex_notifier.addFolder(path)
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2014-03-20 05:33:34 +00:00
|
|
|
def searchDBForShow(regShowName, indexer_id=None):
|
2014-03-25 05:57:24 +00:00
|
|
|
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
myDB = db.DBConnection()
|
|
|
|
|
|
|
|
yearRegex = "([^()]+?)\s*(\()?(\d{4})(?(2)\))$"
|
|
|
|
|
|
|
|
for showName in showNames:
|
|
|
|
|
2014-03-17 15:57:57 +00:00
|
|
|
show = get_show_by_name(showName, sickbeard.showList)
|
2014-03-10 05:18:05 +00:00
|
|
|
if show:
|
2014-03-25 05:57:24 +00:00
|
|
|
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
|
|
|
[show.name, show.name])
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
|
|
|
[showName, showName])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if len(sqlResults) == 1:
|
2014-03-25 05:57:24 +00:00
|
|
|
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
# if we didn't get exactly one result then try again with the year stripped off if possible
|
|
|
|
match = re.match(yearRegex, showName)
|
|
|
|
if match and match.group(1):
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
|
|
|
|
logger.DEBUG)
|
|
|
|
sqlResults = myDB.select(
|
|
|
|
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
|
|
|
|
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if len(sqlResults) == 0:
|
|
|
|
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
|
|
|
continue
|
|
|
|
elif len(sqlResults) > 1:
|
|
|
|
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
|
|
|
continue
|
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-10 23:58:37 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2014-03-26 02:41:28 +00:00
|
|
|
def searchIndexerForShowID(regShowName, indexer, indexer_id=None):
|
2014-03-25 05:57:24 +00:00
|
|
|
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
2014-03-12 05:28:30 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
# Query Indexers for each search term and build the list of results
|
2014-03-26 19:28:46 +00:00
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
|
2014-03-26 02:41:28 +00:00
|
|
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
2014-03-26 19:28:46 +00:00
|
|
|
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
2014-03-26 02:41:28 +00:00
|
|
|
|
|
|
|
for name in showNames:
|
|
|
|
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG)
|
|
|
|
try:
|
|
|
|
if indexer_id:
|
|
|
|
search = t[indexer_id]
|
|
|
|
else:
|
|
|
|
search = t[name]
|
|
|
|
|
|
|
|
if isinstance(search, dict):
|
|
|
|
search = [search]
|
|
|
|
|
|
|
|
# add search results
|
|
|
|
for i in range(len(search)):
|
|
|
|
part = search[i]
|
|
|
|
seriesname = part['seriesname'].encode('UTF-8').lower()
|
|
|
|
name = name.encode('UTF-8').lower()
|
|
|
|
|
|
|
|
if (seriesname == name) or (indexer_id is not None and part['id'] == indexer_id):
|
2014-03-26 19:28:46 +00:00
|
|
|
return [sickbeard.indexerApi(indexer).config['id'], part['id']]
|
2014-03-26 02:41:28 +00:00
|
|
|
|
2014-03-27 12:06:40 +00:00
|
|
|
except KeyError:break
|
|
|
|
except Exception:continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def sizeof_fmt(num):
|
|
|
|
'''
|
|
|
|
>>> sizeof_fmt(2)
|
|
|
|
'2.0 bytes'
|
|
|
|
>>> sizeof_fmt(1024)
|
|
|
|
'1.0 KB'
|
|
|
|
>>> sizeof_fmt(2048)
|
|
|
|
'2.0 KB'
|
|
|
|
>>> sizeof_fmt(2**20)
|
|
|
|
'1.0 MB'
|
|
|
|
>>> sizeof_fmt(1234567)
|
|
|
|
'1.2 MB'
|
|
|
|
'''
|
|
|
|
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
|
|
|
|
if num < 1024.0:
|
|
|
|
return "%3.1f %s" % (num, x)
|
|
|
|
num /= 1024.0
|
|
|
|
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
def listMediaFiles(path):
|
2014-03-10 05:18:05 +00:00
|
|
|
if not dir or not ek.ek(os.path.isdir, path):
|
|
|
|
return []
|
|
|
|
|
|
|
|
files = []
|
|
|
|
for curFile in ek.ek(os.listdir, path):
|
|
|
|
fullCurFile = ek.ek(os.path.join, path, curFile)
|
|
|
|
|
|
|
|
# if it's a folder do it recursively
|
|
|
|
if ek.ek(os.path.isdir, fullCurFile) and not curFile.startswith('.') and not curFile == 'Extras':
|
|
|
|
files += listMediaFiles(fullCurFile)
|
|
|
|
|
|
|
|
elif isMediaFile(curFile):
|
|
|
|
files.append(fullCurFile)
|
|
|
|
|
|
|
|
return files
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def copyFile(srcFile, destFile):
|
|
|
|
ek.ek(shutil.copyfile, srcFile, destFile)
|
|
|
|
try:
|
|
|
|
ek.ek(shutil.copymode, srcFile, destFile)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def moveFile(srcFile, destFile):
|
|
|
|
try:
|
|
|
|
ek.ek(os.rename, srcFile, destFile)
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
except OSError:
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
ek.ek(os.unlink, srcFile)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def link(src, dst):
|
|
|
|
if os.name == 'nt':
|
|
|
|
import ctypes
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
|
|
|
else:
|
|
|
|
os.link(src, dst)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def hardlinkFile(srcFile, destFile):
|
|
|
|
try:
|
|
|
|
ek.ek(link, srcFile, destFile)
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
except:
|
|
|
|
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def symlink(src, dst):
|
|
|
|
if os.name == 'nt':
|
|
|
|
import ctypes
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-04-24 03:22:21 +00:00
|
|
|
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,1280]:
|
|
|
|
raise ctypes.WinError()
|
|
|
|
else:
|
|
|
|
os.symlink(src, dst)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def moveAndSymlinkFile(srcFile, destFile):
|
|
|
|
try:
|
|
|
|
ek.ek(os.rename, srcFile, destFile)
|
|
|
|
fixSetGroupID(destFile)
|
|
|
|
ek.ek(symlink, destFile, srcFile)
|
|
|
|
except:
|
|
|
|
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
|
|
|
copyFile(srcFile, destFile)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def make_dirs(path):
|
|
|
|
"""
|
|
|
|
Creates any folders that are missing and assigns them the permissions of their
|
|
|
|
parents
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger.log(u"Checking if the path " + path + " already exists", logger.DEBUG)
|
|
|
|
|
|
|
|
if not ek.ek(os.path.isdir, path):
|
|
|
|
# Windows, create all missing folders
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
try:
|
|
|
|
logger.log(u"Folder " + path + " didn't exist, creating it", logger.DEBUG)
|
|
|
|
ek.ek(os.makedirs, path)
|
|
|
|
except (OSError, IOError), e:
|
|
|
|
logger.log(u"Failed creating " + path + " : " + ex(e), logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# not Windows, create all missing folders and set permissions
|
|
|
|
else:
|
|
|
|
sofar = ''
|
|
|
|
folder_list = path.split(os.path.sep)
|
|
|
|
|
|
|
|
# look through each subfolder and make sure they all exist
|
|
|
|
for cur_folder in folder_list:
|
|
|
|
sofar += cur_folder + os.path.sep
|
|
|
|
|
|
|
|
# if it exists then just keep walking down the line
|
|
|
|
if ek.ek(os.path.isdir, sofar):
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
logger.log(u"Folder " + sofar + " didn't exist, creating it", logger.DEBUG)
|
|
|
|
ek.ek(os.mkdir, sofar)
|
|
|
|
# use normpath to remove end separator, otherwise checks permissions against itself
|
|
|
|
chmodAsParent(ek.ek(os.path.normpath, sofar))
|
|
|
|
# do the library update for synoindex
|
|
|
|
notifiers.synoindex_notifier.addFolder(sofar)
|
|
|
|
except (OSError, IOError), e:
|
|
|
|
logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def rename_ep_file(cur_path, new_path, old_path_length=0):
|
|
|
|
"""
|
|
|
|
Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
|
|
|
|
left that are now empty.
|
|
|
|
|
|
|
|
cur_path: The absolute path to the file you want to move/rename
|
|
|
|
new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
|
|
|
|
old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION
|
|
|
|
"""
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if old_path_length == 0 or old_path_length > len(cur_path):
|
|
|
|
# approach from the right
|
|
|
|
cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable
|
|
|
|
else:
|
|
|
|
# approach from the left
|
2014-03-25 05:57:24 +00:00
|
|
|
cur_file_ext = cur_path[old_path_length:]
|
2014-03-10 05:18:05 +00:00
|
|
|
cur_file_name = cur_path[:old_path_length]
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if cur_file_ext[1:] in subtitleExtensions:
|
|
|
|
#Extract subtitle language from filename
|
|
|
|
sublang = os.path.splitext(cur_file_name)[1][1:]
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
#Check if the language extracted from filename is a valid language
|
|
|
|
try:
|
|
|
|
language = subliminal.language.Language(sublang, strict=True)
|
2014-03-25 05:57:24 +00:00
|
|
|
cur_file_ext = '.' + sublang + cur_file_ext
|
2014-03-10 05:18:05 +00:00
|
|
|
except ValueError:
|
|
|
|
pass
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# put the extension on the incoming file
|
|
|
|
new_path += cur_file_ext
|
|
|
|
|
|
|
|
make_dirs(os.path.dirname(new_path))
|
|
|
|
|
|
|
|
# move the file
|
|
|
|
try:
|
|
|
|
logger.log(u"Renaming file from " + cur_path + " to " + new_path)
|
|
|
|
ek.ek(os.rename, cur_path, new_path)
|
|
|
|
except (OSError, IOError), e:
|
|
|
|
logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# clean up any old folders that are empty
|
|
|
|
delete_empty_folders(ek.ek(os.path.dirname, cur_path))
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|
|
|
"""
|
|
|
|
Walks backwards up the path and deletes any empty folders found.
|
|
|
|
|
|
|
|
check_empty_dir: The path to clean (absolute path to a folder)
|
|
|
|
keep_dir: Clean until this path is reached
|
|
|
|
"""
|
|
|
|
|
|
|
|
# treat check_empty_dir as empty when it only contains these items
|
|
|
|
ignore_items = []
|
|
|
|
|
|
|
|
logger.log(u"Trying to clean any empty folders under " + check_empty_dir)
|
|
|
|
|
|
|
|
# as long as the folder exists and doesn't contain any files, delete it
|
|
|
|
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
|
|
|
|
|
|
|
|
check_files = ek.ek(os.listdir, check_empty_dir)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
if not check_files or (len(check_files) <= len(ignore_items) and all(
|
|
|
|
[check_file in ignore_items for check_file in check_files])):
|
2014-03-10 05:18:05 +00:00
|
|
|
# directory is empty or contains only ignore_items
|
|
|
|
try:
|
|
|
|
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
|
|
|
# need shutil.rmtree when ignore_items is really implemented
|
|
|
|
ek.ek(os.rmdir, check_empty_dir)
|
|
|
|
# do the library update for synoindex
|
|
|
|
notifiers.synoindex_notifier.deleteFolder(check_empty_dir)
|
|
|
|
except OSError, e:
|
|
|
|
logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING)
|
|
|
|
break
|
|
|
|
check_empty_dir = ek.ek(os.path.dirname, check_empty_dir)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def chmodAsParent(childPath):
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
return
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if not parentPath:
|
|
|
|
logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG)
|
|
|
|
return
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
parentPathStat = ek.ek(os.stat, parentPath)
|
|
|
|
parentMode = stat.S_IMODE(parentPathStat[stat.ST_MODE])
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
childPathStat = ek.ek(os.stat, childPath)
|
|
|
|
childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
if ek.ek(os.path.isfile, childPath):
|
|
|
|
childMode = fileBitFilter(parentMode)
|
|
|
|
else:
|
|
|
|
childMode = parentMode
|
|
|
|
|
|
|
|
if childPath_mode == childMode:
|
|
|
|
return
|
|
|
|
|
|
|
|
childPath_owner = childPathStat.st_uid
|
2014-03-25 05:57:24 +00:00
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
ek.ek(os.chmod, childPath, childMode)
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode),
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
except OSError:
|
|
|
|
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def fileBitFilter(mode):
|
|
|
|
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
|
|
|
if mode & bit:
|
|
|
|
mode -= bit
|
|
|
|
|
|
|
|
return mode
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def fixSetGroupID(childPath):
|
|
|
|
if os.name == 'nt' or os.name == 'ce':
|
|
|
|
return
|
|
|
|
|
|
|
|
parentPath = ek.ek(os.path.dirname, childPath)
|
|
|
|
parentStat = ek.ek(os.stat, parentPath)
|
|
|
|
parentMode = stat.S_IMODE(parentStat[stat.ST_MODE])
|
|
|
|
|
|
|
|
if parentMode & stat.S_ISGID:
|
|
|
|
parentGID = parentStat[stat.ST_GID]
|
|
|
|
childStat = ek.ek(os.stat, childPath)
|
|
|
|
childGID = childStat[stat.ST_GID]
|
|
|
|
|
|
|
|
if childGID == parentGID:
|
|
|
|
return
|
|
|
|
|
|
|
|
childPath_owner = childStat.st_uid
|
2014-03-25 05:57:24 +00:00
|
|
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
if user_id != 0 and user_id != childPath_owner:
|
|
|
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2014-03-25 05:57:24 +00:00
|
|
|
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
|
|
|
except OSError:
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(
|
|
|
|
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
|
|
|
|
childPath, parentGID), logger.ERROR)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
def sanitizeSceneName(name, ezrss=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Takes a show name and returns the "scenified" version of it.
|
|
|
|
|
|
|
|
ezrss: If true the scenified version will follow EZRSS's cracksmoker rules as best as possible
|
|
|
|
|
|
|
|
Returns: A string containing the scene version of the show name given.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not ezrss:
|
|
|
|
bad_chars = u",:()'!?\u2019"
|
|
|
|
# ezrss leaves : and ! in their show names as far as I can tell
|
|
|
|
else:
|
|
|
|
bad_chars = u",()'?\u2019"
|
|
|
|
|
|
|
|
# strip out any bad chars
|
|
|
|
for x in bad_chars:
|
|
|
|
name = name.replace(x, "")
|
|
|
|
|
|
|
|
# tidy up stuff that doesn't belong in scene names
|
|
|
|
name = name.replace("- ", ".").replace(" ", ".").replace("&", "and").replace('/', '.')
|
|
|
|
name = re.sub("\.\.*", ".", name)
|
|
|
|
|
|
|
|
if name.endswith('.'):
|
|
|
|
name = name[:-1]
|
|
|
|
|
|
|
|
return name
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def create_https_certificates(ssl_cert, ssl_key):
|
|
|
|
"""
|
|
|
|
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
|
|
|
"""
|
|
|
|
try:
|
2014-03-16 01:01:12 +00:00
|
|
|
from lib.OpenSSL import crypto # @UnresolvedImport
|
2014-03-25 05:57:24 +00:00
|
|
|
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
|
|
|
serial # @UnresolvedImport
|
2014-03-10 05:18:05 +00:00
|
|
|
except:
|
|
|
|
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Create the CA Certificate
|
|
|
|
cakey = createKeyPair(TYPE_RSA, 1024)
|
|
|
|
careq = createCertRequest(cakey, CN='Certificate Authority')
|
2014-03-25 05:57:24 +00:00
|
|
|
cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
cname = 'SickBeard'
|
|
|
|
pkey = createKeyPair(TYPE_RSA, 1024)
|
|
|
|
req = createCertRequest(pkey, CN=cname)
|
2014-03-25 05:57:24 +00:00
|
|
|
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# Save the key and certificate to disk
|
|
|
|
try:
|
|
|
|
open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
|
|
|
open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
|
|
|
except:
|
|
|
|
logger.log(u"Error creating SSL key and certificate", logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
import doctest
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
doctest.testmod()
|
|
|
|
|
|
|
|
|
|
|
|
def parse_json(data):
|
|
|
|
"""
|
|
|
|
Parse json data into a python object
|
|
|
|
|
|
|
|
data: data string containing json
|
|
|
|
|
|
|
|
Returns: parsed data as json or None
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsedJSON = json.loads(data)
|
2014-03-20 08:15:22 +00:00
|
|
|
except ValueError, e:
|
|
|
|
logger.log(u"Error trying to decode json data. Error: " + ex(e), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
return parsedJSON
|
|
|
|
|
|
|
|
|
|
|
|
def parse_xml(data, del_xmlns=False):
|
|
|
|
"""
|
|
|
|
Parse data into an xml elementtree.ElementTree
|
|
|
|
|
|
|
|
data: data string containing xml
|
|
|
|
del_xmlns: if True, removes xmlns namesspace from data before parsing
|
|
|
|
|
|
|
|
Returns: parsed data as elementtree or None
|
|
|
|
"""
|
|
|
|
|
|
|
|
if del_xmlns:
|
|
|
|
data = re.sub(' xmlns="[^"]+"', '', data)
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsedXML = etree.fromstring(data)
|
|
|
|
except Exception, e:
|
2014-03-20 08:15:22 +00:00
|
|
|
logger.log(u"Error trying to parse xml data. Error: " + ex(e), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
parsedXML = None
|
|
|
|
|
|
|
|
return parsedXML
|
|
|
|
|
|
|
|
|
|
|
|
def get_xml_text(element, mini_dom=False):
|
|
|
|
"""
|
|
|
|
Get all text inside a xml element
|
|
|
|
|
|
|
|
element: A xml element either created with elementtree.ElementTree or xml.dom.minidom
|
|
|
|
mini_dom: Default False use elementtree, True use minidom
|
|
|
|
|
|
|
|
Returns: text
|
|
|
|
"""
|
|
|
|
|
|
|
|
text = ""
|
|
|
|
|
|
|
|
if mini_dom:
|
|
|
|
node = element
|
|
|
|
for child in node.childNodes:
|
|
|
|
if child.nodeType in (Node.CDATA_SECTION_NODE, Node.TEXT_NODE):
|
|
|
|
text += child.data
|
|
|
|
else:
|
|
|
|
if element is not None:
|
|
|
|
for child in [element] + element.findall('.//*'):
|
|
|
|
if child.text:
|
|
|
|
text += child.text
|
|
|
|
|
|
|
|
return text.strip()
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def backupVersionedFile(old_file, version):
|
|
|
|
numTries = 0
|
|
|
|
|
|
|
|
new_file = old_file + '.' + 'v' + str(version)
|
|
|
|
|
|
|
|
while not ek.ek(os.path.isfile, new_file):
|
|
|
|
if not ek.ek(os.path.isfile, old_file):
|
|
|
|
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
|
|
|
|
break
|
|
|
|
|
|
|
|
try:
|
|
|
|
logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG)
|
|
|
|
shutil.copy(old_file, new_file)
|
|
|
|
logger.log(u"Backup done", logger.DEBUG)
|
|
|
|
break
|
|
|
|
except Exception, e:
|
|
|
|
logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + ex(e), logger.WARNING)
|
|
|
|
numTries += 1
|
|
|
|
time.sleep(1)
|
|
|
|
logger.log(u"Trying again.", logger.DEBUG)
|
|
|
|
|
|
|
|
if numTries >= 10:
|
|
|
|
logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
# try to convert to int, if it fails the default will be returned
|
2014-03-25 05:57:24 +00:00
|
|
|
def tryInt(s, s_default=0):
|
|
|
|
try:
|
|
|
|
return int(s)
|
|
|
|
except:
|
|
|
|
return s_default
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# generates a md5 hash of a file
|
2014-03-25 05:57:24 +00:00
|
|
|
def md5_for_file(filename, block_size=2 ** 16):
|
|
|
|
try:
|
|
|
|
with open(filename, 'rb') as f:
|
2014-03-10 05:18:05 +00:00
|
|
|
md5 = hashlib.md5()
|
|
|
|
while True:
|
|
|
|
data = f.read(block_size)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
md5.update(data)
|
|
|
|
f.close()
|
|
|
|
return md5.hexdigest()
|
|
|
|
except Exception:
|
|
|
|
return None
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def get_lan_ip():
|
|
|
|
"""
|
|
|
|
Simple function to get LAN localhost_ip
|
|
|
|
http://stackoverflow.com/questions/11735821/python-get-localhost-ip
|
|
|
|
"""
|
|
|
|
|
|
|
|
if os.name != "nt":
|
|
|
|
import fcntl
|
|
|
|
import struct
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def get_interface_ip(ifname):
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
|
2014-03-25 05:57:24 +00:00
|
|
|
ifname[:15]))[20:24])
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
|
|
|
if ip.startswith("127.") and os.name != "nt":
|
|
|
|
interfaces = [
|
|
|
|
"eth0",
|
|
|
|
"eth1",
|
|
|
|
"eth2",
|
|
|
|
"wlan0",
|
|
|
|
"wlan1",
|
|
|
|
"wifi0",
|
|
|
|
"ath0",
|
|
|
|
"ath1",
|
|
|
|
"ppp0",
|
2014-03-25 05:57:24 +00:00
|
|
|
]
|
2014-03-10 05:18:05 +00:00
|
|
|
for ifname in interfaces:
|
|
|
|
try:
|
|
|
|
ip = get_interface_ip(ifname)
|
2014-03-25 05:57:24 +00:00
|
|
|
print ifname, ip
|
2014-03-10 05:18:05 +00:00
|
|
|
break
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
return ip
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def check_url(url):
|
|
|
|
"""
|
|
|
|
Check if a URL exists without downloading the whole file.
|
|
|
|
We only check the URL header.
|
|
|
|
"""
|
|
|
|
# see also http://stackoverflow.com/questions/2924422
|
|
|
|
# http://stackoverflow.com/questions/1140661
|
|
|
|
good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2]
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
|
|
|
conn = httplib.HTTPConnection(host)
|
|
|
|
conn.request('HEAD', path)
|
|
|
|
return conn.getresponse().status in good_codes
|
|
|
|
except StandardError:
|
|
|
|
return None
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
Encryption
|
|
|
|
==========
|
|
|
|
By Pedro Jose Pereira Vieito <pvieito@gmail.com> (@pvieito)
|
|
|
|
|
|
|
|
* If encryption_version==0 then return data without encryption
|
|
|
|
* The keys should be unique for each device
|
|
|
|
|
|
|
|
To add a new encryption_version:
|
|
|
|
1) Code your new encryption_version
|
|
|
|
2) Update the last encryption_version available in webserve.py
|
|
|
|
3) Remember to maintain old encryption versions and key generators for retrocompatibility
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Key Generators
|
2014-03-25 05:57:24 +00:00
|
|
|
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# Encryption Functions
|
|
|
|
def encrypt(data, encryption_version=0, decrypt=False):
|
|
|
|
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
|
|
|
if encryption_version == 1:
|
2014-03-25 05:57:24 +00:00
|
|
|
if decrypt:
|
|
|
|
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-03-25 05:57:24 +00:00
|
|
|
return base64.encodestring(
|
|
|
|
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
|
2014-03-10 05:18:05 +00:00
|
|
|
# Version 0: Plain text
|
|
|
|
else:
|
|
|
|
return data
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def decrypt(data, encryption_version=0):
|
2014-03-25 05:57:24 +00:00
|
|
|
return encrypt(data, encryption_version, decrypt=True)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def full_sanitizeSceneName(name):
|
|
|
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _check_against_names(name, show):
|
|
|
|
nameInQuestion = full_sanitizeSceneName(name)
|
|
|
|
|
|
|
|
showNames = [show.name]
|
|
|
|
showNames.extend(sickbeard.scene_exceptions.get_scene_exceptions(show.indexerid))
|
|
|
|
|
|
|
|
for showName in showNames:
|
|
|
|
nameFromList = full_sanitizeSceneName(showName)
|
|
|
|
#logger.log(u"Comparing names: '"+nameFromList+"' vs '"+nameInQuestion+"'", logger.DEBUG)
|
|
|
|
if nameFromList == nameInQuestion:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 23:58:37 +00:00
|
|
|
def get_show_by_name(name, showList, useIndexer=False):
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Trying to get the indexerid for " + name, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if showList:
|
|
|
|
for show in showList:
|
|
|
|
if _check_against_names(name, show):
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Matched " + name + " in the showlist to the show " + show.name, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
return show
|
|
|
|
|
2014-03-10 23:58:37 +00:00
|
|
|
if useIndexer:
|
2014-03-27 02:01:53 +00:00
|
|
|
for indexer in sickbeard.indexerApi().indexers:
|
2014-03-10 23:58:37 +00:00
|
|
|
try:
|
2014-03-26 19:28:46 +00:00
|
|
|
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
|
2014-03-12 05:28:30 +00:00
|
|
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
2014-03-26 19:28:46 +00:00
|
|
|
lINDEXER_API_PARMS['search_all_languages'] = True
|
2014-03-12 05:28:30 +00:00
|
|
|
|
2014-03-26 19:28:46 +00:00
|
|
|
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
2014-03-10 05:18:05 +00:00
|
|
|
showObj = t[name]
|
2014-03-26 19:28:46 +00:00
|
|
|
except:continue
|
2014-03-10 23:58:37 +00:00
|
|
|
|
2014-03-26 19:28:46 +00:00
|
|
|
if showObj:
|
|
|
|
showResult = findCertainShow(sickbeard.showList, int(showObj["id"]))
|
|
|
|
if showResult is not None:
|
|
|
|
return showResult
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
def suffix(d):
|
2014-03-25 05:57:24 +00:00
|
|
|
return 'th' if 11 <= d <= 13 else {1: 'st', 2: 'nd', 3: 'rd'}.get(d % 10, 'th')
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def custom_strftime(format, t):
|
2014-03-20 08:15:22 +00:00
|
|
|
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-20 08:15:22 +00:00
|
|
|
def is_hidden_folder(folder):
|
|
|
|
"""
|
|
|
|
Returns True if folder is hidden.
|
|
|
|
On Linux based systems hidden folders start with . (dot)
|
|
|
|
folder: Full path of folder to check
|
|
|
|
"""
|
|
|
|
if ek.ek(os.path.isdir, folder):
|
|
|
|
if ek.ek(os.path.basename, folder).startswith('.'):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-20 08:15:22 +00:00
|
|
|
def real_path(path):
|
|
|
|
"""
|
|
|
|
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
|
|
|
"""
|
2014-04-23 11:21:20 +00:00
|
|
|
return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))
|
2014-04-26 10:37:40 +00:00
|
|
|
|
|
|
|
def _copy(self, obj, objectmap=None):
|
|
|
|
"""
|
|
|
|
<Purpose>
|
|
|
|
Create a deep copy of an object without using the python 'copy' module.
|
|
|
|
Using copy.deepcopy() doesn't work because builtins like id and hasattr
|
|
|
|
aren't available when this is called.
|
|
|
|
<Arguments>
|
|
|
|
self
|
|
|
|
obj
|
|
|
|
The object to make a deep copy of.
|
|
|
|
objectmap
|
|
|
|
A mapping between original objects and the corresponding copy. This is
|
|
|
|
used to handle circular references.
|
|
|
|
<Exceptions>
|
|
|
|
TypeError
|
|
|
|
If an object is encountered that we don't know how to make a copy of.
|
|
|
|
NamespaceViolationError
|
|
|
|
If an unexpected error occurs while copying. This isn't the greatest
|
|
|
|
solution, but in general the idea is we just need to abort the wrapped
|
|
|
|
function call.
|
|
|
|
<Side Effects>
|
|
|
|
A new reference is created to every non-simple type of object. That is,
|
|
|
|
everything except objects of type str, unicode, int, etc.
|
|
|
|
<Returns>
|
|
|
|
The deep copy of obj with circular/recursive references preserved.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# If this is a top-level call to _copy, create a new objectmap for use
|
|
|
|
# by recursive calls to _copy.
|
|
|
|
if objectmap is None:
|
|
|
|
objectmap = {}
|
|
|
|
# If this is a circular reference, use the copy we already made.
|
|
|
|
elif _saved_id(obj) in objectmap:
|
|
|
|
return objectmap[_saved_id(obj)]
|
|
|
|
|
|
|
|
# types.InstanceType is included because the user can provide an instance
|
|
|
|
# of a class of their own in the list of callback args to settimer.
|
|
|
|
if _is_in(type(obj), [str, unicode, int, long, float, complex, bool, frozenset,
|
|
|
|
types.NoneType, types.FunctionType, types.LambdaType,
|
|
|
|
types.MethodType, types.InstanceType]):
|
|
|
|
return obj
|
|
|
|
|
|
|
|
elif type(obj) is list:
|
|
|
|
temp_list = []
|
|
|
|
# Need to save this in the objectmap before recursing because lists
|
|
|
|
# might have circular references.
|
|
|
|
objectmap[_saved_id(obj)] = temp_list
|
|
|
|
|
|
|
|
for item in obj:
|
|
|
|
temp_list.append(self._copy(item, objectmap))
|
|
|
|
|
|
|
|
return temp_list
|
|
|
|
|
|
|
|
elif type(obj) is tuple:
|
|
|
|
temp_list = []
|
|
|
|
|
|
|
|
for item in obj:
|
|
|
|
temp_list.append(self._copy(item, objectmap))
|
|
|
|
|
|
|
|
# I'm not 100% confident on my reasoning here, so feel free to point
|
|
|
|
# out where I'm wrong: There's no way for a tuple to directly contain
|
|
|
|
# a circular reference to itself. Instead, it has to contain, for
|
|
|
|
# example, a dict which has the same tuple as a value. In that
|
|
|
|
# situation, we can avoid infinite recursion and properly maintain
|
|
|
|
# circular references in our copies by checking the objectmap right
|
|
|
|
# after we do the copy of each item in the tuple. The existence of the
|
|
|
|
# dictionary would keep the recursion from being infinite because those
|
|
|
|
# are properly handled. That just leaves making sure we end up with
|
|
|
|
# only one copy of the tuple. We do that here by checking to see if we
|
|
|
|
# just made a copy as a result of copying the items above. If so, we
|
|
|
|
# return the one that's already been made.
|
|
|
|
if _saved_id(obj) in objectmap:
|
|
|
|
return objectmap[_saved_id(obj)]
|
|
|
|
|
|
|
|
retval = tuple(temp_list)
|
|
|
|
objectmap[_saved_id(obj)] = retval
|
|
|
|
return retval
|
|
|
|
|
|
|
|
elif type(obj) is set:
|
|
|
|
temp_list = []
|
|
|
|
# We can't just store this list object in the objectmap because it isn't
|
|
|
|
# a set yet. If it's possible to have a set contain a reference to
|
|
|
|
# itself, this could result in infinite recursion. However, sets can
|
|
|
|
# only contain hashable items so I believe this can't happen.
|
|
|
|
|
|
|
|
for item in obj:
|
|
|
|
temp_list.append(self._copy(item, objectmap))
|
|
|
|
|
|
|
|
retval = set(temp_list)
|
|
|
|
objectmap[_saved_id(obj)] = retval
|
|
|
|
return retval
|
|
|
|
|
|
|
|
elif type(obj) is dict:
|
|
|
|
temp_dict = {}
|
|
|
|
# Need to save this in the objectmap before recursing because dicts
|
|
|
|
# might have circular references.
|
|
|
|
objectmap[_saved_id(obj)] = temp_dict
|
|
|
|
|
|
|
|
for key, value in obj.items():
|
|
|
|
temp_key = self._copy(key, objectmap)
|
|
|
|
temp_dict[temp_key] = self._copy(value, objectmap)
|
|
|
|
|
|
|
|
return temp_dict
|
|
|
|
|
|
|
|
# We don't copy certain objects. This is because copying an emulated file
|
|
|
|
# object, for example, will cause the destructor of the original one to
|
|
|
|
# be invoked, which will close the actual underlying file. As the object
|
|
|
|
# is wrapped and the client does not have access to it, it's safe to not
|
|
|
|
# wrap it.
|
|
|
|
elif isinstance(obj, (NamespaceObjectWrapper, emulfile.emulated_file,
|
|
|
|
emulcomm.emulated_socket, thread.LockType,
|
|
|
|
virtual_namespace.VirtualNamespace)):
|
|
|
|
return obj
|
|
|
|
|
|
|
|
else:
|
|
|
|
raise TypeError("_copy is not implemented for objects of type " + str(type(obj)))
|
|
|
|
|
|
|
|
except Exception, e:
|
|
|
|
self._handle_violation("_copy failed on " + str(obj) + " with message " + str(e))
|