mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge remote-tracking branch 'origin/nightly' into dev
This commit is contained in:
commit
9a2ba72b38
61 changed files with 1176 additions and 1591 deletions
|
@ -71,6 +71,7 @@ throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
|
|||
signal.signal(signal.SIGINT, sickbeard.sig_handler)
|
||||
signal.signal(signal.SIGTERM, sickbeard.sig_handler)
|
||||
|
||||
|
||||
class SickRage(object):
|
||||
def __init__(self):
|
||||
# system event callback for shutdown/restart
|
||||
|
@ -455,7 +456,8 @@ class SickRage(object):
|
|||
sickbeard.showList.append(curShow)
|
||||
except Exception, e:
|
||||
logger.log(
|
||||
u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8', 'replace'),
|
||||
u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8',
|
||||
'replace'),
|
||||
logger.ERROR)
|
||||
|
||||
def restore(self, srcDir, dstDir):
|
||||
|
@ -519,6 +521,7 @@ class SickRage(object):
|
|||
# system exit
|
||||
os._exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if sys.hexversion >= 0x020600F0:
|
||||
freeze_support()
|
||||
|
|
|
@ -276,6 +276,24 @@
|
|||
|
||||
<fieldset class="component-group-list" style="width:670px">
|
||||
|
||||
<div class="field-pair">
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title">Branch Version:</span>
|
||||
<span class="component-desc">
|
||||
<select id="branchVersion" name="branchVersion">
|
||||
#for $cur_branch in $sickbeard.versionCheckScheduler.action.list_remote_branches():
|
||||
<option value="$cur_branch" #if $cur_branch == $sickbeard.version.SICKBEARD_VERSION then "selected=\"selected\"" else ""#>$cur_branch.capitalize()</option>
|
||||
#end for
|
||||
</select>
|
||||
<input class="btn" class="btn" type="button" id="branchCheckout" value="Checkout Branch">
|
||||
</span>
|
||||
</label>
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title"> </span>
|
||||
<span class="component-desc">Select the branch you wish to use, changing this will require a restart.</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="field-pair">
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title">CPU Throttling:</span>
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
$(document).ready(function(){
|
||||
$(".enabler").each(function(){
|
||||
$(document).ready(function () {
|
||||
$(".enabler").each(function () {
|
||||
if (!$(this).prop('checked'))
|
||||
$('#content_'+$(this).attr('id')).hide();
|
||||
$('#content_' + $(this).attr('id')).hide();
|
||||
});
|
||||
|
||||
$(".enabler").click(function() {
|
||||
$(".enabler").click(function () {
|
||||
if ($(this).prop('checked'))
|
||||
$('#content_'+$(this).attr('id')).fadeIn("fast", "linear");
|
||||
$('#content_' + $(this).attr('id')).fadeIn("fast", "linear");
|
||||
else
|
||||
$('#content_'+$(this).attr('id')).fadeOut("fast", "linear");
|
||||
$('#content_' + $(this).attr('id')).fadeOut("fast", "linear");
|
||||
});
|
||||
|
||||
$(".viewIf").click(function() {
|
||||
$(".viewIf").click(function () {
|
||||
if ($(this).prop('checked')) {
|
||||
$('.hide_if_'+$(this).attr('id')).css('display','none');
|
||||
$('.show_if_'+$(this).attr('id')).fadeIn("fast", "linear");
|
||||
$('.hide_if_' + $(this).attr('id')).css('display', 'none');
|
||||
$('.show_if_' + $(this).attr('id')).fadeIn("fast", "linear");
|
||||
} else {
|
||||
$('.show_if_'+$(this).attr('id')).css('display','none');
|
||||
$('.hide_if_'+$(this).attr('id')).fadeIn("fast", "linear");
|
||||
$('.show_if_' + $(this).attr('id')).css('display', 'none');
|
||||
$('.hide_if_' + $(this).attr('id')).fadeIn("fast", "linear");
|
||||
}
|
||||
});
|
||||
|
||||
$(".datePresets").click(function() {
|
||||
$(".datePresets").click(function () {
|
||||
var def = $('#date_presets').val()
|
||||
if ($(this).prop('checked') && '%x' == def) {
|
||||
def = '%a, %b %d, %Y'
|
||||
|
@ -42,36 +42,42 @@ $(document).ready(function(){
|
|||
$('#date_presets').val(def)
|
||||
});
|
||||
|
||||
// bind 'myForm' and provide a simple callback function
|
||||
// bind 'myForm' and provide a simple callback function
|
||||
$('#configForm').ajaxForm({
|
||||
beforeSubmit: function(){
|
||||
$('.config_submitter').each(function(){
|
||||
beforeSubmit: function () {
|
||||
$('.config_submitter').each(function () {
|
||||
$(this).attr("disabled", "disabled");
|
||||
$(this).after('<span><img src="'+sbRoot+'/images/loading16.gif"> Saving...</span>');
|
||||
$(this).after('<span><img src="' + sbRoot + '/images/loading16.gif"> Saving...</span>');
|
||||
$(this).hide();
|
||||
});
|
||||
},
|
||||
success: function(){
|
||||
success: function () {
|
||||
setTimeout('config_success()', 2000)
|
||||
}
|
||||
});
|
||||
|
||||
$('#api_key').click(function(){ $('#api_key').select() });
|
||||
$("#generate_new_apikey").click(function(){
|
||||
$.get(sbRoot + '/config/general/generateKey',
|
||||
function(data){
|
||||
$('#api_key').click(function () {
|
||||
$('#api_key').select()
|
||||
});
|
||||
$("#generate_new_apikey").click(function () {
|
||||
$.get(sbRoot + '/config/general/generateKey',
|
||||
function (data) {
|
||||
if (data.error != undefined) {
|
||||
alert(data.error);
|
||||
return;
|
||||
}
|
||||
$('#api_key').val(data);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$('#branchCheckout').click(function () {
|
||||
url = sbRoot+'/home/branchCheckout?branch='+$("#branchVersion").val();
|
||||
window.location.href = url;
|
||||
});
|
||||
});
|
||||
|
||||
function config_success(){
|
||||
$('.config_submitter').each(function(){
|
||||
function config_success() {
|
||||
$('.config_submitter').each(function () {
|
||||
$(this).removeAttr("disabled");
|
||||
$(this).next().remove();
|
||||
$(this).show();
|
||||
|
|
|
@ -117,10 +117,6 @@ $(document).ready(function () {
|
|||
$("#checkboxControls input").change(function (e) {
|
||||
var whichClass = $(this).attr('id');
|
||||
$(this).showHideRows(whichClass);
|
||||
|
||||
$('tr.' + whichClass).each(function (i) {
|
||||
$(this).toggle();
|
||||
});
|
||||
});
|
||||
|
||||
// initially show/hide all the rows according to the checkboxes
|
||||
|
@ -273,4 +269,4 @@ $(document).ready(function () {
|
|||
height:120
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#license:unlicense (http://unlicense.org/)
|
||||
|
||||
from functools import wraps
|
||||
import traceback
|
||||
|
||||
__author__ = "dbr/Ben"
|
||||
__version__ = "1.9"
|
||||
|
@ -21,7 +22,7 @@ import logging
|
|||
import zipfile
|
||||
import datetime as dt
|
||||
import requests
|
||||
import cachecontrol
|
||||
import requests.exceptions
|
||||
import xmltodict
|
||||
|
||||
try:
|
||||
|
@ -35,7 +36,7 @@ except ImportError:
|
|||
gzip = None
|
||||
|
||||
from lib.dateutil.parser import parse
|
||||
from cachecontrol import caches
|
||||
from lib.cachecontrol import CacheControl, caches
|
||||
|
||||
from tvdb_ui import BaseUI, ConsoleUI
|
||||
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
||||
|
@ -366,7 +367,8 @@ class Tvdb:
|
|||
apikey=None,
|
||||
forceConnect=False,
|
||||
useZip=False,
|
||||
dvdorder=False):
|
||||
dvdorder=False,
|
||||
proxy=None):
|
||||
|
||||
"""interactive (True/False):
|
||||
When True, uses built-in console UI is used to select the correct show.
|
||||
|
@ -464,16 +466,18 @@ class Tvdb:
|
|||
|
||||
self.config['dvdorder'] = dvdorder
|
||||
|
||||
self.config['proxy'] = proxy
|
||||
|
||||
if cache is True:
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = self._getTempDir()
|
||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
elif cache is False:
|
||||
self.config['cache_enabled'] = False
|
||||
elif isinstance(cache, basestring):
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = cache
|
||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
else:
|
||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||
|
||||
|
@ -561,18 +565,24 @@ class Tvdb:
|
|||
|
||||
# get response from TVDB
|
||||
if self.config['cache_enabled']:
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
self.sess.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
|
||||
resp = self.sess.get(url, cache_auto=True, params=params)
|
||||
else:
|
||||
resp = requests.get(url, params=params)
|
||||
|
||||
except requests.HTTPError, e:
|
||||
except requests.exceptions.HTTPError, e:
|
||||
raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
||||
|
||||
except requests.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
raise tvdb_error("Connection error " + str(e.message) + " while loading URL " + str(url))
|
||||
|
||||
except requests.Timeout, e:
|
||||
except requests.exceptions.Timeout, e:
|
||||
raise tvdb_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||
except Exception:
|
||||
raise tvdb_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc())
|
||||
|
||||
def process(path, key, value):
|
||||
key = key.lower()
|
||||
|
@ -684,8 +694,7 @@ class Tvdb:
|
|||
log().debug("Searching for show %s" % series)
|
||||
self.config['params_getSeries']['seriesname'] = series
|
||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
|
||||
return [seriesEt[item] for item in seriesEt][0]
|
||||
return [seriesEt[item] for item in seriesEt][0] if seriesEt else []
|
||||
|
||||
def _getSeries(self, series):
|
||||
"""This searches TheTVDB.com for the series name,
|
||||
|
@ -703,7 +712,8 @@ class Tvdb:
|
|||
|
||||
if self.config['custom_ui'] is not None:
|
||||
log().debug("Using custom UI %s" % (repr(self.config['custom_ui'])))
|
||||
ui = self.config['custom_ui'](config=self.config)
|
||||
CustomUI = self.config['custom_ui']
|
||||
ui = CustomUI(config=self.config)
|
||||
else:
|
||||
if not self.config['interactive']:
|
||||
log().debug('Auto-selecting first search result using BaseUI')
|
||||
|
|
|
@ -10,6 +10,7 @@ Modified from http://github.com/dbr/tvrage_api
|
|||
Simple-to-use Python interface to The TVRage's API (tvrage.com)
|
||||
"""
|
||||
from functools import wraps
|
||||
import traceback
|
||||
|
||||
__author__ = "echel0n"
|
||||
__version__ = "1.0"
|
||||
|
@ -23,7 +24,7 @@ import warnings
|
|||
import logging
|
||||
import datetime as dt
|
||||
import requests
|
||||
import cachecontrol
|
||||
import requests.exceptions
|
||||
import xmltodict
|
||||
|
||||
try:
|
||||
|
@ -32,7 +33,7 @@ except ImportError:
|
|||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
from lib.dateutil.parser import parse
|
||||
from cachecontrol import caches
|
||||
from cachecontrol import CacheControl, caches
|
||||
|
||||
from tvrage_ui import BaseUI
|
||||
from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound,
|
||||
|
@ -283,7 +284,8 @@ class TVRage:
|
|||
apikey=None,
|
||||
forceConnect=False,
|
||||
useZip=False,
|
||||
dvdorder=False):
|
||||
dvdorder=False,
|
||||
proxy=None):
|
||||
|
||||
"""
|
||||
cache (True/False/str/unicode/urllib2 opener):
|
||||
|
@ -316,16 +318,18 @@ class TVRage:
|
|||
|
||||
self.config['custom_ui'] = custom_ui
|
||||
|
||||
self.config['proxy'] = proxy
|
||||
|
||||
if cache is True:
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = self._getTempDir()
|
||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
elif cache is False:
|
||||
self.config['cache_enabled'] = False
|
||||
elif isinstance(cache, basestring):
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = cache
|
||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
else:
|
||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||
|
||||
|
@ -401,18 +405,25 @@ class TVRage:
|
|||
|
||||
# get response from TVRage
|
||||
if self.config['cache_enabled']:
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
self.sess.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
|
||||
resp = self.sess.get(url.strip(), cache_auto=True, params=params)
|
||||
else:
|
||||
resp = requests.get(url.strip(), params=params)
|
||||
|
||||
except requests.HTTPError, e:
|
||||
except requests.exceptions.HTTPError, e:
|
||||
raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
|
||||
|
||||
except requests.ConnectionError, e:
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
raise tvrage_error("Connection error " + str(e.message) + " while loading URL " + str(url))
|
||||
|
||||
except requests.Timeout, e:
|
||||
except requests.exceptions.Timeout, e:
|
||||
raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
|
||||
except Exception:
|
||||
raise tvrage_error("Unknown exception while loading URL " + url + ": " + traceback.format_exc())
|
||||
|
||||
def remap_keys(path, key, value):
|
||||
name_map = {
|
||||
|
@ -545,8 +556,7 @@ class TVRage:
|
|||
log().debug("Searching for show %s" % series)
|
||||
self.config['params_getSeries']['show'] = series
|
||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||
|
||||
return [seriesEt[item] for item in seriesEt][0]
|
||||
return [seriesEt[item] for item in seriesEt][0] if seriesEt else []
|
||||
|
||||
def _getSeries(self, series):
|
||||
"""This searches tvrage.com for the series name,
|
||||
|
@ -564,7 +574,8 @@ class TVRage:
|
|||
|
||||
if self.config['custom_ui'] is not None:
|
||||
log().debug("Using custom UI %s" % (repr(self.config['custom_ui'])))
|
||||
ui = self.config['custom_ui'](config=self.config)
|
||||
CustomUI = self.config['custom_ui']
|
||||
ui = CustomUI(config=self.config)
|
||||
else:
|
||||
log().debug('Auto-selecting first search result using BaseUI')
|
||||
ui = BaseUI(config=self.config)
|
||||
|
|
|
@ -24,7 +24,6 @@ import socket
|
|||
import os
|
||||
import re
|
||||
|
||||
from urllib2 import getproxies
|
||||
from threading import Lock
|
||||
|
||||
# apparently py2exe won't build these unless they're imported somewhere
|
||||
|
@ -32,7 +31,8 @@ import sys
|
|||
from sickbeard import providers, metadata, config, webserveInit
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
|
||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, freshontv, bitsoup
|
||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \
|
||||
freshontv, bitsoup
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
||||
naming_ep_type
|
||||
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||
|
@ -98,9 +98,9 @@ metadata_provider_dict = {}
|
|||
|
||||
NEWEST_VERSION = None
|
||||
NEWEST_VERSION_STRING = None
|
||||
VERSION_NOTIFY = None
|
||||
AUTO_UPDATE = None
|
||||
NOTIFY_ON_UPDATE = None
|
||||
VERSION_NOTIFY = False
|
||||
AUTO_UPDATE = False
|
||||
NOTIFY_ON_UPDATE = False
|
||||
CUR_COMMIT_HASH = None
|
||||
|
||||
INIT_LOCK = Lock()
|
||||
|
@ -119,9 +119,9 @@ WEB_PASSWORD = None
|
|||
WEB_HOST = None
|
||||
WEB_IPV6 = None
|
||||
|
||||
PLAY_VIDEOS = None
|
||||
PLAY_VIDEOS = False
|
||||
|
||||
HANDLE_REVERSE_PROXY = None
|
||||
HANDLE_REVERSE_PROXY = False
|
||||
PROXY_SETTING = None
|
||||
|
||||
LOCALHOST_IP = None
|
||||
|
@ -137,16 +137,15 @@ ENABLE_HTTPS = False
|
|||
HTTPS_CERT = None
|
||||
HTTPS_KEY = None
|
||||
|
||||
LAUNCH_BROWSER = None
|
||||
LAUNCH_BROWSER = False
|
||||
CACHE_DIR = None
|
||||
ACTUAL_CACHE_DIR = None
|
||||
ROOT_DIRS = None
|
||||
UPDATE_SHOWS_ON_START = None
|
||||
SORT_ARTICLE = None
|
||||
UPDATE_SHOWS_ON_START = False
|
||||
SORT_ARTICLE = False
|
||||
DEBUG = False
|
||||
CLEAR_CACHE = None
|
||||
|
||||
USE_LISTVIEW = None
|
||||
USE_LISTVIEW = False
|
||||
METADATA_XBMC = None
|
||||
METADATA_XBMC_12PLUS = None
|
||||
METADATA_MEDIABROWSER = None
|
||||
|
@ -157,42 +156,42 @@ METADATA_MEDE8ER = None
|
|||
|
||||
QUALITY_DEFAULT = None
|
||||
STATUS_DEFAULT = None
|
||||
FLATTEN_FOLDERS_DEFAULT = None
|
||||
SUBTITLES_DEFAULT = None
|
||||
FLATTEN_FOLDERS_DEFAULT = False
|
||||
SUBTITLES_DEFAULT = False
|
||||
INDEXER_DEFAULT = None
|
||||
INDEXER_TIMEOUT = None
|
||||
SCENE_DEFAULT = None
|
||||
ANIME_DEFAULT = None
|
||||
SCENE_DEFAULT = False
|
||||
ANIME_DEFAULT = False
|
||||
PROVIDER_ORDER = []
|
||||
|
||||
NAMING_MULTI_EP = None
|
||||
NAMING_MULTI_EP = False
|
||||
NAMING_PATTERN = None
|
||||
NAMING_ABD_PATTERN = None
|
||||
NAMING_CUSTOM_ABD = None
|
||||
NAMING_CUSTOM_ABD = False
|
||||
NAMING_SPORTS_PATTERN = None
|
||||
NAMING_CUSTOM_SPORTS = None
|
||||
NAMING_CUSTOM_SPORTS = False
|
||||
NAMING_FORCE_FOLDERS = False
|
||||
NAMING_STRIP_YEAR = None
|
||||
NAMING_STRIP_YEAR = False
|
||||
NAMING_ANIME = None
|
||||
|
||||
USE_NZBS = None
|
||||
USE_TORRENTS = None
|
||||
USE_NZBS = False
|
||||
USE_TORRENTS = False
|
||||
|
||||
NZB_METHOD = None
|
||||
NZB_DIR = None
|
||||
USENET_RETENTION = None
|
||||
TORRENT_METHOD = None
|
||||
TORRENT_DIR = None
|
||||
DOWNLOAD_PROPERS = None
|
||||
DOWNLOAD_PROPERS = False
|
||||
CHECK_PROPERS_INTERVAL = None
|
||||
ALLOW_HIGH_PRIORITY = None
|
||||
ALLOW_HIGH_PRIORITY = False
|
||||
|
||||
AUTOPOSTPROCESSER_FREQUENCY = None
|
||||
DAILYSEARCH_FREQUENCY = None
|
||||
UPDATE_FREQUENCY = None
|
||||
BACKLOG_FREQUENCY = None
|
||||
DAILYSEARCH_STARTUP = None
|
||||
BACKLOG_STARTUP = None
|
||||
DAILYSEARCH_STARTUP = False
|
||||
BACKLOG_STARTUP = False
|
||||
|
||||
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
|
||||
MIN_BACKLOG_FREQUENCY = 10
|
||||
|
@ -203,8 +202,8 @@ DEFAULT_BACKLOG_FREQUENCY = 10080
|
|||
DEFAULT_DAILYSEARCH_FREQUENCY = 60
|
||||
DEFAULT_UPDATE_FREQUENCY = 1
|
||||
|
||||
ADD_SHOWS_WO_DIR = None
|
||||
CREATE_MISSING_SHOW_DIRS = None
|
||||
ADD_SHOWS_WO_DIR = False
|
||||
CREATE_MISSING_SHOW_DIRS = False
|
||||
RENAME_EPISODES = False
|
||||
AIRDATE_EPISODES = False
|
||||
PROCESS_AUTOMATICALLY = False
|
||||
|
@ -250,7 +249,7 @@ TORRENT_SEED_TIME = None
|
|||
TORRENT_PAUSED = False
|
||||
TORRENT_HIGH_BANDWIDTH = False
|
||||
TORRENT_LABEL = ''
|
||||
TORRENT_VERIFY_CERT = True
|
||||
TORRENT_VERIFY_CERT = False
|
||||
|
||||
USE_XBMC = False
|
||||
XBMC_ALWAYS_ON = True
|
||||
|
@ -331,7 +330,7 @@ ANIMESUPPORT = False
|
|||
USE_ANIDB = False
|
||||
ANIDB_USERNAME = None
|
||||
ANIDB_PASSWORD = None
|
||||
ANIDB_USE_MYLIST = 0
|
||||
ANIDB_USE_MYLIST = False
|
||||
ADBA_CONNECTION = None
|
||||
ANIME_SPLIT_HOME = False
|
||||
|
||||
|
@ -403,9 +402,9 @@ EMAIL_LIST = None
|
|||
GUI_NAME = None
|
||||
HOME_LAYOUT = None
|
||||
HISTORY_LAYOUT = None
|
||||
DISPLAY_SHOW_SPECIALS = None
|
||||
DISPLAY_SHOW_SPECIALS = False
|
||||
COMING_EPS_LAYOUT = None
|
||||
COMING_EPS_DISPLAY_PAUSED = None
|
||||
COMING_EPS_DISPLAY_PAUSED = False
|
||||
COMING_EPS_SORT = None
|
||||
COMING_EPS_MISSED_RANGE = None
|
||||
FUZZY_DATING = False
|
||||
|
@ -438,6 +437,8 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
|||
TRAKT_API_KEY = 'abd806c54516240c76e4ebc9c5ccf394'
|
||||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
|
@ -474,7 +475,7 @@ def initialize(consoleLogging=True):
|
|||
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
||||
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, CLEAR_CACHE, dailySearchScheduler, NFO_RENAME, \
|
||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, dailySearchScheduler, NFO_RENAME, \
|
||||
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
||||
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
||||
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
||||
|
@ -523,6 +524,10 @@ def initialize(consoleLogging=True):
|
|||
logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR)
|
||||
CACHE_DIR = None
|
||||
|
||||
# clean cache folders
|
||||
if CACHE_DIR:
|
||||
helpers.clearCache()
|
||||
|
||||
GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick')
|
||||
|
||||
ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
|
||||
|
@ -583,18 +588,11 @@ def initialize(consoleLogging=True):
|
|||
if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
|
||||
ROOT_DIRS = ''
|
||||
|
||||
proxies = getproxies()
|
||||
proxy_url = None
|
||||
if 'http' in proxies:
|
||||
proxy_url = proxies['http']
|
||||
elif 'ftp' in proxies:
|
||||
proxy_url = proxies['ftp']
|
||||
|
||||
QUALITY_DEFAULT = check_setting_int(CFG, 'General', 'quality_default', SD)
|
||||
STATUS_DEFAULT = check_setting_int(CFG, 'General', 'status_default', SKIPPED)
|
||||
VERSION_NOTIFY = check_setting_int(CFG, 'General', 'version_notify', 1)
|
||||
AUTO_UPDATE = check_setting_int(CFG, 'General', 'auto_update', 0)
|
||||
NOTIFY_ON_UPDATE = check_setting_int(CFG, 'General', 'notify_on_update', 1)
|
||||
VERSION_NOTIFY = bool(check_setting_int(CFG, 'General', 'version_notify', 1))
|
||||
AUTO_UPDATE = bool(check_setting_int(CFG, 'General', 'auto_update', 0))
|
||||
NOTIFY_ON_UPDATE = bool(check_setting_int(CFG, 'General', 'notify_on_update', 1))
|
||||
FLATTEN_FOLDERS_DEFAULT = bool(check_setting_int(CFG, 'General', 'flatten_folders_default', 0))
|
||||
INDEXER_DEFAULT = check_setting_int(CFG, 'General', 'indexer_default', 0)
|
||||
INDEXER_TIMEOUT = check_setting_int(CFG, 'General', 'indexer_timeout', 20)
|
||||
|
@ -605,11 +603,11 @@ def initialize(consoleLogging=True):
|
|||
|
||||
NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
|
||||
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%SN - %A.D - %EN')
|
||||
NAMING_CUSTOM_ABD = check_setting_int(CFG, 'General', 'naming_custom_abd', 0)
|
||||
NAMING_CUSTOM_ABD = bool(check_setting_int(CFG, 'General', 'naming_custom_abd', 0))
|
||||
NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern', '%SN - %A-D - %EN')
|
||||
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
|
||||
NAMING_CUSTOM_SPORTS = check_setting_int(CFG, 'General', 'naming_custom_sports', 0)
|
||||
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
|
||||
NAMING_CUSTOM_SPORTS = bool(check_setting_int(CFG, 'General', 'naming_custom_sports', 0))
|
||||
NAMING_MULTI_EP = bool(check_setting_int(CFG, 'General', 'naming_multi_ep', 1))
|
||||
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
|
||||
|
||||
|
@ -659,16 +657,16 @@ def initialize(consoleLogging=True):
|
|||
TORRENT_DIR = check_setting_str(CFG, 'Blackhole', 'torrent_dir', '')
|
||||
|
||||
TV_DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'tv_download_dir', '')
|
||||
PROCESS_AUTOMATICALLY = check_setting_int(CFG, 'General', 'process_automatically', 0)
|
||||
UNPACK = check_setting_int(CFG, 'General', 'unpack', 0)
|
||||
RENAME_EPISODES = check_setting_int(CFG, 'General', 'rename_episodes', 1)
|
||||
AIRDATE_EPISODES = check_setting_int(CFG, 'General', 'airdate_episodes', 0)
|
||||
KEEP_PROCESSED_DIR = check_setting_int(CFG, 'General', 'keep_processed_dir', 1)
|
||||
PROCESS_AUTOMATICALLY = bool(check_setting_int(CFG, 'General', 'process_automatically', 0))
|
||||
UNPACK = bool(check_setting_int(CFG, 'General', 'unpack', 0))
|
||||
RENAME_EPISODES = bool(check_setting_int(CFG, 'General', 'rename_episodes', 1))
|
||||
AIRDATE_EPISODES = bool(check_setting_int(CFG, 'General', 'airdate_episodes', 0))
|
||||
KEEP_PROCESSED_DIR = bool(check_setting_int(CFG, 'General', 'keep_processed_dir', 1))
|
||||
PROCESS_METHOD = check_setting_str(CFG, 'General', 'process_method', 'copy' if KEEP_PROCESSED_DIR else 'move')
|
||||
MOVE_ASSOCIATED_FILES = check_setting_int(CFG, 'General', 'move_associated_files', 0)
|
||||
NFO_RENAME = check_setting_int(CFG, 'General', 'nfo_rename', 1)
|
||||
CREATE_MISSING_SHOW_DIRS = check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0)
|
||||
ADD_SHOWS_WO_DIR = check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0)
|
||||
MOVE_ASSOCIATED_FILES = bool(check_setting_int(CFG, 'General', 'move_associated_files', 0))
|
||||
NFO_RENAME = bool(check_setting_int(CFG, 'General', 'nfo_rename', 1))
|
||||
CREATE_MISSING_SHOW_DIRS = bool(check_setting_int(CFG, 'General', 'create_missing_show_dirs', 0))
|
||||
ADD_SHOWS_WO_DIR = bool(check_setting_int(CFG, 'General', 'add_shows_wo_dir', 0))
|
||||
|
||||
NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
|
||||
NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '')
|
||||
|
@ -761,7 +759,8 @@ def initialize(consoleLogging=True):
|
|||
USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0))
|
||||
PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0))
|
||||
PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0))
|
||||
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
||||
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||
check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
||||
PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '')
|
||||
PUSHOVER_APIKEY = check_setting_str(CFG, 'Pushover', 'pushover_apikey', '')
|
||||
USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
|
||||
|
@ -796,7 +795,7 @@ def initialize(consoleLogging=True):
|
|||
TRAKT_API = check_setting_str(CFG, 'Trakt', 'trakt_api', '')
|
||||
TRAKT_REMOVE_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_remove_watchlist', 0))
|
||||
TRAKT_USE_WATCHLIST = bool(check_setting_int(CFG, 'Trakt', 'trakt_use_watchlist', 0))
|
||||
TRAKT_METHOD_ADD = check_setting_str(CFG, 'Trakt', 'trakt_method_add', "0")
|
||||
TRAKT_METHOD_ADD = check_setting_int(CFG, 'Trakt', 'trakt_method_add', 0)
|
||||
TRAKT_START_PAUSED = bool(check_setting_int(CFG, 'Trakt', 'trakt_start_paused', 0))
|
||||
TRAKT_USE_RECOMMENDED = bool(check_setting_int(CFG, 'Trakt', 'trakt_use_recommended', 0))
|
||||
TRAKT_SYNC = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync', 0))
|
||||
|
@ -874,10 +873,11 @@ def initialize(consoleLogging=True):
|
|||
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
||||
|
||||
ANIMESUPPORT = False
|
||||
USE_ANIDB = check_setting_str(CFG, 'ANIDB', 'use_anidb', '')
|
||||
USE_ANIDB = bool(check_setting_int(CFG, 'ANIDB', 'use_anidb', 0))
|
||||
ANIDB_USERNAME = check_setting_str(CFG, 'ANIDB', 'anidb_username', '')
|
||||
ANIDB_PASSWORD = check_setting_str(CFG, 'ANIDB', 'anidb_password', '')
|
||||
ANIDB_USE_MYLIST = bool(check_setting_int(CFG, 'ANIDB', 'anidb_use_mylist', 0))
|
||||
|
||||
ANIME_SPLIT_HOME = bool(check_setting_int(CFG, 'ANIME', 'anime_split_home', 0))
|
||||
|
||||
METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc', '0|0|0|0|0|0|0|0|0|0')
|
||||
|
@ -902,125 +902,15 @@ def initialize(consoleLogging=True):
|
|||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
|
||||
TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network')
|
||||
|
||||
# initialize NZB and TORRENT providers
|
||||
providerList = providers.makeProviderList()
|
||||
|
||||
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
|
||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||
|
||||
TORRENTRSS_DATA = check_setting_str(CFG, 'TorrentRss', 'torrentrss_data', '')
|
||||
torrentRssProviderList = providers.getTorrentRssProviderList(TORRENTRSS_DATA)
|
||||
|
||||
if not os.path.isfile(CONFIG_FILE):
|
||||
logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG)
|
||||
save_config()
|
||||
|
||||
# start up all the threads
|
||||
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
|
||||
|
||||
# initialize the main SB database
|
||||
myDB = db.DBConnection()
|
||||
db.upgradeDatabase(myDB, mainDB.InitialSchema)
|
||||
|
||||
# initialize the cache database
|
||||
myDB = db.DBConnection('cache.db')
|
||||
db.upgradeDatabase(myDB, cache_db.InitialSchema)
|
||||
|
||||
# initialize the failed downloads database
|
||||
myDB = db.DBConnection('failed.db')
|
||||
db.upgradeDatabase(myDB, failed_db.InitialSchema)
|
||||
|
||||
# fix up any db problems
|
||||
myDB = db.DBConnection()
|
||||
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck)
|
||||
|
||||
# migrate the config if it needs it
|
||||
migrator = ConfigMigrator(CFG)
|
||||
migrator.migrate_config()
|
||||
|
||||
# initialize metadata_providers
|
||||
metadata_provider_dict = metadata.get_metadata_generator_dict()
|
||||
for cur_metadata_tuple in [(METADATA_XBMC, metadata.xbmc),
|
||||
(METADATA_XBMC_12PLUS, metadata.xbmc_12plus),
|
||||
(METADATA_MEDIABROWSER, metadata.mediabrowser),
|
||||
(METADATA_PS3, metadata.ps3),
|
||||
(METADATA_WDTV, metadata.wdtv),
|
||||
(METADATA_TIVO, metadata.tivo),
|
||||
(METADATA_MEDE8ER, metadata.mede8er),
|
||||
]:
|
||||
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
||||
tmp_provider = cur_metadata_class.metadata_class()
|
||||
tmp_provider.set_config(cur_metadata_config)
|
||||
metadata_provider_dict[tmp_provider.name] = tmp_provider
|
||||
|
||||
# initialize newznab providers
|
||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||
providerList = providers.makeProviderList()
|
||||
|
||||
# initialize schedulers
|
||||
# updaters
|
||||
update_now = datetime.timedelta(minutes=0)
|
||||
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
||||
threadName="CHECKVERSION",
|
||||
silent=False)
|
||||
|
||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SHOWQUEUE")
|
||||
|
||||
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="SHOWUPDATER",
|
||||
start_time=datetime.time(hour=3)) # 3 AM
|
||||
|
||||
# searchers
|
||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SEARCHQUEUE")
|
||||
|
||||
update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY)
|
||||
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="DAILYSEARCHER",
|
||||
run_delay=update_now if DAILYSEARCH_STARTUP
|
||||
else update_interval)
|
||||
|
||||
update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY)
|
||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="BACKLOG",
|
||||
run_delay=update_now if BACKLOG_STARTUP
|
||||
else update_interval)
|
||||
|
||||
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4*60, 'daily': 24*60}
|
||||
if CHECK_PROPERS_INTERVAL in search_intervals:
|
||||
update_interval = datetime.timedelta(minutes=search_intervals[CHECK_PROPERS_INTERVAL])
|
||||
run_at = None
|
||||
else:
|
||||
update_interval = datetime.timedelta(hours=1)
|
||||
run_at = datetime.time(hour=1) # 1 AM
|
||||
|
||||
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
|
||||
cycleTime=update_interval,
|
||||
threadName="FINDPROPERS",
|
||||
start_time=run_at,
|
||||
run_delay=update_interval)
|
||||
|
||||
# processors
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(
|
||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||
threadName="POSTPROCESSER",
|
||||
silent=not PROCESS_AUTOMATICALLY)
|
||||
|
||||
traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="TRAKTCHECKER",
|
||||
silent=not USE_TRAKT)
|
||||
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName="FINDSUBTITLES",
|
||||
silent=not USE_SUBTITLES)
|
||||
|
||||
# dynamically load provider settings
|
||||
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
||||
curProvider.providerType == GenericProvider.TORRENT]:
|
||||
|
@ -1104,17 +994,114 @@ def initialize(consoleLogging=True):
|
|||
curNzbProvider.getID() + '_backlog_only',
|
||||
0))
|
||||
|
||||
try:
|
||||
url = 'http://raw.github.com/echel0n/sickrage-init/master/settings.ini'
|
||||
clear_cache = ElementTree.XML(helpers.getURL(url)).find('cache/clear').text
|
||||
CLEAR_CACHE = check_setting_str(CFG, 'General', 'clear_cache', '')
|
||||
if CLEAR_CACHE != clear_cache:
|
||||
for curProvider in [x for x in providers.sortedProviderList() if x.isActive()]:
|
||||
curProvider.cache._clearCache()
|
||||
CLEAR_CACHE = clear_cache
|
||||
save_config()
|
||||
except:
|
||||
pass
|
||||
if not os.path.isfile(CONFIG_FILE):
|
||||
logger.log(u"Unable to find '" + CONFIG_FILE + "', all settings will be default!", logger.DEBUG)
|
||||
save_config()
|
||||
|
||||
# start up all the threads
|
||||
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
|
||||
|
||||
# initialize the main SB database
|
||||
myDB = db.DBConnection()
|
||||
db.upgradeDatabase(myDB, mainDB.InitialSchema)
|
||||
|
||||
# initialize the cache database
|
||||
myDB = db.DBConnection('cache.db')
|
||||
db.upgradeDatabase(myDB, cache_db.InitialSchema)
|
||||
|
||||
# initialize the failed downloads database
|
||||
myDB = db.DBConnection('failed.db')
|
||||
db.upgradeDatabase(myDB, failed_db.InitialSchema)
|
||||
|
||||
# fix up any db problems
|
||||
myDB = db.DBConnection()
|
||||
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck)
|
||||
|
||||
# migrate the config if it needs it
|
||||
migrator = ConfigMigrator(CFG)
|
||||
migrator.migrate_config()
|
||||
|
||||
# initialize metadata_providers
|
||||
metadata_provider_dict = metadata.get_metadata_generator_dict()
|
||||
for cur_metadata_tuple in [(METADATA_XBMC, metadata.xbmc),
|
||||
(METADATA_XBMC_12PLUS, metadata.xbmc_12plus),
|
||||
(METADATA_MEDIABROWSER, metadata.mediabrowser),
|
||||
(METADATA_PS3, metadata.ps3),
|
||||
(METADATA_WDTV, metadata.wdtv),
|
||||
(METADATA_TIVO, metadata.tivo),
|
||||
(METADATA_MEDE8ER, metadata.mede8er),
|
||||
]:
|
||||
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
||||
tmp_provider = cur_metadata_class.metadata_class()
|
||||
tmp_provider.set_config(cur_metadata_config)
|
||||
metadata_provider_dict[tmp_provider.name] = tmp_provider
|
||||
|
||||
# initialize schedulers
|
||||
# updaters
|
||||
update_now = datetime.timedelta(minutes=0)
|
||||
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
||||
threadName="CHECKVERSION",
|
||||
silent=False)
|
||||
|
||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SHOWQUEUE")
|
||||
|
||||
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="SHOWUPDATER",
|
||||
start_time=datetime.time(hour=3)) # 3 AM
|
||||
|
||||
# searchers
|
||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SEARCHQUEUE")
|
||||
|
||||
update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY)
|
||||
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="DAILYSEARCHER",
|
||||
run_delay=update_now if DAILYSEARCH_STARTUP
|
||||
else update_interval)
|
||||
|
||||
update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY)
|
||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="BACKLOG",
|
||||
run_delay=update_now if BACKLOG_STARTUP
|
||||
else update_interval)
|
||||
|
||||
search_intervals = {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}
|
||||
if CHECK_PROPERS_INTERVAL in search_intervals:
|
||||
update_interval = datetime.timedelta(minutes=search_intervals[CHECK_PROPERS_INTERVAL])
|
||||
run_at = None
|
||||
else:
|
||||
update_interval = datetime.timedelta(hours=1)
|
||||
run_at = datetime.time(hour=1) # 1 AM
|
||||
|
||||
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
|
||||
cycleTime=update_interval,
|
||||
threadName="FINDPROPERS",
|
||||
start_time=run_at,
|
||||
run_delay=update_interval)
|
||||
|
||||
# processors
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(
|
||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||
threadName="POSTPROCESSER",
|
||||
silent=not PROCESS_AUTOMATICALLY)
|
||||
|
||||
traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="TRAKTCHECKER",
|
||||
silent=not USE_TRAKT)
|
||||
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName="FINDSUBTITLES",
|
||||
silent=not USE_SUBTITLES)
|
||||
|
||||
showList = []
|
||||
loadingShowList = {}
|
||||
|
@ -1126,11 +1113,10 @@ def start():
|
|||
global __INITIALIZED__, backlogSearchScheduler, \
|
||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, USE_SUBTITLES,traktCheckerScheduler, \
|
||||
subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \
|
||||
dailySearchScheduler, events, started
|
||||
|
||||
with INIT_LOCK:
|
||||
|
||||
if __INITIALIZED__:
|
||||
# start sysetm events queue
|
||||
events.start()
|
||||
|
@ -1191,63 +1177,63 @@ def halt():
|
|||
dailySearchScheduler.stop.set()
|
||||
logger.log(u"Waiting for the DAILYSEARCH thread to exit")
|
||||
try:
|
||||
dailySearchScheduler.join()
|
||||
dailySearchScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
backlogSearchScheduler.stop.set()
|
||||
logger.log(u"Waiting for the BACKLOG thread to exit")
|
||||
try:
|
||||
backlogSearchScheduler.join()
|
||||
backlogSearchScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
showUpdateScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
|
||||
try:
|
||||
showUpdateScheduler.join()
|
||||
showUpdateScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
versionCheckScheduler.stop.set()
|
||||
logger.log(u"Waiting for the VERSIONCHECKER thread to exit")
|
||||
try:
|
||||
versionCheckScheduler.join()
|
||||
versionCheckScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
showQueueScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SHOWQUEUE thread to exit")
|
||||
try:
|
||||
showQueueScheduler.join()
|
||||
showQueueScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
searchQueueScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SEARCHQUEUE thread to exit")
|
||||
try:
|
||||
searchQueueScheduler.join()
|
||||
searchQueueScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
autoPostProcesserScheduler.stop.set()
|
||||
logger.log(u"Waiting for the POSTPROCESSER thread to exit")
|
||||
try:
|
||||
autoPostProcesserScheduler.join()
|
||||
autoPostProcesserScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
traktCheckerScheduler.stop.set()
|
||||
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
|
||||
try:
|
||||
traktCheckerScheduler.join()
|
||||
traktCheckerScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
properFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the PROPERFINDER thread to exit")
|
||||
try:
|
||||
properFinderScheduler.join()
|
||||
properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -1269,11 +1255,13 @@ def halt():
|
|||
__INITIALIZED__ = False
|
||||
started = False
|
||||
|
||||
|
||||
def sig_handler(signum=None, frame=None):
|
||||
if type(signum) != type(None):
|
||||
logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
|
||||
events.put(events.SystemEvent.SHUTDOWN)
|
||||
|
||||
|
||||
def saveAll():
|
||||
global showList
|
||||
|
||||
|
@ -1286,6 +1274,7 @@ def saveAll():
|
|||
logger.log(u"Saving config file to disk")
|
||||
save_config()
|
||||
|
||||
|
||||
def restart(soft=True):
|
||||
if soft:
|
||||
halt()
|
||||
|
@ -1391,8 +1380,6 @@ def save_config():
|
|||
new_config['General']['ignore_words'] = IGNORE_WORDS
|
||||
new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED)
|
||||
|
||||
new_config['General']['clear_cache'] = CLEAR_CACHE
|
||||
|
||||
new_config['Blackhole'] = {}
|
||||
new_config['Blackhole']['nzb_dir'] = NZB_DIR
|
||||
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
|
||||
|
@ -1617,7 +1604,7 @@ def save_config():
|
|||
new_config['Trakt']['trakt_api'] = TRAKT_API
|
||||
new_config['Trakt']['trakt_remove_watchlist'] = int(TRAKT_REMOVE_WATCHLIST)
|
||||
new_config['Trakt']['trakt_use_watchlist'] = int(TRAKT_USE_WATCHLIST)
|
||||
new_config['Trakt']['trakt_method_add'] = TRAKT_METHOD_ADD
|
||||
new_config['Trakt']['trakt_method_add'] = int(TRAKT_METHOD_ADD)
|
||||
new_config['Trakt']['trakt_start_paused'] = int(TRAKT_START_PAUSED)
|
||||
new_config['Trakt']['trakt_use_recommended'] = int(TRAKT_USE_RECOMMENDED)
|
||||
new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC)
|
||||
|
@ -1705,10 +1692,10 @@ def save_config():
|
|||
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
||||
|
||||
new_config['ANIDB'] = {}
|
||||
new_config['ANIDB']['use_anidb'] = USE_ANIDB
|
||||
new_config['ANIDB']['use_anidb'] = int(USE_ANIDB)
|
||||
new_config['ANIDB']['anidb_username'] = ANIDB_USERNAME
|
||||
new_config['ANIDB']['anidb_password'] = helpers.encrypt(ANIDB_PASSWORD, ENCRYPTION_VERSION)
|
||||
new_config['ANIDB']['anidb_use_mylist'] = ANIDB_USE_MYLIST
|
||||
new_config['ANIDB']['anidb_use_mylist'] = int(ANIDB_USE_MYLIST)
|
||||
|
||||
new_config['ANIME'] = {}
|
||||
new_config['ANIME']['anime_split_home'] = int(ANIME_SPLIT_HOME)
|
||||
|
|
|
@ -199,15 +199,15 @@ class ShowListUI:
|
|||
self.log = log
|
||||
|
||||
def selectSeries(self, allSeries):
|
||||
if sickbeard.showList:
|
||||
idList = [x.indexerid for x in sickbeard.showList]
|
||||
|
||||
try:
|
||||
# try to pick a show that's in my show list
|
||||
for curShow in allSeries:
|
||||
if int(curShow['id']) in idList:
|
||||
if filter(lambda x: int(x.indexerid) == int(curShow['id']), sickbeard.showList):
|
||||
return curShow
|
||||
except:
|
||||
pass
|
||||
|
||||
# if nothing matches then return everything
|
||||
# if nothing matches then return first result
|
||||
return allSeries[0]
|
||||
|
||||
|
||||
|
|
|
@ -82,5 +82,4 @@ def getClientIstance(name):
|
|||
module = getClientModule(name)
|
||||
className = module.api.__class__.__name__
|
||||
|
||||
return getattr(module, className)
|
||||
|
||||
return getattr(module, className)
|
|
@ -265,8 +265,8 @@ class Quality:
|
|||
return (status, Quality.NONE)
|
||||
|
||||
@staticmethod
|
||||
def statusFromName(name, assume=True):
|
||||
quality = Quality.nameQuality(name)
|
||||
def statusFromName(name, assume=True, anime=False):
|
||||
quality = Quality.nameQuality(name, anime)
|
||||
if assume and quality == Quality.UNKNOWN:
|
||||
quality = Quality.assumeQuality(name)
|
||||
return Quality.compositeStatus(DOWNLOADED, quality)
|
||||
|
|
|
@ -27,6 +27,7 @@ from sickbeard import helpers
|
|||
from sickbeard import logger
|
||||
from sickbeard import naming
|
||||
from sickbeard import db
|
||||
from sickbeard import version
|
||||
|
||||
naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
|
@ -190,6 +191,10 @@ def change_VERSION_NOTIFY(version_notify):
|
|||
if oldSetting == False and version_notify == True:
|
||||
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
|
||||
|
||||
def change_VERSION(version):
|
||||
if sickbeard.version.SICKBEARD_VERSION != version:
|
||||
|
||||
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
|
||||
|
||||
def CheckSection(CFG, sec):
|
||||
""" Check if INI section exists, if not create it """
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 39
|
||||
MAX_DB_VERSION = 40
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
def check(self):
|
||||
|
@ -901,3 +901,17 @@ class AddIndexerMapping(AddSceneToTvShows):
|
|||
"CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
class AddVersionToTvEpisodes(AddIndexerMapping):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 40
|
||||
|
||||
def execute(self):
|
||||
backupDatabase(40)
|
||||
|
||||
logger.log(u"Adding column version to tv_episodes and history")
|
||||
self.addColumn("tv_episodes", "version", "NUMERIC", "-1")
|
||||
self.addColumn("tv_episodes", "release_group", "TEXT", "")
|
||||
self.addColumn("history", "version", "NUMERIC", "-1")
|
||||
|
||||
self.incDBVersion()
|
||||
|
|
|
@ -51,14 +51,12 @@ class GitHub(object):
|
|||
if params and type(params) is dict:
|
||||
url += '?' + '&'.join([str(x) + '=' + str(params[x]) for x in params.keys()])
|
||||
|
||||
data = helpers.getURL(url)
|
||||
|
||||
if data:
|
||||
json_data = json.loads(data)
|
||||
return json_data
|
||||
else:
|
||||
parsedJSON = helpers.getURL(url, json=True)
|
||||
if not parsedJSON:
|
||||
return []
|
||||
|
||||
return parsedJSON
|
||||
|
||||
def commits(self):
|
||||
"""
|
||||
Uses the API to get a list of the 100 most recent commits from the specified user/repo/branch, starting from HEAD.
|
||||
|
@ -89,3 +87,9 @@ class GitHub(object):
|
|||
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
|
||||
params={'per_page': per_page})
|
||||
return access_API
|
||||
|
||||
def branches(self):
|
||||
access_API = self._access_API(
|
||||
['repos', self.github_repo_user, self.github_repo, 'branches'],
|
||||
params={'per_page': 100})
|
||||
return access_API
|
|
@ -32,10 +32,13 @@ import urlparse
|
|||
import uuid
|
||||
import base64
|
||||
import zipfile
|
||||
import datetime
|
||||
|
||||
from lib import requests
|
||||
from lib.requests import exceptions
|
||||
from itertools import izip, cycle
|
||||
import sickbeard
|
||||
import subliminal
|
||||
import adba
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
try:
|
||||
import json
|
||||
|
@ -49,19 +52,18 @@ except ImportError:
|
|||
|
||||
from xml.dom.minidom import Node
|
||||
|
||||
import sickbeard
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, EpisodeNotFoundByAbsoluteNumberException, ex
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
||||
from sickbeard import logger, classes
|
||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions
|
||||
from sickbeard import db
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard import notifiers
|
||||
from lib import subliminal
|
||||
from lib import adba
|
||||
from lib import trakt
|
||||
from sickbeard import clients
|
||||
|
||||
from cachecontrol import CacheControl, caches
|
||||
from itertools import izip, cycle
|
||||
|
||||
urllib._urlopener = classes.SickBeardURLopener()
|
||||
session = requests.Session()
|
||||
|
||||
|
||||
def indentXML(elem, level=0):
|
||||
|
@ -192,59 +194,6 @@ def sanitizeFileName(name):
|
|||
return name
|
||||
|
||||
|
||||
def getURL(url, post_data=None, headers=None, params=None, timeout=30, json=False, use_proxy=False):
|
||||
"""
|
||||
Returns a byte-string retrieved from the url provider.
|
||||
"""
|
||||
|
||||
global session
|
||||
if not session:
|
||||
session = requests.Session()
|
||||
|
||||
req_headers = ['User-Agent', USER_AGENT, 'Accept-Encoding', 'gzip,deflate']
|
||||
if headers:
|
||||
for cur_header in headers:
|
||||
req_headers.append(cur_header)
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
it = iter(req_headers)
|
||||
|
||||
if use_proxy and sickbeard.PROXY_SETTING:
|
||||
logger.log("Using proxy for url: " + url, logger.DEBUG)
|
||||
proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies,
|
||||
timeout=timeout, verify=False)
|
||||
else:
|
||||
r = session.get(url, params=params, data=post_data, headers=dict(zip(it, it)), timeout=timeout,
|
||||
verify=False)
|
||||
except requests.HTTPError, e:
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return None
|
||||
|
||||
except requests.ConnectionError, e:
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return None
|
||||
|
||||
except requests.Timeout, e:
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return None
|
||||
|
||||
if r.ok:
|
||||
if json:
|
||||
return r.json()
|
||||
|
||||
return r.content
|
||||
|
||||
|
||||
def _remove_file_failed(file):
|
||||
try:
|
||||
ek.ek(os.remove, file)
|
||||
|
@ -252,40 +201,6 @@ def _remove_file_failed(file):
|
|||
pass
|
||||
|
||||
|
||||
def download_file(url, filename):
|
||||
global session
|
||||
if not session:
|
||||
session = requests.Session()
|
||||
|
||||
try:
|
||||
r = session.get(url, stream=True, verify=False)
|
||||
with open(filename, 'wb') as fp:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
fp.write(chunk)
|
||||
fp.flush()
|
||||
|
||||
except requests.HTTPError, e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
|
||||
except requests.ConnectionError, e:
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
|
||||
except requests.Timeout, e:
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def findCertainShow(showList, indexerid):
|
||||
if not showList:
|
||||
return None
|
||||
|
@ -611,6 +526,14 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
|||
break
|
||||
|
||||
|
||||
def fileBitFilter(mode):
|
||||
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
||||
if mode & bit:
|
||||
mode -= bit
|
||||
|
||||
return mode
|
||||
|
||||
|
||||
def chmodAsParent(childPath):
|
||||
if os.name == 'nt' or os.name == 'ce':
|
||||
return
|
||||
|
@ -650,14 +573,6 @@ def chmodAsParent(childPath):
|
|||
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
||||
|
||||
|
||||
def fileBitFilter(mode):
|
||||
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
||||
if mode & bit:
|
||||
mode -= bit
|
||||
|
||||
return mode
|
||||
|
||||
|
||||
def fixSetGroupID(childPath):
|
||||
if os.name == 'nt' or os.name == 'ce':
|
||||
return
|
||||
|
@ -713,14 +628,17 @@ def get_absolute_number_from_season_and_episode(show, season, episode):
|
|||
if len(sqlResults) == 1:
|
||||
absolute_number = int(sqlResults[0]["absolute_number"])
|
||||
logger.log(
|
||||
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), logger.DEBUG)
|
||||
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode),
|
||||
logger.DEBUG)
|
||||
else:
|
||||
logger.log(
|
||||
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(episode),
|
||||
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(
|
||||
episode),
|
||||
logger.DEBUG)
|
||||
|
||||
return absolute_number
|
||||
|
||||
|
||||
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
|
||||
episodes = []
|
||||
season = None
|
||||
|
@ -803,11 +721,13 @@ def create_https_certificates(ssl_cert, ssl_key):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
|
||||
|
||||
def parse_json(data):
|
||||
"""
|
||||
Parse json data into a python object
|
||||
|
@ -1107,6 +1027,7 @@ def get_show(name, tryIndexers=False):
|
|||
|
||||
return showObj
|
||||
|
||||
|
||||
def is_hidden_folder(folder):
|
||||
"""
|
||||
Returns True if folder is hidden.
|
||||
|
@ -1219,16 +1140,13 @@ def mapIndexersToShow(showObj):
|
|||
mapped = {showObj.indexer: showObj.indexerid}
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
sqlResults = myDB.select(
|
||||
"SELECT * FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?",
|
||||
[showObj.indexerid, showObj.indexer])
|
||||
|
||||
# for each mapped entry
|
||||
for curResult in sqlResults:
|
||||
logger.log(u"Found " + sickbeard.indexerApi(showObj.indexer).name + "<->" + sickbeard.indexerApi(
|
||||
int(curResult['mindexer'])).name + " mapping in cache for show: " + showObj.name, logger.DEBUG)
|
||||
|
||||
logger.log(u"Found indexer mapping in cache for show: " + showObj.name, logger.DEBUG)
|
||||
mapped[int(curResult['mindexer'])] = int(curResult['mindexer_id'])
|
||||
else:
|
||||
sql_l = []
|
||||
|
@ -1241,22 +1159,27 @@ def mapIndexersToShow(showObj):
|
|||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
mapped_show = t[showObj.name]
|
||||
try:
|
||||
mapped_show = t[showObj.name]
|
||||
except sickbeard.indexer_shownotfound:
|
||||
logger.log(u"Unable to map " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
|
||||
indexer).name + " for show: " + showObj.name + ", skipping it", logger.ERROR)
|
||||
mapped_show = None
|
||||
|
||||
if len(mapped_show) and not len(mapped_show) > 1:
|
||||
logger.log(u"Mapping " + sickbeard.indexerApi(showObj.indexer).name + "<->" + sickbeard.indexerApi(
|
||||
indexer).name + " for show " + showObj.name,
|
||||
logger.DEBUG)
|
||||
logger.log(u"Mapping " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
|
||||
indexer).name + " for show: " + showObj.name, logger.DEBUG)
|
||||
|
||||
mapped[indexer] = int(mapped_show[0]['id'])
|
||||
|
||||
logger.log(u"Adding " + sickbeard.indexerApi(showObj.indexer).name + "<->" + sickbeard.indexerApi(
|
||||
indexer).name + " mapping to DB for show: " + showObj.name, logger.DEBUG)
|
||||
logger.log(u"Adding indexer mapping to DB for show: " + showObj.name, logger.DEBUG)
|
||||
|
||||
sql_l.append([
|
||||
"INSERT OR IGNORE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) VALUES (?,?,?,?)",
|
||||
[showObj.indexerid, showObj.indexer, int(mapped_show[0]['id']), indexer]])
|
||||
|
||||
if len(sql_l) > 0:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
return mapped
|
||||
|
@ -1272,4 +1195,165 @@ def touchFile(fname, atime=None):
|
|||
logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
|
||||
pass
|
||||
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
|
||||
"""
|
||||
Returns a byte-string retrieved from the url provider.
|
||||
"""
|
||||
|
||||
# request session
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
|
||||
|
||||
# request session headers
|
||||
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
|
||||
if headers:
|
||||
req_headers.update(headers)
|
||||
session.headers.update(req_headers)
|
||||
|
||||
# request session ssl verify
|
||||
session.verify = False
|
||||
|
||||
# request session paramaters
|
||||
session.params = params
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
# request session proxies
|
||||
if sickbeard.PROXY_SETTING:
|
||||
logger.log("Using proxy for url: " + url, logger.DEBUG)
|
||||
session.proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
resp = session.get(url, data=post_data, timeout=timeout)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except requests.exceptions.Timeout, e:
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
except Exception:
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return
|
||||
|
||||
if not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
|
||||
return
|
||||
|
||||
if json:
|
||||
return resp.json()
|
||||
|
||||
return resp.content
|
||||
|
||||
|
||||
def download_file(url, filename, session=None):
|
||||
# create session
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
|
||||
|
||||
# request session headers
|
||||
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
|
||||
|
||||
# request session ssl verify
|
||||
session.verify = False
|
||||
|
||||
# request session streaming
|
||||
session.stream = True
|
||||
|
||||
# request session proxies
|
||||
if sickbeard.PROXY_SETTING:
|
||||
logger.log("Using proxy for url: " + url, logger.DEBUG)
|
||||
session.proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
try:
|
||||
resp = session.get(url)
|
||||
if not resp.ok:
|
||||
return False
|
||||
|
||||
with open(filename, 'wb') as fp:
|
||||
for chunk in resp.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
fp.write(chunk)
|
||||
fp.flush()
|
||||
|
||||
chmodAsParent(filename)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.ConnectionError, e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Connection error " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except requests.exceptions.Timeout, e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return False
|
||||
except EnvironmentError, e:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
except Exception:
|
||||
_remove_file_failed(filename)
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return False
|
||||
|
||||
if not resp:
|
||||
logger.log(u"No data returned from " + url, logger.DEBUG)
|
||||
return False
|
||||
elif not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def clearCache(force=False):
|
||||
update_datetime = datetime.datetime.now()
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
if sickbeard.CACHE_DIR:
|
||||
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR)
|
||||
|
||||
# Does our cache_dir exists
|
||||
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
|
||||
logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING)
|
||||
else:
|
||||
max_age = datetime.timedelta(hours=12)
|
||||
|
||||
# Get all our cache files
|
||||
for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR):
|
||||
path = os.path.basename(cache_root)
|
||||
|
||||
# skip these cache folders
|
||||
if path in ['rss', 'images']:
|
||||
continue
|
||||
|
||||
for file in cache_files:
|
||||
cache_file = ek.ek(os.path.join, cache_root, file)
|
||||
|
||||
if ek.ek(os.path.isfile, cache_file):
|
||||
cache_file_modified = datetime.datetime.fromtimestamp(
|
||||
ek.ek(os.path.getmtime, cache_file))
|
||||
|
||||
if force or (update_datetime - cache_file_modified > max_age):
|
||||
try:
|
||||
ek.ek(os.remove, cache_file)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e),
|
||||
logger.WARNING)
|
||||
break
|
|
@ -25,7 +25,7 @@ from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
|
|||
dateFormat = "%Y%m%d%H%M%S"
|
||||
|
||||
|
||||
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
|
||||
def _logHistoryItem(action, showid, season, episode, quality, resource, provider, version=-1):
|
||||
logDate = datetime.datetime.today().strftime(dateFormat)
|
||||
|
||||
if not isinstance(resource, unicode):
|
||||
|
@ -33,8 +33,8 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, provider
|
|||
|
||||
myDB = db.DBConnection()
|
||||
myDB.action(
|
||||
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
||||
[action, logDate, showid, season, episode, quality, resource, provider])
|
||||
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider, version) VALUES (?,?,?,?,?,?,?,?,?)",
|
||||
[action, logDate, showid, season, episode, quality, resource, provider, version])
|
||||
|
||||
|
||||
def logSnatch(searchResult):
|
||||
|
@ -44,6 +44,7 @@ def logSnatch(searchResult):
|
|||
season = int(curEpObj.season)
|
||||
episode = int(curEpObj.episode)
|
||||
quality = searchResult.quality
|
||||
version = searchResult.version
|
||||
|
||||
providerClass = searchResult.provider
|
||||
if providerClass != None:
|
||||
|
@ -55,10 +56,10 @@ def logSnatch(searchResult):
|
|||
|
||||
resource = searchResult.name
|
||||
|
||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider, version)
|
||||
|
||||
|
||||
def logDownload(episode, filename, new_ep_quality, release_group=None):
|
||||
def logDownload(episode, filename, new_ep_quality, release_group=None, version=-1):
|
||||
showid = int(episode.show.indexerid)
|
||||
season = int(episode.season)
|
||||
epNum = int(episode.episode)
|
||||
|
@ -73,7 +74,7 @@ def logDownload(episode, filename, new_ep_quality, release_group=None):
|
|||
|
||||
action = episode.status
|
||||
|
||||
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
|
||||
_logHistoryItem(action, showid, season, epNum, quality, filename, provider, version)
|
||||
|
||||
|
||||
def logSubtitle(showid, season, episode, status, subtitleResult):
|
||||
|
|
|
@ -47,7 +47,10 @@ class indexerApi(object):
|
|||
def api_params(self):
|
||||
if self.indexerID:
|
||||
if sickbeard.CACHE_DIR:
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name)
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name)
|
||||
if sickbeard.PROXY_SETTING:
|
||||
indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING
|
||||
|
||||
return indexerConfig[self.indexerID]['api_params']
|
||||
|
||||
@property
|
||||
|
|
|
@ -23,7 +23,7 @@ indexerConfig[INDEXER_TVDB] = {
|
|||
'module': Tvdb,
|
||||
'api_params': {'apikey': 'F9C450E78D99172E',
|
||||
'language': 'en',
|
||||
'useZip': True
|
||||
'useZip': True,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ indexerConfig[INDEXER_TVRAGE] = {
|
|||
'name': 'TVRage',
|
||||
'module': TVRage,
|
||||
'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt',
|
||||
'language': 'en'
|
||||
'language': 'en',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -35,9 +35,8 @@ def getShowImage(url, imgNum=None):
|
|||
logger.log(u"Fetching image from " + tempURL, logger.DEBUG)
|
||||
|
||||
image_data = helpers.getURL(tempURL)
|
||||
|
||||
if image_data is None:
|
||||
logger.log(u"There was an error trying to retrieve the image, aborting", logger.ERROR)
|
||||
return None
|
||||
return
|
||||
|
||||
return image_data
|
||||
|
|
|
@ -31,9 +31,10 @@ from dateutil import parser
|
|||
|
||||
|
||||
class NameParser(object):
|
||||
NORMAL_REGEX = 0
|
||||
SPORTS_REGEX = 1
|
||||
ANIME_REGEX = 2
|
||||
ALL_REGEX = 0
|
||||
NORMAL_REGEX = 1
|
||||
SPORTS_REGEX = 2
|
||||
ANIME_REGEX = 3
|
||||
|
||||
def __init__(self, file_name=True, showObj=None, tryIndexers=False, convert=False,
|
||||
naming_pattern=False):
|
||||
|
@ -44,13 +45,14 @@ class NameParser(object):
|
|||
self.convert = convert
|
||||
self.naming_pattern = naming_pattern
|
||||
|
||||
self.regexModes = [self.NORMAL_REGEX, self.SPORTS_REGEX, self.ANIME_REGEX]
|
||||
if self.showObj and not self.showObj.is_anime and not self.showObj.is_sports:
|
||||
self.regexModes = [self.NORMAL_REGEX]
|
||||
self._compile_regexes(self.NORMAL_REGEX)
|
||||
elif self.showObj and self.showObj.is_anime:
|
||||
self.regexModes = [self.ANIME_REGEX]
|
||||
self._compile_regexes(self.ANIME_REGEX)
|
||||
elif self.showObj and self.showObj.is_sports:
|
||||
self.regexModes = [self.SPORTS_REGEX]
|
||||
self._compile_regexes(self.SPORTS_REGEX)
|
||||
else:
|
||||
self._compile_regexes(self.ALL_REGEX)
|
||||
|
||||
def clean_series_name(self, series_name):
|
||||
"""Cleans up series name by removing any . and _
|
||||
|
@ -83,9 +85,12 @@ class NameParser(object):
|
|||
elif regexMode == self.ANIME_REGEX:
|
||||
logger.log(u"Using ANIME regexs", logger.DEBUG)
|
||||
uncompiled_regex = [regexes.anime_regexes, regexes.normal_regexes]
|
||||
else:
|
||||
logger.log(u"Using NORMAL reqgexs", logger.DEBUG)
|
||||
elif regexMode == self.NORMAL_REGEX:
|
||||
logger.log(u"Using NORMAL regexs", logger.DEBUG)
|
||||
uncompiled_regex = [regexes.normal_regexes]
|
||||
else:
|
||||
logger.log(u"Using ALL regexes", logger.DEBUG)
|
||||
uncompiled_regex = [regexes.normal_regexes, regexes.sports_regexs, regexes.anime_regexes]
|
||||
|
||||
self.compiled_regexes = []
|
||||
for regexItem in uncompiled_regex:
|
||||
|
@ -95,7 +100,7 @@ class NameParser(object):
|
|||
except re.error, errormsg:
|
||||
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
|
||||
else:
|
||||
self.compiled_regexes.append((regexMode, cur_pattern_num, cur_pattern_name, cur_regex))
|
||||
self.compiled_regexes.append((cur_pattern_num, cur_pattern_name, cur_regex))
|
||||
|
||||
def _parse_string(self, name):
|
||||
if not name:
|
||||
|
@ -103,144 +108,136 @@ class NameParser(object):
|
|||
|
||||
matches = []
|
||||
bestResult = None
|
||||
doneSearch = False
|
||||
|
||||
for regexMode in self.regexModes:
|
||||
if doneSearch:
|
||||
break
|
||||
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
match = cur_regex.match(name)
|
||||
|
||||
self._compile_regexes(regexMode)
|
||||
for (cur_regexMode, cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
match = cur_regex.match(name)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
if not match:
|
||||
result = ParseResult(name)
|
||||
result.which_regex = [cur_regex_name]
|
||||
result.score = 0 - cur_regex_num
|
||||
|
||||
named_groups = match.groupdict().keys()
|
||||
|
||||
if 'series_name' in named_groups:
|
||||
result.series_name = match.group('series_name')
|
||||
if result.series_name:
|
||||
result.series_name = self.clean_series_name(result.series_name)
|
||||
result.score += 1
|
||||
|
||||
if 'season_num' in named_groups:
|
||||
tmp_season = int(match.group('season_num'))
|
||||
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
||||
continue
|
||||
result.season_number = tmp_season
|
||||
result.score += 1
|
||||
|
||||
result = ParseResult(name)
|
||||
result.which_regex = [cur_regex_name]
|
||||
result.score = 0 - cur_regex_num
|
||||
if 'ep_num' in named_groups:
|
||||
ep_num = self._convert_number(match.group('ep_num'))
|
||||
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
||||
result.score += 1
|
||||
else:
|
||||
result.episode_numbers = [ep_num]
|
||||
result.score += 1
|
||||
|
||||
named_groups = match.groupdict().keys()
|
||||
if 'ep_ab_num' in named_groups:
|
||||
ep_ab_num = self._convert_number(match.group('ep_ab_num'))
|
||||
if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'):
|
||||
result.ab_episode_numbers = range(ep_ab_num,
|
||||
self._convert_number(match.group('extra_ab_ep_num')) + 1)
|
||||
result.score += 1
|
||||
else:
|
||||
result.ab_episode_numbers = [ep_ab_num]
|
||||
result.score += 1
|
||||
|
||||
if 'series_name' in named_groups:
|
||||
result.series_name = match.group('series_name')
|
||||
if result.series_name:
|
||||
result.series_name = self.clean_series_name(result.series_name)
|
||||
if 'sports_event_id' in named_groups:
|
||||
sports_event_id = match.group('sports_event_id')
|
||||
if sports_event_id:
|
||||
result.sports_event_id = int(match.group('sports_event_id'))
|
||||
result.score += 1
|
||||
|
||||
if 'sports_event_name' in named_groups:
|
||||
result.sports_event_name = match.group('sports_event_name')
|
||||
if result.sports_event_name:
|
||||
result.sports_event_name = self.clean_series_name(result.sports_event_name)
|
||||
result.score += 1
|
||||
|
||||
if 'sports_air_date' in named_groups:
|
||||
sports_air_date = match.group('sports_air_date')
|
||||
if result.show and result.show.is_sports:
|
||||
try:
|
||||
result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date()
|
||||
result.score += 1
|
||||
|
||||
# get show object
|
||||
if not result.show and not self.naming_pattern:
|
||||
result.show = helpers.get_show(result.series_name, self.tryIndexers)
|
||||
|
||||
# confirm result show object variables
|
||||
if result.show:
|
||||
# confirm passed in show object indexer id matches result show object indexer id
|
||||
if self.showObj and self.showObj.indexerid != result.show.indexerid:
|
||||
doneSearch = True
|
||||
break
|
||||
|
||||
# confirm we are using correct regex mode
|
||||
if regexMode == self.NORMAL_REGEX and not (result.show.is_anime or result.show.is_sports):
|
||||
result.score += 1
|
||||
elif regexMode == self.SPORTS_REGEX and result.show.is_sports:
|
||||
result.score += 1
|
||||
elif regexMode == self.ANIME_REGEX and result.show.is_anime:
|
||||
result.score += 1
|
||||
elif not result.show.is_anime:
|
||||
break
|
||||
|
||||
if 'season_num' in named_groups:
|
||||
tmp_season = int(match.group('season_num'))
|
||||
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
||||
except:
|
||||
continue
|
||||
result.season_number = tmp_season
|
||||
result.score += 1
|
||||
|
||||
if 'ep_num' in named_groups:
|
||||
ep_num = self._convert_number(match.group('ep_num'))
|
||||
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||
if result.show and result.show.air_by_date:
|
||||
year = int(match.group('air_year'))
|
||||
month = int(match.group('air_month'))
|
||||
day = int(match.group('air_day'))
|
||||
|
||||
try:
|
||||
dtStr = '%s-%s-%s' % (year, month, day)
|
||||
result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date()
|
||||
result.score += 1
|
||||
else:
|
||||
result.episode_numbers = [ep_num]
|
||||
result.score += 1
|
||||
|
||||
if 'ep_ab_num' in named_groups:
|
||||
ep_ab_num = self._convert_number(match.group('ep_ab_num'))
|
||||
if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'):
|
||||
result.ab_episode_numbers = range(ep_ab_num,
|
||||
self._convert_number(match.group('extra_ab_ep_num')) + 1)
|
||||
result.score += 1
|
||||
else:
|
||||
result.ab_episode_numbers = [ep_ab_num]
|
||||
result.score += 1
|
||||
|
||||
if 'sports_event_id' in named_groups:
|
||||
sports_event_id = match.group('sports_event_id')
|
||||
if sports_event_id:
|
||||
result.sports_event_id = int(match.group('sports_event_id'))
|
||||
result.score += 1
|
||||
|
||||
if 'sports_event_name' in named_groups:
|
||||
result.sports_event_name = match.group('sports_event_name')
|
||||
if result.sports_event_name:
|
||||
result.sports_event_name = self.clean_series_name(result.sports_event_name)
|
||||
result.score += 1
|
||||
|
||||
if 'sports_air_date' in named_groups:
|
||||
sports_air_date = match.group('sports_air_date')
|
||||
if result.show and result.show.is_sports:
|
||||
try:
|
||||
result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date()
|
||||
result.score += 1
|
||||
except:
|
||||
continue
|
||||
|
||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||
if result.show and result.show.air_by_date:
|
||||
year = int(match.group('air_year'))
|
||||
month = int(match.group('air_month'))
|
||||
day = int(match.group('air_day'))
|
||||
|
||||
try:
|
||||
dtStr = '%s-%s-%s' % (year, month, day)
|
||||
result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date()
|
||||
result.score += 1
|
||||
except:
|
||||
continue
|
||||
|
||||
if 'extra_info' in named_groups:
|
||||
tmp_extra_info = match.group('extra_info')
|
||||
|
||||
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
|
||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
except:
|
||||
continue
|
||||
result.extra_info = tmp_extra_info
|
||||
result.score += 1
|
||||
|
||||
if 'release_group' in named_groups:
|
||||
result.release_group = match.group('release_group')
|
||||
result.score += 1
|
||||
if 'extra_info' in named_groups:
|
||||
tmp_extra_info = match.group('extra_info')
|
||||
|
||||
matches.append(result)
|
||||
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
|
||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
continue
|
||||
result.extra_info = tmp_extra_info
|
||||
result.score += 1
|
||||
|
||||
if 'release_group' in named_groups:
|
||||
result.release_group = match.group('release_group')
|
||||
result.score += 1
|
||||
|
||||
if 'version' in named_groups:
|
||||
# assigns version to anime file if detected using anime regex. Non-anime regex receives -1
|
||||
version = match.group('version')
|
||||
if version:
|
||||
result.version = version
|
||||
else:
|
||||
result.version = 1
|
||||
else:
|
||||
result.version = -1
|
||||
|
||||
|
||||
matches.append(result)
|
||||
|
||||
if len(matches):
|
||||
# pick best match with highest score based on placement
|
||||
bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
|
||||
|
||||
# if no show object was created check and see if we passed one in and use that instead
|
||||
if not bestResult.show and self.showObj:
|
||||
bestResult.show = self.showObj
|
||||
show = None
|
||||
if not self.naming_pattern:
|
||||
# try and create a show object for this result
|
||||
show = helpers.get_show(bestResult.series_name, self.tryIndexers)
|
||||
|
||||
# get quality
|
||||
bestResult.quality = common.Quality.nameQuality(name,
|
||||
bestResult.show.is_anime if bestResult.show else False)
|
||||
# confirm passed in show object indexer id matches result show object indexer id
|
||||
if show:
|
||||
if self.showObj and show.indexerid != self.showObj.indexerid:
|
||||
show = None
|
||||
bestResult.show = show
|
||||
elif not show and self.showObj:
|
||||
bestResult.show = self.showObj
|
||||
|
||||
# if this is a naming pattern test or result doesn't have a show object then return best result
|
||||
if not bestResult.show or self.naming_pattern:
|
||||
return bestResult
|
||||
|
||||
# get quality
|
||||
bestResult.quality = common.Quality.nameQuality(name, bestResult.show.is_anime)
|
||||
|
||||
new_episode_numbers = []
|
||||
new_season_numbers = []
|
||||
new_absolute_numbers = []
|
||||
|
@ -451,6 +448,7 @@ class NameParser(object):
|
|||
final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
|
||||
final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
|
||||
final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')
|
||||
final_result.version = self._combine_results(dir_name_result, file_name_result, 'version')
|
||||
|
||||
final_result.which_regex = []
|
||||
if final_result == file_name_result:
|
||||
|
@ -496,7 +494,8 @@ class ParseResult(object):
|
|||
ab_episode_numbers=None,
|
||||
show=None,
|
||||
score=None,
|
||||
quality=None
|
||||
quality=None,
|
||||
version=None
|
||||
):
|
||||
|
||||
self.original_name = original_name
|
||||
|
@ -531,6 +530,8 @@ class ParseResult(object):
|
|||
self.show = show
|
||||
self.score = score
|
||||
|
||||
self.version = version
|
||||
|
||||
def __eq__(self, other):
|
||||
if not other:
|
||||
return False
|
||||
|
@ -561,6 +562,8 @@ class ParseResult(object):
|
|||
return False
|
||||
if self.quality != other.quality:
|
||||
return False
|
||||
if self.version != other.version:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
@ -582,7 +585,10 @@ class ParseResult(object):
|
|||
to_return += str(self.sports_event_id)
|
||||
to_return += str(self.sports_air_date)
|
||||
if self.ab_episode_numbers:
|
||||
to_return += ' [Absolute Nums: ' + str(self.ab_episode_numbers) + ']'
|
||||
to_return += ' [ABS: ' + str(self.ab_episode_numbers) + ']'
|
||||
if self.version:
|
||||
to_return += ' [ANIME VER: ' + str(self.version) + ']'
|
||||
|
||||
if self.release_group:
|
||||
to_return += ' [GROUP: ' + self.release_group + ']'
|
||||
|
||||
|
|
|
@ -77,7 +77,6 @@ def _update_zoneinfo():
|
|||
url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'
|
||||
|
||||
url_data = helpers.getURL(url_zv)
|
||||
|
||||
if url_data is None:
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
|
||||
|
@ -148,7 +147,6 @@ def update_network_dict():
|
|||
url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt'
|
||||
|
||||
url_data = helpers.getURL(url)
|
||||
|
||||
if url_data is None:
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u"Loading Network Timezones update failed. Unable to get URL: " + url, logger.ERROR)
|
||||
|
|
|
@ -67,7 +67,7 @@ class PushbulletNotifier:
|
|||
pushbullet_device = sickbeard.PUSHBULLET_DEVICE
|
||||
|
||||
if method == 'POST':
|
||||
uri = '/api/pushes'
|
||||
uri = '/v2/pushes'
|
||||
else:
|
||||
uri = '/api/devices'
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ class TraktNotifier:
|
|||
Returns: True if the request succeeded, False otherwise
|
||||
"""
|
||||
|
||||
data = TraktCall("account/test/%API%", api, username, password, {})
|
||||
data = TraktCall("account/test/%API%", api, username, password)
|
||||
if data and data["status"] == "success":
|
||||
return True
|
||||
|
||||
|
|
|
@ -106,7 +106,6 @@ def stripNS(element, ns):
|
|||
|
||||
def splitResult(result):
|
||||
urlData = helpers.getURL(result.url)
|
||||
|
||||
if urlData is None:
|
||||
logger.log(u"Unable to load url " + result.url + ", can't download season NZB", logger.ERROR)
|
||||
return False
|
||||
|
|
|
@ -111,7 +111,7 @@ def sendNZB(nzb, proper=False):
|
|||
if (data == None):
|
||||
return False
|
||||
nzbcontent64 = standard_b64encode(data)
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64)
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64)
|
||||
elif nzbget_version == 12:
|
||||
if nzbcontent64 is not None:
|
||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False,
|
||||
|
|
|
@ -92,6 +92,8 @@ class PostProcessor(object):
|
|||
self.is_priority = is_priority
|
||||
|
||||
self.log = ''
|
||||
|
||||
self.version = None
|
||||
|
||||
def _log(self, message, level=logger.MESSAGE):
|
||||
"""
|
||||
|
@ -382,10 +384,10 @@ class PostProcessor(object):
|
|||
"""
|
||||
Look up the NZB name in the history and see if it contains a record for self.nzb_name
|
||||
|
||||
Returns a (indexer_id, season, []) tuple. The first two may be None if none were found.
|
||||
Returns a (indexer_id, season, [], quality, version) tuple. The first two may be None if none were found.
|
||||
"""
|
||||
|
||||
to_return = (None, None, [], None)
|
||||
to_return = (None, None, [], None, None)
|
||||
|
||||
# if we don't have either of these then there's nothing to use to search the history for anyway
|
||||
if not self.nzb_name and not self.folder_name:
|
||||
|
@ -413,6 +415,7 @@ class PostProcessor(object):
|
|||
indexer_id = int(sql_results[0]["showid"])
|
||||
season = int(sql_results[0]["season"])
|
||||
quality = int(sql_results[0]["quality"])
|
||||
version = int(sql_results[0]["version"])
|
||||
|
||||
if quality == common.Quality.UNKNOWN:
|
||||
quality = None
|
||||
|
@ -420,7 +423,8 @@ class PostProcessor(object):
|
|||
show = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||
|
||||
self.in_history = True
|
||||
to_return = (show, season, [], quality)
|
||||
self.version = version
|
||||
to_return = (show, season, [], quality, version)
|
||||
self._log("Found result in history: " + str(to_return), logger.DEBUG)
|
||||
|
||||
return to_return
|
||||
|
@ -452,6 +456,7 @@ class PostProcessor(object):
|
|||
logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
|
||||
logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
|
||||
|
||||
|
||||
def _analyze_name(self, name, file=True):
|
||||
"""
|
||||
Takes a name and tries to figure out a show, season, and episode from it.
|
||||
|
@ -464,7 +469,7 @@ class PostProcessor(object):
|
|||
|
||||
logger.log(u"Analyzing name " + repr(name))
|
||||
|
||||
to_return = (None, None, [], None)
|
||||
to_return = (None, None, [], None, None)
|
||||
|
||||
if not name:
|
||||
return to_return
|
||||
|
@ -488,7 +493,7 @@ class PostProcessor(object):
|
|||
season = parse_result.season_number
|
||||
episodes = parse_result.episode_numbers
|
||||
|
||||
to_return = (show, season, episodes, parse_result.quality)
|
||||
to_return = (show, season, episodes, parse_result.quality, None)
|
||||
|
||||
self._finalize(parse_result)
|
||||
return to_return
|
||||
|
@ -516,7 +521,7 @@ class PostProcessor(object):
|
|||
For a given file try to find the showid, season, and episode.
|
||||
"""
|
||||
|
||||
show = season = quality = None
|
||||
show = season = quality = version = None
|
||||
episodes = []
|
||||
|
||||
# try to look up the nzb in history
|
||||
|
@ -542,7 +547,7 @@ class PostProcessor(object):
|
|||
for cur_attempt in attempt_list:
|
||||
|
||||
try:
|
||||
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
|
||||
(cur_show, cur_season, cur_episodes, cur_quality, cur_version) = cur_attempt()
|
||||
except (InvalidNameException, InvalidShowException), e:
|
||||
logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
|
||||
continue
|
||||
|
@ -555,6 +560,10 @@ class PostProcessor(object):
|
|||
if cur_quality and not (self.in_history and quality):
|
||||
quality = cur_quality
|
||||
|
||||
# we only get current version for animes from history to prevent issues with old database entries
|
||||
if cur_version is not None:
|
||||
version = cur_version
|
||||
|
||||
if cur_season != None:
|
||||
season = cur_season
|
||||
if cur_episodes:
|
||||
|
@ -594,9 +603,9 @@ class PostProcessor(object):
|
|||
season = 1
|
||||
|
||||
if show and season and episodes:
|
||||
return (show, season, episodes, quality)
|
||||
return (show, season, episodes, quality, version)
|
||||
|
||||
return (show, season, episodes, quality)
|
||||
return (show, season, episodes, quality, version)
|
||||
|
||||
def _get_ep_obj(self, show, season, episodes):
|
||||
"""
|
||||
|
@ -783,7 +792,7 @@ class PostProcessor(object):
|
|||
self.anidbEpisode = None
|
||||
|
||||
# try to find the file info
|
||||
(show, season, episodes, quality) = self._find_info()
|
||||
(show, season, episodes, quality, version) = self._find_info()
|
||||
if not show:
|
||||
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
|
||||
logger.ERROR)
|
||||
|
@ -810,6 +819,14 @@ class PostProcessor(object):
|
|||
priority_download = self._is_priority(ep_obj, new_ep_quality)
|
||||
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
|
||||
|
||||
# get the version of the episode we're processing
|
||||
if version:
|
||||
self._log(u"Snatch history had a version in it, using that: v" + str(version),
|
||||
logger.DEBUG)
|
||||
new_ep_version = version
|
||||
else:
|
||||
new_ep_version = -1
|
||||
|
||||
# check for an existing file
|
||||
existing_file_status = self._checkForExistingFile(ep_obj.location)
|
||||
|
||||
|
@ -890,6 +907,13 @@ class PostProcessor(object):
|
|||
|
||||
cur_ep.is_proper = self.is_proper
|
||||
|
||||
cur_ep.version = new_ep_version
|
||||
|
||||
if self.release_group:
|
||||
cur_ep.release_group = self.release_group
|
||||
else:
|
||||
cur_ep.release_group = ""
|
||||
|
||||
sql_l.append(cur_ep.get_sql())
|
||||
|
||||
if len(sql_l) > 0:
|
||||
|
@ -981,7 +1005,7 @@ class PostProcessor(object):
|
|||
ep_obj.createMetaFiles()
|
||||
|
||||
# log it to history
|
||||
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
|
||||
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version)
|
||||
|
||||
# send notifications
|
||||
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
||||
|
|
|
@ -60,7 +60,8 @@ class ProperFinder():
|
|||
run_in = sickbeard.properFinderScheduler.lastRun + sickbeard.properFinderScheduler.cycleTime - datetime.datetime.now()
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at = u", next check in approx. " + ("%dh, %dm" % (hours, minutes) if 0 < hours else "%dm, %ds" % (minutes, seconds))
|
||||
run_at = u", next check in approx. " + (
|
||||
"%dh, %dm" % (hours, minutes) if 0 < hours else "%dm, %ds" % (minutes, seconds))
|
||||
|
||||
logger.log(u"Completed the search for new propers%s" % run_at)
|
||||
|
||||
|
@ -136,11 +137,18 @@ class ProperFinder():
|
|||
|
||||
# populate our Proper instance
|
||||
if parse_result.is_anime:
|
||||
logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG)
|
||||
logger.log(u"I am sorry '" + curProper.name + "' seams to be an anime proper seach is not yet suported",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
else:
|
||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
||||
curProper.episode = parse_result.episode_numbers[0]
|
||||
if parse_result.is_anime:
|
||||
if parse_result.release_group and parse_result.version:
|
||||
curProper.release_group = parse_result.release_group
|
||||
curProper.version = parse_result.version
|
||||
else:
|
||||
continue
|
||||
|
||||
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
|
||||
|
||||
|
@ -149,22 +157,51 @@ class ProperFinder():
|
|||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name, parse_result.show.rls_ignore_words):
|
||||
logger.log(u"Ignoring " + curProper.name + " based on ignored words filter: " + parse_result.show.rls_ignore_words,
|
||||
logger.MESSAGE)
|
||||
if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_ignore_words):
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " based on ignored words filter: " + parse_result.show.rls_ignore_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
|
||||
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name, parse_result.show.rls_require_words):
|
||||
logger.log(u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words,
|
||||
logger.MESSAGE)
|
||||
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_require_words):
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
if not sqlResults:
|
||||
continue
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right release group and a higher version)
|
||||
if parse_result.is_anime:
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
"SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
|
||||
oldVersion = int(sqlResults[0]["version"])
|
||||
oldRelease_group = (sqlResults[0]["release_group"])
|
||||
|
||||
if oldVersion > -1 and oldVersion < curProper.version:
|
||||
logger.log("Found new anime v" + str(curProper.version) + " to replace existing v" + str(oldVersion))
|
||||
else:
|
||||
continue
|
||||
|
||||
if oldRelease_group != curProper.release_group:
|
||||
logger.log("Skipping proper from release group: " + curProper.release_group + ", does not match existing release group: " + oldRelease_group)
|
||||
continue
|
||||
|
||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
||||
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||
|
@ -212,7 +249,7 @@ class ProperFinder():
|
|||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
||||
if showObj == None:
|
||||
logger.log(u"Unable to find the show with indexerid " + str(
|
||||
curProper .indexerid) + " so unable to download the proper", logger.ERROR)
|
||||
curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
||||
continue
|
||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
||||
|
||||
|
@ -221,6 +258,7 @@ class ProperFinder():
|
|||
result.url = curProper.url
|
||||
result.name = curProper.name
|
||||
result.quality = curProper.quality
|
||||
result.version = curProper.version
|
||||
|
||||
# snatch it
|
||||
search.snatchEpisode(result, SNATCHED_PROPER)
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
import re
|
||||
import traceback
|
||||
import datetime
|
||||
import urlparse
|
||||
import sickbeard
|
||||
import generic
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard import logger
|
||||
from sickbeard import tvcache
|
||||
|
@ -30,12 +32,9 @@ from sickbeard import classes
|
|||
from sickbeard import helpers
|
||||
from sickbeard import show_name_helpers
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import clients
|
||||
from lib import requests
|
||||
from lib.requests import exceptions
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from lib.unidecode import unidecode
|
||||
from sickbeard.helpers import sanitizeSceneName
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from unidecode import unidecode
|
||||
|
||||
|
||||
class BitSoupProvider(generic.TorrentProvider):
|
||||
|
@ -83,7 +82,8 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
'ssl': 'yes'
|
||||
}
|
||||
|
||||
self.session = requests.Session()
|
||||
if not self.session:
|
||||
self.session = requests.session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
|
@ -227,32 +227,6 @@ class BitSoupProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -89,7 +89,6 @@ class BTNProvider(generic.TorrentProvider):
|
|||
params.update(search_params)
|
||||
|
||||
parsedJSON = self._api_call(apikey, params)
|
||||
|
||||
if not parsedJSON:
|
||||
logger.log(u"No data returned from " + self.name, logger.ERROR)
|
||||
return []
|
||||
|
|
|
@ -56,7 +56,7 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
def getQuality(self, item, anime=False):
|
||||
|
||||
filename = item.filename
|
||||
quality = Quality.nameQuality(filename)
|
||||
quality = Quality.sceneQuality(filename, anime)
|
||||
|
||||
return quality
|
||||
|
||||
|
@ -81,10 +81,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
|
||||
|
||||
if ep_obj.show.air_by_date:
|
||||
params['date'] = str(ep_obj.airdate).split('-')[0]
|
||||
elif ep_obj.show.sports:
|
||||
params['date'] = str(ep_obj.airdate).split('-')[0]
|
||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||
params['season'] = str(ep_obj.airdate).split('-')[0]
|
||||
elif ep_obj.show.anime:
|
||||
params['season'] = "%d" % ep_obj.scene_absolute_number
|
||||
else:
|
||||
|
@ -101,9 +99,7 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
|
||||
|
||||
if self.show.air_by_date:
|
||||
params['date'] = str(ep_obj.airdate)
|
||||
elif self.show.sports:
|
||||
if self.show.air_by_date or self.show.sports:
|
||||
params['date'] = str(ep_obj.airdate)
|
||||
elif self.show.anime:
|
||||
params['episode'] = "%i" % int(ep_obj.scene_absolute_number)
|
||||
|
|
|
@ -258,32 +258,6 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -23,7 +23,6 @@ import datetime
|
|||
import os
|
||||
import re
|
||||
import itertools
|
||||
import Queue
|
||||
import sickbeard
|
||||
import requests
|
||||
|
||||
|
@ -35,15 +34,13 @@ from sickbeard.exceptions import ex
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.common import Quality
|
||||
|
||||
from lib.hachoir_parser import createParser
|
||||
from hachoir_parser import createParser
|
||||
|
||||
class GenericProvider:
|
||||
NZB = "nzb"
|
||||
TORRENT = "torrent"
|
||||
|
||||
def __init__(self, name):
|
||||
self.queue = Queue.Queue()
|
||||
|
||||
# these need to be set in the subclass
|
||||
self.providerType = None
|
||||
self.name = name
|
||||
|
@ -62,9 +59,9 @@ class GenericProvider:
|
|||
self.cache = tvcache.TVCache(self)
|
||||
|
||||
self.session = requests.session()
|
||||
self.session.verify = False
|
||||
self.session.headers.update({
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'})
|
||||
|
||||
self.headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
|
||||
|
||||
def getID(self):
|
||||
return GenericProvider.makeID(self.name)
|
||||
|
@ -79,6 +76,9 @@ class GenericProvider:
|
|||
def _checkAuth(self):
|
||||
return
|
||||
|
||||
def _doLogin(self):
|
||||
return True
|
||||
|
||||
def isActive(self):
|
||||
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS:
|
||||
return self.isEnabled()
|
||||
|
@ -109,60 +109,63 @@ class GenericProvider:
|
|||
|
||||
return result
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
def getURL(self, url, post_data=None, params=None, timeout=30, json=False):
|
||||
"""
|
||||
By default this is just a simple urlopen call but this method should be overridden
|
||||
for providers with special URL requirements (like cookies)
|
||||
"""
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
# check for auth
|
||||
if not self._doLogin():
|
||||
return
|
||||
|
||||
data = helpers.getURL(url, post_data, headers, json=json)
|
||||
|
||||
if not data:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
|
||||
return None
|
||||
|
||||
return data
|
||||
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
||||
session=self.session, json=json)
|
||||
|
||||
def downloadResult(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
|
||||
logger.log(u"Downloading a result from " + self.name + " at " + result.url)
|
||||
|
||||
data = self.getURL(result.url)
|
||||
|
||||
if data is None:
|
||||
# check for auth
|
||||
if not self._doLogin():
|
||||
return False
|
||||
|
||||
# use the appropriate watch folder
|
||||
if self.providerType == GenericProvider.NZB:
|
||||
saveDir = sickbeard.NZB_DIR
|
||||
writeMode = 'w'
|
||||
elif self.providerType == GenericProvider.TORRENT:
|
||||
saveDir = sickbeard.TORRENT_DIR
|
||||
writeMode = 'wb'
|
||||
if self.providerType == GenericProvider.TORRENT:
|
||||
try:
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
urls = [
|
||||
'http://torcache.net/torrent/' + torrent_hash + '.torrent',
|
||||
'http://torrage.com/torrent/' + torrent_hash + '.torrent',
|
||||
'http://zoink.it/torrent/' + torrent_hash + '.torrent',
|
||||
]
|
||||
except:
|
||||
urls = [result.url]
|
||||
|
||||
filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
elif self.providerType == GenericProvider.NZB:
|
||||
urls = [result.url]
|
||||
|
||||
filename = ek.ek(os.path.join, sickbeard.NZB_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
else:
|
||||
return False
|
||||
return
|
||||
|
||||
# use the result name as the filename
|
||||
file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
for url in urls:
|
||||
if helpers.download_file(url, filename, session=self.session):
|
||||
logger.log(u"Downloading a result from " + self.name + " at " + url)
|
||||
|
||||
logger.log(u"Saving to " + file_name, logger.DEBUG)
|
||||
if self.providerType == GenericProvider.TORRENT:
|
||||
logger.log(u"Saved magnet link to " + filename, logger.MESSAGE)
|
||||
else:
|
||||
logger.log(u"Saved result to " + filename, logger.MESSAGE)
|
||||
|
||||
try:
|
||||
with open(file_name, writeMode) as fileOut:
|
||||
fileOut.write(data)
|
||||
helpers.chmodAsParent(file_name)
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
# as long as it's a valid download then consider it a successful snatch
|
||||
return self._verify_download(file_name)
|
||||
return self._verify_download(filename)
|
||||
|
||||
def _verify_download(self, file_name=None):
|
||||
"""
|
||||
|
@ -300,6 +303,7 @@ class GenericProvider:
|
|||
showObj = parse_result.show
|
||||
quality = parse_result.quality
|
||||
release_group = parse_result.release_group
|
||||
version = parse_result.version
|
||||
|
||||
addCacheEntry = False
|
||||
if not (showObj.air_by_date or showObj.sports):
|
||||
|
@ -312,14 +316,16 @@ class GenericProvider:
|
|||
if not len(parse_result.episode_numbers) and (
|
||||
parse_result.season_number and parse_result.season_number != season) or (
|
||||
not parse_result.season_number and season != 1):
|
||||
logger.log(u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
|
||||
logger.DEBUG)
|
||||
logger.log(
|
||||
u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
|
||||
logger.DEBUG)
|
||||
addCacheEntry = True
|
||||
elif len(parse_result.episode_numbers) and (
|
||||
parse_result.season_number != season or not [ep for ep in episodes if
|
||||
ep.scene_episode in parse_result.episode_numbers]):
|
||||
logger.log(u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
|
||||
logger.DEBUG)
|
||||
logger.log(
|
||||
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
|
||||
logger.DEBUG)
|
||||
addCacheEntry = True
|
||||
|
||||
if not addCacheEntry:
|
||||
|
@ -386,6 +392,7 @@ class GenericProvider:
|
|||
result.quality = quality
|
||||
result.release_group = release_group
|
||||
result.content = None
|
||||
result.version = version
|
||||
|
||||
if len(epObj) == 1:
|
||||
epNum = epObj[0].episode
|
||||
|
|
|
@ -80,7 +80,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
return True
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj.scene_season)]
|
||||
season_search_string = [self._make_post_data_JSON(show=ep_obj.show, season=ep_obj)]
|
||||
return season_search_string
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||
|
@ -105,16 +105,8 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
logger.log(u"Search url: " + self.search_url + " search_params: " + search_params, logger.DEBUG)
|
||||
|
||||
data = self.getURL(self.search_url, post_data=search_params)
|
||||
|
||||
if not data:
|
||||
logger.log(u"No data returned from " + self.search_url, logger.ERROR)
|
||||
return []
|
||||
|
||||
parsedJSON = helpers.parse_json(data)
|
||||
|
||||
if parsedJSON is None:
|
||||
logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR)
|
||||
parsedJSON = self.getURL(self.search_url, post_data=search_params, json=True)
|
||||
if not parsedJSON:
|
||||
return []
|
||||
|
||||
if self._checkAuthFromData(parsedJSON):
|
||||
|
@ -195,7 +187,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
else:
|
||||
post_data['tvdb'] = {
|
||||
'id': show.indexerid,
|
||||
'season': season,
|
||||
'season': episode.scene_season,
|
||||
}
|
||||
|
||||
if search_term:
|
||||
|
@ -225,20 +217,14 @@ class HDBitsCache(tvcache.TVCache):
|
|||
|
||||
if self._checkAuth(None):
|
||||
|
||||
data = self._getRSSData()
|
||||
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
else:
|
||||
return []
|
||||
|
||||
parsedJSON = helpers.parse_json(data)
|
||||
|
||||
if parsedJSON is None:
|
||||
parsedJSON = self._getRSSData()
|
||||
if not parsedJSON:
|
||||
logger.log(u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR)
|
||||
return []
|
||||
|
||||
# mark updated
|
||||
self.setLastUpdate()
|
||||
|
||||
if self._checkAuth(parsedJSON):
|
||||
if parsedJSON and 'data' in parsedJSON:
|
||||
items = parsedJSON['data']
|
||||
|
@ -249,27 +235,21 @@ class HDBitsCache(tvcache.TVCache):
|
|||
|
||||
cl = []
|
||||
for item in items:
|
||||
|
||||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
else:
|
||||
raise exceptions.AuthException(
|
||||
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
|
||||
else:
|
||||
return []
|
||||
|
||||
def _getRSSData(self):
|
||||
return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON())
|
||||
return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
|
|
@ -288,29 +288,6 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
try:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -230,30 +230,6 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -112,7 +112,6 @@ class KATProvider(generic.TorrentProvider):
|
|||
fileName = None
|
||||
|
||||
data = self.getURL(torrent_link)
|
||||
|
||||
if not data:
|
||||
return None
|
||||
|
||||
|
@ -225,7 +224,6 @@ class KATProvider(generic.TorrentProvider):
|
|||
results = []
|
||||
items = {'Season': [], 'Episode': [], 'RSS': []}
|
||||
|
||||
soup = None
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
|
@ -316,83 +314,6 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
if sickbeard.PROXY_SETTING:
|
||||
proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
r = self.session.get(url, proxies=proxies, verify=False)
|
||||
else:
|
||||
r = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
||||
def downloadResult(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
try:
|
||||
r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORCACHE: " + ex(e), logger.ERROR)
|
||||
try:
|
||||
logger.log("Trying TORRAGE cache instead")
|
||||
r = self.session.get('http://torrage.com/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORRAGE: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
if not r.status_code == 200:
|
||||
return False
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
magnetFileContent = r.content
|
||||
|
||||
try:
|
||||
with open(magnetFileName, 'wb') as fileOut:
|
||||
fileOut.write(magnetFileContent)
|
||||
|
||||
helpers.chmodAsParent(magnetFileName)
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
||||
return True
|
||||
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
@ -457,13 +378,10 @@ class KATCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -227,25 +227,6 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
return True
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
myOpener = classes.AuthURLOpener(sickbeard.NEWZBIN_USERNAME, sickbeard.NEWZBIN_PASSWORD)
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
f = myOpener.openit(url)
|
||||
except (urllib.ContentTooShortError, IOError), e:
|
||||
logger.log("Error loading search results: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
return data
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
||||
|
||||
|
|
|
@ -200,66 +200,66 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
if data:
|
||||
try:
|
||||
with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html:
|
||||
resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
|
||||
|
||||
try:
|
||||
with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html:
|
||||
resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
|
||||
if not resultsTable:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if not resultsTable:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
# Collecting entries
|
||||
entries_std = html.find_all('div', attrs={'id': 'torrent-std'})
|
||||
entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'})
|
||||
|
||||
entries = entries_std + entries_sticky
|
||||
|
||||
#Xirg STANDARD TORRENTS
|
||||
#Continue only if one Release is found
|
||||
if len(entries) > 0:
|
||||
|
||||
for result in entries:
|
||||
|
||||
try:
|
||||
torrentName = \
|
||||
((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title']
|
||||
torrentId = (
|
||||
((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace(
|
||||
'download.php?id=', '')
|
||||
torrent_name = str(torrentName)
|
||||
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
|
||||
torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
|
||||
#torrent_seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0])
|
||||
## Not used, perhaps in the future ##
|
||||
#torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
|
||||
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
# Filter unseeded torrent and torrents with no name/url
|
||||
#if mode != 'RSS' and torrent_seeders == 0:
|
||||
# continue
|
||||
|
||||
if not torrent_name or not torrent_download_url:
|
||||
continue
|
||||
|
||||
item = torrent_name, torrent_download_url
|
||||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
items[mode].append(item)
|
||||
|
||||
# Collecting entries
|
||||
entries_std = html.find_all('div', attrs={'id': 'torrent-std'})
|
||||
entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'})
|
||||
else:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
entries = entries_std + entries_sticky
|
||||
|
||||
#Xirg STANDARD TORRENTS
|
||||
#Continue only if one Release is found
|
||||
if len(entries) > 0:
|
||||
|
||||
for result in entries:
|
||||
|
||||
try:
|
||||
torrentName = \
|
||||
((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title']
|
||||
torrentId = (
|
||||
((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace(
|
||||
'download.php?id=', '')
|
||||
torrent_name = str(torrentName)
|
||||
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
|
||||
torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
|
||||
#torrent_seeders = int(result.find('div', attrs = {'id' : 'torrent-seeders'}).find('a')['class'][0])
|
||||
## Not used, perhaps in the future ##
|
||||
#torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
|
||||
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
# Filter unseeded torrent and torrents with no name/url
|
||||
#if mode != 'RSS' and torrent_seeders == 0:
|
||||
# continue
|
||||
|
||||
if not torrent_name or not torrent_download_url:
|
||||
continue
|
||||
|
||||
item = torrent_name, torrent_download_url
|
||||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")",
|
||||
logger.DEBUG)
|
||||
items[mode].append(item)
|
||||
|
||||
else:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
logger.ERROR)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
|
@ -278,32 +278,6 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
|
||||
return title, url
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -114,17 +114,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params)
|
||||
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
||||
|
||||
data = self.getURL(search_url, json=True)
|
||||
|
||||
if not data:
|
||||
logger.log(u"No data returned from " + search_url, logger.ERROR)
|
||||
parsedJSON = self.getURL(search_url, json=True)
|
||||
if not parsedJSON:
|
||||
return []
|
||||
|
||||
if self._checkAuthFromData(data, is_XML=False):
|
||||
|
||||
if self._checkAuthFromData(parsedJSON, is_XML=False):
|
||||
results = []
|
||||
|
||||
for item in data:
|
||||
for item in parsedJSON:
|
||||
if 'release' in item and 'getnzb' in item:
|
||||
results.append(item)
|
||||
|
||||
|
|
|
@ -141,7 +141,6 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
html = self.getURL(searchURL)
|
||||
|
||||
if not html:
|
||||
continue
|
||||
|
||||
|
@ -205,74 +204,6 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
r = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
||||
def downloadResult(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
try:
|
||||
r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORCACHE: " + ex(e), logger.ERROR)
|
||||
try:
|
||||
logger.log("Trying TORRAGE cache instead")
|
||||
r = self.session.get('http://torrage.com/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORRAGE: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
if not r.status_code == 200:
|
||||
return False
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
magnetFileContent = r.content
|
||||
|
||||
try:
|
||||
with open(magnetFileName, 'wb') as fileOut:
|
||||
fileOut.write(magnetFileContent)
|
||||
|
||||
helpers.chmodAsParent(magnetFileName)
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
||||
return True
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -35,7 +35,7 @@ from lib.requests import exceptions
|
|||
from lib.bencode import bdecode
|
||||
|
||||
class TorrentRssProvider(generic.TorrentProvider):
|
||||
def __init__(self, name, url, cookies, search_mode='eponly', search_fallback=False, backlog_only=False):
|
||||
def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, backlog_only=False):
|
||||
generic.TorrentProvider.__init__(self, name)
|
||||
self.cache = TorrentRssCache(self)
|
||||
self.url = re.sub('\/$', '', url)
|
||||
|
@ -47,11 +47,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
self.search_mode = search_mode
|
||||
self.search_fallback = search_fallback
|
||||
self.backlog_only = backlog_only
|
||||
|
||||
if cookies:
|
||||
self.cookies = cookies
|
||||
else:
|
||||
self.cookies = ''
|
||||
self.cookies = cookies
|
||||
|
||||
def configStr(self):
|
||||
return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only))
|
||||
|
@ -118,6 +114,9 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url):
|
||||
return (True, 'RSS feed Parsed correctly')
|
||||
else:
|
||||
if self.cookies:
|
||||
requests.utils.add_dict_to_cookiejar(self.session.cookies,
|
||||
dict(x.rsplit('=', 1) for x in (self.cookies.split(';'))))
|
||||
torrent_file = self.getURL(url)
|
||||
try:
|
||||
bdecode(torrent_file)
|
||||
|
@ -130,30 +129,6 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
except Exception, e:
|
||||
return (False, 'Error when trying to load RSS: ' + ex(e))
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
if self.cookies:
|
||||
requests.utils.add_dict_to_cookiejar(self.session.cookies,
|
||||
dict(x.rsplit('=', 1) for x in (self.cookies.split(';'))))
|
||||
|
||||
try:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
r = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
||||
def dumpHTML(self, data):
|
||||
|
||||
dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html')
|
||||
|
@ -179,10 +154,11 @@ class TorrentRssCache(tvcache.TVCache):
|
|||
|
||||
def _getRSSData(self):
|
||||
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
|
||||
|
||||
request_headers = None
|
||||
if self.provider.cookies:
|
||||
request_headers = { 'Cookie': self.provider.cookies }
|
||||
else:
|
||||
request_headers = None
|
||||
|
||||
return self.getRSSFeed(self.provider.url, request_headers=request_headers)
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
|
|
@ -69,8 +69,6 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
self.categories = "c27=27&c17=17&c11=11"
|
||||
|
||||
self.headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -178,14 +176,14 @@ class SCCProvider(generic.TorrentProvider):
|
|||
foreignSearchURL = None
|
||||
if mode == 'Season':
|
||||
searchURL = self.urls['archive'] % (search_string)
|
||||
data = [self.getURL(searchURL, headers=self.headers)]
|
||||
data = [self.getURL(searchURL)]
|
||||
else:
|
||||
searchURL = self.urls['search'] % (search_string, self.categories)
|
||||
nonsceneSearchURL = self.urls['nonscene'] % (search_string)
|
||||
foreignSearchURL = self.urls['foreign'] % (search_string)
|
||||
data = [self.getURL(searchURL, headers=self.headers),
|
||||
self.getURL(nonsceneSearchURL, headers=self.headers),
|
||||
self.getURL(foreignSearchURL, headers=self.headers)]
|
||||
data = [self.getURL(searchURL),
|
||||
self.getURL(nonsceneSearchURL),
|
||||
self.getURL(foreignSearchURL)]
|
||||
logger.log(u"Search string: " + nonsceneSearchURL, logger.DEBUG)
|
||||
logger.log(u"Search string: " + foreignSearchURL, logger.DEBUG)
|
||||
|
||||
|
@ -222,9 +220,10 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
title = link.string
|
||||
if re.search('\.\.\.', title):
|
||||
with BS4Parser(self.getURL(self.url + "/" + link['href'])) as details_html:
|
||||
title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
|
||||
|
||||
data = self.getURL(self.url + "/" + link['href'])
|
||||
if data:
|
||||
with BS4Parser(data) as details_html:
|
||||
title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
|
||||
download_url = self.urls['download'] % url['href']
|
||||
id = int(link['href'].replace('details?id=', ''))
|
||||
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
||||
|
@ -272,32 +271,6 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = {}
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, headers=headers, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -163,15 +163,12 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
|
||||
**self.categories[mode])
|
||||
|
||||
data = self.session.post(self.urls['search'], data=post_data, verify=False)
|
||||
if not data:
|
||||
parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True)
|
||||
if not parsedJSON:
|
||||
continue
|
||||
|
||||
try:
|
||||
# convert to json
|
||||
data = data.json()
|
||||
|
||||
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
torrents = parsedJSON.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except:
|
||||
continue
|
||||
|
||||
|
@ -214,36 +211,6 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
if sickbeard.PROXY_SETTING:
|
||||
proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
r = self.session.get(url, proxies=proxies, verify=False)
|
||||
else:
|
||||
r = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -116,6 +116,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id))
|
||||
|
||||
if self.proxy and self.proxy.isEnabled():
|
||||
self.headers.update({'referer': self.proxy.getProxyURL()})
|
||||
|
||||
data = self.getURL(fileURL)
|
||||
if not data:
|
||||
return None
|
||||
|
@ -222,6 +225,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
results = []
|
||||
items = {'Season': [], 'Episode': [], 'RSS': []}
|
||||
|
||||
if self.proxy and self.proxy.isEnabled():
|
||||
self.headers.update({'referer': self.proxy.getProxyURL()})
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
|
@ -290,84 +296,6 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not headers:
|
||||
headers = {}
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
# Glype Proxies does not support Direct Linking.
|
||||
# We have to fake a search on the proxy site to get data
|
||||
if self.proxy.isEnabled():
|
||||
headers.update({'referer': self.proxy.getProxyURL()})
|
||||
|
||||
try:
|
||||
if sickbeard.PROXY_SETTING:
|
||||
proxies = {
|
||||
"http": sickbeard.PROXY_SETTING,
|
||||
"https": sickbeard.PROXY_SETTING,
|
||||
}
|
||||
|
||||
r = self.session.get(url, headers=headers, proxies=proxies, verify=False)
|
||||
else:
|
||||
r = self.session.get(url, headers=headers, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if r.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return r.content
|
||||
|
||||
def downloadResult(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
"""
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
||||
|
||||
if not torrent_hash:
|
||||
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
||||
return False
|
||||
|
||||
try:
|
||||
r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORCACHE: " + ex(e), logger.ERROR)
|
||||
try:
|
||||
logger.log("Trying TORRAGE cache instead")
|
||||
r = self.session.get('http://torrage.com/torrent/' + torrent_hash + '.torrent', verify=False)
|
||||
except Exception, e:
|
||||
logger.log("Unable to connect to TORRAGE: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
if not r.status_code == 200:
|
||||
return False
|
||||
|
||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||
magnetFileContent = r.content
|
||||
|
||||
try:
|
||||
with open(magnetFileName, 'wb') as fileOut:
|
||||
fileOut.write(magnetFileContent)
|
||||
|
||||
helpers.chmodAsParent(magnetFileName)
|
||||
|
||||
except EnvironmentError, e:
|
||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
||||
return True
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -233,32 +233,6 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -194,13 +194,12 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
if self.freeleech:
|
||||
post_data.update({'free': 'on'})
|
||||
|
||||
data = self.session.post(self.urls['search'], data=post_data, verify=False)
|
||||
if not data:
|
||||
parsedJSON = self.getURL(self.urls['search'], post_data=post_data, json=True)
|
||||
if not parsedJSON:
|
||||
continue
|
||||
|
||||
try:
|
||||
data = data.json()
|
||||
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
torrents = parsedJSON.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except:
|
||||
continue
|
||||
|
||||
|
@ -237,29 +236,6 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -230,32 +230,6 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
return (title, url)
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
|
||||
if not self.session:
|
||||
self._doLogin()
|
||||
|
||||
if not headers:
|
||||
headers = []
|
||||
|
||||
try:
|
||||
# Remove double-slashes from url
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
url = urlparse.urlunparse(parsed)
|
||||
|
||||
response = self.session.get(url, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||
return None
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||
return None
|
||||
|
||||
return response.content
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
|
|
@ -16,21 +16,17 @@ from shove import Shove
|
|||
|
||||
class RSSFeeds:
|
||||
def __init__(self, db_name):
|
||||
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db')
|
||||
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name + '.db')
|
||||
if not os.path.exists(os.path.dirname(self.db_name)):
|
||||
sickbeard.helpers.makeDir(os.path.dirname(self.db_name))
|
||||
|
||||
def clearCache(self, age=None):
|
||||
try:
|
||||
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
fc.purge(age)
|
||||
except:
|
||||
os.remove(self.db_name)
|
||||
try:
|
||||
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
fc.purge(age)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG)
|
||||
|
||||
def getFeed(self, url, post_data=None, request_headers=None):
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
|
@ -43,25 +39,18 @@ class RSSFeeds:
|
|||
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
feed = fc.fetch(url, False, False, request_headers)
|
||||
except:
|
||||
os.remove(self.db_name)
|
||||
try:
|
||||
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
feed = fc.fetch(url, False, False, request_headers)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
||||
feed = None
|
||||
|
||||
if not feed:
|
||||
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
|
||||
return
|
||||
elif 'error' in feed.feed:
|
||||
logger.log(u"RSS ERROR:[%s] CODE:[%s]" % (feed.feed['error']['description'], feed.feed['error']['code']),
|
||||
logger.DEBUG)
|
||||
return
|
||||
elif not feed.entries:
|
||||
logger.log(u"No RSS items found using URL: " + url, logger.WARNING)
|
||||
return
|
||||
if not feed or not feed.entries:
|
||||
logger.log(u"RSS error loading url: " + url, logger.DEBUG)
|
||||
return
|
||||
elif 'error' in feed.feed:
|
||||
err_code = feed.feed['error']['code']
|
||||
err_desc = feed.feed['error']['description']
|
||||
|
||||
return feed
|
||||
logger.log(
|
||||
u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG)
|
||||
return
|
||||
else:
|
||||
return feed
|
||||
except Exception as e:
|
||||
logger.log(u"RSS error: " + ex(e), logger.DEBUG)
|
|
@ -173,7 +173,6 @@ def retrieve_exceptions():
|
|||
url = sickbeard.indexerApi(indexer).config['scene_url']
|
||||
|
||||
url_data = helpers.getURL(url)
|
||||
|
||||
if url_data is None:
|
||||
# When urlData is None, trouble connecting to github
|
||||
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
||||
|
@ -253,7 +252,7 @@ def retrieve_exceptions():
|
|||
anidb_exception_dict.clear()
|
||||
xem_exception_dict.clear()
|
||||
|
||||
def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||
def update_scene_exceptions(indexer_id, scene_exceptions, season=-1):
|
||||
"""
|
||||
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
||||
"""
|
||||
|
@ -268,7 +267,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
|
|||
cur_exception = unicode(cur_exception, 'utf-8', 'replace')
|
||||
|
||||
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
|
||||
[indexer_id, cur_exception, -1])
|
||||
[indexer_id, cur_exception, season])
|
||||
|
||||
def _anidb_exceptions_fetcher():
|
||||
global anidb_exception_dict
|
||||
|
@ -299,16 +298,16 @@ def _xem_exceptions_fetcher():
|
|||
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config[
|
||||
'xem_origin']
|
||||
|
||||
url_data = helpers.getURL(url, json=True)
|
||||
if url_data is None:
|
||||
parsedJSON = helpers.getURL(url, json=True)
|
||||
if not parsedJSON:
|
||||
logger.log(u"Check scene exceptions update failed for " + sickbeard.indexerApi(
|
||||
indexer).name + ", Unable to get URL: " + url, logger.ERROR)
|
||||
continue
|
||||
|
||||
if url_data['result'] == 'failure':
|
||||
if parsedJSON['result'] == 'failure':
|
||||
continue
|
||||
|
||||
for indexerid, names in url_data['data'].items():
|
||||
for indexerid, names in parsedJSON['data'].items():
|
||||
xem_exception_dict[int(indexerid)] = names
|
||||
|
||||
setLastRefresh('xem')
|
||||
|
|
|
@ -21,25 +21,20 @@
|
|||
# @copyright: Dermot Buckley
|
||||
#
|
||||
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import traceback
|
||||
import sickbeard
|
||||
|
||||
from lib.tmdb_api import TMDB
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
from lib import simplejson as json
|
||||
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import logger
|
||||
from sickbeard import db
|
||||
from sickbeard.exceptions import ex
|
||||
from lib import requests
|
||||
|
||||
MAX_XEM_AGE_SECS = 86400 # 1 day
|
||||
|
||||
def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=True):
|
||||
"""
|
||||
|
@ -196,7 +191,8 @@ def get_indexer_absolute_numbering(indexer_id, indexer, sceneAbsoluteNumber, fal
|
|||
return sceneAbsoluteNumber
|
||||
|
||||
|
||||
def set_scene_numbering(indexer_id, indexer, season=None, episode=None, absolute_number=None, sceneSeason=None, sceneEpisode=None, sceneAbsolute=None):
|
||||
def set_scene_numbering(indexer_id, indexer, season=None, episode=None, absolute_number=None, sceneSeason=None,
|
||||
sceneEpisode=None, sceneAbsolute=None):
|
||||
"""
|
||||
Set scene numbering for a season/episode.
|
||||
To clear the scene numbering, leave both sceneSeason and sceneEpisode as None.
|
||||
|
@ -332,7 +328,7 @@ def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNum
|
|||
else:
|
||||
rows = myDB.select(
|
||||
"SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ? and scene_season = ?",
|
||||
[indexer, indexer_id, sceneAbsoluteNumber, scene_season])
|
||||
[indexer, indexer_id, sceneAbsoluteNumber, scene_season])
|
||||
|
||||
if rows:
|
||||
return int(rows[0]["absolute_number"])
|
||||
|
@ -455,6 +451,7 @@ def get_xem_absolute_numbering_for_show(indexer_id, indexer):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
def xem_refresh(indexer_id, indexer, force=False):
|
||||
"""
|
||||
Refresh data from xem for a tv show
|
||||
|
@ -467,77 +464,73 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
# XEM API URL
|
||||
url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])
|
||||
|
||||
MAX_REFRESH_AGE_SECS = 86400 # 1 day
|
||||
|
||||
myDB = db.DBConnection()
|
||||
rows = myDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
|
||||
[indexer, indexer_id])
|
||||
|
||||
if rows:
|
||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
||||
lastRefresh = int(rows[0]['last_refreshed'])
|
||||
refresh = int(time.mktime(datetime.datetime.today().timetuple())) > lastRefresh + MAX_REFRESH_AGE_SECS
|
||||
else:
|
||||
refresh = True
|
||||
|
||||
if refresh or force:
|
||||
logger.log(
|
||||
u'Looking up XEM scene mapping using for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,),
|
||||
logger.DEBUG)
|
||||
|
||||
# mark refreshed
|
||||
myDB.upsert("xem_refresh",
|
||||
{'indexer': indexer,
|
||||
'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))},
|
||||
{'indexer_id': indexer_id})
|
||||
|
||||
try:
|
||||
logger.log(
|
||||
u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, sickbeard.indexerApi(indexer).name,),
|
||||
logger.DEBUG)
|
||||
data = requests.get("http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],), verify=False).json()
|
||||
parsedJSON = sickbeard.helpers.getURL(url, json=True)
|
||||
if not parsedJSON or parsedJSON == '':
|
||||
logger.log(u'No XEN data for show "%s on %s"' % (indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE)
|
||||
return
|
||||
|
||||
if data is None or data == '':
|
||||
logger.log(u'No XEN data for show "%s on %s", trying TVTumbler' % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).name,), logger.MESSAGE)
|
||||
data = requests.get("http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=%s&destination=scene" % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'],), verify=False).json()
|
||||
if data is None or data == '':
|
||||
logger.log(u'TVTumbler also failed for show "%s on %s". giving up.' % (indexer_id, indexer,),
|
||||
logger.MESSAGE)
|
||||
return None
|
||||
if 'success' in parsedJSON['result']:
|
||||
cl = []
|
||||
for entry in parsedJSON['data']:
|
||||
if 'scene' in entry:
|
||||
cl.append([
|
||||
"UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[entry['scene']['season'],
|
||||
entry['scene']['episode'],
|
||||
entry['scene']['absolute'],
|
||||
indexer_id,
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
|
||||
]])
|
||||
if 'scene_2' in entry: # for doubles
|
||||
cl.append([
|
||||
"UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[entry['scene_2']['season'],
|
||||
entry['scene_2']['episode'],
|
||||
entry['scene_2']['absolute'],
|
||||
indexer_id,
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
|
||||
]])
|
||||
|
||||
result = data
|
||||
|
||||
cl = []
|
||||
if result:
|
||||
cl.append(["INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)",
|
||||
[indexer, indexer_id, time.time()]])
|
||||
if 'success' in result['result']:
|
||||
for entry in result['data']:
|
||||
if 'scene' in entry:
|
||||
cl.append([
|
||||
"UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[entry['scene']['season'],
|
||||
entry['scene']['episode'],
|
||||
entry['scene']['absolute'],
|
||||
indexer_id,
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
|
||||
]])
|
||||
if 'scene_2' in entry: # for doubles
|
||||
cl.append([
|
||||
"UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[entry['scene_2']['season'],
|
||||
entry['scene_2']['episode'],
|
||||
entry['scene_2']['absolute'],
|
||||
indexer_id,
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
|
||||
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode']
|
||||
]])
|
||||
else:
|
||||
logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).name, result['message']), logger.DEBUG)
|
||||
if len(cl) > 0:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(cl)
|
||||
else:
|
||||
logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (
|
||||
indexer_id, sickbeard.indexerApi(indexer).name,), logger.DEBUG)
|
||||
except Exception, e:
|
||||
logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi(
|
||||
indexer).name + ": " + ex(e), logger.WARNING)
|
||||
logger.log(
|
||||
u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + sickbeard.indexerApi(
|
||||
indexer).name + ": " + ex(e), logger.WARNING)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
return None
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def fix_xem_numbering(indexer_id, indexer):
|
||||
"""
|
||||
|
@ -553,12 +546,12 @@ def fix_xem_numbering(indexer_id, indexer):
|
|||
|
||||
# query = [{
|
||||
# "name": self.show.name,
|
||||
# "seasons": [{
|
||||
# "episodes": [{
|
||||
# "episode_number": None,
|
||||
# "name": None
|
||||
# }],
|
||||
# "season_number": None,
|
||||
# "seasons": [{
|
||||
# "episodes": [{
|
||||
# "episode_number": None,
|
||||
# "name": None
|
||||
# }],
|
||||
# "season_number": None,
|
||||
# }],
|
||||
# "/tv/tv_program/number_of_seasons": [],
|
||||
# "/tv/tv_program/number_of_episodes": [],
|
||||
|
|
|
@ -52,7 +52,7 @@ class Scheduler(threading.Thread):
|
|||
|
||||
def run(self):
|
||||
|
||||
while(not self.stop.is_set()):
|
||||
while not self.stop.is_set():
|
||||
|
||||
current_time = datetime.datetime.now()
|
||||
should_run = False
|
||||
|
|
|
@ -59,7 +59,6 @@ def _downloadResult(result):
|
|||
# nzbs with an URL can just be downloaded from the provider
|
||||
if result.resultType == "nzb":
|
||||
newResult = resProvider.downloadResult(result)
|
||||
|
||||
# if it's an nzb data result
|
||||
elif result.resultType == "nzbdata":
|
||||
|
||||
|
@ -80,21 +79,14 @@ def _downloadResult(result):
|
|||
except EnvironmentError, e:
|
||||
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
|
||||
newResult = False
|
||||
|
||||
elif resProvider.providerType == "torrent":
|
||||
newResult = resProvider.downloadResult(result)
|
||||
|
||||
else:
|
||||
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
|
||||
return False
|
||||
|
||||
if newResult and sickbeard.USE_FAILED_DOWNLOADS:
|
||||
ui.notifications.message('Episode snatched',
|
||||
'<b>%s</b> snatched from <b>%s</b>' % (result.name, resProvider.name))
|
||||
newResult = False
|
||||
|
||||
return newResult
|
||||
|
||||
|
||||
def snatchEpisode(result, endStatus=SNATCHED):
|
||||
"""
|
||||
Contains the internal logic necessary to actually "snatch" a result that
|
||||
|
@ -139,7 +131,11 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
|||
else:
|
||||
# Sets per provider seed ratio
|
||||
result.ratio = result.provider.seedRatio()
|
||||
|
||||
# Gets torrent file contents if not magnet link
|
||||
result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None
|
||||
|
||||
# Snatches torrent with client
|
||||
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
||||
dlResult = client.sendTORRENT(result)
|
||||
else:
|
||||
|
|
|
@ -35,7 +35,7 @@ search_queue_lock = threading.Lock()
|
|||
BACKLOG_SEARCH = 10
|
||||
DAILY_SEARCH = 20
|
||||
FAILED_SEARCH = 30
|
||||
MANUAL_SEARCH = 30
|
||||
MANUAL_SEARCH = 40
|
||||
|
||||
|
||||
class SearchQueue(generic_queue.GenericQueue):
|
||||
|
|
|
@ -47,33 +47,7 @@ class ShowUpdater():
|
|||
logger.log(u"Doing full update on all shows")
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
if sickbeard.CACHE_DIR:
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
cache_dir = sickbeard.indexerApi(indexer).cache
|
||||
logger.log(u"Trying to clean cache folder " + cache_dir)
|
||||
|
||||
# Does our cache_dir exists
|
||||
if not ek.ek(os.path.isdir, cache_dir):
|
||||
logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING)
|
||||
else:
|
||||
max_age = datetime.timedelta(hours=12)
|
||||
# Get all our cache files
|
||||
cache_files = ek.ek(os.listdir, cache_dir)
|
||||
|
||||
for cache_file in cache_files:
|
||||
cache_file_path = ek.ek(os.path.join, cache_dir, cache_file)
|
||||
|
||||
if ek.ek(os.path.isfile, cache_file_path):
|
||||
cache_file_modified = datetime.datetime.fromtimestamp(
|
||||
ek.ek(os.path.getmtime, cache_file_path))
|
||||
|
||||
if update_datetime - cache_file_modified > max_age:
|
||||
try:
|
||||
ek.ek(os.remove, cache_file_path)
|
||||
except OSError, e:
|
||||
logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e),
|
||||
logger.WARNING)
|
||||
break
|
||||
sickbeard.helpers.clearCache()
|
||||
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
|
||||
stale_should_update = []
|
||||
|
|
|
@ -428,11 +428,10 @@ class QueueItemRefresh(ShowQueueItem):
|
|||
self.show.populateCache()
|
||||
|
||||
# Load XEM data to DB for show
|
||||
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer, force=self.force)
|
||||
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
|
||||
|
||||
self.inProgress = False
|
||||
|
||||
|
||||
class QueueItemRename(ShowQueueItem):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.RENAME, show)
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
import time
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
|
@ -24,8 +24,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard import logger
|
||||
from sickbeard import helpers
|
||||
from sickbeard import search_queue
|
||||
from sickbeard import db
|
||||
from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN
|
||||
from sickbeard.common import SKIPPED, WANTED
|
||||
from lib.trakt import *
|
||||
|
||||
|
||||
|
@ -55,18 +54,13 @@ class TraktChecker():
|
|||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
def findShow(self, indexer, indexerid):
|
||||
library = TraktCall("user/library/shows/all.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API,
|
||||
sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
library = TraktCall("user/library/shows/all.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
|
||||
if not library:
|
||||
logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR)
|
||||
return
|
||||
|
||||
for show in library:
|
||||
if int(indexer) == 1 and int(show['tvdb_id']) == int(indexerid):
|
||||
return show
|
||||
elif int(indexer) == 2 and int(show['tvrage_id']) == int(indexerid):
|
||||
return show
|
||||
return filter(lambda x: int(indexerid) in [int(x.tvdb_id), int(x.tvrage_id)], library)
|
||||
|
||||
def syncLibrary(self):
|
||||
logger.log(u"Syncing library to trakt.tv show library", logger.DEBUG)
|
||||
|
@ -113,15 +107,15 @@ class TraktChecker():
|
|||
data['title'] = show_obj.name
|
||||
data['year'] = show_obj.startyear
|
||||
|
||||
if data is not None:
|
||||
if data:
|
||||
logger.log(u"Adding " + show_obj.name + " to trakt.tv library", logger.DEBUG)
|
||||
TraktCall("show/library/%API%", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD,
|
||||
data)
|
||||
|
||||
def updateShows(self):
|
||||
logger.log(u"Starting trakt show watchlist check", logger.DEBUG)
|
||||
watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API,
|
||||
sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
watchlist = TraktCall("user/watchlist/shows.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
|
||||
if not watchlist:
|
||||
logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
|
||||
return
|
||||
|
@ -152,8 +146,8 @@ class TraktChecker():
|
|||
Sets episodes to wanted that are in trakt watchlist
|
||||
"""
|
||||
logger.log(u"Starting trakt episode watchlist check", logger.DEBUG)
|
||||
watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API,
|
||||
sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
watchlist = TraktCall("user/watchlist/episodes.json/%API%/" + sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
|
||||
if not watchlist:
|
||||
logger.log(u"Could not connect to trakt service, aborting watchlist update", logger.ERROR)
|
||||
return
|
||||
|
|
|
@ -721,7 +721,7 @@ class TVShow(object):
|
|||
if newStatus != None:
|
||||
with curEp.lock:
|
||||
logger.log(u"STATUS: we have an associated file, so setting the status from " + str(
|
||||
curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG)
|
||||
curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file, anime=self.is_anime)), logger.DEBUG)
|
||||
curEp.status = Quality.compositeStatus(newStatus, newQuality)
|
||||
|
||||
with curEp.lock:
|
||||
|
@ -1274,6 +1274,8 @@ class TVEpisode(object):
|
|||
self._file_size = 0
|
||||
self._release_name = ''
|
||||
self._is_proper = False
|
||||
self._version = 0
|
||||
self._release_group = ''
|
||||
|
||||
# setting any of the above sets the dirty flag
|
||||
self.dirty = True
|
||||
|
@ -1317,6 +1319,8 @@ class TVEpisode(object):
|
|||
file_size = property(lambda self: self._file_size, dirty_setter("_file_size"))
|
||||
release_name = property(lambda self: self._release_name, dirty_setter("_release_name"))
|
||||
is_proper = property(lambda self: self._is_proper, dirty_setter("_is_proper"))
|
||||
version = property(lambda self: self._version, dirty_setter("_version"))
|
||||
release_group = property(lambda self: self._release_group, dirty_setter("_release_group"))
|
||||
|
||||
def _set_location(self, new_location):
|
||||
logger.log(u"Setter sets location to " + new_location, logger.DEBUG)
|
||||
|
@ -1523,6 +1527,12 @@ class TVEpisode(object):
|
|||
if sqlResults[0]["is_proper"]:
|
||||
self.is_proper = int(sqlResults[0]["is_proper"])
|
||||
|
||||
if sqlResults[0]["version"]:
|
||||
self.version = int(sqlResults[0]["version"])
|
||||
|
||||
if sqlResults[0]["release_group"] is not None:
|
||||
self.release_group = sqlResults[0]["release_group"]
|
||||
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
|
@ -1676,7 +1686,7 @@ class TVEpisode(object):
|
|||
logger.log(
|
||||
u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)),
|
||||
logger.DEBUG)
|
||||
self.status = Quality.statusFromName(self.location)
|
||||
self.status = Quality.statusFromName(self.location, anime=self.show.is_anime)
|
||||
|
||||
# shouldn't get here probably
|
||||
else:
|
||||
|
@ -1701,8 +1711,8 @@ class TVEpisode(object):
|
|||
if self.status == UNKNOWN:
|
||||
if sickbeard.helpers.isMediaFile(self.location):
|
||||
logger.log(u"7 Status changes from " + str(self.status) + " to " + str(
|
||||
Quality.statusFromName(self.location)), logger.DEBUG)
|
||||
self.status = Quality.statusFromName(self.location)
|
||||
Quality.statusFromName(self.location, anime=self.show.is_anime)), logger.DEBUG)
|
||||
self.status = Quality.statusFromName(self.location, anime=self.show.is_anime)
|
||||
|
||||
nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo")
|
||||
logger.log(str(self.show.indexerid) + u": Using NFO name " + nfoFile, logger.DEBUG)
|
||||
|
@ -1849,23 +1859,26 @@ class TVEpisode(object):
|
|||
"UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, "
|
||||
"subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
|
||||
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
|
||||
"absolute_number = ? WHERE episode_id = ?",
|
||||
"absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?",
|
||||
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
|
||||
self.hastbn,
|
||||
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
|
||||
self.season, self.episode, self.absolute_number, epID]]
|
||||
self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
|
||||
else:
|
||||
# use a custom insert method to get the data into the DB.
|
||||
return [
|
||||
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, "
|
||||
"subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, "
|
||||
"release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES "
|
||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)"
|
||||
",?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||
self.description,
|
||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||
self.absolute_number]]
|
||||
self.absolute_number, self.version, self.release_group]]
|
||||
|
||||
def saveToDB(self, forceSave=False):
|
||||
"""
|
||||
|
@ -1898,7 +1911,9 @@ class TVEpisode(object):
|
|||
"file_size": self.file_size,
|
||||
"release_name": self.release_name,
|
||||
"is_proper": self.is_proper,
|
||||
"absolute_number": self.absolute_number
|
||||
"absolute_number": self.absolute_number,
|
||||
"version": self.version,
|
||||
"release_group": self.release_group
|
||||
}
|
||||
controlValueDict = {"showid": self.show.indexerid,
|
||||
"season": self.season,
|
||||
|
|
|
@ -55,6 +55,10 @@ class CacheDBConnection(db.DBConnection):
|
|||
if not self.hasColumn(providerName, 'release_group'):
|
||||
self.addColumn(providerName, 'release_group', "TEXT", "")
|
||||
|
||||
# add version column to table if missing
|
||||
if not self.hasColumn(providerName, 'version'):
|
||||
self.addColumn(providerName, 'version', "NUMERIC", "-1")
|
||||
|
||||
except Exception, e:
|
||||
if str(e) != "table [" + providerName + "] already exists":
|
||||
raise
|
||||
|
@ -106,16 +110,18 @@ class TVCache():
|
|||
def updateCache(self):
|
||||
|
||||
if self.shouldUpdate() and self._checkAuth(None):
|
||||
self._clearCache()
|
||||
|
||||
data = self._getRSSData()
|
||||
|
||||
# as long as the http request worked we count this as an update
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
else:
|
||||
data = self._getRSSData()
|
||||
if not data:
|
||||
return []
|
||||
|
||||
# clear cache
|
||||
self._clearCache()
|
||||
|
||||
# set updated
|
||||
self.setLastUpdate()
|
||||
|
||||
# parse data
|
||||
if self._checkAuth(data):
|
||||
cl = []
|
||||
for item in data.entries:
|
||||
|
@ -270,11 +276,14 @@ class TVCache():
|
|||
# get release group
|
||||
release_group = parse_result.release_group
|
||||
|
||||
# get version
|
||||
version = parse_result.version
|
||||
|
||||
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
||||
|
||||
return [
|
||||
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)",
|
||||
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]]
|
||||
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)",
|
||||
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
|
||||
|
||||
|
||||
def searchCache(self, episodes, manualSearch=False):
|
||||
|
@ -326,6 +335,7 @@ class TVCache():
|
|||
curEp = int(curEp)
|
||||
curQuality = int(curResult["quality"])
|
||||
curReleaseGroup = curResult["release_group"]
|
||||
curVersion = curResult["version"]
|
||||
|
||||
# if the show says we want that episode then add it to the list
|
||||
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
|
||||
|
@ -345,6 +355,7 @@ class TVCache():
|
|||
result.name = title
|
||||
result.quality = curQuality
|
||||
result.release_group = curReleaseGroup
|
||||
result.version = curVersion
|
||||
result.content = self.provider.getURL(url) \
|
||||
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
|
||||
and not url.startswith('magnet') else None
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
'''
|
||||
Created on Aug 26, 2013
|
||||
|
||||
Wrappers around tvtumbler access.
|
||||
|
||||
@author: dermot@buckley.ie
|
||||
'''
|
||||
import time
|
||||
|
||||
from sickbeard import helpers
|
||||
from sickbeard import logger
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
from lib import simplejson as json
|
||||
|
||||
UPDATE_INTERVAL = 432000 # 5 days
|
||||
SHOW_LOOKUP_URL = 'http://show-api.tvtumbler.com/api/show'
|
||||
_tvtumber_cache = {}
|
||||
|
||||
|
||||
def show_info(indexer_id):
|
||||
try:
|
||||
cachedResult = _tvtumber_cache[str(indexer_id)]
|
||||
if time.time() < (cachedResult['mtime'] + UPDATE_INTERVAL):
|
||||
# cached result is still considered current, use it
|
||||
return cachedResult['response']
|
||||
# otherwise we just fall through to lookup
|
||||
except KeyError:
|
||||
pass # no cached value, just fall through to lookup
|
||||
|
||||
url = SHOW_LOOKUP_URL + '?indexer_id=' + str(indexer_id)
|
||||
data = helpers.getURL(url, timeout=60) # give this a longer timeout b/c it may take a while
|
||||
result = json.loads(data)
|
||||
if not result:
|
||||
logger.log(u"Empty lookup result -> failed to find show id", logger.DEBUG)
|
||||
return None
|
||||
if result['error']:
|
||||
logger.log(u"Lookup failed: " + result['errorMessage'], logger.DEBUG)
|
||||
return None
|
||||
|
||||
# result is good, store it for later
|
||||
_tvtumber_cache[str(indexer_id)] = {'mtime': time.time(),
|
||||
'response': result['show']}
|
||||
|
||||
return result['show']
|
|
@ -27,7 +27,6 @@ import tarfile
|
|||
import stat
|
||||
import traceback
|
||||
import gh_api as github
|
||||
import threading
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import helpers, notifiers
|
||||
|
@ -53,11 +52,8 @@ class CheckVersion():
|
|||
else:
|
||||
self.updater = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
if self.check_for_new_version():
|
||||
if self.check_for_new_version(force):
|
||||
if sickbeard.AUTO_UPDATE:
|
||||
logger.log(u"New update found for SickRage, starting auto-updater ...")
|
||||
ui.notifications.message('New update found for SickRage, starting auto-updater')
|
||||
|
@ -113,10 +109,15 @@ class CheckVersion():
|
|||
self.updater.set_newest_text()
|
||||
return True
|
||||
|
||||
def update(self):
|
||||
if self.updater.need_update():
|
||||
def update(self, branch=None):
|
||||
if branch and branch != self.updater.branch:
|
||||
return self.updater.update(branch)
|
||||
elif self.updater.need_update():
|
||||
return self.updater.update()
|
||||
|
||||
def list_remote_branches(self):
|
||||
return self.updater.list_remote_branches()
|
||||
|
||||
class UpdateManager():
|
||||
def get_github_repo_user(self):
|
||||
return 'echel0n'
|
||||
|
@ -127,7 +128,6 @@ class UpdateManager():
|
|||
def get_update_url(self):
|
||||
return sickbeard.WEB_ROOT + "/home/update/?pid=" + str(sickbeard.PID)
|
||||
|
||||
|
||||
class WindowsUpdateManager(UpdateManager):
|
||||
def __init__(self):
|
||||
self.github_repo_user = self.get_github_repo_user()
|
||||
|
@ -163,21 +163,18 @@ class WindowsUpdateManager(UpdateManager):
|
|||
regex = ".*SickRage\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
|
||||
|
||||
version_url_data = helpers.getURL(self.version_url)
|
||||
if not version_url_data:
|
||||
return
|
||||
|
||||
if version_url_data is None:
|
||||
return None
|
||||
else:
|
||||
for curLine in version_url_data.splitlines():
|
||||
logger.log(u"checking line " + curLine, logger.DEBUG)
|
||||
match = re.match(regex, curLine)
|
||||
if match:
|
||||
logger.log(u"found a match", logger.DEBUG)
|
||||
if whole_link:
|
||||
return curLine.strip()
|
||||
else:
|
||||
return int(match.group(1))
|
||||
|
||||
return None
|
||||
for curLine in version_url_data.splitlines():
|
||||
logger.log(u"checking line " + curLine, logger.DEBUG)
|
||||
match = re.match(regex, curLine)
|
||||
if match:
|
||||
logger.log(u"found a match", logger.DEBUG)
|
||||
if whole_link:
|
||||
return curLine.strip()
|
||||
else:
|
||||
return int(match.group(1))
|
||||
|
||||
def need_update(self):
|
||||
self._cur_version = self._find_installed_version()
|
||||
|
@ -203,7 +200,10 @@ class WindowsUpdateManager(UpdateManager):
|
|||
|
||||
sickbeard.NEWEST_VERSION_STRING = newest_text
|
||||
|
||||
def update(self):
|
||||
def update(self, branch='windows_binaries'):
|
||||
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
|
||||
zip_download_url = self._find_newest_version(True)
|
||||
logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG)
|
||||
|
@ -270,6 +270,8 @@ class WindowsUpdateManager(UpdateManager):
|
|||
|
||||
return True
|
||||
|
||||
def list_remote_branches(self):
|
||||
return ['windows_binaries']
|
||||
|
||||
class GitUpdateManager(UpdateManager):
|
||||
def __init__(self):
|
||||
|
@ -503,13 +505,19 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
return False
|
||||
|
||||
def update(self):
|
||||
def update(self, branch=sickbeard.version.SICKBEARD_VERSION):
|
||||
"""
|
||||
Calls git pull origin <branch> in order to update SickRage. Returns a bool depending
|
||||
on the call's success.
|
||||
"""
|
||||
|
||||
output, err, exit_status = self._run_git(self._git_path, 'pull origin ' + self.branch) # @UnusedVariable
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
|
||||
if self.branch == sickbeard.version.SICKBEARD_VERSION:
|
||||
output, err, exit_status = self._run_git(self._git_path, 'pull -f origin ' + self.branch) # @UnusedVariable
|
||||
else:
|
||||
output, err, exit_status = self._run_git(self._git_path, 'checkout -f ' + self.branch) # @UnusedVariable
|
||||
|
||||
if exit_status == 0:
|
||||
# Notify update successful
|
||||
|
@ -519,6 +527,11 @@ class GitUpdateManager(UpdateManager):
|
|||
|
||||
return False
|
||||
|
||||
def list_remote_branches(self):
|
||||
branches, err, exit_status = self._run_git(self._git_path, 'ls-remote --heads origin') # @UnusedVariable
|
||||
if exit_status == 0 and branches:
|
||||
return re.findall('\S+\Wrefs/heads/(.*)', branches)
|
||||
return []
|
||||
|
||||
class SourceUpdateManager(UpdateManager):
|
||||
def __init__(self):
|
||||
|
@ -632,10 +645,14 @@ class SourceUpdateManager(UpdateManager):
|
|||
|
||||
sickbeard.NEWEST_VERSION_STRING = newest_text
|
||||
|
||||
def update(self):
|
||||
def update(self, branch=sickbeard.version.SICKBEARD_VERSION):
|
||||
"""
|
||||
Downloads the latest source tarball from github and installs it over the existing version.
|
||||
"""
|
||||
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
|
||||
base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo
|
||||
tar_download_url = base_url + '/tarball/' + self.branch
|
||||
version_path = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')
|
||||
|
@ -724,3 +741,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
notifiers.notify_git_update(sickbeard.NEWEST_VERSION_STRING)
|
||||
|
||||
return True
|
||||
|
||||
def list_remote_branches(self):
|
||||
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
|
||||
return gh.branches()
|
|
@ -23,27 +23,25 @@ import os
|
|||
import time
|
||||
import urllib
|
||||
import datetime
|
||||
import threading
|
||||
import re
|
||||
import traceback
|
||||
import sickbeard
|
||||
import webserve
|
||||
|
||||
from sickbeard import db, logger, exceptions, history, ui, helpers
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard import search_queue
|
||||
from sickbeard import image_cache
|
||||
from sickbeard import classes
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN
|
||||
from common import Quality, qualityPresetStrings, statusStrings
|
||||
from sickbeard import image_cache
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
from lib import simplejson as json
|
||||
|
||||
import xml.etree.cElementTree as etree
|
||||
|
||||
from lib import subliminal
|
||||
|
||||
dateFormat = "%Y-%m-%d"
|
||||
|
@ -1530,7 +1528,7 @@ class CMD_SickBeardRestart(ApiCall):
|
|||
class CMD_SickBeardSearchIndexers(ApiCall):
|
||||
_help = {"desc": "search for show on the indexers with a given string and language",
|
||||
"optionalParameters": {"name": {"desc": "name of the show you want to search for"},
|
||||
"indexerid": {"desc": "thetvdb.com unique id of a show"},
|
||||
"indexerid": {"desc": "thetvdb.com or tvrage.com unique id of a show"},
|
||||
"lang": {"desc": "the 2 letter abbreviation lang id"}
|
||||
}
|
||||
}
|
||||
|
@ -1555,31 +1553,30 @@ class CMD_SickBeardSearchIndexers(ApiCall):
|
|||
def run(self):
|
||||
""" search for show at tvdb with a given string and language """
|
||||
if self.name and not self.indexerid: # only name was given
|
||||
baseURL = "http://thetvdb.com/api/GetSeries.php?"
|
||||
params = {"seriesname": str(self.name).encode('utf-8'), 'language': self.lang}
|
||||
finalURL = baseURL + urllib.urlencode(params)
|
||||
urlData = sickbeard.helpers.getURL(finalURL)
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
lINDEXER_API_PARMS['language'] = self.lang
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
if urlData is None:
|
||||
apiData = None
|
||||
|
||||
try:
|
||||
apiData = t[str(self.name).encode()]
|
||||
except Exception, e:
|
||||
pass
|
||||
|
||||
if not apiData:
|
||||
return _responds(RESULT_FAILURE, msg="Did not get result from tvdb")
|
||||
else:
|
||||
try:
|
||||
seriesXML = etree.ElementTree(etree.XML(urlData))
|
||||
except Exception, e:
|
||||
logger.log(u"API :: Unable to parse XML for some reason: " + ex(e) + " from XML: " + urlData,
|
||||
logger.ERROR)
|
||||
return _responds(RESULT_FAILURE, msg="Unable to read result from tvdb")
|
||||
|
||||
series = seriesXML.getiterator('Series')
|
||||
results = []
|
||||
for curSeries in series:
|
||||
results.append({"indexerid": int(curSeries.findtext('seriesid')),
|
||||
"tvdbid": int(curSeries.findtext('seriesid')),
|
||||
"name": curSeries.findtext('SeriesName'),
|
||||
"first_aired": curSeries.findtext('FirstAired')})
|
||||
results = []
|
||||
for curSeries in apiData:
|
||||
results.append({"indexerid": int(curSeries.findtext('seriesid')),
|
||||
"tvdbid": int(curSeries.findtext('seriesid')),
|
||||
"name": curSeries.findtext('SeriesName'),
|
||||
"first_aired": curSeries.findtext('FirstAired')})
|
||||
|
||||
lang_id = self.valid_languages[self.lang]
|
||||
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
|
||||
lang_id = self.valid_languages[self.lang]
|
||||
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
|
||||
|
||||
elif self.indexerid:
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import base64
|
||||
import inspect
|
||||
import traceback
|
||||
|
@ -46,6 +47,7 @@ from sickbeard import naming
|
|||
from sickbeard import scene_exceptions
|
||||
from sickbeard import subtitles
|
||||
from sickbeard import network_timezones
|
||||
from sickbeard import version
|
||||
|
||||
from sickbeard.providers import newznab, rsstorrent
|
||||
from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStrings, cpu_presets, SKIPPED
|
||||
|
@ -80,8 +82,7 @@ except ImportError:
|
|||
from lib import adba
|
||||
|
||||
from Cheetah.Template import Template
|
||||
|
||||
from tornado.web import RequestHandler, HTTPError
|
||||
from tornado.web import RequestHandler, HTTPError, asynchronous
|
||||
|
||||
|
||||
def authenticated(handler_class):
|
||||
|
@ -146,6 +147,7 @@ def redirect(url, permanent=False, status=None):
|
|||
assert url[0] == '/'
|
||||
raise HTTPRedirect(sickbeard.WEB_ROOT + url, permanent, status)
|
||||
|
||||
|
||||
@authenticated
|
||||
class MainHandler(RequestHandler):
|
||||
def http_error_401_handler(self):
|
||||
|
@ -189,7 +191,6 @@ class MainHandler(RequestHandler):
|
|||
trace_info, request_info))
|
||||
|
||||
def _dispatch(self):
|
||||
|
||||
path = self.request.uri.replace(sickbeard.WEB_ROOT, '').split('?')[0]
|
||||
|
||||
method = path.strip('/').split('/')[-1]
|
||||
|
@ -235,12 +236,14 @@ class MainHandler(RequestHandler):
|
|||
|
||||
raise HTTPError(404)
|
||||
|
||||
@asynchronous
|
||||
def get(self, *args, **kwargs):
|
||||
try:
|
||||
self.finish(self._dispatch())
|
||||
except HTTPRedirect, e:
|
||||
self.redirect(e.url, e.permanent, e.status)
|
||||
|
||||
@asynchronous
|
||||
def post(self, *args, **kwargs):
|
||||
try:
|
||||
self.finish(self._dispatch())
|
||||
|
@ -416,10 +419,6 @@ class MainHandler(RequestHandler):
|
|||
|
||||
logger.log(u"Receiving iCal request from %s" % self.request.remote_ip)
|
||||
|
||||
poster_url = self.request.url().replace('ical', '')
|
||||
|
||||
time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})')
|
||||
|
||||
# Create a iCal string
|
||||
ical = 'BEGIN:VCALENDAR\r\n'
|
||||
ical += 'VERSION:2.0\r\n'
|
||||
|
@ -606,6 +605,13 @@ class ManageSearches(MainHandler):
|
|||
return _munge(t)
|
||||
|
||||
|
||||
def forceVersionCheck(self, *args, **kwargs):
|
||||
# force a check to see if there is a new version
|
||||
if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True):
|
||||
logger.log(u"Forcing version check")
|
||||
|
||||
redirect("/home/")
|
||||
|
||||
def forceBacklog(self, *args, **kwargs):
|
||||
# force it to run the next time it looks
|
||||
result = sickbeard.backlogSearchScheduler.forceRun()
|
||||
|
@ -615,7 +621,6 @@ class ManageSearches(MainHandler):
|
|||
|
||||
redirect("/manage/manageSearches/")
|
||||
|
||||
|
||||
def forceSearch(self, *args, **kwargs):
|
||||
|
||||
# force it to run the next time it looks
|
||||
|
@ -1429,8 +1434,7 @@ class ConfigGeneral(MainHandler):
|
|||
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None,
|
||||
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
|
||||
handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None,
|
||||
proxy_setting=None,
|
||||
anon_redirect=None, git_path=None, calendar_unprotected=None,
|
||||
proxy_setting=None, anon_redirect=None, git_path=None, calendar_unprotected=None,
|
||||
fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
|
||||
indexer_timeout=None, play_videos=None):
|
||||
|
||||
|
@ -1511,6 +1515,7 @@ class ConfigGeneral(MainHandler):
|
|||
|
||||
redirect("/config/general/")
|
||||
|
||||
|
||||
class ConfigBackupRestore(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file="config_backuprestore.tmpl")
|
||||
|
@ -1539,7 +1544,6 @@ class ConfigBackupRestore(MainHandler):
|
|||
|
||||
def restore(self, backupFile=None):
|
||||
|
||||
|
||||
finalResult = ''
|
||||
|
||||
if backupFile:
|
||||
|
@ -1639,6 +1643,7 @@ class ConfigSearch(MainHandler):
|
|||
|
||||
redirect("/config/search/")
|
||||
|
||||
|
||||
class ConfigPostProcessing(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
|
||||
|
@ -2184,6 +2189,7 @@ class ConfigProviders(MainHandler):
|
|||
|
||||
redirect("/config/providers/")
|
||||
|
||||
|
||||
class ConfigNotifications(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file="config_notifications.tmpl")
|
||||
|
@ -2217,7 +2223,8 @@ class ConfigNotifications(MainHandler):
|
|||
use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None,
|
||||
use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None,
|
||||
trakt_remove_watchlist=None, trakt_use_watchlist=None, trakt_method_add=None,
|
||||
trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None, trakt_default_indexer=None,
|
||||
trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None,
|
||||
trakt_default_indexer=None,
|
||||
use_synologynotifier=None, synologynotifier_notify_onsnatch=None,
|
||||
synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
|
||||
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None,
|
||||
|
@ -2390,6 +2397,7 @@ class ConfigNotifications(MainHandler):
|
|||
|
||||
redirect("/config/notifications/")
|
||||
|
||||
|
||||
class ConfigSubtitles(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file="config_subtitles.tmpl")
|
||||
|
@ -2447,6 +2455,7 @@ class ConfigSubtitles(MainHandler):
|
|||
|
||||
redirect("/config/subtitles/")
|
||||
|
||||
|
||||
class ConfigAnime(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
|
||||
|
@ -2460,26 +2469,11 @@ class ConfigAnime(MainHandler):
|
|||
|
||||
results = []
|
||||
|
||||
if use_anidb == "on":
|
||||
use_anidb = 1
|
||||
else:
|
||||
use_anidb = 0
|
||||
|
||||
if anidb_use_mylist == "on":
|
||||
anidb_use_mylist = 1
|
||||
else:
|
||||
anidb_use_mylist = 0
|
||||
|
||||
if split_home == "on":
|
||||
split_home = 1
|
||||
else:
|
||||
split_home = 0
|
||||
|
||||
sickbeard.USE_ANIDB = use_anidb
|
||||
sickbeard.USE_ANIDB = config.checkbox_to_value(use_anidb)
|
||||
sickbeard.ANIDB_USERNAME = anidb_username
|
||||
sickbeard.ANIDB_PASSWORD = anidb_password
|
||||
sickbeard.ANIDB_USE_MYLIST = anidb_use_mylist
|
||||
sickbeard.ANIME_SPLIT_HOME = split_home
|
||||
sickbeard.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist)
|
||||
sickbeard.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home)
|
||||
|
||||
sickbeard.save_config()
|
||||
|
||||
|
@ -2493,6 +2487,7 @@ class ConfigAnime(MainHandler):
|
|||
|
||||
redirect("/config/anime/")
|
||||
|
||||
|
||||
class Config(MainHandler):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file="config.tmpl")
|
||||
|
@ -2547,16 +2542,8 @@ class HomePostProcess(MainHandler):
|
|||
t.submenu = HomeMenu()
|
||||
return _munge(t)
|
||||
|
||||
|
||||
def forceVersionCheck(self, *args, **kwargs):
|
||||
|
||||
# force a check to see if there is a new version
|
||||
if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True):
|
||||
logger.log(u"Forcing version check")
|
||||
|
||||
redirect("/home/")
|
||||
|
||||
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None, is_priority=None, failed="0", type="auto", *args, **kwargs):
|
||||
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None,
|
||||
is_priority=None, failed="0", type="auto", *args, **kwargs):
|
||||
|
||||
if failed == "0":
|
||||
failed = False
|
||||
|
@ -2779,9 +2766,7 @@ class NewHomeAddShows(MainHandler):
|
|||
final_results = []
|
||||
|
||||
logger.log(u"Getting recommended shows from Trakt.tv", logger.DEBUG)
|
||||
recommendedlist = TraktCall("recommendations/shows.json/%API%/" + sickbeard.TRAKT_USERNAME,
|
||||
sickbeard.TRAKT_API,
|
||||
sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
recommendedlist = TraktCall("recommendations/shows.json/%API%", sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
if recommendedlist is None:
|
||||
logger.log(u"Could not connect to trakt service, aborting recommended list update", logger.ERROR)
|
||||
return
|
||||
|
@ -2818,7 +2803,7 @@ class NewHomeAddShows(MainHandler):
|
|||
t = PageTemplate(headers=self.request.headers, file="home_trendingShows.tmpl")
|
||||
t.submenu = HomeMenu()
|
||||
|
||||
t.trending_shows = TraktCall("shows/trending.json/%API%/", sickbeard.TRAKT_API_KEY)
|
||||
t.trending_shows = TraktCall("shows/trending.json/%API%", sickbeard.TRAKT_API_KEY)
|
||||
|
||||
return _munge(t)
|
||||
|
||||
|
@ -3467,12 +3452,12 @@ class Home(MainHandler):
|
|||
|
||||
return _munge(t)
|
||||
|
||||
def update(self, pid=None):
|
||||
def update(self, pid=None, branch=None):
|
||||
|
||||
if str(pid) != str(sickbeard.PID):
|
||||
redirect("/home/")
|
||||
|
||||
updated = sickbeard.versionCheckScheduler.action.update() # @UndefinedVariable
|
||||
updated = sickbeard.versionCheckScheduler.action.update(branch) # @UndefinedVariable
|
||||
if updated:
|
||||
# do a hard restart
|
||||
sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
|
||||
|
@ -3483,6 +3468,8 @@ class Home(MainHandler):
|
|||
return self._genericMessage("Update Failed",
|
||||
"Update wasn't successful, not restarting. Check your log for more information.")
|
||||
|
||||
def branchCheckout(self, branch):
|
||||
return self.update(sickbeard.PID, branch)
|
||||
|
||||
def displayShow(self, show=None):
|
||||
|
||||
|
@ -3652,14 +3639,15 @@ class Home(MainHandler):
|
|||
return self._genericMessage("Error", errString)
|
||||
|
||||
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
|
||||
|
||||
if showObj is None:
|
||||
if not showObj:
|
||||
errString = "Unable to find the specified show: " + str(show)
|
||||
if directCall:
|
||||
return [errString]
|
||||
else:
|
||||
return self._genericMessage("Error", errString)
|
||||
|
||||
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid)
|
||||
|
||||
if not location and not anyQualities and not bestQualities and not flatten_folders:
|
||||
t = PageTemplate(headers=self.request.headers, file="editShow.tmpl")
|
||||
t.submenu = HomeMenu()
|
||||
|
@ -3876,8 +3864,7 @@ class Home(MainHandler):
|
|||
|
||||
if do_update_exceptions:
|
||||
try:
|
||||
scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVariable
|
||||
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid)
|
||||
scene_exceptions.update_scene_exceptions(showObj.indexerid, exceptions_list) # @UndefinedVdexerid)
|
||||
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
||||
except exceptions.CantUpdateException, e:
|
||||
errors.append("Unable to force an update on scene exceptions of the show.")
|
||||
|
|
Loading…
Reference in a new issue