mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-20 16:43:43 +00:00
Merge remote-tracking branch 'origin/dev'
This commit is contained in:
commit
83e1da31a7
57 changed files with 408 additions and 218 deletions
51
SickBeard.py
51
SickBeard.py
|
@ -54,25 +54,20 @@ import threading
|
|||
import signal
|
||||
import traceback
|
||||
import getopt
|
||||
import time
|
||||
|
||||
import sickbeard
|
||||
|
||||
import tornado.ioloop
|
||||
import tornado.autoreload
|
||||
|
||||
from sickbeard import db
|
||||
from sickbeard.tv import TVShow
|
||||
from sickbeard import logger
|
||||
from sickbeard import webserveInit
|
||||
from sickbeard import autoreload_shutdown
|
||||
from sickbeard.version import SICKBEARD_VERSION
|
||||
from sickbeard.databases.mainDB import MIN_DB_VERSION
|
||||
from sickbeard.databases.mainDB import MAX_DB_VERSION
|
||||
|
||||
from lib.configobj import ConfigObj
|
||||
|
||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
signal.signal(signal.SIGINT, sickbeard.sig_handler)
|
||||
signal.signal(signal.SIGTERM, sickbeard.sig_handler)
|
||||
|
@ -84,9 +79,12 @@ def loadShowsFromDB():
|
|||
Populates the showList with shows from the database
|
||||
"""
|
||||
|
||||
logger.log(u"Loading initial show list")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
||||
|
||||
sickbeard.showList = []
|
||||
for sqlShow in sqlResults:
|
||||
try:
|
||||
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
||||
|
@ -329,18 +327,12 @@ def main():
|
|||
# Initialize the config and our threads
|
||||
sickbeard.initialize(consoleLogging=consoleLogging)
|
||||
|
||||
sickbeard.showList = []
|
||||
|
||||
if sickbeard.DAEMON:
|
||||
daemonize()
|
||||
|
||||
# Use this PID for everything
|
||||
sickbeard.PID = os.getpid()
|
||||
|
||||
# Build from the DB to start with
|
||||
logger.log(u"Loading initial show list")
|
||||
loadShowsFromDB()
|
||||
|
||||
if forcedPort:
|
||||
logger.log(u"Forcing web server to port " + str(forcedPort))
|
||||
startPort = forcedPort
|
||||
|
@ -386,28 +378,35 @@ def main():
|
|||
sickbeard.launchBrowser(startPort)
|
||||
sys.exit()
|
||||
|
||||
def startup():
|
||||
# Fire up all our threads
|
||||
sickbeard.start()
|
||||
# Build from the DB to start with
|
||||
loadShowsFromDB()
|
||||
|
||||
# Launch browser if we're supposed to
|
||||
if sickbeard.LAUNCH_BROWSER and not noLaunch and not sickbeard.DAEMON and not sickbeard.restarted:
|
||||
sickbeard.launchBrowser(startPort)
|
||||
# Fire up all our threads
|
||||
sickbeard.start()
|
||||
|
||||
# Start an update if we're supposed to
|
||||
if forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
|
||||
sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable
|
||||
# Launch browser if we're supposed to
|
||||
if sickbeard.LAUNCH_BROWSER and not noLaunch:
|
||||
sickbeard.launchBrowser(startPort)
|
||||
|
||||
# If we restarted then unset the restarted flag
|
||||
if sickbeard.restarted:
|
||||
sickbeard.restarted = False
|
||||
# Start an update if we're supposed to
|
||||
if forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
|
||||
sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable
|
||||
|
||||
# create ioloop
|
||||
# If we restarted then unset the restarted flag
|
||||
if sickbeard.restarted:
|
||||
sickbeard.restarted = False
|
||||
|
||||
# IOLoop
|
||||
io_loop = IOLoop.current()
|
||||
|
||||
io_loop.add_timeout(datetime.timedelta(seconds=5), startup)
|
||||
# Open browser window
|
||||
if sickbeard.LAUNCH_BROWSER and not (noLaunch or sickbeard.DAEMON or sickbeard.restarted):
|
||||
io_loop.add_timeout(datetime.timedelta(seconds=5), functools.partial(sickbeard.launchBrowser, startPort))
|
||||
|
||||
# Start web server
|
||||
io_loop.start()
|
||||
|
||||
# Save and restart/shutdown
|
||||
sickbeard.saveAndShutdown()
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -9,8 +9,15 @@
|
|||
<div class="meta" style="float:left;font-size: 12px;">
|
||||
#set $myDB = $db.DBConnection()
|
||||
#set $today = str($datetime.date.today().toordinal())
|
||||
|
||||
#if $sickbeard.showList:
|
||||
#set $numShows = len($sickbeard.showList)
|
||||
#set $numGoodShows = len([x for x in $sickbeard.showList if x.paused == 0 and "Ended" not in x.status])
|
||||
#else
|
||||
#set $numShows = 0
|
||||
#set $numGoodShows = 0
|
||||
#end if
|
||||
|
||||
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
||||
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
||||
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
||||
|
|
|
@ -224,9 +224,6 @@ class Anime(aniDBabstractObject):
|
|||
class Episode(aniDBabstractObject):
|
||||
|
||||
def __init__(self, aniDB, number=None, epid=None, filePath=None, fid=None, epno=None, paramsA=None, paramsF=None, load=False, calculate=False):
|
||||
if not aniDB and not number and not epid and not file and not fid:
|
||||
return None
|
||||
|
||||
self.maper = AniDBMaper()
|
||||
self.epid = epid
|
||||
self.filePath = filePath
|
||||
|
|
|
@ -22,7 +22,8 @@ import webbrowser
|
|||
import time
|
||||
import datetime
|
||||
import socket
|
||||
import os, sys, subprocess, re
|
||||
import os
|
||||
import re
|
||||
|
||||
from urllib2 import getproxies
|
||||
from threading import Lock
|
||||
|
@ -102,7 +103,6 @@ AUTO_UPDATE = None
|
|||
CUR_COMMIT_HASH = None
|
||||
|
||||
INIT_LOCK = Lock()
|
||||
__INITIALIZED__ = False
|
||||
started = False
|
||||
restarted = False
|
||||
|
||||
|
@ -433,8 +433,8 @@ CALENDAR_UNPROTECTED = False
|
|||
|
||||
TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
||||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
__INITIALIZED__ = False
|
||||
def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
|
@ -478,7 +478,7 @@ def initialize(consoleLogging=True):
|
|||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
|
||||
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
|
||||
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
|
||||
ANIME_SPLIT_HOME, maintenanceScheduler, SCENE_DEFAULT, RES
|
||||
ANIME_SPLIT_HOME, maintenanceScheduler, SCENE_DEFAULT
|
||||
|
||||
if __INITIALIZED__:
|
||||
return False
|
||||
|
@ -1300,13 +1300,6 @@ def saveAll():
|
|||
logger.log(u"Saving config file to disk")
|
||||
save_config()
|
||||
|
||||
def cleanup_tornado_sockets(io_loop):
|
||||
for fd in io_loop._handlers.keys():
|
||||
try:
|
||||
os.close(fd)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def saveAndShutdown():
|
||||
halt()
|
||||
saveAll()
|
||||
|
@ -1797,18 +1790,4 @@ def getEpList(epIDs, showid=None):
|
|||
curEpObj = curShowObj.getEpisode(int(curEp["season"]), int(curEp["episode"]))
|
||||
epList.append(curEpObj)
|
||||
|
||||
return epList
|
||||
|
||||
|
||||
def autoreload_shutdown():
|
||||
logger.log('SickRage is now auto-reloading, please stand by ...')
|
||||
|
||||
# halt all tasks
|
||||
halt()
|
||||
|
||||
# save settings
|
||||
saveAll()
|
||||
|
||||
if CREATEPID:
|
||||
logger.log(u"Removing pidfile " + str(PIDFILE))
|
||||
remove_pid_file(PIDFILE)
|
||||
return epList
|
|
@ -42,3 +42,6 @@ class PostProcesser():
|
|||
return
|
||||
|
||||
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
|
|
@ -35,6 +35,9 @@ class BlackAndWhiteList(object):
|
|||
self.show_id = show_id
|
||||
self.refresh()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def refresh(self):
|
||||
logger.log(u"Building black and white list for " + str(self.show_id), logger.DEBUG)
|
||||
|
||||
|
@ -208,6 +211,5 @@ class BlackWhiteKeyword(object):
|
|||
self.range = range # "global" or a parser group
|
||||
self.value = values # a list of values may contain only one item (still a list)
|
||||
|
||||
|
||||
class BlackWhitelistNoShowIDException(Exception):
|
||||
"No show_id was given"
|
||||
|
|
|
@ -36,6 +36,9 @@ class DailySearcher():
|
|||
|
||||
self.amActive = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
|
||||
self.amActive = True
|
||||
|
@ -85,10 +88,10 @@ class DailySearcher():
|
|||
with ep.lock:
|
||||
if ep.show.paused:
|
||||
ep.status = common.SKIPPED
|
||||
else:
|
||||
if ep.status == common.UNAIRED:
|
||||
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
|
||||
ep.status = common.WANTED
|
||||
|
||||
if ep.status == common.UNAIRED:
|
||||
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
|
||||
ep.status = common.WANTED
|
||||
|
||||
if ep.status == common.WANTED:
|
||||
if show not in todaysEps:
|
||||
|
@ -102,6 +105,7 @@ class DailySearcher():
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
if len(todaysEps):
|
||||
for show in todaysEps:
|
||||
segment = todaysEps[show]
|
||||
|
|
|
@ -63,7 +63,7 @@ class DBConnection(object):
|
|||
def reconnect(self):
|
||||
"""Closes the existing database connection and re-opens it."""
|
||||
self.close()
|
||||
self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20)
|
||||
self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20, check_same_thread=False)
|
||||
self.connection.isolation_level = None
|
||||
|
||||
if self.row_type == "dict":
|
||||
|
@ -111,7 +111,7 @@ class DBConnection(object):
|
|||
if self.hasTable('db_version'):
|
||||
result = self.select("SELECT db_version FROM db_version")
|
||||
except:
|
||||
pass
|
||||
return 0
|
||||
|
||||
if result:
|
||||
return int(result[0]["db_version"])
|
||||
|
@ -143,7 +143,9 @@ class DBConnection(object):
|
|||
sqlResult.append(self.execute(qu[0], qu[1]))
|
||||
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" queries executed", logger.DEBUG)
|
||||
return sqlResult
|
||||
|
||||
# finished
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
|
@ -151,7 +153,7 @@ class DBConnection(object):
|
|||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + ex(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(0.02)
|
||||
time.sleep(1)
|
||||
else:
|
||||
logger.log(u"DB error: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
@ -189,7 +191,7 @@ class DBConnection(object):
|
|||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + ex(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(0.02)
|
||||
time.sleep(1)
|
||||
else:
|
||||
logger.log(u"DB error: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
@ -340,11 +342,7 @@ class SchemaUpgrade(object):
|
|||
self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,))
|
||||
|
||||
def checkDBVersion(self):
|
||||
result = self.connection.select("SELECT db_version FROM db_version")
|
||||
if result:
|
||||
return int(result[0]["db_version"])
|
||||
else:
|
||||
return 0
|
||||
return self.connection.checkDBVersion()
|
||||
|
||||
def incDBVersion(self):
|
||||
new_version = self.checkDBVersion() + 1
|
||||
|
|
|
@ -44,6 +44,9 @@ class FailedProcessor(object):
|
|||
|
||||
self.log = ""
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def process(self):
|
||||
self._log(u"Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")")
|
||||
|
||||
|
|
|
@ -43,6 +43,9 @@ class GenericQueue(object):
|
|||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def pause(self):
|
||||
logger.log(u"Pausing queue")
|
||||
self.min_priority = 999999999999
|
||||
|
@ -116,6 +119,9 @@ class QueueItem:
|
|||
|
||||
self.added = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def get_thread_name(self):
|
||||
if self.thread_name:
|
||||
return self.thread_name
|
||||
|
|
|
@ -1078,9 +1078,9 @@ def get_show_by_name(name, useIndexer=False):
|
|||
if showObj:
|
||||
return showObj
|
||||
if not showObj and sickbeard.showList:
|
||||
if name in sickbeard.scene_exceptions.exceptionIndexerCache:
|
||||
showObj = findCertainShow(sickbeard.showList,
|
||||
int(sickbeard.scene_exceptions.exceptionIndexerCache[name]))
|
||||
scene_indexerid, scene_season = sickbeard.scene_exceptions.get_scene_exception_by_name(name)
|
||||
if scene_indexerid:
|
||||
showObj = findCertainShow(sickbeard.showList, scene_indexerid)
|
||||
|
||||
if useIndexer and not showObj:
|
||||
(sn, idx, id) = searchIndexerForShowID(name, ui=classes.ShowListUI)
|
||||
|
|
|
@ -33,6 +33,9 @@ class ImageCache:
|
|||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _cache_dir(self):
|
||||
"""
|
||||
Builds up the full path to the image cache directory
|
||||
|
|
|
@ -25,6 +25,9 @@ class indexerApi(object):
|
|||
def __init__(self, indexerID=None):
|
||||
self.indexerID = int(indexerID) if indexerID else None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def indexer(self, *args, **kwargs):
|
||||
if self.indexerID:
|
||||
return indexerConfig[self.indexerID]['module'](*args, **kwargs)
|
||||
|
|
|
@ -67,6 +67,9 @@ class SBRotatingLogHandler(object):
|
|||
self.console_logging = False
|
||||
self.log_lock = threading.Lock()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def close_log(self, handler=None):
|
||||
if not handler:
|
||||
handler = self.cur_handler
|
||||
|
@ -302,6 +305,9 @@ class DispatchingFormatter:
|
|||
self._formatters = formatters
|
||||
self._default_formatter = default_formatter
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def format(self, record):
|
||||
formatter = self._formatters.get(record.name, self._default_formatter)
|
||||
return formatter.format(record)
|
||||
|
|
|
@ -31,6 +31,9 @@ class Maintenance():
|
|||
|
||||
self.amActive = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
self.amActive = True
|
||||
|
||||
|
|
|
@ -23,12 +23,11 @@ import threading
|
|||
import regexes
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import logger, helpers, scene_numbering, common
|
||||
from sickbeard import logger, helpers, scene_numbering, common, exceptions
|
||||
from dateutil import parser
|
||||
|
||||
nameparser_lock = threading.Lock()
|
||||
|
||||
|
||||
class NameParser(object):
|
||||
ALL_REGEX = 0
|
||||
NORMAL_REGEX = 1
|
||||
|
@ -46,6 +45,9 @@ class NameParser(object):
|
|||
self.convert = convert
|
||||
self.naming_pattern = naming_pattern
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def clean_series_name(self, series_name):
|
||||
"""Cleans up series name by removing any . and _
|
||||
characters, along with any trailing hyphens.
|
||||
|
@ -444,6 +446,9 @@ class ParseResult(object):
|
|||
self.show = show
|
||||
self.score = score
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def __eq__(self, other):
|
||||
if not other:
|
||||
return False
|
||||
|
@ -522,13 +527,18 @@ class ParseResult(object):
|
|||
|
||||
if self.show.is_anime and len(self.ab_episode_numbers):
|
||||
for epAbsNo in self.ab_episode_numbers:
|
||||
a = scene_numbering.get_indexer_absolute_numbering(self.show.indexerid, self.show.indexer, epAbsNo)
|
||||
if a:
|
||||
(s, e) = helpers.get_all_episodes_from_absolute_number(self.show, None, [a])
|
||||
|
||||
new_absolute_numbers.append(a)
|
||||
new_episode_numbers.extend(e)
|
||||
new_season_numbers.append(s)
|
||||
ab = scene_numbering.get_indexer_absolute_numbering(self.show.indexerid, self.show.indexer, epAbsNo)
|
||||
if ab:
|
||||
try:
|
||||
(s, e) = helpers.get_all_episodes_from_absolute_number(self.show, None, [ab])
|
||||
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
|
||||
logger.log(str(self.show.indexerid) + ": Indexer object absolute number " + str(
|
||||
ab) + " is incomplete, skipping this episode")
|
||||
return self
|
||||
else:
|
||||
new_absolute_numbers.append(ab)
|
||||
new_episode_numbers.extend(e)
|
||||
new_season_numbers.append(s)
|
||||
|
||||
elif self.season_number and len(self.episode_numbers):
|
||||
for epNo in self.episode_numbers:
|
||||
|
@ -614,6 +624,8 @@ class NameParserCache(object):
|
|||
logger.log("Using cached parse result for: " + name, logger.DEBUG)
|
||||
return self._previous_parsed[name]
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
name_parser_cache = NameParserCache()
|
||||
|
||||
|
|
|
@ -165,6 +165,7 @@ def update_network_dict():
|
|||
pass
|
||||
|
||||
myDB = db.DBConnection('cache.db')
|
||||
|
||||
# load current network timezones
|
||||
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
|
||||
|
||||
|
@ -181,10 +182,12 @@ def update_network_dict():
|
|||
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
||||
if h_k:
|
||||
del old_d[cur_d]
|
||||
|
||||
# remove deleted records
|
||||
if len(old_d) > 0:
|
||||
L = list(va for va in old_d)
|
||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
||||
|
||||
# change all network timezone infos at once (much faster)
|
||||
if ql:
|
||||
myDB.mass_action(ql)
|
||||
|
|
|
@ -96,6 +96,8 @@ class PostProcessor(object):
|
|||
|
||||
self.log = ''
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _log(self, message, level=logger.MESSAGE):
|
||||
"""
|
||||
|
@ -966,6 +968,7 @@ class PostProcessor(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
# find the destination folder
|
||||
try:
|
||||
proper_path = ep_obj.proper_path()
|
||||
|
@ -1043,6 +1046,7 @@ class PostProcessor(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
# log it to history
|
||||
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
|
||||
|
||||
|
|
|
@ -40,6 +40,9 @@ class ProperFinder():
|
|||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
|
||||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
|
|
|
@ -49,6 +49,9 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
self.url = "http://api.btnapps.net"
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -315,6 +318,9 @@ class BTNCache(tvcache.TVCache):
|
|||
# At least 15 minutes between queries
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
|
|
@ -47,6 +47,9 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
self.url = 'https://www.ezrss.it/'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -178,6 +181,9 @@ class EZRSSCache(tvcache.TVCache):
|
|||
# only poll EZRSS every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
rss_url = self.provider.url + 'feed/'
|
||||
|
|
|
@ -45,6 +45,9 @@ class Fanzub(generic.NZBProvider):
|
|||
|
||||
self.url = 'http://fanzub.com/'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -131,6 +134,8 @@ class FanzubCache(tvcache.TVCache):
|
|||
# we get 100 post each call !
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
|
|
|
@ -68,6 +68,9 @@ class GenericProvider:
|
|||
self.session.headers.update({
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'})
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def getID(self):
|
||||
return GenericProvider.makeID(self.name)
|
||||
|
||||
|
@ -406,9 +409,14 @@ class NZBProvider(GenericProvider):
|
|||
|
||||
self.providerType = GenericProvider.NZB
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
class TorrentProvider(GenericProvider):
|
||||
def __init__(self, name):
|
||||
GenericProvider.__init__(self, name)
|
||||
|
||||
self.providerType = GenericProvider.TORRENT
|
||||
self.providerType = GenericProvider.TORRENT
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
|
|
@ -55,6 +55,9 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
self.rss_url = 'http://hdbits.org/api/torrents'
|
||||
self.download_url = 'http://hdbits.org/download.php?'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -214,6 +217,9 @@ class HDBitsCache(tvcache.TVCache):
|
|||
# only poll HDBits every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -258,6 +264,7 @@ class HDBitsCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
||||
else:
|
||||
raise exceptions.AuthException(
|
||||
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||
|
|
|
@ -73,6 +73,9 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
self.cookies = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -351,6 +354,9 @@ class HDTorrentsCache(tvcache.TVCache):
|
|||
# only poll HDTorrents every 10 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -380,6 +386,7 @@ class HDTorrentsCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -65,6 +65,9 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
self.categorie = 'l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -292,6 +295,9 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
# Only poll IPTorrents every 10 minutes max
|
||||
self.minTime = 10
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -321,6 +327,7 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -68,6 +68,9 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
self.searchurl = self.url + 'usearch/%s/?field=seeders&sorder=desc' #order by seed
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -430,6 +433,9 @@ class KATCache(tvcache.TVCache):
|
|||
# only poll ThePirateBay every 10 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -458,6 +464,7 @@ class KATCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -40,6 +40,9 @@ class NewzbinDownloader(urllib.FancyURLopener):
|
|||
def __init__(self):
|
||||
urllib.FancyURLopener.__init__(self)
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
||||
|
||||
# if newzbin is throttling us, wait seconds and try again
|
||||
|
@ -73,6 +76,9 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
|
||||
self.NEWZBIN_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S %Z'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return sickbeard.NEWZBIN
|
||||
|
||||
|
@ -334,6 +340,9 @@ class NewzbinCache(tvcache.TVCache):
|
|||
# only poll Newzbin every 10 mins max
|
||||
self.minTime = 1
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
return self.provider._getRSSData()
|
||||
|
|
|
@ -69,6 +69,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
self.default = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def configStr(self):
|
||||
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback))
|
||||
|
||||
|
@ -292,6 +295,9 @@ class NewznabCache(tvcache.TVCache):
|
|||
# only poll newznab providers every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
params = {"t": "tvsearch",
|
||||
|
@ -345,6 +351,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
|
|
|
@ -71,6 +71,9 @@ class NextGenProvider(generic.TorrentProvider):
|
|||
|
||||
self.login_opener = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -341,6 +344,9 @@ class NextGenCache(tvcache.TVCache):
|
|||
# Only poll NextGen every 10 minutes max
|
||||
self.minTime = 10
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -370,6 +376,7 @@ class NextGenCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -45,6 +45,9 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
|
||||
self.url = 'http://www.nyaa.se/'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -127,6 +130,8 @@ class NyaaCache(tvcache.TVCache):
|
|||
# only poll NyaaTorrents every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
params = {
|
||||
|
|
|
@ -49,6 +49,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
self.url = 'https://omgwtfnzbs.org/'
|
||||
self.supportsBacklog = True
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -157,6 +160,9 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
|||
tvcache.TVCache.__init__(self, provider)
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
params = {'user': provider.username,
|
||||
'api': provider.api_key,
|
||||
|
|
|
@ -67,6 +67,9 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||
|
||||
self.categories = {'Season': ['23'], 'Episode': ['7', '14', '24'], 'RSS': ['7', '14', '23', '24']}
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -314,6 +317,9 @@ class PublicHDCache(tvcache.TVCache):
|
|||
# only poll ThePirateBay every 10 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -343,6 +349,7 @@ class PublicHDCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -52,6 +52,9 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
else:
|
||||
self.cookies = ''
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def configStr(self):
|
||||
return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only))
|
||||
|
||||
|
@ -170,6 +173,9 @@ class TorrentRssCache(tvcache.TVCache):
|
|||
tvcache.TVCache.__init__(self, provider)
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
|
||||
if self.provider.cookies:
|
||||
|
|
|
@ -73,6 +73,9 @@ class SCCProvider(generic.TorrentProvider):
|
|||
|
||||
self.headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -336,6 +339,9 @@ class SCCCache(tvcache.TVCache):
|
|||
# only poll SCC every 10 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -365,6 +371,7 @@ class SCCCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -66,6 +66,9 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
|
||||
self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c49': 1}, 'RSS': {'c14': 1, 'c2': 1, 'c49': 1}}
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -276,6 +279,9 @@ class SpeedCDCache(tvcache.TVCache):
|
|||
# only poll Speedcd every 20 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -305,6 +311,7 @@ class SpeedCDCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -67,6 +67,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
self.re_title_url = '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -409,6 +412,9 @@ class ThePirateBayCache(tvcache.TVCache):
|
|||
# only poll ThePirateBay every 10 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -438,6 +444,7 @@ class ThePirateBayCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
@ -470,6 +477,9 @@ class ThePirateBayWebproxy:
|
|||
'Hiload.org (NL)': 'http://hiload.org/',
|
||||
}
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
""" Return True if we Choose to call TPB via Proxy """
|
||||
return self.enabled
|
||||
|
|
|
@ -72,6 +72,9 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1},
|
||||
'RSS': {'c2': 1, 'c26': 1, 'c7': 1, 'c24': 1, 'c14': 1}}
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -300,6 +303,9 @@ class TorrentDayCache(tvcache.TVCache):
|
|||
# Only poll IPTorrents every 10 minutes max
|
||||
self.minTime = 10
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -329,6 +335,7 @@ class TorrentDayCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -67,6 +67,9 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
|
||||
self.categories = "2,26,27,32"
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -295,6 +298,9 @@ class TorrentLeechCache(tvcache.TVCache):
|
|||
# only poll TorrentLeech every 20 minutes max
|
||||
self.minTime = 20
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -324,6 +330,7 @@ class TorrentLeechCache(tvcache.TVCache):
|
|||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
(title, url) = item
|
||||
|
|
|
@ -47,6 +47,9 @@ class TvTorrentsProvider(generic.TorrentProvider):
|
|||
|
||||
self.url = 'http://www.tvtorrents.com/'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -86,6 +89,9 @@ class TvTorrentsCache(tvcache.TVCache):
|
|||
# only poll TvTorrents every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getRSSData(self):
|
||||
# These will be ignored on the serverside.
|
||||
ignore_regex = "all.month|month.of|season[\s\d]*complete"
|
||||
|
|
|
@ -31,6 +31,9 @@ class WombleProvider(generic.NZBProvider):
|
|||
self.cache = WombleCache(self)
|
||||
self.url = 'http://newshost.co.za/'
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
|
@ -41,6 +44,9 @@ class WombleCache(tvcache.TVCache):
|
|||
# only poll Womble's Index every 15 minutes max
|
||||
self.minTime = 15
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
|
@ -48,30 +54,32 @@ class WombleCache(tvcache.TVCache):
|
|||
self._clearCache()
|
||||
|
||||
data = None
|
||||
|
||||
if not self.shouldUpdate():
|
||||
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
|
||||
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
|
||||
data = self.getRSSFeed(url)
|
||||
return
|
||||
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if not data:
|
||||
return []
|
||||
cl = []
|
||||
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
|
||||
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
|
||||
data = self.getRSSFeed(url)
|
||||
|
||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||
cl = []
|
||||
for item in data.entries:
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if not data:
|
||||
return []
|
||||
|
||||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||
for item in data.entries:
|
||||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
# set last updated
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
# set last updated
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return data != 'Invalid Link'
|
||||
|
|
|
@ -23,23 +23,14 @@ class RSSFeeds:
|
|||
logger.log(u"RSS error: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, tb):
|
||||
self.fc = None
|
||||
def __del__(self):
|
||||
self.fs.close()
|
||||
|
||||
def clearCache(self, age=None):
|
||||
if not self.fc:
|
||||
return
|
||||
|
||||
self.fc.purge(age)
|
||||
|
||||
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
||||
if not self.fc:
|
||||
return
|
||||
with feed_lock:
|
||||
self.fc.purge(age)
|
||||
|
||||
def getFeed(self, url, post_data=None, request_headers=None):
|
||||
with feed_lock:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||
|
@ -59,4 +50,4 @@ class RSSFeeds:
|
|||
logger.log(u"No RSS items found using URL: " + url, logger.WARNING)
|
||||
return
|
||||
|
||||
return feed
|
||||
return feed
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
import re
|
||||
import time
|
||||
import threading
|
||||
import sickbeard
|
||||
|
||||
from lib import adba
|
||||
|
@ -26,20 +27,16 @@ from sickbeard import name_cache
|
|||
from sickbeard import logger
|
||||
from sickbeard import db
|
||||
|
||||
MAX_XEM_AGE_SECS = 86400 # 1 day
|
||||
MAX_ANIDB_AGE_SECS = 86400 # 1 day
|
||||
|
||||
exceptionCache = {}
|
||||
exceptionSeasonCache = {}
|
||||
exceptionIndexerCache = {}
|
||||
|
||||
scene_lock = threading.Lock()
|
||||
|
||||
def shouldRefresh(list):
|
||||
MAX_REFRESH_AGE_SECS = 86400 # 1 day
|
||||
|
||||
myDB = db.DBConnection('cache.db')
|
||||
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||
[list])
|
||||
if rows:
|
||||
return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
||||
return time.time() > (int(rows[0]['last_refreshed']) + MAX_REFRESH_AGE_SECS)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
@ -55,59 +52,50 @@ def get_scene_exceptions(indexer_id, season=-1):
|
|||
Given a indexer_id, return a list of all the scene exceptions.
|
||||
"""
|
||||
|
||||
global exceptionCache
|
||||
exceptionsList = []
|
||||
|
||||
if indexer_id not in exceptionCache or season not in exceptionCache[indexer_id]:
|
||||
myDB = db.DBConnection('cache.db')
|
||||
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
|
||||
[indexer_id, season])
|
||||
myDB = db.DBConnection('cache.db')
|
||||
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
|
||||
[indexer_id, season])
|
||||
if exceptions:
|
||||
exceptionsList = list(set([cur_exception["show_name"] for cur_exception in exceptions]))
|
||||
|
||||
if len(exceptionsList):
|
||||
try:
|
||||
exceptionCache[indexer_id][season] = exceptionsList
|
||||
except:
|
||||
exceptionCache[indexer_id] = {season: exceptionsList}
|
||||
else:
|
||||
exceptionsList = list(set(exceptionCache[indexer_id][season]))
|
||||
|
||||
if season == 1: # if we where looking for season 1 we can add generic names
|
||||
exceptionsList += get_scene_exceptions(indexer_id, season=-1)
|
||||
|
||||
return exceptionsList
|
||||
|
||||
|
||||
def get_all_scene_exceptions(indexer_id):
|
||||
exceptionsDict = {}
|
||||
|
||||
myDB = db.DBConnection('cache.db')
|
||||
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||
exceptionsList = {}
|
||||
[cur_exception["show_name"] for cur_exception in exceptions]
|
||||
for cur_exception in exceptions:
|
||||
if not cur_exception["season"] in exceptionsList:
|
||||
exceptionsList[cur_exception["season"]] = []
|
||||
exceptionsList[cur_exception["season"]].append(cur_exception["show_name"])
|
||||
|
||||
return exceptionsList
|
||||
if exceptions:
|
||||
for cur_exception in exceptions:
|
||||
if not cur_exception["season"] in exceptionsDict:
|
||||
exceptionsDict[cur_exception["season"]] = []
|
||||
exceptionsDict[cur_exception["season"]].append(cur_exception["show_name"])
|
||||
|
||||
return exceptionsDict
|
||||
|
||||
|
||||
def get_scene_seasons(indexer_id):
|
||||
"""
|
||||
return a list of season numbers that have scene exceptions
|
||||
"""
|
||||
global exceptionSeasonCache
|
||||
if indexer_id not in exceptionSeasonCache:
|
||||
myDB = db.DBConnection('cache.db')
|
||||
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
|
||||
[indexer_id])
|
||||
exceptionSeasonCache[indexer_id] = [int(x["season"]) for x in sqlResults]
|
||||
|
||||
return exceptionSeasonCache[indexer_id]
|
||||
myDB = db.DBConnection('cache.db')
|
||||
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
|
||||
[indexer_id])
|
||||
|
||||
if sqlResults:
|
||||
return [int(x["season"]) for x in sqlResults]
|
||||
|
||||
|
||||
def get_scene_exception_by_name(show_name):
|
||||
return get_scene_exception_by_name_multiple(show_name)[0]
|
||||
|
||||
|
||||
def get_scene_exception_by_name_multiple(show_name):
|
||||
"""
|
||||
Given a show name, return the indexerid of the exception, None if no exception
|
||||
|
@ -136,6 +124,7 @@ def get_scene_exception_by_name_multiple(show_name):
|
|||
sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
|
||||
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
|
||||
out.append((cur_indexer_id, cur_season))
|
||||
|
||||
if out:
|
||||
return out
|
||||
else:
|
||||
|
@ -148,11 +137,7 @@ def retrieve_exceptions():
|
|||
scene_exceptions table in cache.db. Also clears the scene name cache.
|
||||
"""
|
||||
|
||||
global exceptionCache, exceptionSeasonCache
|
||||
|
||||
exception_dict = {}
|
||||
exceptionCache = {}
|
||||
exceptionSeasonCache = {}
|
||||
|
||||
# exceptions are stored on github pages
|
||||
if setLastRefresh('normal'):
|
||||
|
@ -184,11 +169,10 @@ def retrieve_exceptions():
|
|||
# regex out the list of shows, taking \' into account
|
||||
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
||||
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
||||
|
||||
exception_dict[indexer_id] = alias_list
|
||||
|
||||
# XEM scene exceptions
|
||||
xem_exceptions = _xem_excpetions_fetcher()
|
||||
xem_exceptions = _xem_exceptions_fetcher()
|
||||
for xem_ex in xem_exceptions:
|
||||
if xem_ex in exception_dict:
|
||||
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
|
||||
|
@ -229,39 +213,33 @@ def retrieve_exceptions():
|
|||
else:
|
||||
logger.log(u"No scene exceptions update needed")
|
||||
|
||||
# build indexer scene name cache
|
||||
buildIndexerCache()
|
||||
|
||||
# cleanup
|
||||
del exception_dict
|
||||
|
||||
def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||
"""
|
||||
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
||||
"""
|
||||
|
||||
global exceptionIndexerCache
|
||||
|
||||
myDB = db.DBConnection('cache.db')
|
||||
myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id])
|
||||
|
||||
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
||||
for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id):
|
||||
logger.log(u"Updating scene exceptions", logger.MESSAGE)
|
||||
for cur_season in [-1] + get_scene_seasons(indexer_id):
|
||||
for cur_exception in scene_exceptions:
|
||||
exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id
|
||||
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)",
|
||||
[indexer_id, cur_exception, cur_season, 1])
|
||||
|
||||
name_cache.clearCache()
|
||||
|
||||
def _retrieve_anidb_mainnames():
|
||||
global MAX_ANIDB_AGE_SECS
|
||||
|
||||
success = False
|
||||
|
||||
anidb_mainNames = {}
|
||||
|
||||
if shouldRefresh('anidb'):
|
||||
logger.log(u"Checking for scene exception updates for AniDB")
|
||||
success = False
|
||||
|
||||
logger.log(u"Checking for scene exception updates for AniDB")
|
||||
for show in sickbeard.showList:
|
||||
if show.is_anime and show.indexer == 1:
|
||||
try:
|
||||
|
@ -280,8 +258,7 @@ def _retrieve_anidb_mainnames():
|
|||
return anidb_mainNames
|
||||
|
||||
|
||||
def _xem_excpetions_fetcher():
|
||||
global MAX_XEM_AGE_SECS
|
||||
def _xem_exceptions_fetcher():
|
||||
|
||||
exception_dict = {}
|
||||
|
||||
|
@ -318,20 +295,4 @@ def getSceneSeasons(indexer_id):
|
|||
"""
|
||||
myDB = db.DBConnection('cache.db')
|
||||
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||
return [cur_exception["season"] for cur_exception in seasons]
|
||||
|
||||
|
||||
def buildIndexerCache():
|
||||
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
||||
global exceptionIndexerCache
|
||||
exceptionIndexerCache = {}
|
||||
|
||||
for show in sickbeard.showList:
|
||||
for curSeason in [-1] + sickbeard.scene_exceptions.get_scene_seasons(show.indexerid):
|
||||
exceptionIndexerCache[helpers.full_sanitizeSceneName(show.name)] = show.indexerid
|
||||
for name in get_scene_exceptions(show.indexerid, season=curSeason):
|
||||
exceptionIndexerCache[name] = show.indexerid
|
||||
exceptionIndexerCache[helpers.full_sanitizeSceneName(name)] = show.indexerid
|
||||
|
||||
logger.log(u"Updated internal scene name cache", logger.MESSAGE)
|
||||
logger.log(u"Internal scene name cache set to: " + str(exceptionIndexerCache), logger.DEBUG)
|
||||
return [cur_exception["season"] for cur_exception in seasons]
|
|
@ -529,9 +529,6 @@ def xem_refresh(indexer_id, indexer, force=False):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
# fix xem scene numbering issues
|
||||
# fix_xem_numbering(indexer_id, indexer)
|
||||
|
||||
|
||||
def fix_xem_numbering(indexer_id, indexer):
|
||||
"""
|
||||
|
@ -694,4 +691,4 @@ def fix_xem_numbering(indexer_id, indexer):
|
|||
|
||||
if ql:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(ql)
|
||||
myDB.mass_action(ql)
|
||||
|
|
|
@ -44,6 +44,9 @@ class Scheduler:
|
|||
self.abort = False
|
||||
self.force = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def initThread(self):
|
||||
if self.thread == None or not self.thread.isAlive():
|
||||
self.thread = threading.Thread(None, self.runAction, self.threadName)
|
||||
|
|
|
@ -176,6 +176,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
@ -40,6 +40,8 @@ class BacklogSearchScheduler(scheduler.Scheduler):
|
|||
else:
|
||||
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
class BacklogSearcher:
|
||||
def __init__(self):
|
||||
|
@ -53,6 +55,9 @@ class BacklogSearcher:
|
|||
|
||||
self._resetPI()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _resetPI(self):
|
||||
self.percentDone = 0
|
||||
self.currentSearchInfo = {'title': 'Initializing'}
|
||||
|
|
|
@ -43,6 +43,9 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
generic_queue.GenericQueue.__init__(self)
|
||||
self.queue_name = "SEARCHQUEUE"
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def is_in_queue(self, show, segment):
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
||||
|
@ -93,6 +96,9 @@ class DailySearchQueueItem(generic_queue.QueueItem):
|
|||
self.show = show
|
||||
self.segment = segment
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def execute(self):
|
||||
generic_queue.QueueItem.execute(self)
|
||||
|
||||
|
@ -125,6 +131,9 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
self.show = show
|
||||
self.segment = segment
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def execute(self):
|
||||
generic_queue.QueueItem.execute(self)
|
||||
|
||||
|
@ -168,6 +177,9 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||
self.show = show
|
||||
self.segment = segment
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def execute(self):
|
||||
generic_queue.QueueItem.execute(self)
|
||||
|
||||
|
@ -211,6 +223,9 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
self.segment = segment
|
||||
self.success = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def execute(self):
|
||||
generic_queue.QueueItem.execute(self)
|
||||
|
||||
|
|
|
@ -103,3 +103,6 @@ class ShowUpdater():
|
|||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", piList))
|
||||
|
||||
logger.log(u"Completed full update on all shows")
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
|
|
@ -33,6 +33,9 @@ class TraktChecker():
|
|||
self.todoWanted = []
|
||||
self.todoBacklog = []
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
# add shows from trakt.tv watchlist
|
||||
if sickbeard.TRAKT_USE_WATCHLIST:
|
||||
|
|
|
@ -103,6 +103,9 @@ class TVShow(object):
|
|||
|
||||
self.loadFromDB()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
name = property(lambda self: self._name, dirty_setter("_name"))
|
||||
indexerid = property(lambda self: self._indexerid, dirty_setter("_indexerid"))
|
||||
indexer = property(lambda self: self._indexer, dirty_setter("_indexer"))
|
||||
|
@ -457,6 +460,7 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
def loadEpisodesFromDB(self):
|
||||
|
||||
logger.log(u"Loading all episodes from the DB")
|
||||
|
@ -578,6 +582,7 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
# Done updating save last update date
|
||||
self.last_update_indexer = datetime.date.today().toordinal()
|
||||
self.saveToDB()
|
||||
|
@ -748,6 +753,7 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
# creating metafiles on the root should be good enough
|
||||
if sickbeard.USE_FAILED_DOWNLOADS and rootEp is not None:
|
||||
with rootEp.lock:
|
||||
|
@ -1003,6 +1009,7 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
# remove self from show list
|
||||
sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid]
|
||||
|
||||
|
@ -1077,6 +1084,7 @@ class TVShow(object):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
def airdateModifyStamp(self, ep_obj):
|
||||
"""
|
||||
Make the modify date and time of a file reflect the show air date and time.
|
||||
|
@ -1343,6 +1351,9 @@ class TVEpisode(object):
|
|||
|
||||
self.checkForMetaFiles()
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
name = property(lambda self: self._name, dirty_setter("_name"))
|
||||
season = property(lambda self: self._season, dirty_setter("_season"))
|
||||
episode = property(lambda self: self._episode, dirty_setter("_episode"))
|
||||
|
@ -2415,3 +2426,4 @@ class TVEpisode(object):
|
|||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ from sickbeard.rssfeeds import RSSFeeds
|
|||
|
||||
cache_lock = threading.Lock()
|
||||
|
||||
|
||||
class CacheDBConnection(db.DBConnection):
|
||||
def __init__(self, providerName):
|
||||
db.DBConnection.__init__(self, "cache.db")
|
||||
|
@ -67,6 +68,9 @@ class CacheDBConnection(db.DBConnection):
|
|||
if str(e) != "table lastUpdate already exists":
|
||||
raise
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
class TVCache():
|
||||
def __init__(self, provider):
|
||||
|
||||
|
@ -74,20 +78,23 @@ class TVCache():
|
|||
self.providerID = self.provider.getID()
|
||||
self.minTime = 10
|
||||
|
||||
def _getDB(self):
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _getDB(self):
|
||||
return CacheDBConnection(self.providerID)
|
||||
|
||||
def _clearCache(self):
|
||||
if self.shouldClearCache():
|
||||
logger.log(u"Clearing " + self.provider.name + " cache")
|
||||
|
||||
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
|
||||
|
||||
# clear RSS Feed cache
|
||||
with RSSFeeds(self.providerID) as feed:
|
||||
feed.clearCache(int(time.mktime(curDate.timetuple())))
|
||||
RSSFeeds(self.providerID).clearCache()
|
||||
|
||||
def _getRSSData(self):
|
||||
|
||||
|
@ -103,14 +110,9 @@ class TVCache():
|
|||
|
||||
def updateCache(self):
|
||||
|
||||
# delete anything older then 7 days
|
||||
logger.log(u"Clearing " + self.provider.name + " cache")
|
||||
self._clearCache()
|
||||
if self.shouldUpdate() and self._checkAuth(None):
|
||||
self._clearCache()
|
||||
|
||||
if not self.shouldUpdate():
|
||||
return
|
||||
|
||||
if self._checkAuth(None):
|
||||
data = self._getRSSData()
|
||||
|
||||
# as long as the http request worked we count this as an update
|
||||
|
@ -129,7 +131,6 @@ class TVCache():
|
|||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
|
@ -137,9 +138,7 @@ class TVCache():
|
|||
return []
|
||||
|
||||
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
||||
with RSSFeeds(self.providerID) as feed:
|
||||
data = feed.getRSSFeed(url, post_data, request_headers)
|
||||
return data
|
||||
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers)
|
||||
|
||||
def _translateTitle(self, title):
|
||||
return title.replace(' ', '.')
|
||||
|
@ -219,10 +218,10 @@ class TVCache():
|
|||
|
||||
def shouldUpdate(self):
|
||||
# if we've updated recently then skip the update
|
||||
# if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
||||
# logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
|
||||
# datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
|
||||
# return False
|
||||
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
||||
logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
|
||||
datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
@ -230,7 +229,7 @@ class TVCache():
|
|||
# if daily search hasn't used our previous results yet then don't clear the cache
|
||||
if self.lastUpdate > self.lastSearch:
|
||||
logger.log(
|
||||
u"Daily search has not yet searched our last cache results, skipping clearig cache ...", logger.DEBUG)
|
||||
u"Daily search has not yet used our last cache results, not clearing cache ...", logger.DEBUG)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -30,6 +30,9 @@ class Notifications(object):
|
|||
self._messages = []
|
||||
self._errors = []
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def message(self, title, message=''):
|
||||
"""
|
||||
Add a regular notification to the queue
|
||||
|
@ -89,6 +92,9 @@ class Notification(object):
|
|||
else:
|
||||
self._timeout = datetime.timedelta(minutes=1)
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def is_new(self, remote_ip='127.0.0.1'):
|
||||
"""
|
||||
Returns True if the notification hasn't been displayed to the current client (aka IP address).
|
||||
|
|
|
@ -53,6 +53,9 @@ class CheckVersion():
|
|||
else:
|
||||
self.updater = None
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def run(self, force=False):
|
||||
if self.check_for_new_version():
|
||||
if sickbeard.AUTO_UPDATE:
|
||||
|
|
|
@ -120,7 +120,7 @@ class Api(webserve.MainHandler):
|
|||
|
||||
def builder(self):
|
||||
""" expose the api-builder template """
|
||||
t = webserve.PageTemplate(file="apiBuilder.tmpl")
|
||||
t = webserve.PageTemplate(headers=self.request.headers, file="apiBuilder.tmpl")
|
||||
|
||||
def titler(x):
|
||||
if not x or sickbeard.SORT_ARTICLE:
|
||||
|
@ -1008,6 +1008,7 @@ class CMD_EpisodeSetStatus(ApiCall):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
extra_msg = ""
|
||||
if start_backlog:
|
||||
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, ep_segment)
|
||||
|
|
|
@ -129,17 +129,16 @@ class HTTPRedirect(Exception):
|
|||
"""Exception raised when the request should be redirected."""
|
||||
|
||||
def __init__(self, url, permanent=False, status=None):
|
||||
self.url = url
|
||||
self.url = urlparse.urljoin(sickbeard.WEB_ROOT, url)
|
||||
self.permanent = permanent
|
||||
self.status = status
|
||||
Exception.__init__(self, url, permanent, status)
|
||||
Exception.__init__(self, self.url, self.permanent, self.status)
|
||||
|
||||
def __call__(self):
|
||||
"""Use this exception as a request.handler (raise self)."""
|
||||
raise self
|
||||
|
||||
def redirect(url, permanent=False, status=None):
|
||||
url = urlparse.urljoin(sickbeard.WEB_ROOT, url)
|
||||
raise HTTPRedirect(url, permanent, status)
|
||||
|
||||
@authenticated
|
||||
|
@ -172,6 +171,10 @@ class MainHandler(RequestHandler):
|
|||
path = self.request.uri.replace(sickbeard.WEB_ROOT, '').split('?')[0]
|
||||
|
||||
method = path.strip('/').split('/')[-1]
|
||||
|
||||
if method == 'robots.txt':
|
||||
method = 'robots_txt'
|
||||
|
||||
if path.startswith('/api') and method != 'builder':
|
||||
apikey = path.strip('/').split('/')[-1]
|
||||
method = path.strip('/').split('/')[0]
|
||||
|
@ -225,7 +228,7 @@ class MainHandler(RequestHandler):
|
|||
def robots_txt(self, *args, **kwargs):
|
||||
""" Keep web crawlers out """
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
return 'User-agent: *\nDisallow: /\n'
|
||||
return "User-agent: *\nDisallow: /"
|
||||
|
||||
def showPoster(self, show=None, which=None):
|
||||
# Redirect initial poster/banner thumb to default images
|
||||
|
@ -3996,6 +3999,7 @@ class Home(MainHandler):
|
|||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
|
||||
if int(status) == WANTED:
|
||||
msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"
|
||||
for season in segment:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import traceback
|
||||
import time
|
||||
import sickbeard
|
||||
import webserve
|
||||
import webapi
|
||||
|
|
Loading…
Reference in a new issue