Modified DB code to close its connection if right after its finished with it, helps performance-wise and should resolve locked db issues as well.

This commit is contained in:
echel0n 2014-06-07 14:32:38 -07:00
parent 2c0f3a3dc3
commit d00d55fdfc
37 changed files with 855 additions and 881 deletions

View file

@ -76,8 +76,8 @@ def loadShowsFromDB():
Populates the showList with shows from the database
"""
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_shows")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_shows")
for sqlShow in sqlResults:
try:
@ -187,16 +187,6 @@ def main():
# Need console logging for SickBeard.py and SickBeard-console.exe
consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)
# Attempt to rename the process for easier debugging
try:
from setproctitle import setproctitle
except ImportError:
if consoleLogging:
sys.stderr.write(u"setproctitle module is not available.\n")
setproctitle = lambda t: None
setproctitle(sickbeard.MY_NAME)
# Rename the main thread
threading.currentThread().name = "MAIN"
@ -306,16 +296,18 @@ def main():
sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
with db.DBConnection() as myDB:
CUR_DB_VERSION = myDB.checkDBVersion()
if CUR_DB_VERSION > 0:
if CUR_DB_VERSION < MIN_DB_VERSION:
raise SystemExit("Your database version (" + str(
db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of SickRage (" + str(
CUR_DB_VERSION) + ") is too old to migrate from with this version of SickRage (" + str(
MIN_DB_VERSION) + ").\n" + \
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
if CUR_DB_VERSION > MAX_DB_VERSION:
raise SystemExit("Your database version (" + str(
db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of SickRage supports (" + str(
CUR_DB_VERSION) + ") has been incremented past what this version of SickRage supports (" + str(
MAX_DB_VERSION) + ").\n" + \
"If you have used other forks of SB, your database may be unusable due to their modifications.")

View file

@ -433,7 +433,6 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
__INITIALIZED__ = False
def initialize(consoleLogging=True):
with INIT_LOCK:
@ -906,16 +905,20 @@ def initialize(consoleLogging=True):
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
# initialize the main SB database
db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema)
with db.DBConnection() as myDB:
db.upgradeDatabase(myDB, mainDB.InitialSchema)
# initialize the cache database
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
with db.DBConnection('cache.db') as myDB:
db.upgradeDatabase(myDB, cache_db.InitialSchema)
# initialize the failed downloads database
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
with db.DBConnection('failed.db') as myDB:
db.upgradeDatabase(myDB, failed_db.InitialSchema)
# fix up any db problems
db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)
with db.DBConnection() as myDB:
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck)
# migrate the config if it needs it
migrator = ConfigMigrator(CFG)
@ -1803,8 +1806,8 @@ def getEpList(epIDs, showid=None):
query += " AND showid = ?"
params.append(showid)
myDB = db.DBConnection()
sqlResults = myDB.select(query, params)
with db.DBConnection() as myDB:
sqlResults = myDB.select(query, params)
epList = []

View file

@ -33,8 +33,6 @@ class BlackAndWhiteList(object):
if not show_id:
raise BlackWhitelistNoShowIDException()
self.show_id = show_id
self.myDB = db.DBConnection()
self.refresh()
def refresh(self):
@ -98,8 +96,9 @@ class BlackAndWhiteList(object):
return "Blacklist: " + blackResult + ", Whitelist: " + whiteResult
def _add_keywords(self, table, range, values):
for value in values:
self.myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
with db.DBConnection() as myDB:
for value in values:
myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
self.refresh()
def _del_all_black_keywords(self):
@ -116,16 +115,19 @@ class BlackAndWhiteList(object):
def _del_all_keywords(self, table):
logger.log(u"Deleting all " + table + " keywords for " + str(self.show_id), logger.DEBUG)
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
with db.DBConnection() as myDB:
myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
self.refresh()
def _del_all_keywords_for(self, table, range):
logger.log(u"Deleting all " + range + " " + table + " keywords for " + str(self.show_id), logger.DEBUG)
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
with db.DBConnection() as myDB:
myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
self.refresh()
def _load_list(self, table):
sqlResults = self.myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
if not sqlResults or not len(sqlResults):
return ([], {})

View file

@ -467,8 +467,8 @@ class ConfigMigrator():
sickbeard.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, 'General', 'naming_multi_ep_type', 1))
# see if any of their shows used season folders
myDB = db.DBConnection()
season_folder_shows = myDB.select("SELECT * FROM tv_shows WHERE flatten_folders = 0")
with db.DBConnection() as myDB:
season_folder_shows = myDB.select("SELECT * FROM tv_shows WHERE flatten_folders = 0")
# if any shows had season folders on then prepend season folder to the pattern
if season_folder_shows:

View file

@ -43,9 +43,9 @@ class DailySearcher():
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
curDate = datetime.date.today()
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
sql_l = []
todaysEps = {}
@ -80,8 +80,8 @@ class DailySearcher():
sql_l.append(ep.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
if len(todaysEps):
for show in todaysEps:

View file

@ -46,7 +46,7 @@ def dbFilename(filename="sickbeard.db", suffix=None):
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
class DBConnection:
class DBConnection(object):
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
self.filename = filename
@ -70,45 +70,6 @@ class DBConnection:
else:
return 0
def fetch(self, query, args=None):
with db_lock:
if query == None:
return
sqlResult = None
attempt = 0
while attempt < 5:
try:
if args == None:
logger.log(self.filename + ": " + query, logger.DB)
cursor = self.connection.cursor()
cursor.execute(query)
sqlResult = cursor.fetchone()[0]
else:
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
cursor = self.connection.cursor()
cursor.execute(query, args)
sqlResult = cursor.fetchone()[0]
# get out of the connection attempt loop since we were successful
break
except sqlite3.OperationalError, e:
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
logger.log(u"DB error: " + ex(e), logger.WARNING)
attempt += 1
time.sleep(0.02)
else:
logger.log(u"DB error: " + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError, e:
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
raise
return sqlResult
def mass_action(self, querylist, logTransaction=False):
with db_lock:
@ -244,6 +205,15 @@ class DBConnection:
def close(self):
self.connection.close()
def __enter__(self):
return self
def __del__(self):
self.close()
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def sanityCheckDatabase(connection, sanity_check):
sanity_check(connection).check()

View file

@ -50,8 +50,8 @@ def logFailed(release):
release = prepareFailedName(release)
myDB = db.DBConnection("failed.db")
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
with db.DBConnection('failed.db') as myDB:
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
if len(sql_results) == 0:
logger.log(
@ -86,11 +86,10 @@ def logFailed(release):
def logSuccess(release):
myDB = db.DBConnection("failed.db")
release = prepareFailedName(release)
myDB.action("DELETE FROM history WHERE release=?", [release])
with db.DBConnection('failed.db') as myDB:
myDB.action("DELETE FROM history WHERE release=?", [release])
def hasFailed(release, size, provider="%"):
@ -104,19 +103,20 @@ def hasFailed(release, size, provider="%"):
release = prepareFailedName(release)
myDB = db.DBConnection("failed.db")
sql_results = myDB.select(
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
[release, size, provider])
with db.DBConnection('failed.db') as myDB:
sql_results = myDB.select(
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
[release, size, provider])
return (len(sql_results) > 0)
def revertEpisode(epObj):
"""Restore the episodes of a failed download to their original state"""
myDB = db.DBConnection("failed.db")
with db.DBConnection('failed.db') as myDB:
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?",
[epObj.show.indexerid, epObj.season])
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?", [epObj.show.indexerid, epObj.season])
history_eps = dict([(res["episode"], res) for res in sql_results])
try:
@ -127,13 +127,14 @@ def revertEpisode(epObj):
epObj.status = history_eps[epObj.episode]['old_status']
else:
logger.log(u"WARNING: Episode not found in history. Setting it back to WANTED",
logger.WARNING)
logger.WARNING)
epObj.status = WANTED
epObj.saveToDB()
except EpisodeNotFoundException, e:
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
logger.WARNING)
logger.WARNING)
def markFailed(epObj):
log_str = u""
@ -151,8 +152,6 @@ def markFailed(epObj):
def logSnatch(searchResult):
myDB = db.DBConnection("failed.db")
logDate = datetime.datetime.today().strftime(dateFormat)
release = prepareFailedName(searchResult.name)
@ -164,27 +163,28 @@ def logSnatch(searchResult):
show_obj = searchResult.episodes[0].show
for episode in searchResult.episodes:
myDB.action(
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
episode.status])
with db.DBConnection('failed.db') as myDB:
for episode in searchResult.episodes:
myDB.action(
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
episode.status])
def deleteLoggedSnatch(release, size, provider):
myDB = db.DBConnection("failed.db")
release = prepareFailedName(release)
myDB.action("DELETE FROM history WHERE release=? AND size=? AND provider=?",
[release, size, provider])
with db.DBConnection('failed.db') as myDB:
myDB.action("DELETE FROM history WHERE release=? AND size=? AND provider=?",
[release, size, provider])
def trimHistory():
myDB = db.DBConnection("failed.db")
myDB.action("DELETE FROM history WHERE date < " + str(
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
with db.DBConnection('failed.db') as myDB:
myDB.action("DELETE FROM history WHERE date < " + str(
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
def findRelease(epObj):
"""
@ -195,28 +195,29 @@ def findRelease(epObj):
release = None
provider = None
myDB = db.DBConnection("failed.db")
# Clear old snatches for this release if any exist
myDB.action("DELETE FROM history WHERE showid=" + str(epObj.show.indexerid) + " AND season=" + str(
epObj.season) + " AND episode=" + str(epObj.episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
epObj.show.indexerid) + " AND season=" + str(epObj.season) + " AND episode=" + str(epObj.episode) + ")")
with db.DBConnection('failed.db') as myDB:
myDB.action("DELETE FROM history WHERE showid=" + str(epObj.show.indexerid) + " AND season=" + str(
epObj.season) + " AND episode=" + str(
epObj.episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
epObj.show.indexerid) + " AND season=" + str(epObj.season) + " AND episode=" + str(epObj.episode) + ")")
# Search for release in snatch history
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
[epObj.show.indexerid, epObj.season, epObj.episode])
# Search for release in snatch history
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
[epObj.show.indexerid, epObj.season, epObj.episode])
for result in results:
release = str(result["release"])
provider = str(result["provider"])
date = result["date"]
for result in results:
release = str(result["release"])
provider = str(result["provider"])
date = result["date"]
# Clear any incomplete snatch records for this release if any exist
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
# Clear any incomplete snatch records for this release if any exist
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
# Found a previously failed release
logger.log(u"Failed release found for season (%s): (%s)" % (epObj.season, result["release"]), logger.DEBUG)
return (release, provider)
# Found a previously failed release
logger.log(u"Failed release found for season (%s): (%s)" % (epObj.season, result["release"]), logger.DEBUG)
return (release, provider)
# Release was not found
logger.log(u"No releases found for season (%s) of (%s)" % (epObj.season, epObj.show.indexerid), logger.DEBUG)

View file

@ -288,41 +288,40 @@ def makeDir(path):
def searchDBForShow(regShowName, log=False):
showNames = [re.sub('[. -]', ' ', regShowName)]
myDB = db.DBConnection()
yearRegex = "([^()]+?)\s*(\()?(\d{4})(?(2)\))$"
for showName in showNames:
with db.DBConnection() as myDB:
for showName in showNames:
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ?",
[showName])
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ?",
[showName])
if len(sqlResults) == 1:
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
else:
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
if log:
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(3)])
if len(sqlResults) == 0:
if log:
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
continue
elif len(sqlResults) > 1:
if log:
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name",
logger.DEBUG)
continue
else:
if len(sqlResults) == 1:
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
else:
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
if log:
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(3)])
if len(sqlResults) == 0:
if log:
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
continue
elif len(sqlResults) > 1:
if log:
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name",
logger.DEBUG)
continue
else:
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
return
@ -681,9 +680,9 @@ def update_anime_support():
sickbeard.ANIMESUPPORT = is_anime_in_show_list()
def get_absolute_number_from_season_and_episode(show, season, episode):
myDB = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
with db.DBConnection() as myDB:
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
if len(sqlResults) == 1:
absolute_number = int(sqlResults[0]["absolute_number"])

View file

@ -31,10 +31,10 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, provider
if not isinstance(resource, unicode):
resource = unicode(resource, 'utf-8')
myDB = db.DBConnection()
myDB.action(
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
[action, logDate, showid, season, episode, quality, resource, provider])
with db.DBConnection() as myDB:
myDB.action(
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
[action, logDate, showid, season, episode, quality, resource, provider])
def logSnatch(searchResult):

View file

@ -32,8 +32,8 @@ def addNameToCache(name, indexer_id=0):
# standardize the name we're using to account for small differences in providers
name = sanitizeSceneName(name)
cacheDB = db.DBConnection('cache.db')
cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
with db.DBConnection('cache.db') as myDB:
myDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
def retrieveNameFromCache(name):
@ -50,9 +50,9 @@ def retrieveNameFromCache(name):
# standardize the name we're using to account for small differences in providers
name = sanitizeSceneName(name)
cacheDB = db.DBConnection('cache.db')
if cacheDB.hasTable('scene_names'):
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
with db.DBConnection('cache.db') as myDB:
if myDB.hasTable('scene_names'):
cache_results = myDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
if cache_results:
return int(cache_results[0]["indexer_id"])
@ -66,10 +66,11 @@ def clearCache(show=None, season=-1, indexer_id=0):
"""
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
"""
cacheDB = db.DBConnection('cache.db')
if show:
showNames = sickbeard.show_name_helpers.allPossibleShowNames(show, season=season)
for showName in showNames:
cacheDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [showName, indexer_id])
else:
cacheDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [indexer_id])
with db.DBConnection('cache.db') as myDB:
if show:
showNames = sickbeard.show_name_helpers.allPossibleShowNames(show, season=season)
for showName in showNames:
myDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [showName, indexer_id])
else:
myDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [indexer_id])

View file

@ -164,42 +164,42 @@ def update_network_dict():
except (IOError, OSError):
pass
myDB = db.DBConnection("cache.db")
# load current network timezones
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
with db.DBConnection('cache.db') as myDB:
# load current network timezones
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
# list of sql commands to update the network_timezones table
ql = []
for cur_d, cur_t in d.iteritems():
h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]:
# update old record
ql.append(
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
elif not h_k:
# add new record
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
if h_k:
del old_d[cur_d]
# remove deleted records
if len(old_d) > 0:
L = list(va for va in old_d)
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
# change all network timezone infos at once (much faster)
if ql:
myDB.mass_action(ql)
load_network_dict()
# list of sql commands to update the network_timezones table
ql = []
for cur_d, cur_t in d.iteritems():
h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]:
# update old record
ql.append(
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
elif not h_k:
# add new record
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
if h_k:
del old_d[cur_d]
# remove deleted records
if len(old_d) > 0:
L = list(va for va in old_d)
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
# change all network timezone infos at once (much faster)
if ql:
myDB.mass_action(ql)
load_network_dict()
# load network timezones from db into dict
def load_network_dict():
d = {}
try:
myDB = db.DBConnection("cache.db")
cur_network_list = myDB.select("SELECT * FROM network_timezones")
if cur_network_list is None or len(cur_network_list) < 1:
update_network_dict()
with db.DBConnection('cache.db') as myDB:
cur_network_list = myDB.select("SELECT * FROM network_timezones")
if cur_network_list is None or len(cur_network_list) < 1:
update_network_dict()
cur_network_list = myDB.select("SELECT * FROM network_timezones")
d = dict(cur_network_list)
except:
d = {}

View file

@ -151,13 +151,13 @@ class EmailNotifier:
addrs.append(addr)
# Grab the recipients for the show
mydb = db.DBConnection()
for s in show:
for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
if subs['notify_list']:
for addr in subs['notify_list'].split(','):
if (len(addr.strip()) > 0):
addrs.append(addr)
with db.DBConnection() as myDB:
for s in show:
for subs in myDB.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
if subs['notify_list']:
for addr in subs['notify_list'].split(','):
if (len(addr.strip()) > 0):
addrs.append(addr)
addrs = set(addrs)
logger.log('Notification recepients: %s' % addrs, logger.DEBUG)

View file

@ -405,31 +405,30 @@ class PostProcessor(object):
if self.folder_name:
names.append(self.folder_name)
myDB = db.DBConnection()
# search the database for a possible match and return immediately if we find one
for curName in names:
search_name = re.sub("[\.\-\ ]", "_", curName)
sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [search_name])
with db.DBConnection() as myDB:
for curName in names:
search_name = re.sub("[\.\-\ ]", "_", curName)
sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [search_name])
if len(sql_results) == 0:
continue
if len(sql_results) == 0:
continue
show = helpers.findCertainShow(sickbeard.showList, int(sql_results[0]["showid"]))
if not show:
continue
show = helpers.findCertainShow(sickbeard.showList, int(sql_results[0]["showid"]))
if not show:
continue
season = int(sql_results[0]["season"])
quality = int(sql_results[0]["quality"])
season = int(sql_results[0]["season"])
quality = int(sql_results[0]["quality"])
if quality == common.Quality.UNKNOWN:
quality = None
if quality == common.Quality.UNKNOWN:
quality = None
self.in_history = True
to_return = (show, season, [], quality)
self._log("Found result in history: " + str(to_return), logger.DEBUG)
self.in_history = True
to_return = (show, season, [], quality)
self._log("Found result in history: " + str(to_return), logger.DEBUG)
return to_return
return to_return
self.in_history = False
return to_return
@ -624,9 +623,9 @@ class PostProcessor(object):
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = episodes[0].toordinal()
myDB = db.DBConnection()
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[show.indexerid, show.indexer, airdate])
with db.DBConnection() as myDB:
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[show.indexerid, show.indexer, airdate])
if sql_result:
season = int(sql_result[0][0])
@ -640,10 +639,10 @@ class PostProcessor(object):
# if there's no season then we can hopefully just use 1 automatically
elif season == None and show:
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
[show.indexerid, show.indexer])
with db.DBConnection() as myDB:
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
[show.indexerid, show.indexer])
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
self._log(
u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
@ -965,8 +964,8 @@ class PostProcessor(object):
self._log(u"Couldn't find release in snatch history", logger.WARNING)
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
# find the destination folder
try:
@ -1042,8 +1041,8 @@ class PostProcessor(object):
sql_l.append(ep_obj.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
# log it to history
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)

View file

@ -242,8 +242,9 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
return False
# make sure the dir isn't inside a show dir
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_shows")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_shows")
for sqlShow in sqlResults:
if dirName.lower().startswith(
ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
@ -344,33 +345,33 @@ def already_postprocessed(dirName, videofile, force):
dirName = unicode(dirName, 'utf_8')
# Avoid processing the same dir again if we use a process method <> move
myDB = db.DBConnection()
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
logger.DEBUG)
return True
with db.DBConnection() as myDB:
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
logger.DEBUG)
return True
# This is needed for video whose name differ from dirName
if not isinstance(videofile, unicode):
videofile = unicode(videofile, 'utf_8')
# This is needed for video whose name differ from dirName
if not isinstance(videofile, unicode):
videofile = unicode(videofile, 'utf_8')
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
logger.DEBUG)
return True
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
logger.DEBUG)
return True
#Needed if we have downloaded the same episode @ different quality
search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
search_sql += " and history.resource LIKE ?"
sqlResult = myDB.select(search_sql, [u'%' + videofile])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
logger.DEBUG)
return True
#Needed if we have downloaded the same episode @ different quality
search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"
search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in common.Quality.DOWNLOADED]) + ")"
search_sql += " and history.resource LIKE ?"
sqlResult = myDB.select(search_sql, [u'%' + videofile])
if sqlResult:
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
logger.DEBUG)
return True
return False

View file

@ -182,10 +182,10 @@ class ProperFinder():
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = curProper.episode.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[curProper.indexerid, curProper.indexer, airdate])
with db.DBConnection() as myDB:
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[curProper.indexerid, curProper.indexer, airdate])
if sql_result:
curProper.season = int(sql_result[0][0])
@ -196,11 +196,14 @@ class ProperFinder():
continue
# check if we actually want this proper (if it's the right quality)
sqlResults = db.DBConnection().select(
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[curProper.indexerid, curProper.season, curProper.episode])
with db.DBConnection() as myDB:
sqlResults = myDB.select(
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[curProper.indexerid, curProper.season, curProper.episode])
if not sqlResults:
continue
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
@ -222,13 +225,13 @@ class ProperFinder():
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
# make sure the episode has been downloaded before
myDB = db.DBConnection()
historyResults = myDB.select(
"SELECT resource FROM history "
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
historyLimit.strftime(history.dateFormat)])
with db.DBConnection() as myDB:
historyResults = myDB.select(
"SELECT resource FROM history "
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
historyLimit.strftime(history.dateFormat)])
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
if len(historyResults) == 0:
@ -273,19 +276,19 @@ class ProperFinder():
logger.log(u"Setting the last Proper search in the DB to " + str(when), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM info")
if len(sqlResults) == 0:
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
[0, 0, str(when)])
else:
myDB.action("UPDATE info SET last_proper_search=" + str(when))
if len(sqlResults) == 0:
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
[0, 0, str(when)])
else:
myDB.action("UPDATE info SET last_proper_search=" + str(when))
def _get_lastProperSearch(self):
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM info")
try:
last_proper_search = datetime.date.fromordinal(int(sqlResults[0]["last_proper_search"]))

View file

@ -321,11 +321,11 @@ class GenericProvider:
logger.DEBUG)
continue
myDB = db.DBConnection()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[show.indexerid,
parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
with db.DBConnection() as myDB:
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[show.indexerid,
parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
if len(sql_results) != 1:
logger.log(

View file

@ -314,13 +314,15 @@ class HDTorrentsProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -257,13 +257,15 @@ class IPTorrentsProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -393,13 +393,15 @@ class KATProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -305,13 +305,15 @@ class NextGenProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -277,13 +277,15 @@ class PublicHDProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -299,13 +299,15 @@ class SCCProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -239,13 +239,15 @@ class SpeedCDProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -371,13 +371,15 @@ class ThePirateBayProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -263,13 +263,15 @@ class TorrentDayProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -258,13 +258,15 @@ class TorrentLeechProvider(generic.TorrentProvider):
results = []
sqlResults = db.DBConnection().select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
with db.DBConnection() as myDB:
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []

View file

@ -41,9 +41,9 @@ def get_scene_exceptions(indexer_id, season=-1):
global exceptionCache
if indexer_id not in exceptionCache or season not in exceptionCache[indexer_id]:
myDB = db.DBConnection("cache.db")
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
[indexer_id, season])
with db.DBConnection('cache.db') as myDB:
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
[indexer_id, season])
exceptionsList = list(set([cur_exception["show_name"] for cur_exception in exceptions]))
if len(exceptionsList):
@ -60,8 +60,8 @@ def get_scene_exceptions(indexer_id, season=-1):
return exceptionsList
def get_all_scene_exceptions(indexer_id):
myDB = db.DBConnection("cache.db")
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
with db.DBConnection('cache.db') as myDB:
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
exceptionsList = {}
[cur_exception["show_name"] for cur_exception in exceptions]
for cur_exception in exceptions:
@ -77,9 +77,9 @@ def get_scene_seasons(indexer_id):
"""
global exceptionSeasonCache
if indexer_id not in exceptionSeasonCache:
myDB = db.DBConnection("cache.db")
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
[indexer_id])
with db.DBConnection('cache.db') as myDB:
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
[indexer_id])
exceptionSeasonCache[indexer_id] = [int(x["season"]) for x in sqlResults]
return exceptionSeasonCache[indexer_id]
@ -95,17 +95,17 @@ def get_scene_exception_by_name_multiple(show_name):
is present.
"""
myDB = db.DBConnection("cache.db")
# try the obvious case first
exception_result = myDB.select(
"SELECT indexer_id, season FROM scene_exceptions WHERE LOWER(show_name) = ? ORDER BY season ASC",
[show_name.lower()])
if exception_result:
return [(int(x["indexer_id"]), int(x["season"])) for x in exception_result]
with db.DBConnection('cache.db') as myDB:
exception_result = myDB.select(
"SELECT indexer_id, season FROM scene_exceptions WHERE LOWER(show_name) = ? ORDER BY season ASC",
[show_name.lower()])
if exception_result:
return [(int(x["indexer_id"]), int(x["season"])) for x in exception_result]
out = []
all_exception_results = myDB.select("SELECT show_name, indexer_id, season FROM scene_exceptions")
out = []
all_exception_results = myDB.select("SELECT show_name, indexer_id, season FROM scene_exceptions")
for cur_exception in all_exception_results:
cur_exception_name = cur_exception["show_name"]
@ -180,25 +180,24 @@ def retrieve_exceptions():
else:
exception_dict[local_ex] = local_exceptions[local_ex]
myDB = db.DBConnection("cache.db")
changed_exceptions = False
# write all the exceptions we got off the net into the database
for cur_indexer_id in exception_dict:
with db.DBConnection('cache.db') as myDB:
for cur_indexer_id in exception_dict:
# get a list of the existing exceptions for this ID
existing_exceptions = [x["show_name"] for x in
myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])]
# get a list of the existing exceptions for this ID
existing_exceptions = [x["show_name"] for x in
myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])]
for cur_exception_dict in exception_dict[cur_indexer_id]:
cur_exception, curSeason = cur_exception_dict.items()[0]
for cur_exception_dict in exception_dict[cur_indexer_id]:
cur_exception, curSeason = cur_exception_dict.items()[0]
# if this exception isn't already in the DB then add it
if cur_exception not in existing_exceptions:
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
[cur_indexer_id, cur_exception, curSeason])
changed_exceptions = True
# if this exception isn't already in the DB then add it
if cur_exception not in existing_exceptions:
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
[cur_indexer_id, cur_exception, curSeason])
changed_exceptions = True
# since this could invalidate the results of the cache we clear it out after updating
if changed_exceptions:
@ -217,16 +216,15 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
global exceptionIndexerCache
myDB = db.DBConnection("cache.db")
with db.DBConnection('cache.db') as myDB:
myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id])
myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id])
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id):
for cur_exception in scene_exceptions:
exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)",
[indexer_id, cur_exception, cur_season, 1])
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id):
for cur_exception in scene_exceptions:
exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)",
[indexer_id, cur_exception, cur_season, 1])
name_cache.clearCache()
@ -237,31 +235,30 @@ def _retrieve_anidb_mainnames():
anidb_mainNames = {}
cacheDB = db.DBConnection('cache.db')
with db.DBConnection('cache.db') as myDB:
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['anidb'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
else:
refresh = True
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['anidb'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
else:
refresh = True
if refresh:
for show in sickbeard.showList:
if show.is_anime and show.indexer == 1:
try:
anime = adba.Anime(None, name=show.name, tvdbid=show.indexerid, autoCorrectName=True)
except:
continue
else:
success = True
if refresh:
for show in sickbeard.showList:
if show.is_anime and show.indexer == 1:
try:
anime = adba.Anime(None, name=show.name, tvdbid=show.indexerid, autoCorrectName=True)
except:
continue
else:
success = True
if anime.name and anime.name != show.name:
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
if anime.name and anime.name != show.name:
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
if success:
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['anidb', time.time()])
if success:
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['anidb', time.time()])
return anidb_mainNames
@ -271,31 +268,30 @@ def _xem_excpetions_fetcher(indexer):
exception_dict = {}
cacheDB = db.DBConnection('cache.db')
with db.DBConnection('cache.db') as myDB:
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['xem'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
refresh = True
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['xem'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
refresh = True
if refresh:
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
if refresh:
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
url_data = helpers.getURL(url, json=True)
if url_data is None:
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
return exception_dict
url_data = helpers.getURL(url, json=True)
if url_data is None:
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
return exception_dict
if url_data['result'] == 'failure':
return exception_dict
if url_data['result'] == 'failure':
return exception_dict
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['xem', time.time()])
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['xem', time.time()])
for indexerid, names in url_data['data'].items():
exception_dict[int(indexerid)] = names
for indexerid, names in url_data['data'].items():
exception_dict[int(indexerid)] = names
return exception_dict
@ -303,8 +299,8 @@ def _xem_excpetions_fetcher(indexer):
def getSceneSeasons(indexer_id):
"""get a list of season numbers that have scene excpetions
"""
myDB = db.DBConnection("cache.db")
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
with db.DBConnection('cache.db') as myDB:
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
return [cur_exception["season"] for cur_exception in seasons]
def buildIndexerCache():

View file

@ -82,11 +82,11 @@ def find_scene_numbering(indexer_id, indexer, season, episode):
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
[indexer, indexer_id, season, episode])
rows = myDB.select(
"SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
[indexer, indexer_id, season, episode])
if rows:
return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
@ -134,11 +134,11 @@ def find_scene_absolute_numbering(indexer_id, indexer, absolute_number):
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and absolute_number = ? and scene_absolute_number != 0",
[indexer, indexer_id, absolute_number])
rows = myDB.select(
"SELECT scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and absolute_number = ? and scene_absolute_number != 0",
[indexer, indexer_id, absolute_number])
if rows:
return int(rows[0]["scene_absolute_number"])
@ -154,11 +154,11 @@ def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallba
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?",
[indexer, indexer_id, sceneSeason, sceneEpisode])
rows = myDB.select(
"SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?",
[indexer, indexer_id, sceneSeason, sceneEpisode])
if rows:
return (int(rows[0]["season"]), int(rows[0]["episode"]))
else:
@ -178,11 +178,11 @@ def get_indexer_absolute_numbering(indexer_id, indexer, sceneAbsoluteNumber, fal
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ?",
[indexer, indexer_id, sceneAbsoluteNumber])
rows = myDB.select(
"SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ?",
[indexer, indexer_id, sceneAbsoluteNumber])
if rows:
return int(rows[0]["absolute_number"])
else:
@ -203,24 +203,23 @@ def set_scene_numbering(indexer_id, indexer, season=None, episode=None, absolute
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
if season and episode:
myDB.action(
"INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)",
[indexer, indexer_id, season, episode])
if season and episode:
myDB.action(
"INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)",
[indexer, indexer_id, season, episode])
myDB.action(
"UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?",
[sceneSeason, sceneEpisode, indexer, indexer_id, season, episode])
elif absolute_number:
myDB.action(
"INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)",
[indexer, indexer_id, absolute_number])
myDB.action(
"UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?",
[sceneSeason, sceneEpisode, indexer, indexer_id, season, episode])
elif absolute_number:
myDB.action(
"INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)",
[indexer, indexer_id, absolute_number])
myDB.action(
"UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?",
[sceneAbsolute, indexer, indexer_id, absolute_number])
myDB.action(
"UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?",
[sceneAbsolute, indexer, indexer_id, absolute_number])
def find_xem_numbering(indexer_id, indexer, season, episode):
@ -241,11 +240,10 @@ def find_xem_numbering(indexer_id, indexer, season, episode):
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
rows = myDB.select(
"SELECT scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
[indexer, indexer_id, season, episode])
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
[indexer, indexer_id, season, episode])
if rows:
return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
@ -268,11 +266,10 @@ def find_xem_absolute_numbering(indexer_id, indexer, absolute_number):
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
rows = myDB.select(
"SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and absolute_number = ? and scene_absolute_number != 0",
[indexer, indexer_id, absolute_number])
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and absolute_number = ? and scene_absolute_number != 0",
[indexer, indexer_id, absolute_number])
if rows:
return int(rows[0]["scene_absolute_number"])
@ -295,11 +292,11 @@ def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE indexer = ? and showid = ? and scene_season = ? and scene_episode = ?",
[indexer, indexer_id, sceneSeason, sceneEpisode])
rows = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE indexer = ? and showid = ? and scene_season = ? and scene_episode = ?",
[indexer, indexer_id, sceneSeason, sceneEpisode])
if rows:
return (int(rows[0]["season"]), int(rows[0]["episode"]))
@ -322,11 +319,11 @@ def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNum
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select(
"SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ?",
[indexer, indexer_id, sceneAbsoluteNumber])
rows = myDB.select(
"SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ?",
[indexer, indexer_id, sceneAbsoluteNumber])
if rows:
return int(rows[0]["absolute_number"])
@ -345,11 +342,10 @@ def get_scene_numbering_for_show(indexer_id, indexer):
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
rows = myDB.select(
'SELECT season, episode, scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
[indexer, indexer_id])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT season, episode, scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
[indexer, indexer_id])
result = {}
for row in rows:
@ -377,11 +373,10 @@ def get_xem_numbering_for_show(indexer_id, indexer):
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
rows = myDB.select(
'SELECT season, episode, scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
[indexer, indexer_id])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT season, episode, scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
[indexer, indexer_id])
result = {}
for row in rows:
@ -407,11 +402,10 @@ def get_scene_absolute_numbering_for_show(indexer_id, indexer):
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
rows = myDB.select(
'SELECT absolute_number, scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number != 0 ORDER BY absolute_number',
[indexer, indexer_id])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT absolute_number, scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number != 0 ORDER BY absolute_number',
[indexer, indexer_id])
result = {}
for row in rows:
@ -437,12 +431,12 @@ def get_xem_absolute_numbering_for_show(indexer_id, indexer):
xem_refresh(indexer_id, indexer)
myDB = db.DBConnection()
result = {}
rows = myDB.select(
'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number',
[indexer, indexer_id])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number',
[indexer, indexer_id])
for row in rows:
absolute_number = int(row['absolute_number'])
@ -464,10 +458,10 @@ def xem_refresh(indexer_id, indexer, force=False):
indexer_id = int(indexer_id)
indexer = int(indexer)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
rows = myDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
[indexer, indexer_id])
rows = myDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
[indexer, indexer_id])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
@ -532,7 +526,8 @@ def xem_refresh(indexer_id, indexer, force=False):
return None
if ql:
myDB.mass_action(ql)
with db.DBConnection() as myDB:
myDB.mass_action(ql)
# fix xem scene numbering issues
# fix_xem_numbering(indexer_id, indexer)
@ -590,11 +585,10 @@ def fix_xem_numbering(indexer_id, indexer):
# # Get query results
# tmp = get_from_api(url, params=params)['result']
myDB = db.DBConnection()
rows = myDB.select(
'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?',
[indexer, indexer_id])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?',
[indexer, indexer_id])
last_absolute_number = None
last_scene_season = None

View file

@ -175,8 +175,8 @@ def snatchEpisode(result, endStatus=SNATCHED):
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
return True
@ -511,10 +511,10 @@ def searchProviders(show, season, episodes, manualSearch=False):
u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG)
myDB = db.DBConnection()
allEps = [int(x["episode"]) for x in
myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?",
[show.indexerid, season])]
with db.DBConnection() as myDB:
allEps = [int(x["episode"]) for x in
myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?",
[show.indexerid, season])]
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
allWanted = True

View file

@ -117,8 +117,8 @@ class BacklogSearcher:
logger.log(u"Retrieving the last check time from the DB", logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM info")
if len(sqlResults) == 0:
lastBacklog = 1
@ -137,15 +137,15 @@ class BacklogSearcher:
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
"SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
[show.indexerid, fromDate.toordinal()])
with db.DBConnection() as myDB:
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(
"SELECT status, season, episode FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
[show.indexerid, fromDate.toordinal()])
# check through the list of statuses to see if we want any
wanted = {}
@ -175,13 +175,13 @@ class BacklogSearcher:
logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM info")
if len(sqlResults) == 0:
myDB.action("INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)", [str(when), 0])
else:
myDB.action("UPDATE info SET last_backlog=" + str(when))
if len(sqlResults) == 0:
myDB.action("INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)", [str(when), 0])
else:
myDB.action("UPDATE info SET last_backlog=" + str(when))
def run(self, force=False):

View file

@ -85,11 +85,11 @@ class ShowUpdater():
stale_should_update = []
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
myDB = db.DBConnection()
# last_update_date <= 90 days, sorted ASC because dates are ordinal
sql_result = myDB.select(
"SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;",
[stale_update_date])
with db.DBConnection() as myDB:
sql_result = myDB.select(
"SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;",
[stale_update_date])
for cur_result in sql_result:
stale_should_update.append(int(cur_result['indexer_id']))

View file

@ -108,7 +108,6 @@ def makeSceneShowSearchStrings(show, season=-1):
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
myDB = db.DBConnection()
if show.air_by_date or show.sports:
numseasons = 0
@ -144,9 +143,10 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
seasonStrings.append("%d" % ab_number)
else:
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
[show.indexerid])
with db.DBConnection() as myDB:
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
[show.indexerid])
numseasons = int(numseasonsSQlResult[0][0])
seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
@ -177,10 +177,10 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
def makeSceneSearchString(show, ep_obj):
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
[show.indexerid])
with db.DBConnection() as myDB:
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
[show.indexerid])
numseasons = int(numseasonsSQlResult[0][0])
# see if we should use dates instead of episodes

View file

@ -363,9 +363,9 @@ class QueueItemAdd(ShowQueueItem):
# if they gave a custom status then change all the eps to it
if self.default_status != SKIPPED:
logger.log(u"Setting all episodes to the specified default status: " + str(self.default_status))
myDB = db.DBConnection()
myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0",
[self.default_status, SKIPPED, self.show.indexerid])
with db.DBConnection() as myDB:
myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0",
[self.default_status, SKIPPED, self.show.indexerid])
# if they started with WANTED eps then run the backlog
if self.default_status == WANTED:

View file

@ -103,11 +103,12 @@ class SubtitlesFinder():
# - episode subtitles != config wanted languages or SINGLE (depends on config multi)
# - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
# - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d
myDB = db.DBConnection()
today = datetime.date.today().toordinal()
# you have 5 minutes to understand that one. Good luck
sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('+','.join([str(x) for x in Quality.DOWNLOADED])+') OR (e.status IN ('+','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER])+') AND e.location != ""))', [today, wantedLanguages(True), today, today])
with db.DBConnection() as myDB:
sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('+','.join([str(x) for x in Quality.DOWNLOADED])+') OR (e.status IN ('+','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER])+') AND e.location != ""))', [today, wantedLanguages(True), today, today])
if len(sqlResults) == 0:
logger.log('No subtitles to download', logger.MESSAGE)
return
@ -141,7 +142,6 @@ class SubtitlesFinder():
try:
subtitles = epObj.downloadSubtitles()
except:
logger.log(u'Unable to find subtitles', logger.DEBUG)
return

View file

@ -153,8 +153,6 @@ class TVShow(object):
def getAllEpisodes(self, season=None, has_location=False):
myDB = db.DBConnection()
sql_selection = "SELECT season, episode, "
# subselection to detect multi-episodes early, share_location > 0
@ -171,24 +169,25 @@ class TVShow(object):
# need ORDER episode ASC to rename multi-episodes in order S01E01-02
sql_selection = sql_selection + " ORDER BY season ASC, episode ASC"
results = myDB.select(sql_selection)
with db.DBConnection() as myDB:
results = myDB.select(sql_selection)
ep_list = []
for cur_result in results:
cur_ep = self.getEpisode(int(cur_result["season"]), int(cur_result["episode"]))
if cur_ep:
cur_ep.relatedEps = []
if cur_ep.location:
# if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
if cur_result["share_location"] > 0:
related_eps_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC",
[self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode])
for cur_related_ep in related_eps_result:
related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep not in cur_ep.relatedEps:
cur_ep.relatedEps.append(related_ep)
ep_list.append(cur_ep)
ep_list = []
for cur_result in results:
cur_ep = self.getEpisode(int(cur_result["season"]), int(cur_result["episode"]))
if cur_ep:
cur_ep.relatedEps = []
if cur_ep.location:
# if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
if cur_result["share_location"] > 0:
related_eps_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC",
[self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode])
for cur_related_ep in related_eps_result:
related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep not in cur_ep.relatedEps:
cur_ep.relatedEps.append(related_ep)
ep_list.append(cur_ep)
return ep_list
@ -202,8 +201,8 @@ class TVShow(object):
# if we get an anime get the real season and episode
if self.is_anime and absolute_number and not season and not episode:
myDB = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0"
with db.DBConnection() as myDB:
sql = "SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0"
sqlResults = myDB.select(sql, [self.indexerid, absolute_number])
if len(sqlResults) == 1:
@ -264,28 +263,28 @@ class TVShow(object):
graceperiod = datetime.timedelta(days=30)
myDB = db.DBConnection()
last_airdate = datetime.date.fromordinal(1)
# get latest aired episode to compare against today - graceperiod and today + graceperiod
sql_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1",
[cur_indexerid])
with db.DBConnection() as myDB:
sql_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1",
[cur_indexerid])
if sql_result:
last_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
if last_airdate >= (update_date - graceperiod) and last_airdate <= (update_date + graceperiod):
return True
if sql_result:
last_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
if last_airdate >= (update_date - graceperiod) and last_airdate <= (update_date + graceperiod):
return True
# get next upcoming UNAIRED episode to compare against today + graceperiod
sql_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1",
[cur_indexerid])
# get next upcoming UNAIRED episode to compare against today + graceperiod
sql_result = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1",
[cur_indexerid])
if sql_result:
next_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
if next_airdate <= (update_date + graceperiod):
return True
if sql_result:
next_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
if next_airdate <= (update_date + graceperiod):
return True
last_update_indexer = datetime.date.fromordinal(self.last_update_indexer)
@ -331,8 +330,8 @@ class TVShow(object):
logger.log(str(self.indexerid) + u": Writing NFOs for all episodes")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
for epResult in sqlResults:
logger.log(str(self.indexerid) + u": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(
@ -422,16 +421,16 @@ class TVShow(object):
sql_l.append(curEpisode.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
def loadEpisodesFromDB(self):
logger.log(u"Loading all episodes from the DB")
myDB = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ?"
sqlResults = myDB.select(sql, [self.indexerid])
with db.DBConnection() as myDB:
sql = "SELECT * FROM tv_episodes WHERE showid = ?"
sqlResults = myDB.select(sql, [self.indexerid])
scannedEps = {}
@ -543,8 +542,8 @@ class TVShow(object):
scannedEps[season][episode] = True
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
# Done updating save last update date
self.last_update_indexer = datetime.date.today().toordinal()
@ -603,10 +602,10 @@ class TVShow(object):
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[self.indexerid, self.indexer, airdate])
with db.DBConnection() as myDB:
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[self.indexerid, self.indexer, airdate])
if sql_result:
season = int(sql_result[0][0])
@ -713,8 +712,8 @@ class TVShow(object):
sql_l.append(curEp.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
# creating metafiles on the root should be good enough
if sickbeard.USE_FAILED_DOWNLOADS and rootEp is not None:
@ -727,9 +726,8 @@ class TVShow(object):
logger.log(str(self.indexerid) + u": Loading show info from database")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid])
if len(sqlResults) > 1:
raise exceptions.MultipleDBShowsException()
@ -808,7 +806,8 @@ class TVShow(object):
self.imdbid = sqlResults[0]["imdb_id"]
# Get IMDb_info from database
sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid])
if len(sqlResults) == 0:
logger.log(str(self.indexerid) + ": Unable to find IMDb show info in the database")
@ -937,12 +936,12 @@ class TVShow(object):
def nextEpisode(self):
logger.log(str(self.indexerid) + ": Finding the episode which airs next", logger.DEBUG)
myDB = db.DBConnection()
innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?) ORDER BY airdate ASC LIMIT 1"
innerParams = [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]
query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status in (?,?)"
params = [self.indexerid, datetime.date.today().toordinal()] + innerParams + [UNAIRED, WANTED]
sqlResults = myDB.select(query, params)
with db.DBConnection() as myDB:
innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?) ORDER BY airdate ASC LIMIT 1"
innerParams = [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]
query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status in (?,?)"
params = [self.indexerid, datetime.date.today().toordinal()] + innerParams + [UNAIRED, WANTED]
sqlResults = myDB.select(query, params)
if sqlResults == None or len(sqlResults) == 0:
logger.log(str(self.indexerid) + u": No episode found... need to implement a show status",
@ -959,15 +958,14 @@ class TVShow(object):
def deleteShow(self):
myDB = db.DBConnection()
sql_l = [["DELETE FROM tv_episodes WHERE showid = ?", [self.indexerid]],
["DELETE FROM tv_shows WHERE indexer_id = ?", [self.indexerid]],
["DELETE FROM imdb_info WHERE indexer_id = ?", [self.indexerid]],
["DELETE FROM xem_refresh WHERE indexer_id = ?", [self.indexerid]],
["DELETE FROM scene_numbering WHERE indexer_id = ?", [self.indexerid]]]
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
# remove self from show list
sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid]
@ -996,8 +994,8 @@ class TVShow(object):
# run through all locations from DB, check that they exist
logger.log(str(self.indexerid) + u": Loading all episodes with a location from the database")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
sql_l = []
for ep in sqlResults:
@ -1040,8 +1038,8 @@ class TVShow(object):
self.airdateModifyStamp(curEp)
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
def airdateModifyStamp(self, ep_obj):
"""
@ -1090,9 +1088,11 @@ class TVShow(object):
logger.log(str(self.indexerid) + ": Downloading subtitles", logger.DEBUG)
try:
episodes = db.DBConnection().select(
"SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC",
[self.indexerid])
with db.DBConnection() as myDB:
episodes = myDB.select(
"SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC",
[self.indexerid])
for episodeLoc in episodes:
episode = self.makeEpFromFile(episodeLoc['location'])
subtitles = episode.downloadSubtitles(force=force)
@ -1104,8 +1104,6 @@ class TVShow(object):
def saveToDB(self):
logger.log(str(self.indexerid) + u": Saving show info to database", logger.DEBUG)
myDB = db.DBConnection()
controlValueDict = {"indexer_id": self.indexerid}
newValueDict = {"indexer": self.indexer,
"show_name": self.name,
@ -1133,14 +1131,18 @@ class TVShow(object):
"rls_ignore_words": self.rls_ignore_words,
"rls_require_words": self.rls_require_words
}
myDB.upsert("tv_shows", newValueDict, controlValueDict)
with db.DBConnection() as myDB:
myDB.upsert("tv_shows", newValueDict, controlValueDict)
helpers.update_anime_support()
if self.imdbid:
controlValueDict = {"indexer_id": self.indexerid}
newValueDict = self.imdb_info
myDB.upsert("imdb_info", newValueDict, controlValueDict)
with db.DBConnection() as myDB:
myDB.upsert("imdb_info", newValueDict, controlValueDict)
def __str__(self):
toReturn = ""
@ -1180,9 +1182,9 @@ class TVShow(object):
logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG)
return False
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[self.indexerid, season, episode])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[self.indexerid, season, episode])
if not sqlResults or not len(sqlResults):
logger.log(u"Unable to find a matching episode in database, ignoring found episode", logger.DEBUG)
@ -1472,9 +1474,9 @@ class TVEpisode(object):
str(self.show.indexerid) + u": Loading episode details from DB for episode " + str(season) + "x" + str(
episode), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[self.show.indexerid, season, episode])
with db.DBConnection() as myDB:
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[self.show.indexerid, season, episode])
if len(sqlResults) > 1:
raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.")
@ -1825,10 +1827,10 @@ class TVEpisode(object):
# delete myself from the DB
logger.log(u"Deleting myself from the database", logger.DEBUG)
myDB = db.DBConnection()
sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str(
self.season) + " AND episode=" + str(self.episode)
myDB.action(sql)
with db.DBConnection() as myDB:
sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str(
self.season) + " AND episode=" + str(self.episode)
myDB.action(sql)
raise exceptions.EpisodeDeletedException()
@ -1844,11 +1846,10 @@ class TVEpisode(object):
logger.log(str(self.show.indexerid) + u": Not creating SQL queue - record is not dirty", logger.DEBUG)
return
myDB = db.DBConnection()
rows = myDB.select(
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
[self.show.indexerid, self.season, self.episode])
with db.DBConnection() as myDB:
rows = myDB.select(
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
[self.show.indexerid, self.season, self.episode])
epID = None
if rows:
@ -1894,8 +1895,6 @@ class TVEpisode(object):
logger.log(u"STATUS IS " + str(self.status), logger.DEBUG)
myDB = db.DBConnection()
newValueDict = {"indexerid": self.indexerid,
"indexer": self.indexer,
"name": self.name,
@ -1918,7 +1917,8 @@ class TVEpisode(object):
"episode": self.episode}
# use a custom update/insert method to get the data into the DB
myDB.upsert("tv_episodes", newValueDict, controlValueDict)
with db.DBConnection() as myDB:
myDB.upsert("tv_episodes", newValueDict, controlValueDict)
def fullPath(self):
if self.location == None or self.location == "":
@ -2378,11 +2378,9 @@ class TVEpisode(object):
# save any changes to the databas
sql_l = []
with self.lock:
sql_l.append(self.get_sql())
for relEp in self.relatedEps:
for relEp in [self] + self.relatedEps:
sql_l.append(relEp.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)

View file

@ -49,21 +49,22 @@ class CacheDBConnection(db.DBConnection):
def __init__(self, providerName):
db.DBConnection.__init__(self, "cache.db")
# Create the table if it's not already there
try:
if not self.hasTable(providerName):
self.action("CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
except Exception, e:
if str(e) != "table [" + providerName + "] already exists":
raise
with self as myDB:
# Create the table if it's not already there
try:
if not myDB.hasTable(providerName):
myDB.action("CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
except Exception, e:
if str(e) != "table [" + providerName + "] already exists":
raise
# Create the table if it's not already there
try:
if not self.hasTable('lastUpdate'):
self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
except Exception, e:
if str(e) != "table lastUpdate already exists":
raise
# Create the table if it's not already there
try:
if not myDB.hasTable('lastUpdate'):
myDB.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
except Exception, e:
if str(e) != "table lastUpdate already exists":
raise
class TVCache():
@ -276,12 +277,12 @@ class TVCache():
season = episodes = None
if parse_result.air_by_date or parse_result.sports:
myDB = db.DBConnection()
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
with db.DBConnection() as myDB:
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]

View file

@ -283,15 +283,14 @@ class Manage:
@cherrypy.expose
def showEpisodeStatuses(self, indexer_id, whichStatus):
myDB = db.DBConnection()
status_list = [int(whichStatus)]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER
cur_show_results = myDB.select(
"SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN (" + ','.join(
['?'] * len(status_list)) + ")", [int(indexer_id)] + status_list)
with db.DBConnection() as myDB:
cur_show_results = myDB.select(
"SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN (" + ','.join(
['?'] * len(status_list)) + ")", [int(indexer_id)] + status_list)
result = {}
for cur_result in cur_show_results:
@ -324,12 +323,12 @@ class Manage:
if not status_list:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select(
"SELECT show_name, tv_shows.indexer_id as indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN (" + ','.join(
['?'] * len(
status_list)) + ") AND season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name",
status_list)
with db.DBConnection() as myDB:
status_results = myDB.select(
"SELECT show_name, tv_shows.indexer_id as indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN (" + ','.join(
['?'] * len(
status_list)) + ") AND season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name",
status_list)
ep_counts = {}
show_names = {}
@ -372,28 +371,26 @@ class Manage:
to_change[indexer_id].append(what)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
for cur_indexer_id in to_change:
for cur_indexer_id in to_change:
# get a list of all the eps we want to change if they just said "all"
if 'all' in to_change[cur_indexer_id]:
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN (" + ','.join(
['?'] * len(status_list)) + ") AND season != 0 AND showid = ?", status_list + [cur_indexer_id])
all_eps = [str(x["season"]) + 'x' + str(x["episode"]) for x in all_eps_results]
to_change[cur_indexer_id] = all_eps
# get a list of all the eps we want to change if they just said "all"
if 'all' in to_change[cur_indexer_id]:
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN (" + ','.join(
['?'] * len(status_list)) + ") AND season != 0 AND showid = ?", status_list + [cur_indexer_id])
all_eps = [str(x["season"]) + 'x' + str(x["episode"]) for x in all_eps_results]
to_change[cur_indexer_id] = all_eps
Home().setStatus(cur_indexer_id, '|'.join(to_change[cur_indexer_id]), newStatus, direct=True)
Home().setStatus(cur_indexer_id, '|'.join(to_change[cur_indexer_id]), newStatus, direct=True)
redirect('/manage/episodeStatuses')
@cherrypy.expose
def showSubtitleMissed(self, indexer_id, whichSubs):
myDB = db.DBConnection()
cur_show_results = myDB.select(
"SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'",
[int(indexer_id)])
with db.DBConnection() as myDB:
cur_show_results = myDB.select(
"SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'",
[int(indexer_id)])
result = {}
for cur_result in cur_show_results:
@ -431,9 +428,9 @@ class Manage:
if not whichSubs:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select(
"SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name")
with db.DBConnection() as myDB:
status_results = myDB.select(
"SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name")
ep_counts = {}
show_names = {}
@ -482,10 +479,10 @@ class Manage:
for cur_indexer_id in to_download:
# get a list of all the eps we want to download subtitles if they just said "all"
if 'all' in to_download[cur_indexer_id]:
myDB = db.DBConnection()
all_eps_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?",
[cur_indexer_id])
with db.DBConnection() as myDB:
all_eps_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?",
[cur_indexer_id])
to_download[cur_indexer_id] = [str(x["season"]) + 'x' + str(x["episode"]) for x in all_eps_results]
for epResult in to_download[cur_indexer_id]:
@ -512,34 +509,33 @@ class Manage:
t = PageTemplate(file="manage_backlogOverview.tmpl")
t.submenu = ManageMenu()
myDB = db.DBConnection()
showCounts = {}
showCats = {}
showSQLResults = {}
for curShow in sickbeard.showList:
with db.DBConnection() as myDB:
for curShow in sickbeard.showList:
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[curShow.indexerid])
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[curShow.indexerid])
for curResult in sqlResults:
curEpCat = curShow.getOverview(int(curResult["status"]))
epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
for curResult in sqlResults:
curEpCat = curShow.getOverview(int(curResult["status"]))
epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
showCounts[curShow.indexerid] = epCounts
showCats[curShow.indexerid] = epCats
showSQLResults[curShow.indexerid] = sqlResults
showCounts[curShow.indexerid] = epCounts
showCats[curShow.indexerid] = epCats
showSQLResults[curShow.indexerid] = sqlResults
t.showCounts = showCounts
t.showCats = showCats
@ -859,17 +855,17 @@ class Manage:
@cherrypy.expose
def failedDownloads(self, limit=100, toRemove=None):
myDB = db.DBConnection("failed.db")
with db.DBConnection('failed.db') as myDB:
if limit == "0":
sqlResults = myDB.select("SELECT * FROM failed")
else:
sqlResults = myDB.select("SELECT * FROM failed LIMIT ?", [limit])
if limit == "0":
sqlResults = myDB.select("SELECT * FROM failed")
else:
sqlResults = myDB.select("SELECT * FROM failed LIMIT ?", [limit])
toRemove = toRemove.split("|") if toRemove is not None else []
toRemove = toRemove.split("|") if toRemove is not None else []
for release in toRemove:
myDB.action('DELETE FROM failed WHERE release = ?', [release])
for release in toRemove:
myDB.action('DELETE FROM failed WHERE release = ?', [release])
if toRemove:
raise cherrypy.HTTPRedirect('/manage/failedDownloads/')
@ -886,16 +882,15 @@ class History:
@cherrypy.expose
def index(self, limit=100):
myDB = db.DBConnection()
# sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage))
if limit == "0":
sqlResults = myDB.select(
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC")
else:
sqlResults = myDB.select(
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC LIMIT ?",
[limit])
with db.DBConnection() as myDB:
if limit == "0":
sqlResults = myDB.select(
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC")
else:
sqlResults = myDB.select(
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC LIMIT ?",
[limit])
history = {'show_id': 0, 'season': 0, 'episode': 0, 'quality': 0,
'actions': [{'time': '', 'action': '', 'provider': ''}]}
@ -959,8 +954,9 @@ class History:
@cherrypy.expose
def clearHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE 1=1")
with db.DBConnection() as myDB:
myDB.action("DELETE FROM history WHERE 1=1")
ui.notifications.message('History cleared')
redirect("/history/")
@ -968,9 +964,10 @@ class History:
@cherrypy.expose
def trimHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE date < " + str(
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat)))
with db.DBConnection() as myDB:
myDB.action("DELETE FROM history WHERE date < " + str(
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat)))
ui.notifications.message('Removed history entries greater than 30 days old')
redirect("/history/")
@ -1440,7 +1437,6 @@ class ConfigProviders:
return providerDict[name].getID() + '|' + providerDict[name].configStr()
else:
newProvider = newznab.NewznabProvider(name, url, key=key)
sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
@ -2231,8 +2227,6 @@ class NewHomeAddShows:
t = PageTemplate(file="home_massAddTable.tmpl")
t.submenu = HomeMenu()
myDB = db.DBConnection()
if not rootDir:
return "No folders selected."
elif type(rootDir) != list:
@ -2255,54 +2249,55 @@ class NewHomeAddShows:
dir_list = []
for root_dir in root_dirs:
try:
file_list = ek.ek(os.listdir, root_dir)
except:
continue
for cur_file in file_list:
cur_path = ek.ek(os.path.normpath, ek.ek(os.path.join, root_dir, cur_file))
if not ek.ek(os.path.isdir, cur_path):
with db.DBConnection() as myDB:
for root_dir in root_dirs:
try:
file_list = ek.ek(os.listdir, root_dir)
except:
continue
cur_dir = {
'dir': cur_path,
'display_dir': '<b>' + ek.ek(os.path.dirname, cur_path) + os.sep + '</b>' + ek.ek(os.path.basename,
cur_path),
}
for cur_file in file_list:
# see if the folder is in XBMC already
dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path])
cur_path = ek.ek(os.path.normpath, ek.ek(os.path.join, root_dir, cur_file))
if not ek.ek(os.path.isdir, cur_path):
continue
if dirResults:
cur_dir['added_already'] = True
else:
cur_dir['added_already'] = False
cur_dir = {
'dir': cur_path,
'display_dir': '<b>' + ek.ek(os.path.dirname, cur_path) + os.sep + '</b>' + ek.ek(os.path.basename,
cur_path),
}
dir_list.append(cur_dir)
# see if the folder is in XBMC already
dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path])
indexer_id = show_name = indexer = None
for cur_provider in sickbeard.metadata_provider_dict.values():
(indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path)
if show_name: break
if dirResults:
cur_dir['added_already'] = True
else:
cur_dir['added_already'] = False
# default to TVDB if indexer was not detected
if show_name and not (indexer and indexer_id):
(sn, idx, id) = helpers.searchIndexerForShowID(show_name, indexer, indexer_id)
dir_list.append(cur_dir)
# set indexer and indexer_id from found info
if indexer is None and idx:
indexer = idx
indexer_id = show_name = indexer = None
for cur_provider in sickbeard.metadata_provider_dict.values():
(indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path)
if show_name: break
if indexer_id is None and id:
indexer_id = id
# default to TVDB if indexer was not detected
if show_name and not (indexer and indexer_id):
(sn, idx, id) = helpers.searchIndexerForShowID(show_name, indexer, indexer_id)
cur_dir['existing_info'] = (indexer_id, show_name, indexer)
# set indexer and indexer_id from found info
if indexer is None and idx:
indexer = idx
if indexer_id and helpers.findCertainShow(sickbeard.showList, indexer_id):
cur_dir['added_already'] = True
if indexer_id is None and id:
indexer_id = id
cur_dir['existing_info'] = (indexer_id, show_name, indexer)
if indexer_id and helpers.findCertainShow(sickbeard.showList, indexer_id):
cur_dir['added_already'] = True
t.dirList = dir_list
@ -2632,7 +2627,7 @@ class Home:
if 'callback' in kwargs and '_' in kwargs:
callback, _ = kwargs['callback'], kwargs['_']
else:
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query stiring."
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query string."
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
cherrypy.response.headers['Content-Type'] = 'text/javascript'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
@ -2877,8 +2872,9 @@ class Home:
def loadShowNotifyLists(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
mydb = db.DBConnection()
rows = mydb.select("SELECT show_id, show_name, notify_list FROM tv_shows ORDER BY show_name ASC")
with db.DBConnection() as myDB:
rows = myDB.select("SELECT show_id, show_name, notify_list FROM tv_shows ORDER BY show_name ASC")
data = {}
size = 0
for r in rows:
@ -2995,17 +2991,16 @@ class Home:
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid)
myDB = db.DBConnection()
with db.DBConnection() as myDB:
seasonResults = myDB.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season desc",
[showObj.indexerid]
)
seasonResults = myDB.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season desc",
[showObj.indexerid]
)
sqlResults = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[showObj.indexerid]
)
sqlResults = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[showObj.indexerid]
)
t = PageTemplate(file="displayShow.tmpl")
t.submenu = [{'title': 'Edit', 'path': 'home/editShow?show=%d' % showObj.indexerid}]
@ -3115,9 +3110,10 @@ class Home:
@cherrypy.expose
def plotDetails(self, show, season, episode):
result = db.DBConnection().action(
"SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
(int(show), int(season), int(episode))).fetchone()
with db.DBConnection() as myDB:
result = myDB.action(
"SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
(int(show), int(season), int(episode))).fetchone()
return result['description'] if result else 'Episode not found.'
@cherrypy.expose
@ -3578,8 +3574,8 @@ class Home:
sql_l.append(epObj.get_sql())
if sql_l:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
with db.DBConnection() as myDB:
myDB.mass_action(sql_l)
if int(status) == WANTED:
msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"
@ -3678,33 +3674,32 @@ class Home:
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
myDB = db.DBConnection()
if eps is None:
redirect("/home/displayShow?show=" + show)
for curEp in eps.split('|'):
with db.DBConnection() as myDB:
for curEp in eps.split('|'):
epInfo = curEp.split('x')
epInfo = curEp.split('x')
# this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database
ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5",
[show, epInfo[0], epInfo[1]])
if not ep_result:
logger.log(u"Unable to find an episode for " + curEp + ", skipping", logger.WARNING)
continue
related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?",
[ep_result[0]["location"], epInfo[1]])
# this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database
ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5",
[show, epInfo[0], epInfo[1]])
if not ep_result:
logger.log(u"Unable to find an episode for " + curEp + ", skipping", logger.WARNING)
continue
related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?",
[ep_result[0]["location"], epInfo[1]])
root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1]))
root_ep_obj.relatedEps = []
root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1]))
root_ep_obj.relatedEps = []
for cur_related_ep in related_eps_result:
related_ep_obj = show_obj.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep_obj not in root_ep_obj.relatedEps:
root_ep_obj.relatedEps.append(related_ep_obj)
for cur_related_ep in related_eps_result:
related_ep_obj = show_obj.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep_obj not in root_ep_obj.relatedEps:
root_ep_obj.relatedEps.append(related_ep_obj)
root_ep_obj.rename()
root_ep_obj.rename()
redirect("/home/displayShow?show=" + show)
@ -4000,8 +3995,6 @@ class WebInterface:
@cherrypy.expose
def comingEpisodes(self, layout="None"):
myDB = db.DBConnection()
today1 = datetime.date.today()
today = today1.toordinal()
next_week1 = (datetime.date.today() + datetime.timedelta(days=7))
@ -4010,24 +4003,27 @@ class WebInterface:
done_show_list = []
qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED]
sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(
['?'] * len(qualList)) + ")", [today, next_week] + qualList)
for cur_result in sql_results:
done_show_list.append(int(cur_result["showid"]))
more_sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(
['?'] * len(
done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(
['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")",
done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED)
sql_results += more_sql_results
with db.DBConnection() as myDB:
sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(
['?'] * len(qualList)) + ")", [today, next_week] + qualList)
more_sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(
['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList)
sql_results += more_sql_results
for cur_result in sql_results:
done_show_list.append(int(cur_result["showid"]))
more_sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN (" + ','.join(
['?'] * len(
done_show_list)) + ") AND tv_shows.indexer_id = outer_eps.showid AND airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND outer_eps.status NOT IN (" + ','.join(
['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")",
done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED)
sql_results += more_sql_results
more_sql_results = myDB.select(
"SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(
['?'] * len(qualList)) + ")", [today, recently, WANTED] + qualList)
sql_results += more_sql_results
# sort by localtime
sorts = {
@ -4099,48 +4095,46 @@ class WebInterface:
ical += 'X-WR-CALDESC:SickRage\r\n'
ical += 'PRODID://Sick-Beard Upcoming Episodes//\r\n'
# Get shows info
myDB = db.DBConnection()
# Limit dates
past_date = (datetime.date.today() + datetime.timedelta(weeks=-52)).toordinal()
future_date = (datetime.date.today() + datetime.timedelta(weeks=52)).toordinal()
# Get all the shows that are not paused and are currently on air (from kjoconnor Fork)
calendar_shows = myDB.select(
"SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE ( status = 'Continuing' OR status = 'Returning Series' ) AND paused != '1'")
for show in calendar_shows:
# Get all episodes of this show airing between today and next month
episode_list = myDB.select(
"SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?",
(past_date, future_date, int(show["indexer_id"])))
with db.DBConnection() as myDB:
calendar_shows = myDB.select(
"SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE ( status = 'Continuing' OR status = 'Returning Series' ) AND paused != '1'")
for show in calendar_shows:
# Get all episodes of this show airing between today and next month
episode_list = myDB.select(
"SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?",
(past_date, future_date, int(show["indexer_id"])))
utc = tz.gettz('GMT')
utc = tz.gettz('GMT')
for episode in episode_list:
for episode in episode_list:
air_date_time = network_timezones.parse_date_time(episode['airdate'], show["airs"],
show['network']).astimezone(utc)
air_date_time_end = air_date_time + datetime.timedelta(minutes=helpers.tryInt(show["runtime"], 60))
air_date_time = network_timezones.parse_date_time(episode['airdate'], show["airs"],
show['network']).astimezone(utc)
air_date_time_end = air_date_time + datetime.timedelta(minutes=helpers.tryInt(show["runtime"], 60))
# Create event for episode
ical = ical + 'BEGIN:VEVENT\r\n'
ical = ical + 'DTSTART:' + air_date_time.strftime("%Y%m%d") + 'T' + air_date_time.strftime(
"%H%M%S") + 'Z\r\n'
ical = ical + 'DTEND:' + air_date_time_end.strftime("%Y%m%d") + 'T' + air_date_time_end.strftime(
"%H%M%S") + 'Z\r\n'
ical = ical + 'SUMMARY:' + show['show_name'] + ': ' + episode['name'] + '\r\n'
ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show[
'show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str(
episode['season']) + '\r\n'
if (episode['description'] is not None and episode['description'] != ''):
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + \
episode['description'].splitlines()[0] + '\r\n'
else:
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\r\n'
ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str(
episode['season']) + '\r\n'
ical = ical + 'END:VEVENT\r\n'
# Create event for episode
ical = ical + 'BEGIN:VEVENT\r\n'
ical = ical + 'DTSTART:' + air_date_time.strftime("%Y%m%d") + 'T' + air_date_time.strftime(
"%H%M%S") + 'Z\r\n'
ical = ical + 'DTEND:' + air_date_time_end.strftime("%Y%m%d") + 'T' + air_date_time_end.strftime(
"%H%M%S") + 'Z\r\n'
ical = ical + 'SUMMARY:' + show['show_name'] + ': ' + episode['name'] + '\r\n'
ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show[
'show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str(
episode['season']) + '\r\n'
if (episode['description'] is not None and episode['description'] != ''):
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + \
episode['description'].splitlines()[0] + '\r\n'
else:
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\r\n'
ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str(
episode['season']) + '\r\n'
ical = ical + 'END:VEVENT\r\n'
# Ending the iCal
ical += 'END:VCALENDAR'