2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import os.path
|
|
|
|
import re
|
2014-06-05 08:05:02 +00:00
|
|
|
import sqlite3
|
2014-06-05 16:22:54 +00:00
|
|
|
import time
|
2014-06-05 19:52:30 +00:00
|
|
|
import threading
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-05 19:52:30 +00:00
|
|
|
import sickbeard
|
2014-03-10 05:18:05 +00:00
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard.exceptions import ex
|
2016-03-15 16:34:58 +00:00
|
|
|
import helpers
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
db_lock = threading.Lock()
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
def dbFilename(filename='sickbeard.db', suffix=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
@param filename: The sqlite database filename to use. If not specified,
|
|
|
|
will be made to be sickbeard.db
|
|
|
|
@param suffix: The suffix to append to the filename. A '.' will be added
|
|
|
|
automatically, i.e. suffix='v0' will make dbfile.db.v0
|
|
|
|
@return: the correct location of the database file.
|
|
|
|
"""
|
|
|
|
if suffix:
|
2015-03-01 16:31:16 +00:00
|
|
|
filename = '%s.%s' % (filename, suffix)
|
2014-03-10 05:18:05 +00:00
|
|
|
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2017-11-14 20:48:01 +00:00
|
|
|
def mass_upsert_sql(tableName, valueDict, keyDict):
|
|
|
|
|
|
|
|
"""
|
|
|
|
use with cl.extend(mass_upsert_sql(tableName, valueDict, keyDict))
|
|
|
|
|
|
|
|
:param tableName: table name
|
|
|
|
:param valueDict: dict of values to be set {'table_fieldname': value}
|
|
|
|
:param keyDict: dict of restrains for update {'table_fieldname': value}
|
|
|
|
:return: list of 2 sql command
|
|
|
|
"""
|
|
|
|
cl = []
|
|
|
|
|
|
|
|
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
|
|
|
|
|
|
|
|
cl.append(['UPDATE [%s] SET %s WHERE %s' % (
|
|
|
|
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict))), valueDict.values() + keyDict.values()])
|
|
|
|
|
|
|
|
|
|
|
|
cl.append(['INSERT INTO [' + tableName + '] (' + ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in valueDict.keys() + keyDict.keys()]) + ')' +
|
|
|
|
' SELECT ' + ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in valueDict.values() + keyDict.values()]) + ' WHERE changes() = 0'])
|
|
|
|
return cl
|
|
|
|
|
|
|
|
|
2014-06-07 21:32:38 +00:00
|
|
|
class DBConnection(object):
|
2015-03-01 16:31:16 +00:00
|
|
|
def __init__(self, filename='sickbeard.db', suffix=None, row_type=None):
|
2016-03-15 16:34:58 +00:00
|
|
|
|
|
|
|
db_src = dbFilename(filename)
|
|
|
|
if not os.path.isfile(db_src):
|
|
|
|
db_alt = dbFilename('sickrage.db')
|
|
|
|
if os.path.isfile(db_alt):
|
|
|
|
helpers.copyFile(db_alt, db_src)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.filename = filename
|
2016-03-15 16:34:58 +00:00
|
|
|
self.connection = sqlite3.connect(db_src, 20)
|
2014-06-21 22:46:59 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
if row_type == 'dict':
|
2014-06-05 08:05:02 +00:00
|
|
|
self.connection.row_factory = self._dict_factory
|
|
|
|
else:
|
|
|
|
self.connection.row_factory = sqlite3.Row
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def checkDBVersion(self):
|
2014-06-21 22:46:59 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
2014-06-23 01:24:55 +00:00
|
|
|
if self.hasTable('db_version'):
|
2015-03-01 16:31:16 +00:00
|
|
|
result = self.select('SELECT db_version FROM db_version')
|
2016-10-01 19:31:35 +00:00
|
|
|
else:
|
|
|
|
version = self.select('PRAGMA user_version')[0]['user_version']
|
|
|
|
if version:
|
|
|
|
self.action('PRAGMA user_version = 0')
|
|
|
|
self.action('CREATE TABLE db_version (db_version INTEGER);')
|
|
|
|
self.action('INSERT INTO db_version (db_version) VALUES (%s);' % version)
|
|
|
|
return version
|
2014-06-23 01:24:55 +00:00
|
|
|
except:
|
2014-06-30 15:57:32 +00:00
|
|
|
return 0
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if result:
|
2016-06-20 01:46:10 +00:00
|
|
|
version = int(result[0]['db_version'])
|
|
|
|
if 10000 > version and self.hasColumn('db_version', 'db_minor_version'):
|
2016-03-15 16:34:58 +00:00
|
|
|
minor = self.select('SELECT db_minor_version FROM db_version')
|
2016-06-20 01:46:10 +00:00
|
|
|
return version * 100 + int(minor[0]['db_minor_version'])
|
|
|
|
return version
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
def mass_action(self, querylist, logTransaction=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
with db_lock:
|
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
if querylist is None:
|
2014-03-10 05:18:05 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
sqlResult = []
|
|
|
|
attempt = 0
|
|
|
|
|
|
|
|
while attempt < 5:
|
|
|
|
try:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
affected = 0
|
2014-06-05 16:22:54 +00:00
|
|
|
for qu in querylist:
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
cursor = self.connection.cursor()
|
2014-06-05 16:22:54 +00:00
|
|
|
if len(qu) == 1:
|
|
|
|
if logTransaction:
|
2014-12-23 22:28:06 +00:00
|
|
|
logger.log(qu[0], logger.DB)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
|
|
|
sqlResult.append(cursor.execute(qu[0]).fetchall())
|
2014-06-05 16:22:54 +00:00
|
|
|
elif len(qu) > 1:
|
|
|
|
if logTransaction:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DB)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
sqlResult.append(cursor.execute(qu[0], qu[1]).fetchall())
|
|
|
|
affected += cursor.rowcount
|
2014-12-23 22:28:06 +00:00
|
|
|
self.connection.commit()
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if affected > 0:
|
|
|
|
logger.log(u'Transaction with %s queries executed affected %i row%s' % (
|
|
|
|
len(querylist), affected, helpers.maybe_plural(affected)), logger.DEBUG)
|
2014-12-23 22:28:06 +00:00
|
|
|
return sqlResult
|
2015-06-08 12:47:01 +00:00
|
|
|
except sqlite3.OperationalError as e:
|
2014-03-10 05:18:05 +00:00
|
|
|
sqlResult = []
|
2014-06-05 09:13:29 +00:00
|
|
|
if self.connection:
|
|
|
|
self.connection.rollback()
|
2015-03-01 16:31:16 +00:00
|
|
|
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
|
|
|
|
logger.log(u'DB error: ' + ex(e), logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
attempt += 1
|
2014-06-30 15:57:32 +00:00
|
|
|
time.sleep(1)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'DB error: ' + ex(e), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
raise
|
2015-06-08 12:47:01 +00:00
|
|
|
except sqlite3.DatabaseError as e:
|
2014-06-05 09:13:29 +00:00
|
|
|
if self.connection:
|
|
|
|
self.connection.rollback()
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
raise
|
|
|
|
|
2014-06-30 15:57:32 +00:00
|
|
|
return sqlResult
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
def action(self, query, args=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
with db_lock:
|
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
if query is None:
|
2014-03-10 05:18:05 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
sqlResult = None
|
|
|
|
attempt = 0
|
|
|
|
|
|
|
|
while attempt < 5:
|
|
|
|
try:
|
2014-12-23 22:28:06 +00:00
|
|
|
if args is None:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(self.filename + ': ' + query, logger.DB)
|
2014-12-23 22:28:06 +00:00
|
|
|
sqlResult = self.connection.execute(query)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB)
|
2014-12-23 22:28:06 +00:00
|
|
|
sqlResult = self.connection.execute(query, args)
|
|
|
|
self.connection.commit()
|
2014-03-10 05:18:05 +00:00
|
|
|
# get out of the connection attempt loop since we were successful
|
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
except sqlite3.OperationalError as e:
|
2015-03-01 16:31:16 +00:00
|
|
|
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
|
|
|
|
logger.log(u'DB error: ' + ex(e), logger.WARNING)
|
2014-03-10 05:18:05 +00:00
|
|
|
attempt += 1
|
2014-06-30 15:57:32 +00:00
|
|
|
time.sleep(1)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'DB error: ' + ex(e), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
raise
|
2015-06-08 12:47:01 +00:00
|
|
|
except sqlite3.DatabaseError as e:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
raise
|
|
|
|
|
2014-06-30 15:57:32 +00:00
|
|
|
return sqlResult
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def select(self, query, args=None):
|
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
sqlResults = self.action(query, args).fetchall()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
if sqlResults is None:
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
|
|
|
|
|
|
|
return sqlResults
|
|
|
|
|
|
|
|
def upsert(self, tableName, valueDict, keyDict):
|
|
|
|
|
2014-06-05 09:19:48 +00:00
|
|
|
changesBefore = self.connection.total_changes
|
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
query = 'UPDATE [%s] SET %s WHERE %s' % (
|
|
|
|
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict)))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-05 19:52:30 +00:00
|
|
|
self.action(query, valueDict.values() + keyDict.values())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-05 09:19:48 +00:00
|
|
|
if self.connection.total_changes == changesBefore:
|
2015-03-01 16:31:16 +00:00
|
|
|
query = 'INSERT INTO [' + tableName + '] (' + ', '.join(valueDict.keys() + keyDict.keys()) + ')' + \
|
|
|
|
' VALUES (' + ', '.join(['?'] * len(valueDict.keys() + keyDict.keys())) + ')'
|
2014-03-10 05:18:05 +00:00
|
|
|
self.action(query, valueDict.values() + keyDict.values())
|
|
|
|
|
|
|
|
def tableInfo(self, tableName):
|
2014-06-21 22:46:59 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
|
2015-03-01 16:31:16 +00:00
|
|
|
sqlResult = self.select('PRAGMA table_info([%s])' % tableName)
|
2014-03-10 05:18:05 +00:00
|
|
|
columns = {}
|
2014-06-21 22:46:59 +00:00
|
|
|
for column in sqlResult:
|
2014-03-25 05:57:24 +00:00
|
|
|
columns[column['name']] = {'type': column['type']}
|
2014-03-10 05:18:05 +00:00
|
|
|
return columns
|
|
|
|
|
2014-06-05 08:05:02 +00:00
|
|
|
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
|
2015-03-01 16:31:16 +00:00
|
|
|
@staticmethod
|
|
|
|
def _dict_factory(cursor, row):
|
2014-06-05 08:05:02 +00:00
|
|
|
d = {}
|
|
|
|
for idx, col in enumerate(cursor.description):
|
|
|
|
d[col[0]] = row[idx]
|
|
|
|
return d
|
|
|
|
|
2014-06-04 06:52:55 +00:00
|
|
|
def hasTable(self, tableName):
|
2015-03-01 16:31:16 +00:00
|
|
|
return len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
|
2014-06-04 06:52:55 +00:00
|
|
|
|
2014-07-06 11:10:25 +00:00
|
|
|
def hasColumn(self, tableName, column):
|
|
|
|
return column in self.tableInfo(tableName)
|
|
|
|
|
2014-12-13 05:04:21 +00:00
|
|
|
def hasIndex(self, tableName, index):
|
|
|
|
sqlResults = self.select('PRAGMA index_list([%s])' % tableName)
|
|
|
|
for result in sqlResults:
|
|
|
|
if result['name'] == index:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
def addColumn(self, table, column, type='NUMERIC', default=0):
|
|
|
|
self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
|
|
|
|
self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
|
2014-07-06 11:10:25 +00:00
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
def has_flag(self, flag_name):
|
|
|
|
sql_result = self.select('SELECT flag FROM flags WHERE flag = ?', [flag_name])
|
|
|
|
if 0 < len(sql_result):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def add_flag(self, flag_name):
|
|
|
|
if not self.has_flag(flag_name):
|
|
|
|
self.action('INSERT INTO flags (flag) VALUES (?)', [flag_name])
|
|
|
|
|
|
|
|
def remove_flag(self, flag_name):
|
|
|
|
if self.has_flag(flag_name):
|
|
|
|
self.action('DELETE FROM flags WHERE flag = ?', [flag_name])
|
|
|
|
|
2014-06-05 23:11:07 +00:00
|
|
|
def close(self):
|
2014-06-21 22:46:59 +00:00
|
|
|
"""Close database connection"""
|
2015-03-01 16:31:16 +00:00
|
|
|
if getattr(self, 'connection', None) is not None:
|
2014-06-21 22:46:59 +00:00
|
|
|
self.connection.close()
|
|
|
|
self.connection = None
|
2014-06-07 21:32:38 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def sanityCheckDatabase(connection, sanity_check):
|
|
|
|
sanity_check(connection).check()
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class DBSanityCheck(object):
|
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
|
|
|
|
def check(self):
|
|
|
|
pass
|
|
|
|
|
2014-06-05 19:52:30 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def upgradeDatabase(connection, schema):
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Checking database structure...', logger.MESSAGE)
|
2014-03-10 05:18:05 +00:00
|
|
|
_processUpgrade(connection, schema)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def prettyName(class_name):
|
2015-03-01 16:31:16 +00:00
|
|
|
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
|
2015-03-29 15:46:46 +00:00
|
|
|
def restoreDatabase(filename, version):
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Restoring database before trying upgrade again')
|
2015-03-29 15:46:46 +00:00
|
|
|
if not sickbeard.helpers.restoreVersionedFile(dbFilename(filename=filename, suffix='v%s' % version), version):
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
|
2014-06-05 23:11:07 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-06-21 22:46:59 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _processUpgrade(connection, upgradeClass):
|
|
|
|
instance = upgradeClass(connection)
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Checking %s database upgrade' % prettyName(upgradeClass.__name__), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
if not instance.test():
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
|
2014-03-10 05:18:05 +00:00
|
|
|
try:
|
|
|
|
instance.execute()
|
2015-06-08 12:47:01 +00:00
|
|
|
except sqlite3.DatabaseError as e:
|
2014-06-05 23:11:07 +00:00
|
|
|
# attemping to restore previous DB backup and perform upgrade
|
|
|
|
try:
|
|
|
|
instance.execute()
|
|
|
|
except:
|
2015-03-01 16:31:16 +00:00
|
|
|
result = connection.select('SELECT db_version FROM db_version')
|
2014-06-05 23:11:07 +00:00
|
|
|
if result:
|
2015-03-01 16:31:16 +00:00
|
|
|
version = int(result[0]['db_version'])
|
2014-07-24 04:44:11 +00:00
|
|
|
|
|
|
|
# close db before attempting restore
|
|
|
|
connection.close()
|
|
|
|
|
2015-03-29 15:46:46 +00:00
|
|
|
if restoreDatabase(connection.filename, version):
|
|
|
|
logger.log_error_and_exit(u'Successfully restored database version: %s' % version)
|
|
|
|
else:
|
|
|
|
logger.log_error_and_exit(u'Failed to restore database version: %s' % version)
|
2014-06-05 23:11:07 +00:00
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log('%s upgrade completed' % upgradeClass.__name__, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log('%s upgrade not required' % upgradeClass.__name__, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
for upgradeSubClass in upgradeClass.__subclasses__():
|
|
|
|
_processUpgrade(connection, upgradeSubClass)
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# Base migration class. All future DB changes should be subclassed from this class
|
2014-03-25 05:57:24 +00:00
|
|
|
class SchemaUpgrade(object):
|
2014-03-10 05:18:05 +00:00
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
|
|
|
|
def hasTable(self, tableName):
|
2015-03-01 16:31:16 +00:00
|
|
|
return len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def hasColumn(self, tableName, column):
|
|
|
|
return column in self.connection.tableInfo(tableName)
|
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
def addColumn(self, table, column, type='NUMERIC', default=0):
|
|
|
|
self.connection.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
|
|
|
|
self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-12-09 12:30:00 +00:00
|
|
|
def dropColumn(self, table, column):
|
|
|
|
# get old table columns and store the ones we want to keep
|
2014-12-13 02:46:14 +00:00
|
|
|
result = self.connection.select('pragma table_info([%s])' % table)
|
2014-12-09 12:30:00 +00:00
|
|
|
keptColumns = [c for c in result if c['name'] != column]
|
|
|
|
|
|
|
|
keptColumnsNames = []
|
|
|
|
final = []
|
|
|
|
pk = []
|
|
|
|
|
|
|
|
# copy the old table schema, column by column
|
|
|
|
for column in keptColumns:
|
|
|
|
|
|
|
|
keptColumnsNames.append(column['name'])
|
|
|
|
|
2015-03-01 16:31:16 +00:00
|
|
|
cl = [column['name'], column['type']]
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
'''
|
|
|
|
To be implemented if ever required
|
|
|
|
if column['dflt_value']:
|
|
|
|
cl.append(str(column['dflt_value']))
|
|
|
|
|
|
|
|
if column['notnull']:
|
|
|
|
cl.append(column['notnull'])
|
|
|
|
'''
|
|
|
|
|
|
|
|
if int(column['pk']) != 0:
|
|
|
|
pk.append(column['name'])
|
|
|
|
|
|
|
|
b = ' '.join(cl)
|
|
|
|
final.append(b)
|
|
|
|
|
|
|
|
# join all the table column creation fields
|
|
|
|
final = ', '.join(final)
|
|
|
|
keptColumnsNames = ', '.join(keptColumnsNames)
|
|
|
|
|
|
|
|
# generate sql for the new table creation
|
|
|
|
if len(pk) == 0:
|
2014-12-13 02:46:14 +00:00
|
|
|
sql = 'CREATE TABLE [%s_new] (%s)' % (table, final)
|
2014-12-09 12:30:00 +00:00
|
|
|
else:
|
|
|
|
pk = ', '.join(pk)
|
2014-12-13 02:46:14 +00:00
|
|
|
sql = 'CREATE TABLE [%s_new] (%s, PRIMARY KEY(%s))' % (table, final, pk)
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
# create new temporary table and copy the old table data across, barring the removed column
|
|
|
|
self.connection.action(sql)
|
2014-12-13 02:46:14 +00:00
|
|
|
self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table))
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
# copy the old indexes from the old table
|
2015-03-01 16:31:16 +00:00
|
|
|
result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? and type='index'", [table])
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
# remove the old table and rename the new table to take it's place
|
2014-12-13 02:46:14 +00:00
|
|
|
self.connection.action('DROP TABLE [%s]' % table)
|
|
|
|
self.connection.action('ALTER TABLE [%s_new] RENAME TO [%s]' % (table, table))
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
# write any indexes to the new table
|
|
|
|
if len(result) > 0:
|
|
|
|
for index in result:
|
|
|
|
self.connection.action(index['sql'])
|
|
|
|
|
|
|
|
# vacuum the db as we will have a lot of space to reclaim after dropping tables
|
2015-03-01 16:31:16 +00:00
|
|
|
self.connection.action('VACUUM')
|
2014-12-09 12:30:00 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def checkDBVersion(self):
|
2014-06-30 15:57:32 +00:00
|
|
|
return self.connection.checkDBVersion()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def incDBVersion(self):
|
|
|
|
new_version = self.checkDBVersion() + 1
|
2015-03-01 16:31:16 +00:00
|
|
|
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
|
2014-03-10 05:18:05 +00:00
|
|
|
return new_version
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
def setDBVersion(self, new_version):
|
2015-03-01 16:31:16 +00:00
|
|
|
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
|
2014-12-09 12:30:00 +00:00
|
|
|
return new_version
|
|
|
|
|
2015-03-29 15:46:46 +00:00
|
|
|
def listTables(self):
|
|
|
|
tables = []
|
|
|
|
sql_result = self.connection.select('SELECT name FROM sqlite_master where type = "table"')
|
|
|
|
for table in sql_result:
|
|
|
|
tables.append(table[0])
|
|
|
|
return tables
|
|
|
|
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
def MigrationCode(myDB):
|
|
|
|
schema = {
|
2015-03-01 16:31:16 +00:00
|
|
|
0: sickbeard.mainDB.InitialSchema,
|
2014-12-09 12:30:00 +00:00
|
|
|
9: sickbeard.mainDB.AddSizeAndSceneNameFields,
|
|
|
|
10: sickbeard.mainDB.RenameSeasonFolders,
|
|
|
|
11: sickbeard.mainDB.Add1080pAndRawHDQualities,
|
|
|
|
12: sickbeard.mainDB.AddShowidTvdbidIndex,
|
|
|
|
13: sickbeard.mainDB.AddLastUpdateTVDB,
|
|
|
|
14: sickbeard.mainDB.AddDBIncreaseTo15,
|
|
|
|
15: sickbeard.mainDB.AddIMDbInfo,
|
|
|
|
16: sickbeard.mainDB.AddProperNamingSupport,
|
|
|
|
17: sickbeard.mainDB.AddEmailSubscriptionTable,
|
|
|
|
18: sickbeard.mainDB.AddProperSearch,
|
|
|
|
19: sickbeard.mainDB.AddDvdOrderOption,
|
|
|
|
20: sickbeard.mainDB.AddSubtitlesSupport,
|
|
|
|
21: sickbeard.mainDB.ConvertTVShowsToIndexerScheme,
|
|
|
|
22: sickbeard.mainDB.ConvertTVEpisodesToIndexerScheme,
|
|
|
|
23: sickbeard.mainDB.ConvertIMDBInfoToIndexerScheme,
|
|
|
|
24: sickbeard.mainDB.ConvertInfoToIndexerScheme,
|
|
|
|
25: sickbeard.mainDB.AddArchiveFirstMatchOption,
|
|
|
|
26: sickbeard.mainDB.AddSceneNumbering,
|
|
|
|
27: sickbeard.mainDB.ConvertIndexerToInteger,
|
|
|
|
28: sickbeard.mainDB.AddRequireAndIgnoreWords,
|
|
|
|
29: sickbeard.mainDB.AddSportsOption,
|
|
|
|
30: sickbeard.mainDB.AddSceneNumberingToTvEpisodes,
|
|
|
|
31: sickbeard.mainDB.AddAnimeTVShow,
|
|
|
|
32: sickbeard.mainDB.AddAbsoluteNumbering,
|
|
|
|
33: sickbeard.mainDB.AddSceneAbsoluteNumbering,
|
|
|
|
34: sickbeard.mainDB.AddAnimeBlacklistWhitelist,
|
|
|
|
35: sickbeard.mainDB.AddSceneAbsoluteNumbering2,
|
|
|
|
36: sickbeard.mainDB.AddXemRefresh,
|
|
|
|
37: sickbeard.mainDB.AddSceneToTvShows,
|
|
|
|
38: sickbeard.mainDB.AddIndexerMapping,
|
|
|
|
39: sickbeard.mainDB.AddVersionToTvEpisodes,
|
|
|
|
|
|
|
|
40: sickbeard.mainDB.BumpDatabaseVersion,
|
|
|
|
41: sickbeard.mainDB.Migrate41,
|
2015-03-27 19:25:34 +00:00
|
|
|
42: sickbeard.mainDB.Migrate41,
|
2016-10-01 19:31:35 +00:00
|
|
|
43: sickbeard.mainDB.Migrate43,
|
|
|
|
44: sickbeard.mainDB.Migrate43,
|
2016-03-15 16:34:58 +00:00
|
|
|
|
|
|
|
4301: sickbeard.mainDB.Migrate4301,
|
2016-06-20 01:46:10 +00:00
|
|
|
4302: sickbeard.mainDB.Migrate4302,
|
|
|
|
4400: sickbeard.mainDB.Migrate4302,
|
2015-03-27 19:25:34 +00:00
|
|
|
|
|
|
|
5816: sickbeard.mainDB.MigrateUpstream,
|
|
|
|
5817: sickbeard.mainDB.MigrateUpstream,
|
|
|
|
5818: sickbeard.mainDB.MigrateUpstream,
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
10000: sickbeard.mainDB.SickGearDatabaseVersion,
|
2014-12-23 22:28:06 +00:00
|
|
|
10001: sickbeard.mainDB.RemoveDefaultEpStatusFromTvShows,
|
2016-06-20 01:46:10 +00:00
|
|
|
10002: sickbeard.mainDB.RemoveMinorDBVersion,
|
|
|
|
10003: sickbeard.mainDB.RemoveMetadataSub,
|
2014-12-09 12:30:00 +00:00
|
|
|
|
2014-12-23 22:28:06 +00:00
|
|
|
20000: sickbeard.mainDB.DBIncreaseTo20001,
|
2015-03-22 11:52:56 +00:00
|
|
|
20001: sickbeard.mainDB.AddTvShowOverview,
|
2015-04-07 03:10:50 +00:00
|
|
|
20002: sickbeard.mainDB.AddTvShowTags,
|
2017-08-10 20:01:41 +00:00
|
|
|
20003: sickbeard.mainDB.ChangeMapIndexer,
|
2017-09-13 17:18:59 +00:00
|
|
|
20004: sickbeard.mainDB.AddShowNotFoundCounter,
|
|
|
|
20005: sickbeard.mainDB.AddFlagTable
|
2015-03-01 16:31:16 +00:00
|
|
|
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
|
2014-12-09 12:30:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
db_version = myDB.checkDBVersion()
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG)
|
2014-12-09 12:30:00 +00:00
|
|
|
|
|
|
|
if not (db_version in schema):
|
|
|
|
if db_version == sickbeard.mainDB.MAX_DB_VERSION:
|
|
|
|
logger.log(u'Database schema is up-to-date, no upgrade required')
|
|
|
|
elif db_version < 10000:
|
|
|
|
logger.log_error_and_exit(u'SickGear does not currently support upgrading from this database version')
|
|
|
|
else:
|
|
|
|
logger.log_error_and_exit(u'Invalid database version')
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
while db_version < sickbeard.mainDB.MAX_DB_VERSION:
|
|
|
|
try:
|
|
|
|
update = schema[db_version](myDB)
|
|
|
|
db_version = update.execute()
|
2015-06-08 12:47:01 +00:00
|
|
|
except Exception as e:
|
2014-12-09 12:30:00 +00:00
|
|
|
myDB.close()
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
|
2014-12-09 12:30:00 +00:00
|
|
|
|
2015-03-29 15:46:46 +00:00
|
|
|
if restoreDatabase(myDB.filename, db_version):
|
2014-12-09 12:30:00 +00:00
|
|
|
# initialize the main SB database
|
2015-03-01 16:31:16 +00:00
|
|
|
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
|
2014-12-09 12:30:00 +00:00
|
|
|
else:
|
2015-03-29 15:46:46 +00:00
|
|
|
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version)
|
|
|
|
|
2016-09-13 18:50:05 +00:00
|
|
|
|
2015-03-29 15:46:46 +00:00
|
|
|
def backup_database(filename, version):
|
|
|
|
logger.log(u'Backing up database before upgrade')
|
|
|
|
if not sickbeard.helpers.backupVersionedFile(dbFilename(filename), version):
|
|
|
|
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
|
|
|
|
else:
|
2016-03-15 16:34:58 +00:00
|
|
|
logger.log(u'Proceeding with upgrade')
|
2016-09-13 18:50:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_rollback_module():
|
|
|
|
import imp
|
|
|
|
|
|
|
|
module_urls = [
|
|
|
|
'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/Rollback/rollback.py']
|
|
|
|
|
|
|
|
try:
|
|
|
|
hdr = '# SickGear Rollback Module'
|
|
|
|
module = ''
|
|
|
|
fetched = False
|
|
|
|
|
|
|
|
for t in range(1, 4):
|
|
|
|
for url in module_urls:
|
|
|
|
try:
|
|
|
|
module = helpers.getURL(url)
|
|
|
|
if module and module.startswith(hdr):
|
|
|
|
fetched = True
|
|
|
|
break
|
|
|
|
except (StandardError, Exception):
|
|
|
|
continue
|
|
|
|
if fetched:
|
|
|
|
break
|
|
|
|
time.sleep(30)
|
|
|
|
|
|
|
|
if fetched:
|
|
|
|
loaded = imp.new_module('DbRollback')
|
|
|
|
exec(module, loaded.__dict__)
|
|
|
|
return loaded
|
|
|
|
|
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
return None
|