SickGear/sickbeard/db.py

524 lines
19 KiB
Python
Raw Normal View History

# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os.path
import re
import sqlite3
2014-06-05 16:22:54 +00:00
import time
2014-06-05 19:52:30 +00:00
import threading
2014-06-05 19:52:30 +00:00
import sickbeard
from sickbeard import encodingKludge as ek
from sickbeard import logger
from sickbeard.exceptions import ex
import helpers
2015-03-01 16:31:16 +00:00
db_lock = threading.Lock()
2015-03-01 16:31:16 +00:00
def dbFilename(filename='sickbeard.db', suffix=None):
"""
@param filename: The sqlite database filename to use. If not specified,
will be made to be sickbeard.db
@param suffix: The suffix to append to the filename. A '.' will be added
automatically, i.e. suffix='v0' will make dbfile.db.v0
@return: the correct location of the database file.
"""
if suffix:
2015-03-01 16:31:16 +00:00
filename = '%s.%s' % (filename, suffix)
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
class DBConnection(object):
2015-03-01 16:31:16 +00:00
def __init__(self, filename='sickbeard.db', suffix=None, row_type=None):
db_src = dbFilename(filename)
if not os.path.isfile(db_src):
db_alt = dbFilename('sickrage.db')
if os.path.isfile(db_alt):
helpers.copyFile(db_alt, db_src)
self.filename = filename
self.connection = sqlite3.connect(db_src, 20)
2015-03-01 16:31:16 +00:00
if row_type == 'dict':
self.connection.row_factory = self._dict_factory
else:
self.connection.row_factory = sqlite3.Row
def checkDBVersion(self):
result = None
try:
if self.hasTable('db_version'):
2015-03-01 16:31:16 +00:00
result = self.select('SELECT db_version FROM db_version')
except:
2014-06-30 15:57:32 +00:00
return 0
if result:
version = int(result[0]['db_version'])
if 10000 > version and self.hasColumn('db_version', 'db_minor_version'):
minor = self.select('SELECT db_minor_version FROM db_version')
return version * 100 + int(minor[0]['db_minor_version'])
return version
else:
return 0
def mass_action(self, querylist, logTransaction=False):
with db_lock:
if querylist is None:
return
sqlResult = []
attempt = 0
while attempt < 5:
try:
affected = 0
2014-06-05 16:22:54 +00:00
for qu in querylist:
cursor = self.connection.cursor()
2014-06-05 16:22:54 +00:00
if len(qu) == 1:
if logTransaction:
logger.log(qu[0], logger.DB)
sqlResult.append(cursor.execute(qu[0]).fetchall())
2014-06-05 16:22:54 +00:00
elif len(qu) > 1:
if logTransaction:
2015-03-01 16:31:16 +00:00
logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DB)
sqlResult.append(cursor.execute(qu[0], qu[1]).fetchall())
affected += cursor.rowcount
self.connection.commit()
if affected > 0:
logger.log(u'Transaction with %s queries executed affected %i row%s' % (
len(querylist), affected, helpers.maybe_plural(affected)), logger.DEBUG)
return sqlResult
except sqlite3.OperationalError as e:
sqlResult = []
if self.connection:
self.connection.rollback()
2015-03-01 16:31:16 +00:00
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING)
attempt += 1
2014-06-30 15:57:32 +00:00
time.sleep(1)
else:
2015-03-01 16:31:16 +00:00
logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError as e:
if self.connection:
self.connection.rollback()
2015-03-01 16:31:16 +00:00
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise
2014-06-30 15:57:32 +00:00
return sqlResult
def action(self, query, args=None):
with db_lock:
if query is None:
return
sqlResult = None
attempt = 0
while attempt < 5:
try:
if args is None:
2015-03-01 16:31:16 +00:00
logger.log(self.filename + ': ' + query, logger.DB)
sqlResult = self.connection.execute(query)
else:
2015-03-01 16:31:16 +00:00
logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB)
sqlResult = self.connection.execute(query, args)
self.connection.commit()
# get out of the connection attempt loop since we were successful
break
except sqlite3.OperationalError as e:
2015-03-01 16:31:16 +00:00
if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
logger.log(u'DB error: ' + ex(e), logger.WARNING)
attempt += 1
2014-06-30 15:57:32 +00:00
time.sleep(1)
else:
2015-03-01 16:31:16 +00:00
logger.log(u'DB error: ' + ex(e), logger.ERROR)
raise
except sqlite3.DatabaseError as e:
2015-03-01 16:31:16 +00:00
logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
raise
2014-06-30 15:57:32 +00:00
return sqlResult
def select(self, query, args=None):
sqlResults = self.action(query, args).fetchall()
if sqlResults is None:
return []
return sqlResults
def upsert(self, tableName, valueDict, keyDict):
2014-06-05 09:19:48 +00:00
changesBefore = self.connection.total_changes
2015-03-01 16:31:16 +00:00
genParams = lambda myDict: [x + ' = ?' for x in myDict.keys()]
2015-03-01 16:31:16 +00:00
query = 'UPDATE [%s] SET %s WHERE %s' % (
tableName, ', '.join(genParams(valueDict)), ' AND '.join(genParams(keyDict)))
2014-06-05 19:52:30 +00:00
self.action(query, valueDict.values() + keyDict.values())
2014-06-05 09:19:48 +00:00
if self.connection.total_changes == changesBefore:
2015-03-01 16:31:16 +00:00
query = 'INSERT INTO [' + tableName + '] (' + ', '.join(valueDict.keys() + keyDict.keys()) + ')' + \
' VALUES (' + ', '.join(['?'] * len(valueDict.keys() + keyDict.keys())) + ')'
self.action(query, valueDict.values() + keyDict.values())
def tableInfo(self, tableName):
# FIXME ? binding is not supported here, but I cannot find a way to escape a string manually
2015-03-01 16:31:16 +00:00
sqlResult = self.select('PRAGMA table_info([%s])' % tableName)
columns = {}
for column in sqlResult:
columns[column['name']] = {'type': column['type']}
return columns
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
2015-03-01 16:31:16 +00:00
@staticmethod
def _dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def hasTable(self, tableName):
2015-03-01 16:31:16 +00:00
return len(self.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
def hasColumn(self, tableName, column):
return column in self.tableInfo(tableName)
def hasIndex(self, tableName, index):
sqlResults = self.select('PRAGMA index_list([%s])' % tableName)
for result in sqlResults:
if result['name'] == index:
return True
return False
2015-03-01 16:31:16 +00:00
def addColumn(self, table, column, type='NUMERIC', default=0):
self.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
self.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
def close(self):
"""Close database connection"""
2015-03-01 16:31:16 +00:00
if getattr(self, 'connection', None) is not None:
self.connection.close()
self.connection = None
2015-03-01 16:31:16 +00:00
def sanityCheckDatabase(connection, sanity_check):
sanity_check(connection).check()
class DBSanityCheck(object):
def __init__(self, connection):
self.connection = connection
def check(self):
pass
2014-06-05 19:52:30 +00:00
def upgradeDatabase(connection, schema):
2015-03-01 16:31:16 +00:00
logger.log(u'Checking database structure...', logger.MESSAGE)
_processUpgrade(connection, schema)
def prettyName(class_name):
2015-03-01 16:31:16 +00:00
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
def restoreDatabase(filename, version):
2015-03-01 16:31:16 +00:00
logger.log(u'Restoring database before trying upgrade again')
if not sickbeard.helpers.restoreVersionedFile(dbFilename(filename=filename, suffix='v%s' % version), version):
2015-03-01 16:31:16 +00:00
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
return False
else:
return True
def _processUpgrade(connection, upgradeClass):
instance = upgradeClass(connection)
2015-03-01 16:31:16 +00:00
logger.log(u'Checking %s database upgrade' % prettyName(upgradeClass.__name__), logger.DEBUG)
if not instance.test():
2015-03-01 16:31:16 +00:00
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
try:
instance.execute()
except sqlite3.DatabaseError as e:
# attemping to restore previous DB backup and perform upgrade
try:
instance.execute()
except:
2015-03-01 16:31:16 +00:00
result = connection.select('SELECT db_version FROM db_version')
if result:
2015-03-01 16:31:16 +00:00
version = int(result[0]['db_version'])
# close db before attempting restore
connection.close()
if restoreDatabase(connection.filename, version):
logger.log_error_and_exit(u'Successfully restored database version: %s' % version)
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % version)
2015-03-01 16:31:16 +00:00
logger.log('%s upgrade completed' % upgradeClass.__name__, logger.DEBUG)
else:
2015-03-01 16:31:16 +00:00
logger.log('%s upgrade not required' % upgradeClass.__name__, logger.DEBUG)
for upgradeSubClass in upgradeClass.__subclasses__():
_processUpgrade(connection, upgradeSubClass)
# Base migration class. All future DB changes should be subclassed from this class
class SchemaUpgrade(object):
def __init__(self, connection):
self.connection = connection
def hasTable(self, tableName):
2015-03-01 16:31:16 +00:00
return len(self.connection.select('SELECT 1 FROM sqlite_master WHERE name = ?;', (tableName, ))) > 0
def hasColumn(self, tableName, column):
return column in self.connection.tableInfo(tableName)
2015-03-01 16:31:16 +00:00
def addColumn(self, table, column, type='NUMERIC', default=0):
self.connection.action('ALTER TABLE [%s] ADD %s %s' % (table, column, type))
self.connection.action('UPDATE [%s] SET %s = ?' % (table, column), (default,))
2014-12-09 12:30:00 +00:00
def dropColumn(self, table, column):
# get old table columns and store the ones we want to keep
result = self.connection.select('pragma table_info([%s])' % table)
2014-12-09 12:30:00 +00:00
keptColumns = [c for c in result if c['name'] != column]
keptColumnsNames = []
final = []
pk = []
# copy the old table schema, column by column
for column in keptColumns:
keptColumnsNames.append(column['name'])
2015-03-01 16:31:16 +00:00
cl = [column['name'], column['type']]
2014-12-09 12:30:00 +00:00
'''
To be implemented if ever required
if column['dflt_value']:
cl.append(str(column['dflt_value']))
if column['notnull']:
cl.append(column['notnull'])
'''
if int(column['pk']) != 0:
pk.append(column['name'])
b = ' '.join(cl)
final.append(b)
# join all the table column creation fields
final = ', '.join(final)
keptColumnsNames = ', '.join(keptColumnsNames)
# generate sql for the new table creation
if len(pk) == 0:
sql = 'CREATE TABLE [%s_new] (%s)' % (table, final)
2014-12-09 12:30:00 +00:00
else:
pk = ', '.join(pk)
sql = 'CREATE TABLE [%s_new] (%s, PRIMARY KEY(%s))' % (table, final, pk)
2014-12-09 12:30:00 +00:00
# create new temporary table and copy the old table data across, barring the removed column
self.connection.action(sql)
self.connection.action('INSERT INTO [%s_new] SELECT %s FROM [%s]' % (table, keptColumnsNames, table))
2014-12-09 12:30:00 +00:00
# copy the old indexes from the old table
2015-03-01 16:31:16 +00:00
result = self.connection.select("SELECT sql FROM sqlite_master WHERE tbl_name=? and type='index'", [table])
2014-12-09 12:30:00 +00:00
# remove the old table and rename the new table to take it's place
self.connection.action('DROP TABLE [%s]' % table)
self.connection.action('ALTER TABLE [%s_new] RENAME TO [%s]' % (table, table))
2014-12-09 12:30:00 +00:00
# write any indexes to the new table
if len(result) > 0:
for index in result:
self.connection.action(index['sql'])
# vacuum the db as we will have a lot of space to reclaim after dropping tables
2015-03-01 16:31:16 +00:00
self.connection.action('VACUUM')
2014-12-09 12:30:00 +00:00
def checkDBVersion(self):
2014-06-30 15:57:32 +00:00
return self.connection.checkDBVersion()
def incDBVersion(self):
new_version = self.checkDBVersion() + 1
2015-03-01 16:31:16 +00:00
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
return new_version
2014-12-09 12:30:00 +00:00
def setDBVersion(self, new_version):
2015-03-01 16:31:16 +00:00
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
2014-12-09 12:30:00 +00:00
return new_version
def listTables(self):
tables = []
sql_result = self.connection.select('SELECT name FROM sqlite_master where type = "table"')
for table in sql_result:
tables.append(table[0])
return tables
2014-12-09 12:30:00 +00:00
def MigrationCode(myDB):
schema = {
2015-03-01 16:31:16 +00:00
0: sickbeard.mainDB.InitialSchema,
2014-12-09 12:30:00 +00:00
9: sickbeard.mainDB.AddSizeAndSceneNameFields,
10: sickbeard.mainDB.RenameSeasonFolders,
11: sickbeard.mainDB.Add1080pAndRawHDQualities,
12: sickbeard.mainDB.AddShowidTvdbidIndex,
13: sickbeard.mainDB.AddLastUpdateTVDB,
14: sickbeard.mainDB.AddDBIncreaseTo15,
15: sickbeard.mainDB.AddIMDbInfo,
16: sickbeard.mainDB.AddProperNamingSupport,
17: sickbeard.mainDB.AddEmailSubscriptionTable,
18: sickbeard.mainDB.AddProperSearch,
19: sickbeard.mainDB.AddDvdOrderOption,
20: sickbeard.mainDB.AddSubtitlesSupport,
21: sickbeard.mainDB.ConvertTVShowsToIndexerScheme,
22: sickbeard.mainDB.ConvertTVEpisodesToIndexerScheme,
23: sickbeard.mainDB.ConvertIMDBInfoToIndexerScheme,
24: sickbeard.mainDB.ConvertInfoToIndexerScheme,
25: sickbeard.mainDB.AddArchiveFirstMatchOption,
26: sickbeard.mainDB.AddSceneNumbering,
27: sickbeard.mainDB.ConvertIndexerToInteger,
28: sickbeard.mainDB.AddRequireAndIgnoreWords,
29: sickbeard.mainDB.AddSportsOption,
30: sickbeard.mainDB.AddSceneNumberingToTvEpisodes,
31: sickbeard.mainDB.AddAnimeTVShow,
32: sickbeard.mainDB.AddAbsoluteNumbering,
33: sickbeard.mainDB.AddSceneAbsoluteNumbering,
34: sickbeard.mainDB.AddAnimeBlacklistWhitelist,
35: sickbeard.mainDB.AddSceneAbsoluteNumbering2,
36: sickbeard.mainDB.AddXemRefresh,
37: sickbeard.mainDB.AddSceneToTvShows,
38: sickbeard.mainDB.AddIndexerMapping,
39: sickbeard.mainDB.AddVersionToTvEpisodes,
40: sickbeard.mainDB.BumpDatabaseVersion,
41: sickbeard.mainDB.Migrate41,
42: sickbeard.mainDB.Migrate41,
43: sickbeard.mainDB.Migrate41,
44: sickbeard.mainDB.Migrate41,
4301: sickbeard.mainDB.Migrate4301,
4302: sickbeard.mainDB.Migrate4302,
4400: sickbeard.mainDB.Migrate4302,
5816: sickbeard.mainDB.MigrateUpstream,
5817: sickbeard.mainDB.MigrateUpstream,
5818: sickbeard.mainDB.MigrateUpstream,
2014-12-09 12:30:00 +00:00
10000: sickbeard.mainDB.SickGearDatabaseVersion,
10001: sickbeard.mainDB.RemoveDefaultEpStatusFromTvShows,
10002: sickbeard.mainDB.RemoveMinorDBVersion,
10003: sickbeard.mainDB.RemoveMetadataSub,
2014-12-09 12:30:00 +00:00
20000: sickbeard.mainDB.DBIncreaseTo20001,
20001: sickbeard.mainDB.AddTvShowOverview,
2015-04-07 03:10:50 +00:00
20002: sickbeard.mainDB.AddTvShowTags,
Add smart logic to reduce api hits to newznab server types and improve how nzbs are downloaded. Add newznab smart logic to avoid missing releases when there are a great many recent releases. Change improve performance by using newznab server advertised capabilities. Change config/providers newznab to display only non-default categories. Change use scene season for wanted segment in backlog if show is scene numbering. Change combine Manage Searches / Backlog Search / Limited and Full to Force. Change consolidate limited and full backlog. Change config / Search / Backlog search frequency to instead spread backlog searches over a number of days. Change migrate minimum used value for search frequency into new minimum 7 for search spread. Change restrict nzb providers to 1 backlog batch run per day. Add to Config/Search/Unaired episodes/Allow episodes that are released early. Add to Config/Search/Unaired episodes/Use specific api requests to search for early episode releases. Add use related ids for newznab searches to increase search efficiency. Add periodic update of related show ids. Change terminology Edit Show/"Post processing" tab name to "Other". Add advanced feature "Related show IDs" to Edit Show/Other used for finding episodes and TV info. Add search info source image links to those that have zero id under Edit Show/Other/"Related show IDs". Add "set master" button to Edit Show/Other/"Related show IDs" for info source that can be changed. Change terminology displayShow "Indexers" to "Links" to cover internal and web links. Change add related show info sources on displayShow page. Change don't display "temporarily" defunct TVRage image link on displayShow pages unless it is master info source. Change if a defunct info source is the master of a show then present a link on displayShow to edit related show IDs. Change simplify the next backlog search run time display in the page footer. Change try ssl when fetching data thetvdb, imdb, trakt, scene exception. Change improve reliability to Trakt notifier by using show related id support. Change improve config/providers newznab categories layout. Change show loaded log message at start up and include info source. Change if episode has no airdate then set status to unaired (was skipped). Technical Change move scene_exceptions table from cache.db to sickbeard.db. Add related ids to show obj. Add use of mapped indexer ids for newznab. Add indexer to sql in wanted_eps. Add aired in (scene) season for wanted episodes. Add need_anime, need_sports, need_sd, need_hd, need_uhd to wanted episodes and added as parameter to update_providers. Add fix for lib lockfile/mkdirlockfile. Add set master TV info source logic. Change harden ui input validation. Add per action dialog confirmation. Change to reload page under more events. Change implement "Mark all added episodes Wanted to search for releases" when setting new info source.
2016-09-04 20:00:44 +00:00
20003: sickbeard.mainDB.ChangeMapIndexer
2015-03-01 16:31:16 +00:00
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
2014-12-09 12:30:00 +00:00
}
db_version = myDB.checkDBVersion()
2015-03-01 16:31:16 +00:00
logger.log(u'Detected database version: v%s' % db_version, logger.DEBUG)
2014-12-09 12:30:00 +00:00
if not (db_version in schema):
if db_version == sickbeard.mainDB.MAX_DB_VERSION:
logger.log(u'Database schema is up-to-date, no upgrade required')
elif db_version < 10000:
logger.log_error_and_exit(u'SickGear does not currently support upgrading from this database version')
else:
logger.log_error_and_exit(u'Invalid database version')
else:
while db_version < sickbeard.mainDB.MAX_DB_VERSION:
try:
update = schema[db_version](myDB)
db_version = update.execute()
except Exception as e:
2014-12-09 12:30:00 +00:00
myDB.close()
2015-03-01 16:31:16 +00:00
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
2014-12-09 12:30:00 +00:00
if restoreDatabase(myDB.filename, db_version):
2014-12-09 12:30:00 +00:00
# initialize the main SB database
2015-03-01 16:31:16 +00:00
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
2014-12-09 12:30:00 +00:00
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version)
def backup_database(filename, version):
logger.log(u'Backing up database before upgrade')
if not sickbeard.helpers.backupVersionedFile(dbFilename(filename), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
else:
logger.log(u'Proceeding with upgrade')
def get_rollback_module():
import imp
module_urls = [
'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/Rollback/rollback.py']
try:
hdr = '# SickGear Rollback Module'
module = ''
fetched = False
for t in range(1, 4):
for url in module_urls:
try:
module = helpers.getURL(url)
if module and module.startswith(hdr):
fetched = True
break
except (StandardError, Exception):
continue
if fetched:
break
time.sleep(30)
if fetched:
loaded = imp.new_module('DbRollback')
exec(module, loaded.__dict__)
return loaded
except (StandardError, Exception):
pass
return None