mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Fix slow database operations (port from midgetspy/sickbeard)
This commit is contained in:
parent
9ace903490
commit
7f5651bb41
6 changed files with 47 additions and 96 deletions
|
@ -1,5 +1,7 @@
|
|||
### 0.x.x (2015-xx-xx xx:xx:xx UTC)
|
||||
|
||||
* Fix slow database operations (port from midgetspy/sickbeard)
|
||||
|
||||
[develop changelog]
|
||||
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 20000
|
||||
MAX_DB_VERSION = 20001
|
||||
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
|
@ -949,6 +949,16 @@ class SickGearDatabaseVersion(db.SchemaUpgrade):
|
|||
self.setDBVersion(20000)
|
||||
return self.checkDBVersion()
|
||||
|
||||
# 20000 -> 20001
|
||||
class DBIncreaseTo20001(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
backup_database(self.checkDBVersion())
|
||||
|
||||
logger.log('Bumping database version to force a backup before new database code')
|
||||
|
||||
self.setDBVersion(20001)
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 10001 -> 10000
|
||||
class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
|
||||
|
|
105
sickbeard/db.py
105
sickbeard/db.py
|
@ -48,61 +48,14 @@ def dbFilename(filename="sickbeard.db", suffix=None):
|
|||
|
||||
class DBConnection(object):
|
||||
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
||||
|
||||
self.filename = filename
|
||||
self.suffix = suffix
|
||||
self.row_type = row_type
|
||||
self.connection = None
|
||||
self.connection = sqlite3.connect(dbFilename(filename), 20)
|
||||
|
||||
try:
|
||||
self.reconnect()
|
||||
except Exception as e:
|
||||
logger.log(u"DB error: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
def reconnect(self):
|
||||
"""Closes the existing database connection and re-opens it."""
|
||||
self.close()
|
||||
self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20, check_same_thread=False)
|
||||
self.connection.isolation_level = None
|
||||
|
||||
if self.row_type == "dict":
|
||||
if row_type == "dict":
|
||||
self.connection.row_factory = self._dict_factory
|
||||
else:
|
||||
self.connection.row_factory = sqlite3.Row
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def _cursor(self):
|
||||
"""Returns the cursor; reconnects if disconnected."""
|
||||
if self.connection is None: self.reconnect()
|
||||
return self.connection.cursor()
|
||||
|
||||
def execute(self, query, args=None, fetchall=False, fetchone=False):
|
||||
"""Executes the given query, returning the lastrowid from the query."""
|
||||
cursor = self._cursor()
|
||||
|
||||
try:
|
||||
if fetchall:
|
||||
return self._execute(cursor, query, args).fetchall()
|
||||
elif fetchone:
|
||||
return self._execute(cursor, query, args).fetchone()
|
||||
else:
|
||||
return self._execute(cursor, query, args)
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def _execute(self, cursor, query, args):
|
||||
try:
|
||||
if args == None:
|
||||
return cursor.execute(query)
|
||||
return cursor.execute(query, args)
|
||||
except sqlite3.OperationalError as e:
|
||||
logger.log(u"DB error: " + ex(e), logger.ERROR)
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def checkDBVersion(self):
|
||||
|
||||
result = None
|
||||
|
@ -118,13 +71,11 @@ class DBConnection(object):
|
|||
else:
|
||||
return 0
|
||||
|
||||
def mass_action(self, querylist, logTransaction=False, fetchall=False):
|
||||
def mass_action(self, querylist, logTransaction=False):
|
||||
|
||||
with db_lock:
|
||||
# remove None types
|
||||
querylist = [i for i in querylist if i != None]
|
||||
|
||||
if querylist == None:
|
||||
if querylist is None:
|
||||
return
|
||||
|
||||
sqlResult = []
|
||||
|
@ -135,17 +86,15 @@ class DBConnection(object):
|
|||
for qu in querylist:
|
||||
if len(qu) == 1:
|
||||
if logTransaction:
|
||||
logger.log(qu[0], logger.DEBUG)
|
||||
sqlResult.append(self.execute(qu[0], fetchall=fetchall))
|
||||
logger.log(qu[0], logger.DB)
|
||||
sqlResult.append(self.connection.execute(qu[0]).fetchall())
|
||||
elif len(qu) > 1:
|
||||
if logTransaction:
|
||||
logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
|
||||
sqlResult.append(self.execute(qu[0], qu[1], fetchall=fetchall))
|
||||
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" queries executed", logger.DEBUG)
|
||||
|
||||
# finished
|
||||
break
|
||||
logger.log(qu[0] + " with args " + str(qu[1]), logger.DB)
|
||||
sqlResult.append(self.connection.execute(qu[0], qu[1]).fetchall())
|
||||
self.connection.commit()
|
||||
logger.log(u"Transaction with " + str(len(querylist)) + u" query's executed", logger.DEBUG)
|
||||
return sqlResult
|
||||
except sqlite3.OperationalError, e:
|
||||
sqlResult = []
|
||||
if self.connection:
|
||||
|
@ -164,15 +113,13 @@ class DBConnection(object):
|
|||
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
#time.sleep(0.02)
|
||||
|
||||
return sqlResult
|
||||
|
||||
def action(self, query, args=None, fetchall=False, fetchone=False):
|
||||
def action(self, query, args=None):
|
||||
|
||||
with db_lock:
|
||||
|
||||
if query == None:
|
||||
if query is None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
|
@ -180,13 +127,13 @@ class DBConnection(object):
|
|||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
if args is None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
sqlResult = self.connection.execute(query)
|
||||
else:
|
||||
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
|
||||
|
||||
sqlResult = self.execute(query, args, fetchall=fetchall, fetchone=fetchone)
|
||||
|
||||
sqlResult = self.connection.execute(query, args)
|
||||
self.connection.commit()
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
|
@ -201,27 +148,17 @@ class DBConnection(object):
|
|||
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
#time.sleep(0.02)
|
||||
|
||||
return sqlResult
|
||||
|
||||
def select(self, query, args=None):
|
||||
|
||||
sqlResults = self.action(query, args, fetchall=True)
|
||||
sqlResults = self.action(query, args).fetchall()
|
||||
|
||||
if sqlResults == None:
|
||||
if sqlResults is None:
|
||||
return []
|
||||
|
||||
return sqlResults
|
||||
|
||||
def selectOne(self, query, args=None):
|
||||
|
||||
sqlResults = self.action(query, args, fetchone=True)
|
||||
|
||||
if sqlResults == None:
|
||||
return []
|
||||
|
||||
return sqlResults
|
||||
|
||||
def upsert(self, tableName, valueDict, keyDict):
|
||||
|
||||
|
@ -480,9 +417,9 @@ def MigrationCode(myDB):
|
|||
41: sickbeard.mainDB.Migrate41,
|
||||
|
||||
10000: sickbeard.mainDB.SickGearDatabaseVersion,
|
||||
10001: sickbeard.mainDB.RemoveDefaultEpStatusFromTvShows
|
||||
10001: sickbeard.mainDB.RemoveDefaultEpStatusFromTvShows,
|
||||
|
||||
#20000: sickbeard.mainDB.AddCoolSickGearFeature1,
|
||||
20000: sickbeard.mainDB.DBIncreaseTo20001,
|
||||
#20001: sickbeard.mainDB.AddCoolSickGearFeature2,
|
||||
#20002: sickbeard.mainDB.AddCoolSickGearFeature3,
|
||||
}
|
||||
|
|
|
@ -218,6 +218,7 @@ def retrieve_exceptions():
|
|||
|
||||
# write all the exceptions we got off the net into the database
|
||||
myDB = db.DBConnection('cache.db')
|
||||
cl = []
|
||||
for cur_indexer_id in exception_dict:
|
||||
|
||||
# get a list of the existing exceptions for this ID
|
||||
|
@ -236,10 +237,12 @@ def retrieve_exceptions():
|
|||
if not isinstance(cur_exception, unicode):
|
||||
cur_exception = unicode(cur_exception, 'utf-8', 'replace')
|
||||
|
||||
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
|
||||
[cur_indexer_id, cur_exception, curSeason])
|
||||
cl.append(["INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
|
||||
[cur_indexer_id, cur_exception, curSeason]])
|
||||
changed_exceptions = True
|
||||
|
||||
myDB.mass_action(cl)
|
||||
|
||||
# since this could invalidate the results of the cache we clear it out after updating
|
||||
if changed_exceptions:
|
||||
logger.log(u"Updated scene exceptions")
|
||||
|
|
|
@ -85,11 +85,7 @@ class TVCache():
|
|||
self.minTime = 10
|
||||
|
||||
def _getDB(self):
|
||||
# init provider database if not done already
|
||||
if not self.providerDB:
|
||||
self.providerDB = CacheDBConnection(self.providerID)
|
||||
|
||||
return self.providerDB
|
||||
return CacheDBConnection(self.providerID)
|
||||
|
||||
def _clearCache(self):
|
||||
if self.shouldClearCache():
|
||||
|
@ -313,9 +309,12 @@ class TVCache():
|
|||
'SELECT * FROM [' + self.providerID + '] WHERE indexerid = ? AND season = ? AND episodes LIKE ? '
|
||||
'AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
|
||||
[epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
|
||||
sqlResults = myDB.mass_action(cl)
|
||||
if sqlResults:
|
||||
sqlResults = list(itertools.chain(*sqlResults))
|
||||
|
||||
sqlResults = myDB.mass_action(cl, fetchall=True)
|
||||
sqlResults = list(itertools.chain(*sqlResults))
|
||||
if not sqlResults:
|
||||
return neededEps
|
||||
|
||||
# for each cache entry
|
||||
for curResult in sqlResults:
|
||||
|
|
|
@ -3772,10 +3772,10 @@ class Home(MainHandler):
|
|||
|
||||
def plotDetails(self, show, season, episode):
|
||||
myDB = db.DBConnection()
|
||||
result = myDB.selectOne(
|
||||
result = myDB.select(
|
||||
"SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
(int(show), int(season), int(episode)))
|
||||
return result['description'] if result else 'Episode not found.'
|
||||
return result[0]['description'] if result else 'Episode not found.'
|
||||
|
||||
|
||||
def sceneExceptions(self, show):
|
||||
|
|
Loading…
Reference in a new issue