Change provider cache storage structure

Add handling for failed cache database upgrades
This commit is contained in:
Adam 2015-03-29 23:46:46 +08:00
parent adcfd720e4
commit 24236eba6f
6 changed files with 102 additions and 108 deletions

View file

@ -63,10 +63,9 @@
</tfoot>
<tbody>
#for $provider in $cacheResults:
#for $hItem in $provider[1]:
#for $hItem in $cacheResults:
<tr>
<td class="col-cache">$provider[0]</td>
<td class="col-cache">$hItem['provider']</td>
<td class="col-name-cache">$hItem['name']</td>
<td class="col-cache">$hItem['season']</td>
<td class="col-episodes">$hItem['episodes']</td>
@ -77,7 +76,6 @@
<td class="col-cache">$hItem['release_group']</td>
<td class="col-cache">$hItem['version']</td>
</tr>
#end for
#end for
</tbody>
</table>

View file

@ -100,3 +100,27 @@ class AddNetworkConversions(AddSceneExceptionsRefresh):
' tvrage_country TEXT)')
self.connection.action('CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)')
class ConsolidateProviders(AddNetworkConversions):
def test(self):
return self.checkDBVersion() > 1
def execute(self):
db.backup_database('cache.db', self.checkDBVersion())
if self.hasTable('provider_cache'):
self.connection.action('DROP TABLE provider_cache')
self.connection.action('CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,'
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
'version NUMERIC)')
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version', 'scene_exceptions', 'scene_names',
'network_timezones', 'scene_exceptions_refresh', 'network_conversions', 'provider_cache'])
current_tables = set(self.listTables())
remove_tables = list(current_tables - keep_tables)
for table in remove_tables:
self.connection.action('DROP TABLE %s' % table)
self.incDBVersion()

View file

@ -145,12 +145,6 @@ class MainSanityCheck(db.DBSanityCheck):
logger.log(u'No UNAIRED episodes, check passed')
def backup_database(version):
logger.log(u'Backing up database before upgrade')
if not helpers.backupVersionedFile(db.dbFilename(), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
else:
logger.log(u'Proceeding with upgrade')
# ======================
# = Main DB Migrations =
@ -161,7 +155,7 @@ def backup_database(version):
# 0 -> 31
class InitialSchema(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
queries = [
@ -209,7 +203,7 @@ class InitialSchema(db.SchemaUpgrade):
class AddSizeAndSceneNameFields(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_episodes', 'file_size'):
self.addColumn('tv_episodes', 'file_size')
@ -320,7 +314,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# 10 -> 11
class RenameSeasonFolders(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
# rename the column
self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows')
@ -404,7 +398,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
return result
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
# update the default quality so we dont grab the wrong qualities after migration
sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT)
@ -481,7 +475,7 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
# Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Checking for duplicate shows before adding unique index.')
MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id')
@ -500,7 +494,7 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
class AddLastUpdateTVDB(db.SchemaUpgrade):
# Adding column last_update_tvdb to tv_shows for controlling nightly updates
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'last_update_tvdb'):
logger.log(u'Adding column last_update_tvdb to tv_shows')
@ -513,7 +507,7 @@ class AddLastUpdateTVDB(db.SchemaUpgrade):
# 14 -> 15
class AddDBIncreaseTo15(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Bumping database version to v%s' % self.checkDBVersion())
self.incDBVersion()
@ -523,7 +517,7 @@ class AddDBIncreaseTo15(db.SchemaUpgrade):
# 15 -> 16
class AddIMDbInfo(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Creating IMDb table imdb_info')
self.connection.action(
@ -540,7 +534,7 @@ class AddIMDbInfo(db.SchemaUpgrade):
# 16 -> 17
class AddProperNamingSupport(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'imdb_id')\
and self.hasColumn('tv_shows', 'rls_require_words')\
@ -559,7 +553,7 @@ class AddProperNamingSupport(db.SchemaUpgrade):
# 17 -> 18
class AddEmailSubscriptionTable(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_episodes', 'is_proper')\
and self.hasColumn('tv_shows', 'rls_require_words')\
@ -579,7 +573,7 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade):
# 18 -> 19
class AddProperSearch(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'notify_list')\
and self.hasColumn('tv_shows', 'rls_require_words')\
@ -600,7 +594,7 @@ class AddProperSearch(db.SchemaUpgrade):
# 19 -> 20
class AddDvdOrderOption(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'dvdorder'):
logger.log(u'Adding column dvdorder to tv_shows')
@ -613,7 +607,7 @@ class AddDvdOrderOption(db.SchemaUpgrade):
# 20 -> 21
class AddSubtitlesSupport(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'subtitles'):
logger.log(u'Adding subtitles to tv_shows and tv_episodes')
@ -629,7 +623,7 @@ class AddSubtitlesSupport(db.SchemaUpgrade):
# 21 -> 22
class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Converting TV Shows table to Indexer Scheme...')
@ -656,7 +650,7 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
# 22 -> 23
class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Converting TV Episodes table to Indexer Scheme...')
@ -686,7 +680,7 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
# 23 -> 24
class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Converting IMDB Info table to Indexer Scheme...')
@ -708,7 +702,7 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
# 24 -> 25
class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Converting Info table to Indexer Scheme...')
@ -730,7 +724,7 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
# 25 -> 26
class AddArchiveFirstMatchOption(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'archive_firstmatch'):
logger.log(u'Adding column archive_firstmatch to tv_shows')
@ -744,7 +738,7 @@ class AddArchiveFirstMatchOption(db.SchemaUpgrade):
class AddSceneNumbering(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if self.hasTable('scene_numbering'):
self.connection.action('DROP TABLE scene_numbering')
@ -760,7 +754,7 @@ class AddSceneNumbering(db.SchemaUpgrade):
# 27 -> 28
class ConvertIndexerToInteger(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
cl = []
logger.log(u'Converting Indexer to Integer ...', logger.MESSAGE)
@ -786,7 +780,7 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade):
self.incDBVersion()
return self.checkDBVersion()
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'rls_require_words'):
logger.log(u'Adding column rls_require_words to tv_shows')
@ -803,7 +797,7 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade):
# 29 -> 30
class AddSportsOption(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'sports'):
logger.log(u'Adding column sports to tv_shows')
@ -828,7 +822,7 @@ class AddSportsOption(db.SchemaUpgrade):
# 30 -> 31
class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding columns scene_season and scene_episode to tvepisodes')
self.addColumn('tv_episodes', 'scene_season', 'NUMERIC', 'NULL')
@ -841,7 +835,7 @@ class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade):
# 31 -> 32
class AddAnimeTVShow(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding column anime to tv_episodes')
self.addColumn('tv_shows', 'anime', 'NUMERIC', '0')
@ -853,7 +847,7 @@ class AddAnimeTVShow(db.SchemaUpgrade):
# 32 -> 33
class AddAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding column absolute_number to tv_episodes')
self.addColumn('tv_episodes', 'absolute_number', 'NUMERIC', '0')
@ -865,7 +859,7 @@ class AddAbsoluteNumbering(db.SchemaUpgrade):
# 33 -> 34
class AddSceneAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering')
self.addColumn('scene_numbering', 'absolute_number', 'NUMERIC', '0')
@ -878,7 +872,7 @@ class AddSceneAbsoluteNumbering(db.SchemaUpgrade):
# 34 -> 35
class AddAnimeBlacklistWhitelist(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
cl = []
cl.append(['CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)'])
@ -893,7 +887,7 @@ class AddAnimeBlacklistWhitelist(db.SchemaUpgrade):
# 35 -> 36
class AddSceneAbsoluteNumbering2(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding column scene_absolute_number to tv_episodes')
self.addColumn('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0')
@ -905,7 +899,7 @@ class AddSceneAbsoluteNumbering2(db.SchemaUpgrade):
# 36 -> 37
class AddXemRefresh(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Creating table xem_refresh')
self.connection.action(
@ -918,7 +912,7 @@ class AddXemRefresh(db.SchemaUpgrade):
# 37 -> 38
class AddSceneToTvShows(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding column scene to tv_shows')
self.addColumn('tv_shows', 'scene', 'NUMERIC', '0')
@ -930,7 +924,7 @@ class AddSceneToTvShows(db.SchemaUpgrade):
# 38 -> 39
class AddIndexerMapping(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
if self.hasTable('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping')
@ -946,7 +940,7 @@ class AddIndexerMapping(db.SchemaUpgrade):
# 39 -> 40
class AddVersionToTvEpisodes(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding columns release_group and version to tv_episodes')
self.addColumn('tv_episodes', 'release_group', 'TEXT', '')
@ -962,7 +956,7 @@ class AddVersionToTvEpisodes(db.SchemaUpgrade):
# 40 -> 10000
class BumpDatabaseVersion(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Bumping database version')
@ -973,7 +967,7 @@ class BumpDatabaseVersion(db.SchemaUpgrade):
# 41 -> 10001
class Migrate41(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Bumping database version')
@ -984,7 +978,7 @@ class Migrate41(db.SchemaUpgrade):
# 5816 - 5818 -> 15
class MigrateUpstream(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Migrate SickBeard DB v%s into v15' % str(self.checkDBVersion()).replace('58', ''))
@ -995,7 +989,7 @@ class MigrateUpstream(db.SchemaUpgrade):
# 10000 -> 20000
class SickGearDatabaseVersion(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Bumping database version to new SickGear standards')
@ -1006,7 +1000,7 @@ class SickGearDatabaseVersion(db.SchemaUpgrade):
# 10001 -> 10000
class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Dropping column default_ep_status from tv_shows')
self.dropColumn('tv_shows', 'default_ep_status')
@ -1018,7 +1012,7 @@ class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
# 20000 -> 20001
class DBIncreaseTo20001(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Bumping database version to force a backup before new database code')
@ -1032,7 +1026,7 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
# 20001 -> 20002
class AddTvShowOverview(db.SchemaUpgrade):
def execute(self):
backup_database(self.checkDBVersion())
db.backup_database('sickbeard.db', self.checkDBVersion())
logger.log(u'Adding column overview to tv_shows')
self.addColumn('tv_shows', 'overview', 'TEXT', '')

View file

@ -236,9 +236,9 @@ def prettyName(class_name):
return ' '.join([x.group() for x in re.finditer('([A-Z])([a-z0-9]+)', class_name)])
def restoreDatabase(version):
def restoreDatabase(filename, version):
logger.log(u'Restoring database before trying upgrade again')
if not sickbeard.helpers.restoreVersionedFile(dbFilename(suffix='v%s' % version), version):
if not sickbeard.helpers.restoreVersionedFile(dbFilename(filename=filename, suffix='v%s' % version), version):
logger.log_error_and_exit(u'Database restore failed, abort upgrading database')
return False
else:
@ -257,7 +257,6 @@ def _processUpgrade(connection, upgradeClass):
try:
instance.execute()
except:
restored = False
result = connection.select('SELECT db_version FROM db_version')
if result:
version = int(result[0]['db_version'])
@ -265,14 +264,11 @@ def _processUpgrade(connection, upgradeClass):
# close db before attempting restore
connection.close()
if restoreDatabase(version):
# initialize the main SB database
upgradeDatabase(DBConnection(), sickbeard.mainDB.InitialSchema)
restored = True
if restoreDatabase(connection.filename, version):
logger.log_error_and_exit(u'Successfully restored database version: %s' % version)
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % version)
if not restored:
print 'Error in %s: %s ' % (upgradeClass.__name__, ex(e))
raise
logger.log('%s upgrade completed' % upgradeClass.__name__, logger.DEBUG)
else:
logger.log('%s upgrade not required' % upgradeClass.__name__, logger.DEBUG)
@ -369,6 +365,13 @@ class SchemaUpgrade(object):
self.connection.action('UPDATE db_version SET db_version = ?', [new_version])
return new_version
def listTables(self):
tables = []
sql_result = self.connection.select('SELECT name FROM sqlite_master where type = "table"')
for table in sql_result:
tables.append(table[0])
return tables
def MigrationCode(myDB):
schema = {
@ -442,8 +445,15 @@ def MigrationCode(myDB):
myDB.close()
logger.log(u'Failed to update database with error: %s attempting recovery...' % ex(e), logger.ERROR)
if restoreDatabase(db_version):
if restoreDatabase(myDB.filename, db_version):
# initialize the main SB database
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version)
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version)
def backup_database(filename, version):
logger.log(u'Backing up database before upgrade')
if not sickbeard.helpers.backupVersionedFile(dbFilename(filename), version):
logger.log_error_and_exit(u'Database backup failed, abort upgrading database')
else:
logger.log(u'Proceeding with upgrade')

View file

@ -38,36 +38,6 @@ class CacheDBConnection(db.DBConnection):
def __init__(self, providerName):
db.DBConnection.__init__(self, 'cache.db')
# Create the table if it's not already there
try:
if not self.hasTable(providerName):
self.action(
'CREATE TABLE [' + providerName + '] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT, release_group TEXT)')
self.action(
'CREATE UNIQUE INDEX IF NOT EXISTS [idx_' + providerName + '_url] ON [' + providerName + '] (url)')
elif not self.hasIndex(providerName, 'idx_%s_url' % providerName):
sqlResults = self.select(
'SELECT url, COUNT(url) as count FROM [' + providerName + '] GROUP BY url HAVING count > 1')
for cur_dupe in sqlResults:
self.action('DELETE FROM [' + providerName + '] WHERE url = ?', [cur_dupe['url']])
self.action(
'CREATE UNIQUE INDEX IF NOT EXISTS [idx_' + providerName + '_url] ON [' + providerName + '] (url)')
# add release_group column to table if missing
if not self.hasColumn(providerName, 'release_group'):
self.addColumn(providerName, 'release_group', 'TEXT', '')
# add version column to table if missing
if not self.hasColumn(providerName, 'version'):
self.addColumn(providerName, 'version', 'NUMERIC', '-1')
except Exception, e:
if str(e) != 'table [' + providerName + '] already exists':
raise
# Create the table if it's not already there
try:
if not self.hasTable('lastUpdate'):
@ -90,7 +60,7 @@ class TVCache():
def _clearCache(self):
if self.shouldClearCache():
myDB = self._getDB()
myDB.action('DELETE FROM [' + self.providerID + '] WHERE 1')
myDB.action('DELETE FROM provider_cache WHERE provider = ?', [self.providerID])
def _get_title_and_url(self, item):
# override this in the provider if recent search has a different data layout to backlog searches
@ -273,8 +243,8 @@ class TVCache():
logger.log(u'Added RSS item: [' + name + '] to cache: [' + self.providerID + ']', logger.DEBUG)
return [
'INSERT OR IGNORE INTO [' + self.providerID + '] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)',
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
'INSERT OR IGNORE INTO provider_cache (provider, name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?,?)',
[self.providerID, name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def searchCache(self, episode, manualSearch=False):
@ -286,12 +256,12 @@ class TVCache():
def listPropers(self, date=None, delimiter='.'):
myDB = self._getDB()
sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
sql = "SELECT * FROM provider_cache WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%' AND provider = ?"
if date != None:
sql += ' AND time >= ' + str(int(time.mktime(date.timetuple())))
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
def findNeededEpisodes(self, episode, manualSearch=False):
@ -301,14 +271,14 @@ class TVCache():
myDB = self._getDB()
if type(episode) != list:
sqlResults = myDB.select(
'SELECT * FROM [' + self.providerID + '] WHERE indexerid = ? AND season = ? AND episodes LIKE ?',
[episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%'])
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?',
[self.providerID, episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%'])
else:
for epObj in episode:
cl.append([
'SELECT * FROM [' + self.providerID + '] WHERE indexerid = ? AND season = ? AND episodes LIKE ? '
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ? '
'AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
[epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
[self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
sqlResults = myDB.mass_action(cl)
if sqlResults:
sqlResults = list(itertools.chain(*sqlResults))

View file

@ -4745,15 +4745,13 @@ class ApiBuilder(MainHandler):
class Cache(MainHandler):
def index(self):
myDB = db.DBConnection('cache.db')
results = []
for provider in sickbeard.providers.sortedProviderList():
try:
sqlResults = myDB.select('SELECT * FROM %s' % provider.cache.providerID)
except:
continue
results.append((provider.name, sqlResults))
sql_results = myDB.select('SELECT * FROM provider_cache')
if not sql_results:
sql_results = []
t = PageTemplate(headers=self.request.headers, file='cache.tmpl')
t.cacheResults = results
t.cacheResults = sql_results
return t.respond()