From 2af732c731c7d9a0879793236d69125b31319d46 Mon Sep 17 00:00:00 2001 From: adam Date: Sat, 13 Dec 2014 13:04:21 +0800 Subject: [PATCH] Fix duplicate entries in cache database --- CHANGES.md | 1 + sickbeard/databases/mainDB.py | 668 +++++++++++++++++----------------- sickbeard/db.py | 8 + sickbeard/tvcache.py | 95 ++--- 4 files changed, 396 insertions(+), 376 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index bff1fc40..512b3ce7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -32,6 +32,7 @@ * Change the default state for all check boxes on the Episode Overview page to not checked * Add validation to Go button to ensure at least one item is checked on Episode Overview page * Add highlight to current status text in header on Episode Overview page +* Fix duplicate entries in cache database [develop changelog] * Add TVRage network name standardization diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index d69c3367..12a1c8ae 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -40,116 +40,117 @@ class MainSanityCheck(db.DBSanityCheck): def fix_duplicate_shows(self, column='indexer_id'): - sqlResults = self.connection.select( - "SELECT show_id, " + column + ", COUNT(" + column + ") as count FROM tv_shows GROUP BY " + column + " HAVING count > 1") + sql_results = self.connection.select( + 'SELECT show_id, ' + column + ', COUNT(' + column + ') as count FROM tv_shows GROUP BY ' + column + ' HAVING count > 1') - for cur_duplicate in sqlResults: + for cur_duplicate in sql_results: - logger.log(u"Duplicate show detected! " + column + ": " + str(cur_duplicate[column]) + u" count: " + str( - cur_duplicate["count"]), logger.DEBUG) + logger.log(u'Duplicate show detected! ' + column + ': ' + str(cur_duplicate[column]) + u' count: ' + str( + cur_duplicate['count']), logger.DEBUG) cur_dupe_results = self.connection.select( - "SELECT show_id, " + column + " FROM tv_shows WHERE " + column + " = ? LIMIT ?", - [cur_duplicate[column], int(cur_duplicate["count"]) - 1] + 'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?', + [cur_duplicate[column], int(cur_duplicate['count']) - 1] ) for cur_dupe_id in cur_dupe_results: logger.log( - u"Deleting duplicate show with " + column + ": " + str(cur_dupe_id[column]) + u" show_id: " + str( - cur_dupe_id["show_id"])) - self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]]) + u'Deleting duplicate show with ' + column + ': ' + str(cur_dupe_id[column]) + u' show_id: ' + str( + cur_dupe_id['show_id'])) + self.connection.action('DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]) else: - logger.log(u"No duplicate show, check passed") + logger.log(u'No duplicate show, check passed') def fix_duplicate_episodes(self): - sqlResults = self.connection.select( - "SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1") + sql_results = self.connection.select( + 'SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1') - for cur_duplicate in sqlResults: + for cur_duplicate in sql_results: - logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str( - cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str( - cur_duplicate["count"]), logger.DEBUG) + logger.log(u'Duplicate episode detected! showid: ' + str(cur_duplicate['showid']) + u' season: ' + + str(cur_duplicate['season']) + u' episode: ' + str(cur_duplicate['episode']) + u' count: ' + + str(cur_duplicate['count']), + logger.DEBUG) cur_dupe_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", - [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], - int(cur_duplicate["count"]) - 1] + 'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?', + [cur_duplicate['showid'], cur_duplicate['season'], cur_duplicate['episode'], + int(cur_duplicate['count']) - 1] ) for cur_dupe_id in cur_dupe_results: - logger.log(u"Deleting duplicate episode with episode_id: " + str(cur_dupe_id["episode_id"])) - self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id["episode_id"]]) + logger.log(u'Deleting duplicate episode with episode_id: ' + str(cur_dupe_id['episode_id'])) + self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id['episode_id']]) else: - logger.log(u"No duplicate episode, check passed") + logger.log(u'No duplicate episode, check passed') def fix_orphan_episodes(self): - sqlResults = self.connection.select( - "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") + sql_results = self.connection.select( + 'SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL') - for cur_orphan in sqlResults: - logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str( - cur_orphan["showid"]), logger.DEBUG) - logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"])) - self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]]) + for cur_orphan in sql_results: + logger.log(u'Orphan episode detected! episode_id: ' + str(cur_orphan['episode_id']) + ' showid: ' + str( + cur_orphan['showid']), logger.DEBUG) + logger.log(u'Deleting orphan episode with episode_id: ' + str(cur_orphan['episode_id'])) + self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_orphan['episode_id']]) else: - logger.log(u"No orphan episodes, check passed") + logger.log(u'No orphan episodes, check passed') def fix_missing_table_indexes(self): - if not self.connection.select("PRAGMA index_info('idx_indexer_id')"): - logger.log(u"Missing idx_indexer_id for TV Shows table detected!, fixing...") - self.connection.action("CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);") + if not self.connection.select('PRAGMA index_info("idx_indexer_id")'): + logger.log(u'Missing idx_indexer_id for TV Shows table detected!, fixing...') + self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);') - if not self.connection.select("PRAGMA index_info('idx_tv_episodes_showid_airdate')"): - logger.log(u"Missing idx_tv_episodes_showid_airdate for TV Episodes table detected!, fixing...") - self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);") + if not self.connection.select('PRAGMA index_info("idx_tv_episodes_showid_airdate")'): + logger.log(u'Missing idx_tv_episodes_showid_airdate for TV Episodes table detected!, fixing...') + self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);') - if not self.connection.select("PRAGMA index_info('idx_showid')"): - logger.log(u"Missing idx_showid for TV Episodes table detected!, fixing...") - self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") + if not self.connection.select('PRAGMA index_info("idx_showid")'): + logger.log(u'Missing idx_showid for TV Episodes table detected!, fixing...') + self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') - if not self.connection.select("PRAGMA index_info('idx_status')"): - logger.log(u"Missing idx_status for TV Episodes table detected!, fixing...") - self.connection.action("CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)") + if not self.connection.select('PRAGMA index_info("idx_status")'): + logger.log(u'Missing idx_status for TV Episodes table detected!, fixing...') + self.connection.action('CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)') - if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): - logger.log(u"Missing idx_sta_epi_air for TV Episodes table detected!, fixing...") - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)") + if not self.connection.select('PRAGMA index_info("idx_sta_epi_air")'): + logger.log(u'Missing idx_sta_epi_air for TV Episodes table detected!, fixing...') + self.connection.action('CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)') - if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): - logger.log(u"Missing idx_sta_epi_sta_air for TV Episodes table detected!, fixing...") - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)") + if not self.connection.select('PRAGMA index_info("idx_sta_epi_sta_air")'): + logger.log(u'Missing idx_sta_epi_sta_air for TV Episodes table detected!, fixing...') + self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)') def fix_unaired_episodes(self): - curDate = datetime.date.today() + cur_date = datetime.date.today() - sqlResults = self.connection.select( - "SELECT episode_id, showid FROM tv_episodes WHERE airdate > ? AND status in (?,?)", - [curDate.toordinal(), common.SKIPPED, common.WANTED]) + sql_results = self.connection.select( + 'SELECT episode_id, showid FROM tv_episodes WHERE status = ? or airdate > ? AND status in (?,?)', ['', + cur_date.toordinal(), common.SKIPPED, common.WANTED]) - for cur_unaired in sqlResults: - logger.log(u"UNAIRED episode detected! episode_id: " + str(cur_unaired["episode_id"]) + " showid: " + str( - cur_unaired["showid"]), logger.DEBUG) - logger.log(u"Fixing unaired episode status with episode_id: " + str(cur_unaired["episode_id"])) - self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", - [common.UNAIRED, cur_unaired["episode_id"]]) + for cur_unaired in sql_results: + logger.log(u'UNAIRED episode detected! episode_id: ' + str(cur_unaired['episode_id']) + ' showid: ' + str( + cur_unaired['showid']), logger.DEBUG) + logger.log(u'Fixing unaired episode status with episode_id: ' + str(cur_unaired['episode_id'])) + self.connection.action('UPDATE tv_episodes SET status = ? WHERE episode_id = ?', + [common.UNAIRED, cur_unaired['episode_id']]) else: - logger.log(u"No UNAIRED episodes, check passed") + logger.log(u'No UNAIRED episodes, check passed') -def backupDatabase(version): - logger.log(u"Backing up database before upgrade") +def backup_database(version): + logger.log(u'Backing up database before upgrade') if not helpers.backupVersionedFile(db.dbFilename(), version): - logger.log_error_and_exit(u"Database backup failed, abort upgrading database") + logger.log_error_and_exit(u'Database backup failed, abort upgrading database') else: - logger.log(u"Proceeding with upgrade") + logger.log(u'Proceeding with upgrade') # ====================== # = Main DB Migrations = @@ -160,24 +161,24 @@ def backupDatabase(version): # 0 -> 31 class InitialSchema(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - if not self.hasTable("tv_shows") and not self.hasTable("db_version"): + if not self.hasTable('tv_shows') and not self.hasTable('db_version'): queries = [ - "CREATE TABLE db_version (db_version INTEGER);", - "CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT)", - "CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)", - "CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)", - "CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY(indexer_id, season, episode))", - "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer TEXT, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC);", - "CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC);", - "CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)", - "CREATE INDEX idx_showid ON tv_episodes (showid);", - "CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate);", - "CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate);", - "CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate);", - "CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate)", - "INSERT INTO db_version (db_version) VALUES (31);" + 'CREATE TABLE db_version (db_version INTEGER);', + 'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT)', + 'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)', + 'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)', + 'CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY(indexer_id, season, episode))', + 'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer TEXT, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC);', + 'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC);', + 'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)', + 'CREATE INDEX idx_showid ON tv_episodes (showid);', + 'CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate);', + 'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate);', + 'CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate);', + 'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate)', + 'INSERT INTO db_version (db_version) VALUES (31);' ] for query in queries: self.connection.action(query) @@ -186,18 +187,20 @@ class InitialSchema(db.SchemaUpgrade): cur_db_version = self.checkDBVersion() if cur_db_version < MIN_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + str( - cur_db_version) + ") is too old to migrate from what this version of SickGear supports (" + \ - str(MIN_DB_VERSION) + ").\n" + \ - "Upgrade using a previous version (tag) build 496 to build 501 of SickGear first or remove database file to begin fresh." - ) + logger.log_error_and_exit(u'Your database version (' + + str(cur_db_version) + + ') is too old to migrate from what this version of SickGear supports (' + + str(MIN_DB_VERSION) + ').' + "\n" + + 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear first or remove database file to begin fresh.' + ) if cur_db_version > MAX_DB_VERSION: - logger.log_error_and_exit(u"Your database version (" + str( - cur_db_version) + ") has been incremented past what this version of SickGear supports (" + \ - str(MAX_DB_VERSION) + ").\n" + \ - "If you have used other forks of SickGear, your database may be unusable due to their modifications." - ) + logger.log_error_and_exit(u'Your database version (' + + str(cur_db_version) + + ') has been incremented past what this version of SickGear supports (' + + str(MAX_DB_VERSION) + ').' + "\n" + + 'If you have used other forks of SickGear, your database may be unusable due to their modifications.' + ) return self.checkDBVersion() @@ -206,43 +209,43 @@ class InitialSchema(db.SchemaUpgrade): class AddSizeAndSceneNameFields(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - if not self.hasColumn("tv_episodes", "file_size"): - self.addColumn("tv_episodes", "file_size") + if not self.hasColumn('tv_episodes', 'file_size'): + self.addColumn('tv_episodes', 'file_size') - if not self.hasColumn("tv_episodes", "release_name"): - self.addColumn("tv_episodes", "release_name", "TEXT", "") + if not self.hasColumn('tv_episodes', 'release_name'): + self.addColumn('tv_episodes', 'release_name', 'TEXT', '') - ep_results = self.connection.select("SELECT episode_id, location, file_size FROM tv_episodes") + ep_results = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes') - logger.log(u"Adding file size to all episodes in DB, please be patient") + logger.log(u'Adding file size to all episodes in DB, please be patient') for cur_ep in ep_results: - if not cur_ep["location"]: + if not cur_ep['location']: continue # if there is no size yet then populate it for us - if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]): - cur_size = ek.ek(os.path.getsize, cur_ep["location"]) - self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", - [cur_size, int(cur_ep["episode_id"])]) + if (not cur_ep['file_size'] or not int(cur_ep['file_size'])) and ek.ek(os.path.isfile, cur_ep['location']): + cur_size = ek.ek(os.path.getsize, cur_ep['location']) + self.connection.action('UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?', + [cur_size, int(cur_ep['episode_id'])]) # check each snatch to see if we can use it to get a release name from - history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC") + history_results = self.connection.select('SELECT * FROM history WHERE provider != -1 ORDER BY date ASC') - logger.log(u"Adding release name to all episodes still in history") + logger.log(u'Adding release name to all episodes still in history') for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select( - "SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", - [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]]) + 'SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?', + [cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']]) if not download_results: - logger.log(u"Found a snatch in the history for " + cur_result[ - "resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) + logger.log(u'Found a snatch in the history for ' + cur_result[ + 'resource'] + ' but couldn\'t find the associated download, skipping it', logger.DEBUG) continue - nzb_name = cur_result["resource"] - file_name = ek.ek(os.path.basename, download_results[0]["resource"]) + nzb_name = cur_result['resource'] + file_name = ek.ek(os.path.basename, download_results[0]['resource']) # take the extension off the filename, it's not needed if '.' in file_name: @@ -250,44 +253,45 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): # find the associated episode on disk ep_results = self.connection.select( - "SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", - [cur_result["showid"], cur_result["season"], cur_result["episode"]]) + 'SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ""', + [cur_result['showid'], cur_result['season'], cur_result['episode']]) if not ep_results: logger.log( - u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping", + u'The episode ' + nzb_name + ' was found in history but doesn\'t exist on disk anymore, skipping', logger.DEBUG) continue # get the status/quality of the existing ep and make sure it's what we expect - ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]["status"])) + ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]['status'])) if ep_status != common.DOWNLOADED: continue - if ep_quality != int(cur_result["quality"]): + if ep_quality != int(cur_result['quality']): continue # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): - logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) + logger.log(u'Checking if ' + cur_name + ' is actually a good release name', logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) except (InvalidNameException, InvalidShowException): continue - if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: + if parse_result.series_name and parse_result.season_number is not None\ + and parse_result.episode_numbers and parse_result.release_group: # if all is well by this point we'll just put the release name into the database - self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", - [cur_name, ep_results[0]["episode_id"]]) + self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', + [cur_name, ep_results[0]['episode_id']]) break # check each snatch to see if we can use it to get a release name from - empty_results = self.connection.select("SELECT episode_id, location FROM tv_episodes WHERE release_name = ''") + empty_results = self.connection.select('SELECT episode_id, location FROM tv_episodes WHERE release_name = ""') - logger.log(u"Adding release name to all episodes with obvious scene filenames") + logger.log(u'Adding release name to all episodes with obvious scene filenames') for cur_result in empty_results: - ep_file_name = ek.ek(os.path.basename, cur_result["location"]) + ep_file_name = ek.ek(os.path.basename, cur_result['location']) ep_file_name = os.path.splitext(ep_file_name)[0] # only want to find real scene names here so anything with a space in it is out @@ -304,10 +308,10 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): continue logger.log( - u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", + u'Name ' + ep_file_name + ' gave release group of ' + parse_result.release_group + ', seems valid', logger.DEBUG) - self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", - [ep_file_name, cur_result["episode_id"]]) + self.connection.action('UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?', + [ep_file_name, cur_result['episode_id']]) self.incDBVersion() return self.checkDBVersion() @@ -316,20 +320,20 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade): # 10 -> 11 class RenameSeasonFolders(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) # rename the column - self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") + self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows') self.connection.action( - "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)") - sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows" + 'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)') + sql = 'INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows' self.connection.action(sql) # flip the values to be opposite of what they were before - self.connection.action("UPDATE tv_shows SET flatten_folders = 2 WHERE flatten_folders = 1") - self.connection.action("UPDATE tv_shows SET flatten_folders = 1 WHERE flatten_folders = 0") - self.connection.action("UPDATE tv_shows SET flatten_folders = 0 WHERE flatten_folders = 2") - self.connection.action("DROP TABLE tmp_tv_shows") + self.connection.action('UPDATE tv_shows SET flatten_folders = 2 WHERE flatten_folders = 1') + self.connection.action('UPDATE tv_shows SET flatten_folders = 1 WHERE flatten_folders = 0') + self.connection.action('UPDATE tv_shows SET flatten_folders = 0 WHERE flatten_folders = 2') + self.connection.action('DROP TABLE tmp_tv_shows') self.incDBVersion() return self.checkDBVersion() @@ -337,7 +341,8 @@ class RenameSeasonFolders(db.SchemaUpgrade): # 11 -> 12 class Add1080pAndRawHDQualities(db.SchemaUpgrade): - """Add support for 1080p related qualities along with RawHD + """ + Add support for 1080p related qualities along with RawHD Quick overview of what the upgrade needs to do: @@ -357,7 +362,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): return common.Quality.compositeStatus(status, self._update_quality(quality)) def _update_quality(self, old_quality): - """Update bitwise flags to reflect new quality values + """ + Update bitwise flags to reflect new quality values Check flag bits (clear old then set their new locations) starting with the highest bits so we dont overwrite data we need later on @@ -365,30 +371,31 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): result = old_quality # move fullhdbluray from 1<<5 to 1<<8 if set - if (result & (1 << 5)): - result = result & ~(1 << 5) - result = result | (1 << 8) + if result & (1 << 5): + result &= ~(1 << 5) + result |= 1 << 8 # move hdbluray from 1<<4 to 1<<7 if set - if (result & (1 << 4)): - result = result & ~(1 << 4) - result = result | (1 << 7) + if result & (1 << 4): + result &= ~(1 << 4) + result |= 1 << 7 # move hdwebdl from 1<<3 to 1<<5 if set - if (result & (1 << 3)): - result = result & ~(1 << 3) - result = result | (1 << 5) + if result & (1 << 3): + result &= ~(1 << 3) + result |= 1 << 5 return result def _update_composite_qualities(self, status): - """Unpack, Update, Return new quality values + ''' + Unpack, Update, Return new quality values Unpack the composite archive/initial values. Update either qualities if needed. Then return the new compsite quality value. - """ + ''' best = (status & (0xffff << 16)) >> 16 - initial = status & (0xffff) + initial = status & 0xffff best = self._update_quality(best) initial = self._update_quality(initial) @@ -397,7 +404,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): return result def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) # update the default quality so we dont grab the wrong qualities after migration sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT) @@ -406,8 +413,8 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): # upgrade previous HD to HD720p -- shift previous qualities to new placevalues old_hd = common.Quality.combineQualities( [common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) - new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], - []) + new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, + common.Quality.HDBLURAY], []) # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template old_any = common.Quality.combineQualities( @@ -419,71 +426,71 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade): common.Quality.UNKNOWN], []) # update qualities (including templates) - logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE) + logger.log(u'[1/4] Updating pre-defined templates and the quality for each show...', logger.MESSAGE) cl = [] - shows = self.connection.select("SELECT * FROM tv_shows") + shows = self.connection.select('SELECT * FROM tv_shows') for cur_show in shows: - if cur_show["quality"] == old_hd: + if old_hd == cur_show['quality']: new_quality = new_hd - elif cur_show["quality"] == old_any: + elif old_any == cur_show['quality']: new_quality = new_any else: - new_quality = self._update_composite_qualities(cur_show["quality"]) - cl.append(["UPDATE tv_shows SET quality = ? WHERE show_id = ?", [new_quality, cur_show["show_id"]]]) + new_quality = self._update_composite_qualities(cur_show['quality']) + cl.append(['UPDATE tv_shows SET quality = ? WHERE show_id = ?', [new_quality, cur_show['show_id']]]) self.connection.mass_action(cl) # update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) - logger.log(u"[2/4] Updating the status for the episodes within each show...", logger.MESSAGE) + logger.log(u'[2/4] Updating the status for the episodes within each show...', logger.MESSAGE) cl = [] - episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800") + episodes = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800') for cur_episode in episodes: - cl.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", - [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]]) + cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?', + [self._update_status(cur_episode['status']), cur_episode['episode_id']]]) self.connection.mass_action(cl) # make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together # update previous history so it shows the correct action - logger.log(u"[3/4] Updating history to reflect the correct action...", logger.MESSAGE) + logger.log(u'[3/4] Updating history to reflect the correct action...', logger.MESSAGE) cl = [] - historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800") - for cur_entry in historyAction: - cl.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", - [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]]) + history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800') + for cur_entry in history_action: + cl.append(['UPDATE history SET action = ? WHERE showid = ? AND date = ?', + [self._update_status(cur_entry['action']), cur_entry['showid'], cur_entry['date']]]) self.connection.mass_action(cl) # update previous history so it shows the correct quality - logger.log(u"[4/4] Updating history to reflect the correct quality...", logger.MESSAGE) + logger.log(u'[4/4] Updating history to reflect the correct quality...', logger.MESSAGE) cl = [] - historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8") - for cur_entry in historyQuality: - cl.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", - [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]]) + history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8') + for cur_entry in history_quality: + cl.append(['UPDATE history SET quality = ? WHERE showid = ? AND date = ?', + [self._update_quality(cur_entry['quality']), cur_entry['showid'], cur_entry['date']]]) self.connection.mass_action(cl) self.incDBVersion() # cleanup and reduce db if any previous data was removed - logger.log(u"Performing a vacuum on the database.", logger.DEBUG) - self.connection.action("VACUUM") + logger.log(u'Performing a vacuum on the database.', logger.DEBUG) + self.connection.action('VACUUM') return self.checkDBVersion() # 12 -> 13 class AddShowidTvdbidIndex(db.SchemaUpgrade): - """ Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """ + # Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Check for duplicate shows before adding unique index.") + logger.log(u'Check for duplicate shows before adding unique index.') MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id') - logger.log(u"Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.") - if not self.hasTable("idx_showid"): - self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") - if not self.hasTable("idx_tvdb_id"): - self.connection.action("CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);") + logger.log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.') + if not self.hasTable('idx_showid'): + self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') + if not self.hasTable('idx_tvdb_id'): + self.connection.action('CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id);') self.incDBVersion() return self.checkDBVersion() @@ -491,13 +498,13 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade): # 13 -> 14 class AddLastUpdateTVDB(db.SchemaUpgrade): - """ Adding column last_update_tvdb to tv_shows for controlling nightly updates """ + # Adding column last_update_tvdb to tv_shows for controlling nightly updates def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column last_update_tvdb to tvshows") - if not self.hasColumn("tv_shows", "last_update_tvdb"): - self.addColumn("tv_shows", "last_update_tvdb", default=1) + logger.log(u'Adding column last_update_tvdb to tvshows') + if not self.hasColumn('tv_shows', 'last_update_tvdb'): + self.addColumn('tv_shows', 'last_update_tvdb', default=1) self.incDBVersion() @@ -505,7 +512,7 @@ class AddLastUpdateTVDB(db.SchemaUpgrade): # 14 -> 15 class AddDBIncreaseTo15(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) self.incDBVersion() return self.checkDBVersion() @@ -514,14 +521,13 @@ class AddDBIncreaseTo15(db.SchemaUpgrade): # 15 -> 16 class AddIMDbInfo(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) - + backup_database(self.checkDBVersion()) self.connection.action( - "CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") + 'CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)') - if not self.hasColumn("tv_shows", "imdb_id"): - self.addColumn("tv_shows", "imdb_id") + if not self.hasColumn('tv_shows', 'imdb_id'): + self.addColumn('tv_shows', 'imdb_id') self.incDBVersion() return self.checkDBVersion() @@ -530,9 +536,9 @@ class AddIMDbInfo(db.SchemaUpgrade): # 16 -> 17 class AddProperNamingSupport(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - self.addColumn("tv_episodes", "is_proper") + self.addColumn('tv_episodes', 'is_proper') self.incDBVersion() return self.checkDBVersion() @@ -540,7 +546,7 @@ class AddProperNamingSupport(db.SchemaUpgrade): # 17 -> 18 class AddEmailSubscriptionTable(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) self.addColumn('tv_shows', 'notify_list', 'TEXT', None) self.incDBVersion() @@ -550,11 +556,11 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade): # 18 -> 19 class AddProperSearch(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column last_proper_search to info") - if not self.hasColumn("info", "last_proper_search"): - self.addColumn("info", "last_proper_search", default=1) + logger.log(u'Adding column last_proper_search to info') + if not self.hasColumn('info', 'last_proper_search'): + self.addColumn('info', 'last_proper_search', default=1) self.incDBVersion() return self.checkDBVersion() @@ -563,11 +569,11 @@ class AddProperSearch(db.SchemaUpgrade): # 19 -> 20 class AddDvdOrderOption(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column dvdorder to tvshows") - if not self.hasColumn("tv_shows", "dvdorder"): - self.addColumn("tv_shows", "dvdorder", "NUMERIC", "0") + logger.log(u'Adding column dvdorder to tvshows') + if not self.hasColumn('tv_shows', 'dvdorder'): + self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -576,13 +582,13 @@ class AddDvdOrderOption(db.SchemaUpgrade): # 20 -> 21 class AddSubtitlesSupport(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - if not self.hasColumn("tv_shows", "subtitles"): - self.addColumn("tv_shows", "subtitles") - self.addColumn("tv_episodes", "subtitles", "TEXT", "") - self.addColumn("tv_episodes", "subtitles_searchcount") - self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min)) + if not self.hasColumn('tv_shows', 'subtitles'): + self.addColumn('tv_shows', 'subtitles') + self.addColumn('tv_episodes', 'subtitles', 'TEXT', '') + self.addColumn('tv_episodes', 'subtitles_searchcount') + self.addColumn('tv_episodes', 'subtitles_lastsearch', 'TIMESTAMP', str(datetime.datetime.min)) self.incDBVersion() return self.checkDBVersion() @@ -590,25 +596,25 @@ class AddSubtitlesSupport(db.SchemaUpgrade): # 21 -> 22 class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Converting TV Shows table to Indexer Scheme...") + logger.log(u'Converting TV Shows table to Indexer Scheme...') - if self.hasTable("tmp_tv_shows"): - logger.log(u"Removing temp tv show tables left behind from previous updates...") - self.connection.action("DROP TABLE tmp_tv_shows") + if self.hasTable('tmp_tv_shows'): + logger.log(u'Removing temp tv show tables left behind from previous updates...') + self.connection.action('DROP TABLE tmp_tv_shows') - self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") + self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows') self.connection.action( - "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)") + 'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)') self.connection.action( - "INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows") - self.connection.action("DROP TABLE tmp_tv_shows") + 'INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows') + self.connection.action('DROP TABLE tmp_tv_shows') - self.connection.action("CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);") + self.connection.action('CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id);') - self.connection.action("UPDATE tv_shows SET classification = 'Scripted'") - self.connection.action("UPDATE tv_shows SET indexer = 1") + self.connection.action('UPDATE tv_shows SET classification = "Scripted"') + self.connection.action('UPDATE tv_shows SET indexer = 1') self.incDBVersion() return self.checkDBVersion() @@ -617,28 +623,28 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade): # 22 -> 23 class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Converting TV Episodes table to Indexer Scheme...") + logger.log(u'Converting TV Episodes table to Indexer Scheme...') - if self.hasTable("tmp_tv_episodes"): - logger.log(u"Removing temp tv episode tables left behind from previous updates...") - self.connection.action("DROP TABLE tmp_tv_episodes") + if self.hasTable('tmp_tv_episodes'): + logger.log(u'Removing temp tv episode tables left behind from previous updates...') + self.connection.action('DROP TABLE tmp_tv_episodes') - self.connection.action("ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes") + self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes') self.connection.action( - "CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)") + 'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)') self.connection.action( - "INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes") - self.connection.action("DROP TABLE tmp_tv_episodes") + 'INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes') + self.connection.action('DROP TABLE tmp_tv_episodes') - self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);") - self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") - self.connection.action("CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)") - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)") - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)") + self.connection.action('CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);') + self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);') + self.connection.action('CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)') + self.connection.action('CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)') + self.connection.action('CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)') - self.connection.action("UPDATE tv_episodes SET indexer = 1") + self.connection.action('UPDATE tv_episodes SET indexer = 1') self.incDBVersion() return self.checkDBVersion() @@ -647,20 +653,20 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade): # 23 -> 24 class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Converting IMDB Info table to Indexer Scheme...") + logger.log(u'Converting IMDB Info table to Indexer Scheme...') - if self.hasTable("tmp_imdb_info"): - logger.log(u"Removing temp imdb info tables left behind from previous updates...") - self.connection.action("DROP TABLE tmp_imdb_info") + if self.hasTable('tmp_imdb_info'): + logger.log(u'Removing temp imdb info tables left behind from previous updates...') + self.connection.action('DROP TABLE tmp_imdb_info') - self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info") + self.connection.action('ALTER TABLE imdb_info RENAME TO tmp_imdb_info') self.connection.action( - "CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)") + 'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)') self.connection.action( - "INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info") - self.connection.action("DROP TABLE tmp_imdb_info") + 'INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info') + self.connection.action('DROP TABLE tmp_imdb_info') self.incDBVersion() return self.checkDBVersion() @@ -669,20 +675,20 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade): # 24 -> 25 class ConvertInfoToIndexerScheme(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Converting Info table to Indexer Scheme...") + logger.log(u'Converting Info table to Indexer Scheme...') - if self.hasTable("tmp_info"): - logger.log(u"Removing temp info tables left behind from previous updates...") - self.connection.action("DROP TABLE tmp_info") + if self.hasTable('tmp_info'): + logger.log(u'Removing temp info tables left behind from previous updates...') + self.connection.action('DROP TABLE tmp_info') - self.connection.action("ALTER TABLE info RENAME TO tmp_info") + self.connection.action('ALTER TABLE info RENAME TO tmp_info') self.connection.action( - "CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)") + 'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)') self.connection.action( - "INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info") - self.connection.action("DROP TABLE tmp_info") + 'INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info') + self.connection.action('DROP TABLE tmp_info') self.incDBVersion() return self.checkDBVersion() @@ -691,11 +697,11 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade): # 25 -> 26 class AddArchiveFirstMatchOption(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column archive_firstmatch to tvshows") - if not self.hasColumn("tv_shows", "archive_firstmatch"): - self.addColumn("tv_shows", "archive_firstmatch", "NUMERIC", "0") + logger.log(u'Adding column archive_firstmatch to tvshows') + if not self.hasColumn('tv_shows', 'archive_firstmatch'): + self.addColumn('tv_shows', 'archive_firstmatch', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -705,13 +711,13 @@ class AddArchiveFirstMatchOption(db.SchemaUpgrade): class AddSceneNumbering(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - if self.hasTable("scene_numbering"): - self.connection.action("DROP TABLE scene_numbering") + if self.hasTable('scene_numbering'): + self.connection.action('DROP TABLE scene_numbering') self.connection.action( - "CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode, scene_season, scene_episode))") + 'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode, scene_season, scene_episode))') self.incDBVersion() return self.checkDBVersion() @@ -720,16 +726,16 @@ class AddSceneNumbering(db.SchemaUpgrade): # 27 -> 28 class ConvertIndexerToInteger(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) cl = [] - logger.log(u"Converting Indexer to Integer ...", logger.MESSAGE) - cl.append(["UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?", ["1", "tvdb"]]) - cl.append(["UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?", ["2", "tvrage"]]) - cl.append(["UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?", ["1", "tvdb"]]) - cl.append(["UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?", ["2", "tvrage"]]) - cl.append(["UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ?", ["1", "tvdb"]]) - cl.append(["UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ?", ["2", "tvrage"]]) + logger.log(u'Converting Indexer to Integer ...', logger.MESSAGE) + cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) + cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']]) + cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) + cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']]) + cl.append(['UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']]) + cl.append(['UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']]) self.connection.mass_action(cl) @@ -739,18 +745,18 @@ class ConvertIndexerToInteger(db.SchemaUpgrade): # 28 -> 29 class AddRequireAndIgnoreWords(db.SchemaUpgrade): - """ Adding column rls_require_words and rls_ignore_words to tv_shows """ + # Adding column rls_require_words and rls_ignore_words to tv_shows def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column rls_require_words to tvshows") - if not self.hasColumn("tv_shows", "rls_require_words"): - self.addColumn("tv_shows", "rls_require_words", "TEXT", "") + logger.log(u'Adding column rls_require_words to tvshows') + if not self.hasColumn('tv_shows', 'rls_require_words'): + self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '') - logger.log(u"Adding column rls_ignore_words to tvshows") - if not self.hasColumn("tv_shows", "rls_ignore_words"): - self.addColumn("tv_shows", "rls_ignore_words", "TEXT", "") + logger.log(u'Adding column rls_ignore_words to tvshows') + if not self.hasColumn('tv_shows', 'rls_ignore_words'): + self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '') self.incDBVersion() return self.checkDBVersion() @@ -759,22 +765,22 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade): # 29 -> 30 class AddSportsOption(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column sports to tvshows") - if not self.hasColumn("tv_shows", "sports"): - self.addColumn("tv_shows", "sports", "NUMERIC", "0") + logger.log(u'Adding column sports to tvshows') + if not self.hasColumn('tv_shows', 'sports'): + self.addColumn('tv_shows', 'sports', 'NUMERIC', '0') - if self.hasColumn("tv_shows", "air_by_date") and self.hasColumn("tv_shows", "sports"): + if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'): # update sports column - logger.log(u"[4/4] Updating tv_shows to reflect the correct sports value...", logger.MESSAGE) + logger.log(u'[4/4] Updating tv_shows to reflect the correct sports value...', logger.MESSAGE) cl = [] - historyQuality = self.connection.select( - "SELECT * FROM tv_shows WHERE LOWER(classification) = 'sports' AND air_by_date = 1 AND sports = 0") - for cur_entry in historyQuality: - cl.append(["UPDATE tv_shows SET sports = ? WHERE show_id = ?", - [cur_entry["air_by_date"], cur_entry["show_id"]]]) - cl.append(["UPDATE tv_shows SET air_by_date = 0 WHERE show_id = ?", [cur_entry["show_id"]]]) + history_quality = self.connection.select( + 'SELECT * FROM tv_shows WHERE LOWER(classification) = "sports" AND air_by_date = 1 AND sports = 0') + for cur_entry in history_quality: + cl.append(['UPDATE tv_shows SET sports = ? WHERE show_id = ?', + [cur_entry['air_by_date'], cur_entry['show_id']]]) + cl.append(['UPDATE tv_shows SET air_by_date = 0 WHERE show_id = ?', [cur_entry['show_id']]]) self.connection.mass_action(cl) self.incDBVersion() @@ -784,11 +790,11 @@ class AddSportsOption(db.SchemaUpgrade): # 30 -> 31 class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column scene_season and scene_episode to tvepisodes") - self.addColumn("tv_episodes", "scene_season", "NUMERIC", "NULL") - self.addColumn("tv_episodes", "scene_episode", "NUMERIC", "NULL") + logger.log(u'Adding column scene_season and scene_episode to tvepisodes') + self.addColumn('tv_episodes', 'scene_season', 'NUMERIC', 'NULL') + self.addColumn('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL') self.incDBVersion() return self.incDBVersion() @@ -797,10 +803,10 @@ class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade): # 31 -> 32 class AddAnimeTVShow(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column anime to tv_episodes") - self.addColumn("tv_shows", "anime", "NUMERIC", "0") + logger.log(u'Adding column anime to tv_episodes') + self.addColumn('tv_shows', 'anime', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -809,10 +815,10 @@ class AddAnimeTVShow(db.SchemaUpgrade): # 32 -> 33 class AddAbsoluteNumbering(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column absolute_number to tv_episodes") - self.addColumn("tv_episodes", "absolute_number", "NUMERIC", "0") + logger.log(u'Adding column absolute_number to tv_episodes') + self.addColumn('tv_episodes', 'absolute_number', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -821,11 +827,11 @@ class AddAbsoluteNumbering(db.SchemaUpgrade): # 33 -> 34 class AddSceneAbsoluteNumbering(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column absolute_number and scene_absolute_number to scene_numbering") - self.addColumn("scene_numbering", "absolute_number", "NUMERIC", "0") - self.addColumn("scene_numbering", "scene_absolute_number", "NUMERIC", "0") + logger.log(u'Adding column absolute_number and scene_absolute_number to scene_numbering') + self.addColumn('scene_numbering', 'absolute_number', 'NUMERIC', '0') + self.addColumn('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -834,11 +840,11 @@ class AddSceneAbsoluteNumbering(db.SchemaUpgrade): # 34 -> 35 class AddAnimeBlacklistWhitelist(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) cl = [] - cl.append(["CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)"]) - cl.append(["CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)"]) + cl.append(['CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)']) + cl.append(['CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)']) self.connection.mass_action(cl) self.incDBVersion() @@ -848,10 +854,10 @@ class AddAnimeBlacklistWhitelist(db.SchemaUpgrade): # 35 -> 36 class AddSceneAbsoluteNumbering2(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column scene_absolute_number to tv_episodes") - self.addColumn("tv_episodes", "scene_absolute_number", "NUMERIC", "0") + logger.log(u'Adding column scene_absolute_number to tv_episodes') + self.addColumn('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -860,11 +866,11 @@ class AddSceneAbsoluteNumbering2(db.SchemaUpgrade): # 36 -> 37 class AddXemRefresh(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Creating table xem_refresh") + logger.log(u'Creating table xem_refresh') self.connection.action( - "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)") + 'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)') self.incDBVersion() return self.checkDBVersion() @@ -873,10 +879,10 @@ class AddXemRefresh(db.SchemaUpgrade): # 37 -> 38 class AddSceneToTvShows(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column scene to tv_shows") - self.addColumn("tv_shows", "scene", "NUMERIC", "0") + logger.log(u'Adding column scene to tv_shows') + self.addColumn('tv_shows', 'scene', 'NUMERIC', '0') self.incDBVersion() return self.checkDBVersion() @@ -885,14 +891,14 @@ class AddSceneToTvShows(db.SchemaUpgrade): # 38 -> 39 class AddIndexerMapping(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - if self.hasTable("indexer_mapping"): - self.connection.action("DROP TABLE indexer_mapping") + if self.hasTable('indexer_mapping'): + self.connection.action('DROP TABLE indexer_mapping') - logger.log(u"Adding table indexer_mapping") + logger.log(u'Adding table indexer_mapping') self.connection.action( - "CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))") + 'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))') self.incDBVersion() return self.checkDBVersion() @@ -901,12 +907,12 @@ class AddIndexerMapping(db.SchemaUpgrade): # 39 -> 40 class AddVersionToTvEpisodes(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) - logger.log(u"Adding column version to tv_episodes and history") - self.addColumn("tv_episodes", "version", "NUMERIC", "-1") - self.addColumn("tv_episodes", "release_group", "TEXT", "") - self.addColumn("history", "version", "NUMERIC", "-1") + logger.log(u'Adding column version to tv_episodes and history') + self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1') + self.addColumn('tv_episodes', 'release_group', 'TEXT', '') + self.addColumn('history', 'version', 'NUMERIC', '-1') self.incDBVersion() return self.checkDBVersion() @@ -915,16 +921,17 @@ class AddVersionToTvEpisodes(db.SchemaUpgrade): # 40 -> 10000 class BumpDatabaseVersion(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) logger.log(u'Bumping database version') self.setDBVersion(10000) return self.checkDBVersion() + # 41 -> 10001 class Migrate41(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) logger.log(u'Bumping database version') @@ -935,20 +942,21 @@ class Migrate41(db.SchemaUpgrade): # 10000 -> 20000 class SickGearDatabaseVersion(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) logger.log('Bumping database version to new SickGear standards') self.setDBVersion(20000) return self.checkDBVersion() + # 10001 -> 10000 class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade): def execute(self): - backupDatabase(self.checkDBVersion()) + backup_database(self.checkDBVersion()) logger.log(u'Dropping column default_ep_status from tv_shows') self.dropColumn('tv_shows', 'default_ep_status') self.setDBVersion(10000) - return self.checkDBVersion() \ No newline at end of file + return self.checkDBVersion() diff --git a/sickbeard/db.py b/sickbeard/db.py index 2d521e1f..5f9ee77f 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -261,6 +261,14 @@ class DBConnection(object): def hasColumn(self, tableName, column): return column in self.tableInfo(tableName) + def hasIndex(self, tableName, index): + sqlResults = self.select('PRAGMA index_list([%s])' % tableName) + for result in sqlResults: + if result['name'] == index: + return True + return False + + def addColumn(self, table, column, type="NUMERIC", default=0): self.action("ALTER TABLE [%s] ADD %s %s" % (table, column, type)) self.action("UPDATE [%s] SET %s = ?" % (table, column), (default,)) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index fc4dca28..681ab310 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -36,41 +36,44 @@ import itertools class CacheDBConnection(db.DBConnection): def __init__(self, providerName): - db.DBConnection.__init__(self, "cache.db") + db.DBConnection.__init__(self, 'cache.db') # Create the table if it's not already there try: if not self.hasTable(providerName): self.action( - "CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT, release_group TEXT)") - else: + 'CREATE TABLE [' + providerName + '] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT, release_group TEXT)') + self.action( + 'CREATE UNIQUE INDEX IF NOT EXISTS [idx_' + providerName + '_url] ON [' + providerName + '] (url)') + elif not self.hasIndex(providerName, 'idx_%s_url' % providerName): sqlResults = self.select( - "SELECT url, COUNT(url) as count FROM [" + providerName + "] GROUP BY url HAVING count > 1") + 'SELECT url, COUNT(url) as count FROM [' + providerName + '] GROUP BY url HAVING count > 1') for cur_dupe in sqlResults: - self.action("DELETE FROM [" + providerName + "] WHERE url = ?", [cur_dupe["url"]]) + self.action('DELETE FROM [' + providerName + '] WHERE url = ?', [cur_dupe['url']]) + + self.action( + 'CREATE UNIQUE INDEX IF NOT EXISTS [idx_' + providerName + '_url] ON [' + providerName + '] (url)') - # add unique index to prevent further dupes from happening if one does not exist - self.action("CREATE UNIQUE INDEX IF NOT EXISTS idx_url ON [" + providerName + "] (url)") # add release_group column to table if missing if not self.hasColumn(providerName, 'release_group'): - self.addColumn(providerName, 'release_group', "TEXT", "") + self.addColumn(providerName, 'release_group', 'TEXT', '') # add version column to table if missing if not self.hasColumn(providerName, 'version'): - self.addColumn(providerName, 'version', "NUMERIC", "-1") + self.addColumn(providerName, 'version', 'NUMERIC', '-1') except Exception, e: - if str(e) != "table [" + providerName + "] already exists": + if str(e) != 'table [' + providerName + '] already exists': raise # Create the table if it's not already there try: if not self.hasTable('lastUpdate'): - self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)") + self.action('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)') except Exception, e: - if str(e) != "table lastUpdate already exists": + if str(e) != 'table lastUpdate already exists': raise class TVCache(): @@ -91,7 +94,7 @@ class TVCache(): def _clearCache(self): if self.shouldClearCache(): myDB = self._getDB() - myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1") + myDB.action('DELETE FROM [' + self.providerID + '] WHERE 1') def _get_title_and_url(self, item): # override this in the provider if daily search has a different data layout to backlog searches @@ -151,22 +154,22 @@ class TVCache(): title = self._translateTitle(title) url = self._translateLinkURL(url) - logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) + logger.log(u'Attempting to add item to cache: ' + title, logger.DEBUG) return self._addCacheEntry(title, url) else: logger.log( - u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable", + u'The data returned from the ' + self.provider.name + ' feed is incomplete, this result is unusable', logger.DEBUG) return None def _getLastUpdate(self): myDB = self._getDB() - sqlResults = myDB.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID]) + sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID]) if sqlResults: - lastTime = int(sqlResults[0]["time"]) + lastTime = int(sqlResults[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -176,10 +179,10 @@ class TVCache(): def _getLastSearch(self): myDB = self._getDB() - sqlResults = myDB.select("SELECT time FROM lastSearch WHERE provider = ?", [self.providerID]) + sqlResults = myDB.select('SELECT time FROM lastSearch WHERE provider = ?', [self.providerID]) if sqlResults: - lastTime = int(sqlResults[0]["time"]) + lastTime = int(sqlResults[0]['time']) if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0 else: @@ -193,7 +196,7 @@ class TVCache(): toDate = datetime.datetime.today() myDB = self._getDB() - myDB.upsert("lastUpdate", + myDB.upsert('lastUpdate', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) @@ -202,7 +205,7 @@ class TVCache(): toDate = datetime.datetime.today() myDB = self._getDB() - myDB.upsert("lastSearch", + myDB.upsert('lastSearch', {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) @@ -212,7 +215,7 @@ class TVCache(): def shouldUpdate(self): # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): - logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str( + logger.log(u'Last update was too soon, using old cache: today()-' + str(self.lastUpdate) + '<' + str( datetime.timedelta(minutes=self.minTime)), logger.DEBUG) return False @@ -239,10 +242,10 @@ class TVCache(): myParser = NameParser(showObj=showObj, convert=True) parse_result = myParser.parse(name) except InvalidNameException: - logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) + logger.log(u'Unable to parse the filename ' + name + ' into a valid episode', logger.DEBUG) return None except InvalidShowException: - logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) + logger.log(u'Unable to parse the filename ' + name + ' into a valid show', logger.DEBUG) return None if not parse_result or not parse_result.series_name: @@ -254,7 +257,7 @@ class TVCache(): if season and episodes: # store episodes as a seperated string - episodeText = "|" + "|".join(map(str, episodes)) + "|" + episodeText = '|' + '|'.join(map(str, episodes)) + '|' # get the current timestamp curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) @@ -271,10 +274,10 @@ class TVCache(): # get version version = parse_result.version - logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) + logger.log(u'Added RSS item: [' + name + '] to cache: [' + self.providerID + ']', logger.DEBUG) return [ - "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)", + 'INSERT OR IGNORE INTO [' + self.providerID + '] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)', [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]] @@ -285,12 +288,12 @@ class TVCache(): else: return [] - def listPropers(self, date=None, delimiter="."): + def listPropers(self, date=None, delimiter='.'): myDB = self._getDB() sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'" if date != None: - sql += " AND time >= " + str(int(time.mktime(date.timetuple()))) + sql += ' AND time >= ' + str(int(time.mktime(date.timetuple()))) return filter(lambda x: x['indexerid'] != 0, myDB.select(sql)) @@ -302,14 +305,14 @@ class TVCache(): myDB = self._getDB() if type(episode) != list: sqlResults = myDB.select( - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", - [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"]) + 'SELECT * FROM [' + self.providerID + '] WHERE indexerid = ? AND season = ? AND episodes LIKE ?', + [episode.show.indexerid, episode.season, '%|' + str(episode.episode) + '|%']) else: for epObj in episode: cl.append([ - "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ? " - "AND quality IN (" + ",".join([str(x) for x in epObj.wantedQuality]) + ")", - [epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"]]) + 'SELECT * FROM [' + self.providerID + '] WHERE indexerid = ? AND season = ? AND episodes LIKE ? ' + 'AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')', + [epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']]) sqlResults = myDB.mass_action(cl, fetchall=True) sqlResults = list(itertools.chain(*sqlResults)) @@ -318,45 +321,45 @@ class TVCache(): for curResult in sqlResults: # skip non-tv crap - if not show_name_helpers.filterBadReleases(curResult["name"], parse=False): + if not show_name_helpers.filterBadReleases(curResult['name'], parse=False): continue # get the show object, or if it's not one of our shows then ignore it - showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"])) + showObj = helpers.findCertainShow(sickbeard.showList, int(curResult['indexerid'])) if not showObj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not showObj.is_anime: - logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG) + logger.log(u'' + str(showObj.name) + ' is not an anime, skiping', logger.DEBUG) continue # get season and ep data (ignoring multi-eps for now) - curSeason = int(curResult["season"]) + curSeason = int(curResult['season']) if curSeason == -1: continue - curEp = curResult["episodes"].split("|")[1] + curEp = curResult['episodes'].split('|')[1] if not curEp: continue curEp = int(curEp) - curQuality = int(curResult["quality"]) - curReleaseGroup = curResult["release_group"] - curVersion = curResult["version"] + curQuality = int(curResult['quality']) + curReleaseGroup = curResult['release_group'] + curVersion = curResult['version'] # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): - logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + + logger.log(u'Skipping ' + curResult['name'] + ' because we don\'t want an episode that\'s ' + Quality.qualityStrings[curQuality], logger.DEBUG) continue epObj = showObj.getEpisode(curSeason, curEp) # build a result object - title = curResult["name"] - url = curResult["url"] + title = curResult['name'] + url = curResult['url'] - logger.log(u"Found result " + title + " at " + url) + logger.log(u'Found result ' + title + ' at ' + url) result = self.provider.getResult([epObj]) result.show = showObj