mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 08:53:37 +00:00
Merge pull request #385 from JackDandy/feature/ChangeFirstRunDB
Change first run after install to set up the main db …
This commit is contained in:
commit
954bcb74be
4 changed files with 105 additions and 53 deletions
|
@ -21,6 +21,8 @@
|
|||
* Add handling for CloudFlare custom HTTP response codes
|
||||
* Fix to correctly load local libraries instead of system installed libraries
|
||||
* Update PyNMA to hybrid v1.0
|
||||
* Change first run after install to set up the main db to the current schema instead of upgrading
|
||||
* Change don't create a backup from an initial zero byte main database file, PEP8 and code tidy up
|
||||
|
||||
[develop changelog]
|
||||
* Update Requests library 2.7.0 (ab1f493) to 2.7.0 (8b5e457)
|
||||
|
|
|
@ -21,7 +21,7 @@ import datetime
|
|||
import sickbeard
|
||||
import os.path
|
||||
|
||||
from sickbeard import db, common, helpers, logger
|
||||
from sickbeard import db, common, logger
|
||||
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
@ -145,34 +145,36 @@ class MainSanityCheck(db.DBSanityCheck):
|
|||
logger.log(u'No UNAIRED episodes, check passed')
|
||||
|
||||
|
||||
|
||||
# ======================
|
||||
# = Main DB Migrations =
|
||||
# ======================
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
|
||||
|
||||
# 0 -> 31
|
||||
# 0 -> 20003
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
|
||||
if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
|
||||
queries = [
|
||||
# original sick beard tables
|
||||
'CREATE TABLE db_version (db_version INTEGER);',
|
||||
'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT)',
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)',
|
||||
'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT, version NUMERIC)',
|
||||
'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)',
|
||||
'CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY(indexer_id, season, episode))',
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC);',
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC);',
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC, release_group TEXT, trakt_watched NUMERIC)',
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, overview TEXT, tag TEXT)',
|
||||
'CREATE INDEX idx_showid ON tv_episodes (showid)',
|
||||
'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes (showid,airdate)',
|
||||
'CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)',
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))',
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)',
|
||||
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY (indexer_id, season, episode))',
|
||||
'CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)',
|
||||
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)',
|
||||
'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)',
|
||||
'CREATE INDEX idx_showid ON tv_episodes (showid);',
|
||||
'CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate);',
|
||||
'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate);',
|
||||
'CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate);',
|
||||
'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate)',
|
||||
'INSERT INTO db_version (db_version) VALUES (31);'
|
||||
'CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate)',
|
||||
'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate)',
|
||||
'CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate)',
|
||||
'INSERT INTO db_version (db_version) VALUES (20003)'
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
|
@ -964,7 +966,7 @@ class BumpDatabaseVersion(db.SchemaUpgrade):
|
|||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 41 -> 10001
|
||||
# 41,42 -> 10001
|
||||
class Migrate41(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
|
@ -1034,6 +1036,7 @@ class AddTvShowOverview(db.SchemaUpgrade):
|
|||
self.setDBVersion(20002)
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 20002 -> 20003
|
||||
class AddTvShowTags(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
|
|
|
@ -650,28 +650,28 @@ def parse_xml(data, del_xmlns=False):
|
|||
|
||||
|
||||
def backupVersionedFile(old_file, version):
|
||||
numTries = 0
|
||||
num_tries = 0
|
||||
|
||||
new_file = old_file + '.' + 'v' + str(version)
|
||||
new_file = '%s.v%s' % (old_file, version)
|
||||
|
||||
while not ek.ek(os.path.isfile, new_file):
|
||||
if not ek.ek(os.path.isfile, old_file):
|
||||
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
|
||||
if not ek.ek(os.path.isfile, old_file) or 0 == get_size(old_file):
|
||||
logger.log(u'No need to create backup', logger.DEBUG)
|
||||
break
|
||||
|
||||
try:
|
||||
logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG)
|
||||
logger.log(u'Trying to back up %s to %s' % (old_file, new_file), logger.DEBUG)
|
||||
shutil.copy(old_file, new_file)
|
||||
logger.log(u"Backup done", logger.DEBUG)
|
||||
logger.log(u'Backup done', logger.DEBUG)
|
||||
break
|
||||
except Exception, e:
|
||||
logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + ex(e), logger.WARNING)
|
||||
numTries += 1
|
||||
time.sleep(1)
|
||||
logger.log(u"Trying again.", logger.DEBUG)
|
||||
logger.log(u'Error while trying to back up %s to %s : %s' % (old_file, new_file, ex(e)), logger.WARNING)
|
||||
num_tries += 1
|
||||
time.sleep(3)
|
||||
logger.log(u'Trying again.', logger.DEBUG)
|
||||
|
||||
if numTries >= 10:
|
||||
logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR)
|
||||
if 3 <= num_tries:
|
||||
logger.log(u'Unable to back up %s to %s please do it manually.' % (old_file, new_file), logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -5,7 +5,8 @@ import unittest
|
|||
import test_lib as test
|
||||
import sickbeard
|
||||
from time import sleep
|
||||
from sickbeard import db
|
||||
from sickbeard import db, logger
|
||||
from sickbeard.databases.mainDB import MIN_DB_VERSION, MAX_DB_VERSION
|
||||
|
||||
sys.path.insert(1, os.path.abspath('..'))
|
||||
sys.path.insert(1, os.path.abspath('../lib'))
|
||||
|
@ -21,48 +22,94 @@ class MigrationBasicTests(test.SickbeardTestDBCase):
|
|||
pass
|
||||
|
||||
def test_migrations(self):
|
||||
schema = {
|
||||
0: sickbeard.mainDB.InitialSchema,
|
||||
31: sickbeard.mainDB.AddAnimeTVShow,
|
||||
32: sickbeard.mainDB.AddAbsoluteNumbering,
|
||||
33: sickbeard.mainDB.AddSceneAbsoluteNumbering,
|
||||
34: sickbeard.mainDB.AddAnimeBlacklistWhitelist,
|
||||
35: sickbeard.mainDB.AddSceneAbsoluteNumbering2,
|
||||
36: sickbeard.mainDB.AddXemRefresh,
|
||||
37: sickbeard.mainDB.AddSceneToTvShows,
|
||||
38: sickbeard.mainDB.AddIndexerMapping,
|
||||
39: sickbeard.mainDB.AddVersionToTvEpisodes,
|
||||
41: AddDefaultEpStatusToTvShows,
|
||||
}
|
||||
schema = {0: OldInitialSchema, # sickbeard.mainDB.InitialSchema,
|
||||
31: sickbeard.mainDB.AddAnimeTVShow,
|
||||
32: sickbeard.mainDB.AddAbsoluteNumbering,
|
||||
33: sickbeard.mainDB.AddSceneAbsoluteNumbering,
|
||||
34: sickbeard.mainDB.AddAnimeBlacklistWhitelist,
|
||||
35: sickbeard.mainDB.AddSceneAbsoluteNumbering2,
|
||||
36: sickbeard.mainDB.AddXemRefresh,
|
||||
37: sickbeard.mainDB.AddSceneToTvShows,
|
||||
38: sickbeard.mainDB.AddIndexerMapping,
|
||||
39: sickbeard.mainDB.AddVersionToTvEpisodes,
|
||||
41: AddDefaultEpStatusToTvShows
|
||||
}
|
||||
|
||||
count = 1
|
||||
while count < len(schema.keys()):
|
||||
myDB = db.DBConnection()
|
||||
my_db = db.DBConnection()
|
||||
|
||||
for version in sorted(schema.keys())[:count]:
|
||||
update = schema[version](myDB)
|
||||
update = schema[version](my_db)
|
||||
update.execute()
|
||||
sleep(0.1)
|
||||
|
||||
db.MigrationCode(myDB)
|
||||
myDB.close()
|
||||
for filename in glob.glob(os.path.join(test.TESTDIR, test.TESTDBNAME) +'*'):
|
||||
db.MigrationCode(my_db)
|
||||
my_db.close()
|
||||
for filename in glob.glob(os.path.join(test.TESTDIR, test.TESTDBNAME) + '*'):
|
||||
os.remove(filename)
|
||||
|
||||
sleep(0.1)
|
||||
count += 1
|
||||
|
||||
|
||||
# 0 -> 31
|
||||
class OldInitialSchema(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
|
||||
if not self.hasTable('tv_shows') and not self.hasTable('db_version'):
|
||||
queries = [
|
||||
'CREATE TABLE db_version (db_version INTEGER);',
|
||||
'CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT)',
|
||||
'CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)',
|
||||
'CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)',
|
||||
'CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY(indexer_id, season, episode))',
|
||||
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC);',
|
||||
'CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC);',
|
||||
'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)',
|
||||
'CREATE INDEX idx_showid ON tv_episodes (showid);',
|
||||
'CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate);',
|
||||
'CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate);',
|
||||
'CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate);',
|
||||
'CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate)',
|
||||
'INSERT INTO db_version (db_version) VALUES (31);'
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
|
||||
else:
|
||||
cur_db_version = self.checkDBVersion()
|
||||
|
||||
if cur_db_version < MIN_DB_VERSION:
|
||||
logger.log_error_and_exit(u'Your database version ('
|
||||
+ str(cur_db_version)
|
||||
+ ') is too old to migrate from what this version of SickGear supports ('
|
||||
+ str(MIN_DB_VERSION) + ').' + "\n"
|
||||
+ 'Upgrade using a previous version (tag) build 496 to build 501 of SickGear first or remove database file to begin fresh.'
|
||||
)
|
||||
|
||||
if cur_db_version > MAX_DB_VERSION:
|
||||
logger.log_error_and_exit(u'Your database version ('
|
||||
+ str(cur_db_version)
|
||||
+ ') has been incremented past what this version of SickGear supports ('
|
||||
+ str(MAX_DB_VERSION) + ').' + "\n"
|
||||
+ 'If you have used other forks of SickGear, your database may be unusable due to their modifications.'
|
||||
)
|
||||
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
class AddDefaultEpStatusToTvShows(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
self.addColumn("tv_shows", "default_ep_status", "TEXT", "")
|
||||
self.addColumn('tv_shows', 'default_ep_status', 'TEXT', '')
|
||||
self.setDBVersion(41)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print "=================="
|
||||
print "STARTING - MIGRATION TESTS"
|
||||
print "=================="
|
||||
print "######################################################################"
|
||||
print '=================='
|
||||
print 'Starting - Migration Tests'
|
||||
print '=================='
|
||||
print '######################################################################'
|
||||
suite = unittest.TestLoader().loadTestsFromTestCase(MigrationBasicTests)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
|
Loading…
Reference in a new issue