diff --git a/CHANGES.md b/CHANGES.md
index ac7cfe8..6d3281d 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -32,6 +32,7 @@
* Add startup loading page
* Change restart to use loading page
* Add upgrade messages for sickbeard, cache, and failed db upgrade processes to loading page
+* Change add WorkingDirectory to systemd startup prevents startup git issue
[develop changelog]
diff --git a/gui/slick/interfaces/default/loading.tmpl b/gui/slick/interfaces/default/loading.tmpl
index a392481..f0d3dc9 100644
--- a/gui/slick/interfaces/default/loading.tmpl
+++ b/gui/slick/interfaces/default/loading.tmpl
@@ -41,7 +41,7 @@
@@ -52,9 +52,9 @@ body{padding-top:0 !important}.sglogo{display:block;width:138px;height:74px;marg
-
$msg:
-
#if i == msg_cnt##end if#
+
+ $msg['msg']:
+
#end for
diff --git a/gui/slick/interfaces/default/restart.tmpl b/gui/slick/interfaces/default/restart.tmpl
index 0927d53..63f720f 100644
--- a/gui/slick/interfaces/default/restart.tmpl
+++ b/gui/slick/interfaces/default/restart.tmpl
@@ -76,7 +76,7 @@ body{padding-top:0 !important}.sglogo{display:block;width:138px;height:74px;marg
#if not $do_shutdown
-Waiting for SickGear home page:
+SickGear start phase:
#end if
diff --git a/gui/slick/js/loadingStartup.js b/gui/slick/js/loadingStartup.js
index deffcef..06e1b95 100644
--- a/gui/slick/js/loadingStartup.js
+++ b/gui/slick/js/loadingStartup.js
@@ -1,13 +1,11 @@
/** @namespace $.SickGear.Root */
var dev = !1,
- logInfo = dev && console.info.bind(window.console) || function(){},
- logErr = dev && console.error.bind(window.console) || function(){};
+ logInfo = dev && console.info.bind(window.console) || function (){},
+ logErr = dev && console.error.bind(window.console) || function (){};
$(function () {
-
ajaxConsumer.checkLoadNotifications();
-
});
var baseUrl = function () {
@@ -49,22 +47,36 @@ var ajaxConsumer = function () {
}();
function putMsg(msg) {
- var loading = '.loading-step', lastStep = $(loading).filter(':last');
- if (msg !== lastStep.find('.desc').attr('data-message')){
- lastStep.after(lastStep.clone());
- lastStep.find('.spinner').hide();
- lastStep.find('.hide-yes').removeClass('hide-yes');
- $(loading).filter(':last')
- .find('.desc')
- .attr('data-message', msg)
- .text(msg + ': ');
+ var loading = '.loading-step', lastStep$ = $(loading).filter(':last');
+ if (msg !== lastStep$.attr('data-message')) {
+ lastStep$.clone().insertAfter(lastStep$);
+
+ var result$ = lastStep$.find('.result');
+ lastStep$.find('.spinner').addClass('hide');
+ if (!lastStep$.find('.count').text().length) {
+ result$.removeClass('hide');
+ } else {
+ result$.addClass('hide');
+ }
+ lastStep$ = $(loading).filter(':last');
+ lastStep$.attr('data-message', msg);
+ lastStep$.find('.desc').text(msg + ': ');
+ lastStep$.find('.count').text('');
+ lastStep$.find('.spinner').removeClass('hide');
+ lastStep$.find('.result').addClass('hide');
}
}
function uiUpdateComplete(data) {
$.each(data, function (i, msg) {
- if (i >= $('.loading-step').length){
- putMsg(msg)
+ var loading = '.loading-step';
+ if (i >= $(loading).length) {
+ putMsg(msg.msg);
+ }
+ if (-1 !== msg.progress) {
+ var loading$ = $(loading + '[data-message="' + msg.msg + '"]');
+ loading$.find('.spinner, .result').addClass('hide');
+ loading$.find('.count').text(msg.progress);
}
});
}
diff --git a/init-scripts/init.systemd b/init-scripts/init.systemd
index 1bf7956..5f48daa 100755
--- a/init-scripts/init.systemd
+++ b/init-scripts/init.systemd
@@ -42,6 +42,7 @@ Environment=PYTHONUNBUFFERED=true
ExecStart=/usr/bin/python2 /opt/sickgear/app/sickgear.py --systemd --datadir=/opt/sickgear/data
KillMode=process
Restart=on-failure
+WorkingDirectory=/opt/sickgear
[Install]
WantedBy=multi-user.target
diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py
index fe0299a..25b1c4b 100644
--- a/sickbeard/databases/mainDB.py
+++ b/sickbeard/databases/mainDB.py
@@ -31,10 +31,6 @@ MAX_DB_VERSION = 20010
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
-def upgrade_log(to_log, log_level=logger.MESSAGE):
- logger.load_log('Upgrading main db', to_log, log_level)
-
-
class MainSanityCheck(db.DBSanityCheck):
def check(self):
self.fix_missing_table_indexes()
@@ -327,7 +323,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
ep_results = self.connection.select('SELECT episode_id, location, file_size FROM tv_episodes')
- upgrade_log(u'Adding file size to all episodes in DB, please be patient')
+ self.upgrade_log(u'Adding file size to all episodes in DB, please be patient')
for cur_ep in ep_results:
if not cur_ep['location']:
continue
@@ -341,7 +337,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# check each snatch to see if we can use it to get a release name from
history_results = self.connection.select('SELECT * FROM history WHERE provider != -1 ORDER BY date ASC')
- upgrade_log(u'Adding release name to all episodes still in history')
+ self.upgrade_log(u'Adding release name to all episodes still in history')
for cur_result in history_results:
# find the associated download, if there isn't one then ignore it
download_results = self.connection.select(
@@ -350,7 +346,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
' WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?',
[cur_result['showid'], cur_result['season'], cur_result['episode'], cur_result['date']])
if not download_results:
- upgrade_log(u'Found a snatch in the history for ' + cur_result[
+ self.upgrade_log(u'Found a snatch in the history for ' + cur_result[
'resource'] + ' but couldn\'t find the associated download, skipping it', logger.DEBUG)
continue
@@ -400,7 +396,7 @@ class AddSizeAndSceneNameFields(db.SchemaUpgrade):
# check each snatch to see if we can use it to get a release name from
empty_results = self.connection.select('SELECT episode_id, location FROM tv_episodes WHERE release_name = ""')
- upgrade_log(u'Adding release name to all episodes with obvious scene filenames')
+ self.upgrade_log(u'Adding release name to all episodes with obvious scene filenames')
for cur_result in empty_results:
ep_file_name = ek.ek(os.path.basename, cur_result['location'])
@@ -545,7 +541,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
common.Quality.UNKNOWN], [])
# update qualities (including templates)
- upgrade_log(u'[1/4] Updating pre-defined templates and the quality for each show...', logger.MESSAGE)
+ self.upgrade_log(u'[1/4] Updating pre-defined templates and the quality for each show...')
cl = []
shows = self.connection.select('SELECT * FROM tv_shows')
for cur_show in shows:
@@ -560,7 +556,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
# update status that are are within the old hdwebdl
# (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
- upgrade_log(u'[2/4] Updating the status for the episodes within each show...', logger.MESSAGE)
+ self.upgrade_log(u'[2/4] Updating the status for the episodes within each show...')
cl = []
episodes = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800')
for cur_episode in episodes:
@@ -572,7 +568,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
# may not always coordinate together
# update previous history so it shows the correct action
- upgrade_log(u'[3/4] Updating history to reflect the correct action...', logger.MESSAGE)
+ self.upgrade_log(u'[3/4] Updating history to reflect the correct action...')
cl = []
history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800')
for cur_entry in history_action:
@@ -581,7 +577,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
self.connection.mass_action(cl)
# update previous history so it shows the correct quality
- upgrade_log(u'[4/4] Updating history to reflect the correct quality...', logger.MESSAGE)
+ self.upgrade_log(u'[4/4] Updating history to reflect the correct quality...')
cl = []
history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8')
for cur_entry in history_quality:
@@ -592,7 +588,7 @@ class Add1080pAndRawHDQualities(db.SchemaUpgrade):
self.incDBVersion()
# cleanup and reduce db if any previous data was removed
- upgrade_log(u'Performing a vacuum on the database.', logger.DEBUG)
+ self.upgrade_log(u'Performing a vacuum on the database.', logger.DEBUG)
self.connection.action('VACUUM')
return self.checkDBVersion()
@@ -604,10 +600,10 @@ class AddShowidTvdbidIndex(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Checking for duplicate shows before adding unique index.')
+ self.upgrade_log(u'Checking for duplicate shows before adding unique index.')
MainSanityCheck(self.connection).fix_duplicate_shows('tvdb_id')
- upgrade_log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.')
+ self.upgrade_log(u'Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries.')
if not self.hasTable('idx_showid'):
self.connection.action('CREATE INDEX idx_showid ON tv_episodes (showid);')
if not self.hasTable('idx_tvdb_id'):
@@ -623,7 +619,7 @@ class AddLastUpdateTVDB(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'last_update_tvdb'):
- upgrade_log(u'Adding column last_update_tvdb to tv_shows')
+ self.upgrade_log(u'Adding column last_update_tvdb to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'last_update_tvdb', default=1)
@@ -636,7 +632,7 @@ class AddDBIncreaseTo15(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version to v%s' % self.checkDBVersion())
+ self.upgrade_log(u'Bumping database version to v%s' % self.checkDBVersion())
self.incDBVersion()
return self.checkDBVersion()
@@ -647,7 +643,7 @@ class AddIMDbInfo(db.SchemaUpgrade):
db_backed_up = False
if not self.hasTable('imdb_info'):
- upgrade_log(u'Creating IMDb table imdb_info')
+ self.upgrade_log(u'Creating IMDb table imdb_info')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
@@ -657,7 +653,7 @@ class AddIMDbInfo(db.SchemaUpgrade):
' rating TEXT, votes INTEGER, last_update NUMERIC)')
if not self.hasColumn('tv_shows', 'imdb_id'):
- upgrade_log(u'Adding IMDb column imdb_id to tv_shows')
+ self.upgrade_log(u'Adding IMDb column imdb_id to tv_shows')
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
@@ -678,7 +674,7 @@ class AddProperNamingSupport(db.SchemaUpgrade):
return self.checkDBVersion()
if not self.hasColumn('tv_episodes', 'is_proper'):
- upgrade_log(u'Adding column is_proper to tv_episodes')
+ self.upgrade_log(u'Adding column is_proper to tv_episodes')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_episodes', 'is_proper')
@@ -698,7 +694,7 @@ class AddEmailSubscriptionTable(db.SchemaUpgrade):
return self.checkDBVersion()
if not self.hasColumn('tv_shows', 'notify_list'):
- upgrade_log(u'Adding column notify_list to tv_shows')
+ self.upgrade_log(u'Adding column notify_list to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
@@ -722,7 +718,7 @@ class AddProperSearch(db.SchemaUpgrade):
return self.checkDBVersion()
if not self.hasColumn('info', 'last_proper_search'):
- upgrade_log(u'Adding column last_proper_search to info')
+ self.upgrade_log(u'Adding column last_proper_search to info')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('info', 'last_proper_search', default=1)
@@ -734,7 +730,7 @@ class AddProperSearch(db.SchemaUpgrade):
class AddDvdOrderOption(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'dvdorder'):
- upgrade_log(u'Adding column dvdorder to tv_shows')
+ self.upgrade_log(u'Adding column dvdorder to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'dvdorder', 'NUMERIC', '0')
@@ -746,7 +742,7 @@ class AddDvdOrderOption(db.SchemaUpgrade):
class AddSubtitlesSupport(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'subtitles'):
- upgrade_log(u'Adding subtitles to tv_shows and tv_episodes')
+ self.upgrade_log(u'Adding subtitles to tv_shows and tv_episodes')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'subtitles')
self.addColumn('tv_episodes', 'subtitles', 'TEXT', '')
@@ -762,10 +758,10 @@ class ConvertTVShowsToIndexerScheme(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Converting TV Shows table to Indexer Scheme...')
+ self.upgrade_log(u'Converting TV Shows table to Indexer Scheme...')
if self.hasTable('tmp_tv_shows'):
- upgrade_log(u'Removing temp tv show tables left behind from previous updates...')
+ self.upgrade_log(u'Removing temp tv show tables left behind from previous updates...')
self.connection.action('DROP TABLE tmp_tv_shows')
self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows')
@@ -798,10 +794,10 @@ class ConvertTVEpisodesToIndexerScheme(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Converting TV Episodes table to Indexer Scheme...')
+ self.upgrade_log(u'Converting TV Episodes table to Indexer Scheme...')
if self.hasTable('tmp_tv_episodes'):
- upgrade_log(u'Removing temp tv episode tables left behind from previous updates...')
+ self.upgrade_log(u'Removing temp tv episode tables left behind from previous updates...')
self.connection.action('DROP TABLE tmp_tv_episodes')
self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes')
@@ -835,10 +831,10 @@ class ConvertIMDBInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Converting IMDB Info table to Indexer Scheme...')
+ self.upgrade_log(u'Converting IMDB Info table to Indexer Scheme...')
if self.hasTable('tmp_imdb_info'):
- upgrade_log(u'Removing temp imdb info tables left behind from previous updates...')
+ self.upgrade_log(u'Removing temp imdb info tables left behind from previous updates...')
self.connection.action('DROP TABLE tmp_imdb_info')
self.connection.action('ALTER TABLE imdb_info RENAME TO tmp_imdb_info')
@@ -861,10 +857,10 @@ class ConvertInfoToIndexerScheme(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Converting Info table to Indexer Scheme...')
+ self.upgrade_log(u'Converting Info table to Indexer Scheme...')
if self.hasTable('tmp_info'):
- upgrade_log(u'Removing temp info tables left behind from previous updates...')
+ self.upgrade_log(u'Removing temp info tables left behind from previous updates...')
self.connection.action('DROP TABLE tmp_info')
self.connection.action('ALTER TABLE info RENAME TO tmp_info')
@@ -885,7 +881,7 @@ class AddArchiveFirstMatchOption(db.SchemaUpgrade):
db.backup_database('sickbeard.db', self.checkDBVersion())
if not self.hasColumn('tv_shows', 'archive_firstmatch'):
- upgrade_log(u'Adding column archive_firstmatch to tv_shows')
+ self.upgrade_log(u'Adding column archive_firstmatch to tv_shows')
self.addColumn('tv_shows', 'archive_firstmatch', 'NUMERIC', '0')
self.incDBVersion()
@@ -900,7 +896,7 @@ class AddSceneNumbering(db.SchemaUpgrade):
if self.hasTable('scene_numbering'):
self.connection.action('DROP TABLE scene_numbering')
- upgrade_log(u'Upgrading table scene_numbering ...', logger.MESSAGE)
+ self.upgrade_log(u'Upgrading table scene_numbering ...')
self.connection.action(
'CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER,'
' scene_season INTEGER, scene_episode INTEGER,'
@@ -916,7 +912,7 @@ class ConvertIndexerToInteger(db.SchemaUpgrade):
db.backup_database('sickbeard.db', self.checkDBVersion())
cl = []
- upgrade_log(u'Converting Indexer to Integer ...', logger.MESSAGE)
+ self.upgrade_log(u'Converting Indexer to Integer ...')
cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']])
cl.append(['UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ?', ['2', 'tvrage']])
cl.append(['UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ?', ['1', 'tvdb']])
@@ -940,13 +936,13 @@ class AddRequireAndIgnoreWords(db.SchemaUpgrade):
db_backed_up = False
if not self.hasColumn('tv_shows', 'rls_require_words'):
- upgrade_log(u'Adding column rls_require_words to tv_shows')
+ self.upgrade_log(u'Adding column rls_require_words to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
self.addColumn('tv_shows', 'rls_require_words', 'TEXT', '')
if not self.hasColumn('tv_shows', 'rls_ignore_words'):
- upgrade_log(u'Adding column rls_ignore_words to tv_shows')
+ self.upgrade_log(u'Adding column rls_ignore_words to tv_shows')
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'rls_ignore_words', 'TEXT', '')
@@ -960,14 +956,14 @@ class AddSportsOption(db.SchemaUpgrade):
def execute(self):
db_backed_up = False
if not self.hasColumn('tv_shows', 'sports'):
- upgrade_log(u'Adding column sports to tv_shows')
+ self.upgrade_log(u'Adding column sports to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
self.addColumn('tv_shows', 'sports', 'NUMERIC', '0')
if self.hasColumn('tv_shows', 'air_by_date') and self.hasColumn('tv_shows', 'sports'):
# update sports column
- upgrade_log(u'[4/4] Updating tv_shows to reflect the correct sports value...', logger.MESSAGE)
+ self.upgrade_log(u'[4/4] Updating tv_shows to reflect the correct sports value...')
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
cl = []
@@ -988,7 +984,7 @@ class AddSceneNumberingToTvEpisodes(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding columns scene_season and scene_episode to tvepisodes')
+ self.upgrade_log(u'Adding columns scene_season and scene_episode to tvepisodes')
self.addColumn('tv_episodes', 'scene_season', 'NUMERIC', 'NULL')
self.addColumn('tv_episodes', 'scene_episode', 'NUMERIC', 'NULL')
@@ -1001,7 +997,7 @@ class AddAnimeTVShow(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding column anime to tv_episodes')
+ self.upgrade_log(u'Adding column anime to tv_episodes')
self.addColumn('tv_shows', 'anime', 'NUMERIC', '0')
self.incDBVersion()
@@ -1013,7 +1009,7 @@ class AddAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding column absolute_number to tv_episodes')
+ self.upgrade_log(u'Adding column absolute_number to tv_episodes')
self.addColumn('tv_episodes', 'absolute_number', 'NUMERIC', '0')
self.incDBVersion()
@@ -1025,7 +1021,7 @@ class AddSceneAbsoluteNumbering(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering')
+ self.upgrade_log(u'Adding columns absolute_number and scene_absolute_number to scene_numbering')
self.addColumn('scene_numbering', 'absolute_number', 'NUMERIC', '0')
self.addColumn('scene_numbering', 'scene_absolute_number', 'NUMERIC', '0')
@@ -1040,7 +1036,7 @@ class AddAnimeBlacklistWhitelist(db.SchemaUpgrade):
cl = [['CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)'],
['CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)']]
- upgrade_log(u'Creating table blacklist whitelist')
+ self.upgrade_log(u'Creating table blacklist whitelist')
self.connection.mass_action(cl)
self.incDBVersion()
@@ -1052,7 +1048,7 @@ class AddSceneAbsoluteNumbering2(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding column scene_absolute_number to tv_episodes')
+ self.upgrade_log(u'Adding column scene_absolute_number to tv_episodes')
self.addColumn('tv_episodes', 'scene_absolute_number', 'NUMERIC', '0')
self.incDBVersion()
@@ -1064,7 +1060,7 @@ class AddXemRefresh(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Creating table xem_refresh')
+ self.upgrade_log(u'Creating table xem_refresh')
self.connection.action(
'CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)')
@@ -1077,7 +1073,7 @@ class AddSceneToTvShows(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding column scene to tv_shows')
+ self.upgrade_log(u'Adding column scene to tv_shows')
self.addColumn('tv_shows', 'scene', 'NUMERIC', '0')
self.incDBVersion()
@@ -1092,7 +1088,7 @@ class AddIndexerMapping(db.SchemaUpgrade):
if self.hasTable('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping')
- upgrade_log(u'Adding table indexer_mapping')
+ self.upgrade_log(u'Adding table indexer_mapping')
self.connection.action(
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC,'
' PRIMARY KEY (indexer_id, indexer))')
@@ -1106,11 +1102,11 @@ class AddVersionToTvEpisodes(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Adding columns release_group and version to tv_episodes')
+ self.upgrade_log(u'Adding columns release_group and version to tv_episodes')
self.addColumn('tv_episodes', 'release_group', 'TEXT', '')
self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1')
- upgrade_log(u'Adding column version to history')
+ self.upgrade_log(u'Adding column version to history')
self.addColumn('history', 'version', 'NUMERIC', '-1')
self.incDBVersion()
@@ -1122,7 +1118,7 @@ class BumpDatabaseVersion(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(10000)
return self.checkDBVersion()
@@ -1133,7 +1129,7 @@ class Migrate41(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(10001)
return self.checkDBVersion()
@@ -1149,7 +1145,7 @@ class Migrate43(db.SchemaUpgrade):
if self.hasTable(table):
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
- upgrade_log(u'Dropping redundant table tmdb_info')
+ self.upgrade_log(u'Dropping redundant table tmdb_info')
self.connection.action('DROP TABLE [%s]' % table)
db_chg = True
@@ -1157,7 +1153,7 @@ class Migrate43(db.SchemaUpgrade):
if not db_backed_up:
db.backup_database('sickbeard.db', self.checkDBVersion())
db_backed_up = True
- upgrade_log(u'Dropping redundant tmdb_info refs')
+ self.upgrade_log(u'Dropping redundant tmdb_info refs')
self.dropColumn('tv_shows', 'tmdb_id')
db_chg = True
@@ -1169,7 +1165,7 @@ class Migrate43(db.SchemaUpgrade):
self.connection.action('INSERT INTO db_version (db_version) VALUES (0);')
if not db_chg:
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(10001)
return self.checkDBVersion()
@@ -1180,7 +1176,7 @@ class Migrate4301(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(10002)
return self.checkDBVersion()
@@ -1191,7 +1187,7 @@ class Migrate4302(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(10003)
return self.checkDBVersion()
@@ -1202,7 +1198,7 @@ class MigrateUpstream(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Migrate SickBeard DB v%s into v15' % str(self.checkDBVersion()).replace('58', ''))
+ self.upgrade_log(u'Migrate SickBeard db v%s into v15' % str(self.checkDBVersion()).replace('58', ''))
self.setDBVersion(15)
return self.checkDBVersion()
@@ -1213,7 +1209,7 @@ class SickGearDatabaseVersion(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version to new SickGear standards')
+ self.upgrade_log(u'Bumping database version to new SickGear standards')
self.setDBVersion(20000)
return self.checkDBVersion()
@@ -1224,7 +1220,7 @@ class RemoveDefaultEpStatusFromTvShows(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Dropping redundant column default_ep_status from tv_shows')
+ self.upgrade_log(u'Dropping redundant column default_ep_status from tv_shows')
self.dropColumn('tv_shows', 'default_ep_status')
self.setDBVersion(10000)
@@ -1236,7 +1232,7 @@ class RemoveMinorDBVersion(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Dropping redundant column db_minor_version from db_version')
+ self.upgrade_log(u'Dropping redundant column db_minor_version from db_version')
self.dropColumn('db_version', 'db_minor_version')
self.setDBVersion(10001)
@@ -1247,7 +1243,7 @@ class RemoveMinorDBVersion(db.SchemaUpgrade):
class RemoveMetadataSub(db.SchemaUpgrade):
def execute(self):
if self.hasColumn('tv_shows', 'sub_use_sr_metadata'):
- upgrade_log(u'Dropping redundant column metadata sub')
+ self.upgrade_log(u'Dropping redundant column metadata sub')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.dropColumn('tv_shows', 'sub_use_sr_metadata')
@@ -1260,10 +1256,10 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
- upgrade_log(u'Bumping database version to force a backup before new database code')
+ self.upgrade_log(u'Bumping database version to force a backup before new database code')
self.connection.action('VACUUM')
- upgrade_log(u'Performed a vacuum on the database', logger.DEBUG)
+ self.upgrade_log(u'Performed a vacuum on the database', logger.DEBUG)
self.setDBVersion(20001)
return self.checkDBVersion()
@@ -1273,7 +1269,7 @@ class DBIncreaseTo20001(db.SchemaUpgrade):
class AddTvShowOverview(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'overview'):
- upgrade_log(u'Adding column overview to tv_shows')
+ self.upgrade_log(u'Adding column overview to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'overview', 'TEXT', '')
@@ -1285,7 +1281,7 @@ class AddTvShowOverview(db.SchemaUpgrade):
class AddTvShowTags(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'tag'):
- upgrade_log(u'Adding tag to tv_shows')
+ self.upgrade_log(u'Adding tag to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'tag', 'TEXT', 'Show List')
@@ -1302,7 +1298,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
if self.hasTable('indexer_mapping'):
self.connection.action('DROP TABLE indexer_mapping')
- upgrade_log(u'Changing table indexer_mapping')
+ self.upgrade_log(u'Changing table indexer_mapping')
self.connection.action(
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL,'
' mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0,'
@@ -1311,10 +1307,10 @@ class ChangeMapIndexer(db.SchemaUpgrade):
self.connection.action('CREATE INDEX IF NOT EXISTS idx_mapping ON indexer_mapping (indexer_id, indexer)')
if not self.hasColumn('info', 'last_run_backlog'):
- upgrade_log('Adding last_run_backlog to info')
+ self.upgrade_log('Adding last_run_backlog to info')
self.addColumn('info', 'last_run_backlog', 'NUMERIC', 1)
- upgrade_log(u'Moving table scene_exceptions from cache.db to sickbeard.db')
+ self.upgrade_log(u'Moving table scene_exceptions from cache.db to sickbeard.db')
if self.hasTable('scene_exceptions_refresh'):
self.connection.action('DROP TABLE scene_exceptions_refresh')
self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)')
@@ -1357,7 +1353,7 @@ class ChangeMapIndexer(db.SchemaUpgrade):
class AddShowNotFoundCounter(db.SchemaUpgrade):
def execute(self):
if not self.hasTable('tv_shows_not_found'):
- upgrade_log(u'Adding table tv_shows_not_found')
+ self.upgrade_log(u'Adding table tv_shows_not_found')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action(
@@ -1373,7 +1369,7 @@ class AddShowNotFoundCounter(db.SchemaUpgrade):
class AddFlagTable(db.SchemaUpgrade):
def execute(self):
if not self.hasTable('flags'):
- upgrade_log(u'Adding table flags')
+ self.upgrade_log(u'Adding table flags')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action('CREATE TABLE flags (flag PRIMARY KEY NOT NULL)')
@@ -1386,7 +1382,7 @@ class AddFlagTable(db.SchemaUpgrade):
class DBIncreaseTo20007(db.SchemaUpgrade):
def execute(self):
- upgrade_log(u'Bumping database version')
+ self.upgrade_log(u'Bumping database version')
self.setDBVersion(20007)
return self.checkDBVersion()
@@ -1411,7 +1407,7 @@ class AddWatched(db.SchemaUpgrade):
self.connection.action('VACUUM')
if not self.hasTable('tv_episodes_watched'):
- upgrade_log(u'Adding table tv_episodes_watched')
+ self.upgrade_log(u'Adding table tv_episodes_watched')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action(
@@ -1429,7 +1425,7 @@ class AddPrune(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'prune'):
- upgrade_log('Adding prune to tv_shows')
+ self.upgrade_log('Adding prune to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'prune', 'INT', 0)
diff --git a/sickbeard/db.py b/sickbeard/db.py
index 5407a02..4c428c2 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -312,6 +312,9 @@ class DBConnection(object):
self.connection.close()
self.connection = None
+ def upgrade_log(self, to_log, log_level=logger.MESSAGE):
+ logger.load_log('Upgrading %s' % self.filename, to_log, log_level)
+
def sanityCheckDatabase(connection, sanity_check):
sanity_check(connection).check()
@@ -325,16 +328,12 @@ class DBSanityCheck(object):
pass
-def load_msg(db_name, progess_msg):
- logger.load_log('Upgrading %s' % db_name, progess_msg)
-
-
def upgradeDatabase(connection, schema):
logger.log(u'Checking database structure...', logger.MESSAGE)
connection.is_upgrading = False
_processUpgrade(connection, schema)
if connection.is_upgrading:
- load_msg(connection.filename, 'Finished')
+ connection.upgrade_log('Finished')
def prettyName(class_name):
@@ -355,7 +354,7 @@ def _processUpgrade(connection, upgradeClass):
logger.log(u'Checking %s database upgrade' % prettyName(upgradeClass.__name__), logger.DEBUG)
if not instance.test():
connection.is_upgrading = True
- load_msg(connection.filename, getattr(upgradeClass, 'pretty_name', None) or prettyName(upgradeClass.__name__))
+ connection.upgrade_log(getattr(upgradeClass, 'pretty_name', None) or prettyName(upgradeClass.__name__))
logger.log(u'Database upgrade required: %s' % prettyName(upgradeClass.__name__), logger.MESSAGE)
try:
instance.execute()
@@ -499,6 +498,9 @@ class SchemaUpgrade(object):
self.connection.action('VACUUM')
self.incDBVersion()
+ def upgrade_log(self, *args, **kwargs):
+ self.connection.upgrade_log(*args, **kwargs)
+
def MigrationCode(myDB):
schema = {
@@ -580,7 +582,7 @@ def MigrationCode(myDB):
else:
- load_msg('main db', 'Upgrading')
+ myDB.upgrade_log('Upgrading')
while db_version < sickbeard.mainDB.MAX_DB_VERSION:
if None is schema[db_version]: # skip placeholders used when multi PRs are updating DB
db_version += 1
@@ -597,7 +599,7 @@ def MigrationCode(myDB):
logger.log_error_and_exit(u'Successfully restored database version: %s' % db_version)
else:
logger.log_error_and_exit(u'Failed to restore database version: %s' % db_version)
- load_msg('main db', 'Finished')
+ myDB.upgrade_log('Finished')
def backup_database(filename, version):