|
|
@ -2,6 +2,7 @@ import json |
|
|
|
import os |
|
|
|
import time |
|
|
|
import traceback |
|
|
|
from sqlite3 import OperationalError |
|
|
|
|
|
|
|
from CodernityDB.database import RecordNotFound |
|
|
|
from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict |
|
|
@ -9,7 +10,7 @@ from couchpotato import CPLog |
|
|
|
from couchpotato.api import addApiView |
|
|
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync |
|
|
|
from couchpotato.core.helpers.encoding import toUnicode, sp |
|
|
|
from couchpotato.core.helpers.variable import getImdb, tryInt |
|
|
|
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString |
|
|
|
|
|
|
|
|
|
|
|
log = CPLog(__name__) |
|
|
@ -311,312 +312,326 @@ class Database(object): |
|
|
|
} |
|
|
|
|
|
|
|
migrate_data = {} |
|
|
|
rename_old = False |
|
|
|
|
|
|
|
c = conn.cursor() |
|
|
|
|
|
|
|
for ml in migrate_list: |
|
|
|
migrate_data[ml] = {} |
|
|
|
rows = migrate_list[ml] |
|
|
|
try: |
|
|
|
|
|
|
|
try: |
|
|
|
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml)) |
|
|
|
except: |
|
|
|
# ignore faulty destination_id database |
|
|
|
if ml == 'category': |
|
|
|
migrate_data[ml] = {} |
|
|
|
else: |
|
|
|
raise |
|
|
|
c = conn.cursor() |
|
|
|
|
|
|
|
for p in c.fetchall(): |
|
|
|
columns = {} |
|
|
|
for row in migrate_list[ml]: |
|
|
|
columns[row] = p[rows.index(row)] |
|
|
|
for ml in migrate_list: |
|
|
|
migrate_data[ml] = {} |
|
|
|
rows = migrate_list[ml] |
|
|
|
|
|
|
|
if not migrate_data[ml].get(p[0]): |
|
|
|
migrate_data[ml][p[0]] = columns |
|
|
|
try: |
|
|
|
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml)) |
|
|
|
except: |
|
|
|
# ignore faulty destination_id database |
|
|
|
if ml == 'category': |
|
|
|
migrate_data[ml] = {} |
|
|
|
else: |
|
|
|
rename_old = True |
|
|
|
raise |
|
|
|
|
|
|
|
for p in c.fetchall(): |
|
|
|
columns = {} |
|
|
|
for row in migrate_list[ml]: |
|
|
|
columns[row] = p[rows.index(row)] |
|
|
|
|
|
|
|
if not migrate_data[ml].get(p[0]): |
|
|
|
migrate_data[ml][p[0]] = columns |
|
|
|
else: |
|
|
|
if not isinstance(migrate_data[ml][p[0]], list): |
|
|
|
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]] |
|
|
|
migrate_data[ml][p[0]].append(columns) |
|
|
|
|
|
|
|
conn.close() |
|
|
|
|
|
|
|
log.info('Getting data took %s', time.time() - migrate_start) |
|
|
|
|
|
|
|
db = self.getDB() |
|
|
|
if not db.opened: |
|
|
|
return |
|
|
|
|
|
|
|
# Use properties |
|
|
|
properties = migrate_data['properties'] |
|
|
|
log.info('Importing %s properties', len(properties)) |
|
|
|
for x in properties: |
|
|
|
property = properties[x] |
|
|
|
Env.prop(property.get('identifier'), property.get('value')) |
|
|
|
|
|
|
|
# Categories |
|
|
|
categories = migrate_data.get('category', []) |
|
|
|
log.info('Importing %s categories', len(categories)) |
|
|
|
category_link = {} |
|
|
|
for x in categories: |
|
|
|
c = categories[x] |
|
|
|
|
|
|
|
new_c = db.insert({ |
|
|
|
'_t': 'category', |
|
|
|
'order': c.get('order', 999), |
|
|
|
'label': toUnicode(c.get('label', '')), |
|
|
|
'ignored': toUnicode(c.get('ignored', '')), |
|
|
|
'preferred': toUnicode(c.get('preferred', '')), |
|
|
|
'required': toUnicode(c.get('required', '')), |
|
|
|
'destination': toUnicode(c.get('destination', '')), |
|
|
|
}) |
|
|
|
|
|
|
|
category_link[x] = new_c.get('_id') |
|
|
|
|
|
|
|
# Profiles |
|
|
|
log.info('Importing profiles') |
|
|
|
new_profiles = db.all('profile', with_doc = True) |
|
|
|
new_profiles_by_label = {} |
|
|
|
for x in new_profiles: |
|
|
|
|
|
|
|
# Remove default non core profiles |
|
|
|
if not x['doc'].get('core'): |
|
|
|
db.delete(x['doc']) |
|
|
|
else: |
|
|
|
if not isinstance(migrate_data[ml][p[0]], list): |
|
|
|
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]] |
|
|
|
migrate_data[ml][p[0]].append(columns) |
|
|
|
new_profiles_by_label[x['doc']['label']] = x['_id'] |
|
|
|
|
|
|
|
conn.close() |
|
|
|
profiles = migrate_data['profile'] |
|
|
|
profile_link = {} |
|
|
|
for x in profiles: |
|
|
|
p = profiles[x] |
|
|
|
|
|
|
|
log.info('Getting data took %s', time.time() - migrate_start) |
|
|
|
exists = new_profiles_by_label.get(p.get('label')) |
|
|
|
|
|
|
|
db = self.getDB() |
|
|
|
if not db.opened: |
|
|
|
return |
|
|
|
|
|
|
|
# Use properties |
|
|
|
properties = migrate_data['properties'] |
|
|
|
log.info('Importing %s properties', len(properties)) |
|
|
|
for x in properties: |
|
|
|
property = properties[x] |
|
|
|
Env.prop(property.get('identifier'), property.get('value')) |
|
|
|
|
|
|
|
# Categories |
|
|
|
categories = migrate_data.get('category', []) |
|
|
|
log.info('Importing %s categories', len(categories)) |
|
|
|
category_link = {} |
|
|
|
for x in categories: |
|
|
|
c = categories[x] |
|
|
|
|
|
|
|
new_c = db.insert({ |
|
|
|
'_t': 'category', |
|
|
|
'order': c.get('order', 999), |
|
|
|
'label': toUnicode(c.get('label', '')), |
|
|
|
'ignored': toUnicode(c.get('ignored', '')), |
|
|
|
'preferred': toUnicode(c.get('preferred', '')), |
|
|
|
'required': toUnicode(c.get('required', '')), |
|
|
|
'destination': toUnicode(c.get('destination', '')), |
|
|
|
}) |
|
|
|
|
|
|
|
category_link[x] = new_c.get('_id') |
|
|
|
|
|
|
|
# Profiles |
|
|
|
log.info('Importing profiles') |
|
|
|
new_profiles = db.all('profile', with_doc = True) |
|
|
|
new_profiles_by_label = {} |
|
|
|
for x in new_profiles: |
|
|
|
|
|
|
|
# Remove default non core profiles |
|
|
|
if not x['doc'].get('core'): |
|
|
|
db.delete(x['doc']) |
|
|
|
else: |
|
|
|
new_profiles_by_label[x['doc']['label']] = x['_id'] |
|
|
|
|
|
|
|
profiles = migrate_data['profile'] |
|
|
|
profile_link = {} |
|
|
|
for x in profiles: |
|
|
|
p = profiles[x] |
|
|
|
# Update existing with order only |
|
|
|
if exists and p.get('core'): |
|
|
|
profile = db.get('id', exists) |
|
|
|
profile['order'] = tryInt(p.get('order')) |
|
|
|
profile['hide'] = p.get('hide') in [1, True, 'true', 'True'] |
|
|
|
db.update(profile) |
|
|
|
|
|
|
|
exists = new_profiles_by_label.get(p.get('label')) |
|
|
|
profile_link[x] = profile.get('_id') |
|
|
|
else: |
|
|
|
|
|
|
|
# Update existing with order only |
|
|
|
if exists and p.get('core'): |
|
|
|
profile = db.get('id', exists) |
|
|
|
profile['order'] = tryInt(p.get('order')) |
|
|
|
profile['hide'] = p.get('hide') in [1, True, 'true', 'True'] |
|
|
|
db.update(profile) |
|
|
|
new_profile = { |
|
|
|
'_t': 'profile', |
|
|
|
'label': p.get('label'), |
|
|
|
'order': int(p.get('order', 999)), |
|
|
|
'core': p.get('core', False), |
|
|
|
'qualities': [], |
|
|
|
'wait_for': [], |
|
|
|
'finish': [] |
|
|
|
} |
|
|
|
|
|
|
|
profile_link[x] = profile.get('_id') |
|
|
|
else: |
|
|
|
types = migrate_data['profiletype'] |
|
|
|
for profile_type in types: |
|
|
|
p_type = types[profile_type] |
|
|
|
if types[profile_type]['profile_id'] == p['id']: |
|
|
|
if p_type['quality_id']: |
|
|
|
new_profile['finish'].append(p_type['finish']) |
|
|
|
new_profile['wait_for'].append(p_type['wait_for']) |
|
|
|
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier']) |
|
|
|
|
|
|
|
if len(new_profile['qualities']) > 0: |
|
|
|
new_profile.update(db.insert(new_profile)) |
|
|
|
profile_link[x] = new_profile.get('_id') |
|
|
|
else: |
|
|
|
log.error('Corrupt profile list for "%s", using default.', p.get('label')) |
|
|
|
|
|
|
|
# Qualities |
|
|
|
log.info('Importing quality sizes') |
|
|
|
new_qualities = db.all('quality', with_doc = True) |
|
|
|
new_qualities_by_identifier = {} |
|
|
|
for x in new_qualities: |
|
|
|
new_qualities_by_identifier[x['doc']['identifier']] = x['_id'] |
|
|
|
|
|
|
|
qualities = migrate_data['quality'] |
|
|
|
quality_link = {} |
|
|
|
for x in qualities: |
|
|
|
q = qualities[x] |
|
|
|
q_id = new_qualities_by_identifier[q.get('identifier')] |
|
|
|
|
|
|
|
quality = db.get('id', q_id) |
|
|
|
quality['order'] = q.get('order') |
|
|
|
quality['size_min'] = tryInt(q.get('size_min')) |
|
|
|
quality['size_max'] = tryInt(q.get('size_max')) |
|
|
|
db.update(quality) |
|
|
|
|
|
|
|
quality_link[x] = quality |
|
|
|
|
|
|
|
# Titles |
|
|
|
titles = migrate_data['librarytitle'] |
|
|
|
titles_by_library = {} |
|
|
|
for x in titles: |
|
|
|
title = titles[x] |
|
|
|
if title.get('default'): |
|
|
|
titles_by_library[title.get('libraries_id')] = title.get('title') |
|
|
|
|
|
|
|
# Releases |
|
|
|
releaseinfos = migrate_data['releaseinfo'] |
|
|
|
for x in releaseinfos: |
|
|
|
info = releaseinfos[x] |
|
|
|
|
|
|
|
# Skip if release doesn't exist for this info |
|
|
|
if not migrate_data['release'].get(info.get('release_id')): |
|
|
|
continue |
|
|
|
|
|
|
|
new_profile = { |
|
|
|
'_t': 'profile', |
|
|
|
'label': p.get('label'), |
|
|
|
'order': int(p.get('order', 999)), |
|
|
|
'core': p.get('core', False), |
|
|
|
'qualities': [], |
|
|
|
'wait_for': [], |
|
|
|
'finish': [] |
|
|
|
} |
|
|
|
|
|
|
|
types = migrate_data['profiletype'] |
|
|
|
for profile_type in types: |
|
|
|
p_type = types[profile_type] |
|
|
|
if types[profile_type]['profile_id'] == p['id']: |
|
|
|
if p_type['quality_id']: |
|
|
|
new_profile['finish'].append(p_type['finish']) |
|
|
|
new_profile['wait_for'].append(p_type['wait_for']) |
|
|
|
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier']) |
|
|
|
|
|
|
|
if len(new_profile['qualities']) > 0: |
|
|
|
new_profile.update(db.insert(new_profile)) |
|
|
|
profile_link[x] = new_profile.get('_id') |
|
|
|
else: |
|
|
|
log.error('Corrupt profile list for "%s", using default.', p.get('label')) |
|
|
|
|
|
|
|
# Qualities |
|
|
|
log.info('Importing quality sizes') |
|
|
|
new_qualities = db.all('quality', with_doc = True) |
|
|
|
new_qualities_by_identifier = {} |
|
|
|
for x in new_qualities: |
|
|
|
new_qualities_by_identifier[x['doc']['identifier']] = x['_id'] |
|
|
|
|
|
|
|
qualities = migrate_data['quality'] |
|
|
|
quality_link = {} |
|
|
|
for x in qualities: |
|
|
|
q = qualities[x] |
|
|
|
q_id = new_qualities_by_identifier[q.get('identifier')] |
|
|
|
|
|
|
|
quality = db.get('id', q_id) |
|
|
|
quality['order'] = q.get('order') |
|
|
|
quality['size_min'] = tryInt(q.get('size_min')) |
|
|
|
quality['size_max'] = tryInt(q.get('size_max')) |
|
|
|
db.update(quality) |
|
|
|
|
|
|
|
quality_link[x] = quality |
|
|
|
|
|
|
|
# Titles |
|
|
|
titles = migrate_data['librarytitle'] |
|
|
|
titles_by_library = {} |
|
|
|
for x in titles: |
|
|
|
title = titles[x] |
|
|
|
if title.get('default'): |
|
|
|
titles_by_library[title.get('libraries_id')] = title.get('title') |
|
|
|
|
|
|
|
# Releases |
|
|
|
releaseinfos = migrate_data['releaseinfo'] |
|
|
|
for x in releaseinfos: |
|
|
|
info = releaseinfos[x] |
|
|
|
|
|
|
|
# Skip if release doesn't exist for this info |
|
|
|
if not migrate_data['release'].get(info.get('release_id')): |
|
|
|
continue |
|
|
|
|
|
|
|
if not migrate_data['release'][info.get('release_id')].get('info'): |
|
|
|
migrate_data['release'][info.get('release_id')]['info'] = {} |
|
|
|
|
|
|
|
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value') |
|
|
|
|
|
|
|
releases = migrate_data['release'] |
|
|
|
releases_by_media = {} |
|
|
|
for x in releases: |
|
|
|
release = releases[x] |
|
|
|
if not releases_by_media.get(release.get('movie_id')): |
|
|
|
releases_by_media[release.get('movie_id')] = [] |
|
|
|
|
|
|
|
releases_by_media[release.get('movie_id')].append(release) |
|
|
|
|
|
|
|
# Type ids |
|
|
|
types = migrate_data['filetype'] |
|
|
|
type_by_id = {} |
|
|
|
for t in types: |
|
|
|
type = types[t] |
|
|
|
type_by_id[type.get('id')] = type |
|
|
|
|
|
|
|
# Media |
|
|
|
log.info('Importing %s media items', len(migrate_data['movie'])) |
|
|
|
statuses = migrate_data['status'] |
|
|
|
libraries = migrate_data['library'] |
|
|
|
library_files = migrate_data['library_files__file_library'] |
|
|
|
releases_files = migrate_data['release_files__file_release'] |
|
|
|
all_files = migrate_data['file'] |
|
|
|
poster_type = migrate_data['filetype']['poster'] |
|
|
|
medias = migrate_data['movie'] |
|
|
|
for x in medias: |
|
|
|
m = medias[x] |
|
|
|
|
|
|
|
status = statuses.get(m['status_id']).get('identifier') |
|
|
|
l = libraries.get(m['library_id']) |
|
|
|
|
|
|
|
# Only migrate wanted movies, Skip if no identifier present |
|
|
|
if not l or not getImdb(l.get('identifier')): continue |
|
|
|
|
|
|
|
profile_id = profile_link.get(m['profile_id']) |
|
|
|
category_id = category_link.get(m['category_id']) |
|
|
|
title = titles_by_library.get(m['library_id']) |
|
|
|
releases = releases_by_media.get(x, []) |
|
|
|
info = json.loads(l.get('info', '')) |
|
|
|
|
|
|
|
files = library_files.get(m['library_id'], []) |
|
|
|
if not isinstance(files, list): |
|
|
|
files = [files] |
|
|
|
|
|
|
|
added_media = fireEvent('movie.add', { |
|
|
|
'info': info, |
|
|
|
'identifier': l.get('identifier'), |
|
|
|
'profile_id': profile_id, |
|
|
|
'category_id': category_id, |
|
|
|
'title': title |
|
|
|
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True) |
|
|
|
|
|
|
|
if not added_media: |
|
|
|
log.error('Failed adding media %s: %s', (l.get('identifier'), info)) |
|
|
|
continue |
|
|
|
|
|
|
|
added_media['files'] = added_media.get('files', {}) |
|
|
|
for f in files: |
|
|
|
ffile = all_files[f.get('file_id')] |
|
|
|
|
|
|
|
# Only migrate posters |
|
|
|
if ffile.get('type_id') == poster_type.get('id'): |
|
|
|
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')): |
|
|
|
added_media['files']['image_poster'] = [ffile.get('path')] |
|
|
|
break |
|
|
|
|
|
|
|
if 'image_poster' in added_media['files']: |
|
|
|
db.update(added_media) |
|
|
|
|
|
|
|
for rel in releases: |
|
|
|
|
|
|
|
empty_info = False |
|
|
|
if not rel.get('info'): |
|
|
|
empty_info = True |
|
|
|
rel['info'] = {} |
|
|
|
|
|
|
|
quality = quality_link.get(rel.get('quality_id')) |
|
|
|
if not quality: |
|
|
|
if not migrate_data['release'][info.get('release_id')].get('info'): |
|
|
|
migrate_data['release'][info.get('release_id')]['info'] = {} |
|
|
|
|
|
|
|
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value') |
|
|
|
|
|
|
|
releases = migrate_data['release'] |
|
|
|
releases_by_media = {} |
|
|
|
for x in releases: |
|
|
|
release = releases[x] |
|
|
|
if not releases_by_media.get(release.get('movie_id')): |
|
|
|
releases_by_media[release.get('movie_id')] = [] |
|
|
|
|
|
|
|
releases_by_media[release.get('movie_id')].append(release) |
|
|
|
|
|
|
|
# Type ids |
|
|
|
types = migrate_data['filetype'] |
|
|
|
type_by_id = {} |
|
|
|
for t in types: |
|
|
|
type = types[t] |
|
|
|
type_by_id[type.get('id')] = type |
|
|
|
|
|
|
|
# Media |
|
|
|
log.info('Importing %s media items', len(migrate_data['movie'])) |
|
|
|
statuses = migrate_data['status'] |
|
|
|
libraries = migrate_data['library'] |
|
|
|
library_files = migrate_data['library_files__file_library'] |
|
|
|
releases_files = migrate_data['release_files__file_release'] |
|
|
|
all_files = migrate_data['file'] |
|
|
|
poster_type = migrate_data['filetype']['poster'] |
|
|
|
medias = migrate_data['movie'] |
|
|
|
for x in medias: |
|
|
|
m = medias[x] |
|
|
|
|
|
|
|
status = statuses.get(m['status_id']).get('identifier') |
|
|
|
l = libraries.get(m['library_id']) |
|
|
|
|
|
|
|
# Only migrate wanted movies, Skip if no identifier present |
|
|
|
if not l or not getImdb(l.get('identifier')): continue |
|
|
|
|
|
|
|
profile_id = profile_link.get(m['profile_id']) |
|
|
|
category_id = category_link.get(m['category_id']) |
|
|
|
title = titles_by_library.get(m['library_id']) |
|
|
|
releases = releases_by_media.get(x, []) |
|
|
|
info = json.loads(l.get('info', '')) |
|
|
|
|
|
|
|
files = library_files.get(m['library_id'], []) |
|
|
|
if not isinstance(files, list): |
|
|
|
files = [files] |
|
|
|
|
|
|
|
added_media = fireEvent('movie.add', { |
|
|
|
'info': info, |
|
|
|
'identifier': l.get('identifier'), |
|
|
|
'profile_id': profile_id, |
|
|
|
'category_id': category_id, |
|
|
|
'title': title |
|
|
|
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True) |
|
|
|
|
|
|
|
if not added_media: |
|
|
|
log.error('Failed adding media %s: %s', (l.get('identifier'), info)) |
|
|
|
continue |
|
|
|
|
|
|
|
release_status = statuses.get(rel.get('status_id')).get('identifier') |
|
|
|
added_media['files'] = added_media.get('files', {}) |
|
|
|
for f in files: |
|
|
|
ffile = all_files[f.get('file_id')] |
|
|
|
|
|
|
|
if rel['info'].get('download_id'): |
|
|
|
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True'] |
|
|
|
rel['info']['download_info'] = { |
|
|
|
'id': rel['info'].get('download_id'), |
|
|
|
'downloader': rel['info'].get('download_downloader'), |
|
|
|
'status_support': status_support, |
|
|
|
} |
|
|
|
# Only migrate posters |
|
|
|
if ffile.get('type_id') == poster_type.get('id'): |
|
|
|
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')): |
|
|
|
added_media['files']['image_poster'] = [ffile.get('path')] |
|
|
|
break |
|
|
|
|
|
|
|
# Add status to keys |
|
|
|
rel['info']['status'] = release_status |
|
|
|
if not empty_info: |
|
|
|
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True) |
|
|
|
else: |
|
|
|
release = { |
|
|
|
'_t': 'release', |
|
|
|
'identifier': rel.get('identifier'), |
|
|
|
'media_id': added_media.get('_id'), |
|
|
|
'quality': quality.get('identifier'), |
|
|
|
'status': release_status, |
|
|
|
'last_edit': int(time.time()), |
|
|
|
'files': {} |
|
|
|
} |
|
|
|
if 'image_poster' in added_media['files']: |
|
|
|
db.update(added_media) |
|
|
|
|
|
|
|
# Add downloader info if provided |
|
|
|
try: |
|
|
|
release['download_info'] = rel['info']['download_info'] |
|
|
|
del rel['download_info'] |
|
|
|
except: |
|
|
|
pass |
|
|
|
for rel in releases: |
|
|
|
|
|
|
|
# Add files |
|
|
|
release_files = releases_files.get(rel.get('id'), []) |
|
|
|
if not isinstance(release_files, list): |
|
|
|
release_files = [release_files] |
|
|
|
empty_info = False |
|
|
|
if not rel.get('info'): |
|
|
|
empty_info = True |
|
|
|
rel['info'] = {} |
|
|
|
|
|
|
|
if len(release_files) == 0: |
|
|
|
quality = quality_link.get(rel.get('quality_id')) |
|
|
|
if not quality: |
|
|
|
continue |
|
|
|
|
|
|
|
for f in release_files: |
|
|
|
rfile = all_files.get(f.get('file_id')) |
|
|
|
if not rfile: |
|
|
|
release_status = statuses.get(rel.get('status_id')).get('identifier') |
|
|
|
|
|
|
|
if rel['info'].get('download_id'): |
|
|
|
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True'] |
|
|
|
rel['info']['download_info'] = { |
|
|
|
'id': rel['info'].get('download_id'), |
|
|
|
'downloader': rel['info'].get('download_downloader'), |
|
|
|
'status_support': status_support, |
|
|
|
} |
|
|
|
|
|
|
|
# Add status to keys |
|
|
|
rel['info']['status'] = release_status |
|
|
|
if not empty_info: |
|
|
|
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True) |
|
|
|
else: |
|
|
|
release = { |
|
|
|
'_t': 'release', |
|
|
|
'identifier': rel.get('identifier'), |
|
|
|
'media_id': added_media.get('_id'), |
|
|
|
'quality': quality.get('identifier'), |
|
|
|
'status': release_status, |
|
|
|
'last_edit': int(time.time()), |
|
|
|
'files': {} |
|
|
|
} |
|
|
|
|
|
|
|
# Add downloader info if provided |
|
|
|
try: |
|
|
|
release['download_info'] = rel['info']['download_info'] |
|
|
|
del rel['download_info'] |
|
|
|
except: |
|
|
|
pass |
|
|
|
|
|
|
|
# Add files |
|
|
|
release_files = releases_files.get(rel.get('id'), []) |
|
|
|
if not isinstance(release_files, list): |
|
|
|
release_files = [release_files] |
|
|
|
|
|
|
|
if len(release_files) == 0: |
|
|
|
continue |
|
|
|
|
|
|
|
file_type = type_by_id.get(rfile.get('type_id')).get('identifier') |
|
|
|
for f in release_files: |
|
|
|
rfile = all_files.get(f.get('file_id')) |
|
|
|
if not rfile: |
|
|
|
continue |
|
|
|
|
|
|
|
if not release['files'].get(file_type): |
|
|
|
release['files'][file_type] = [] |
|
|
|
file_type = type_by_id.get(rfile.get('type_id')).get('identifier') |
|
|
|
|
|
|
|
release['files'][file_type].append(rfile.get('path')) |
|
|
|
if not release['files'].get(file_type): |
|
|
|
release['files'][file_type] = [] |
|
|
|
|
|
|
|
try: |
|
|
|
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc'] |
|
|
|
rls.update(release) |
|
|
|
db.update(rls) |
|
|
|
except: |
|
|
|
db.insert(release) |
|
|
|
release['files'][file_type].append(rfile.get('path')) |
|
|
|
|
|
|
|
log.info('Total migration took %s', time.time() - migrate_start) |
|
|
|
log.info('=' * 30) |
|
|
|
try: |
|
|
|
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc'] |
|
|
|
rls.update(release) |
|
|
|
db.update(rls) |
|
|
|
except: |
|
|
|
db.insert(release) |
|
|
|
|
|
|
|
# rename old database |
|
|
|
log.info('Renaming old database to %s ', old_db + '.old') |
|
|
|
os.rename(old_db, old_db + '.old') |
|
|
|
log.info('Total migration took %s', time.time() - migrate_start) |
|
|
|
log.info('=' * 30) |
|
|
|
|
|
|
|
rename_old = True |
|
|
|
|
|
|
|
if os.path.isfile(old_db + '-wal'): |
|
|
|
os.rename(old_db + '-wal', old_db + '-wal.old') |
|
|
|
if os.path.isfile(old_db + '-shm'): |
|
|
|
os.rename(old_db + '-shm', old_db + '-shm.old') |
|
|
|
except OperationalError: |
|
|
|
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc()) |
|
|
|
except: |
|
|
|
log.error('Migration failed: %s', traceback.format_exc()) |
|
|
|
|
|
|
|
|
|
|
|
# rename old database |
|
|
|
if rename_old: |
|
|
|
random = randomString() |
|
|
|
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random)) |
|
|
|
os.rename(old_db, '%s.%s_old' % (old_db, random)) |
|
|
|
|
|
|
|
if os.path.isfile(old_db + '-wal'): |
|
|
|
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random)) |
|
|
|
if os.path.isfile(old_db + '-shm'): |
|
|
|
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random)) |
|
|
|