openmedialibrary/oml/update.py

628 lines
22 KiB
Python
Raw Normal View History

2014-08-05 11:47:16 +02:00
# -*- coding: utf-8 -*-
2014-09-03 00:32:44 +02:00
2014-08-05 11:47:16 +02:00
from contextlib import closing
import base64
2014-08-05 11:47:16 +02:00
import json
import os
import tarfile
2016-01-12 20:30:51 +05:30
from threading import Thread
2016-07-03 17:21:51 +05:30
import urllib.request
import urllib.error
import urllib.parse
2014-08-05 11:47:16 +02:00
import shutil
import subprocess
2015-11-30 17:49:33 +01:00
import sys
2016-01-12 20:30:51 +05:30
import time
2014-08-05 11:47:16 +02:00
import OpenSSL.crypto
2014-08-05 11:47:16 +02:00
import ox
2014-08-22 19:46:45 +02:00
from oxtornado import actions
2016-07-03 17:21:51 +05:30
from sqlalchemy.sql.expression import text
2014-08-05 11:47:16 +02:00
import settings
2016-02-01 13:15:34 +05:30
import utils
import db
2016-04-14 21:27:30 +02:00
from integration import get_trayicon_version
2014-08-05 11:47:16 +02:00
2015-11-30 17:49:33 +01:00
import logging
logger = logging.getLogger(__name__)
2014-08-22 19:46:45 +02:00
2016-07-03 17:21:51 +05:30
ENCODING = 'base64'
2014-08-05 11:47:16 +02:00
def verify(release):
verified = False
2014-08-05 11:47:16 +02:00
value = []
for module in sorted(release['modules']):
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
value = '\n'.join(value)
2014-10-31 18:53:10 +01:00
value = value.encode()
for digest in ('sha512', 'sha256', 'sha1'):
2019-01-17 16:00:22 +05:30
if 'signature_%s' % digest in release:
tls_sig = base64.b64decode(release['signature_%s' % digest].encode())
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
try:
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
verified = True
except OpenSSL.crypto.Error:
2016-02-01 13:32:59 +05:30
logger.debug('invalid tls signature')
verified = False
break
if 'signature' in release and not verified:
import ed25519
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
sig = release['signature'].encode()
try:
vk.verify(sig, value, encoding=ENCODING)
verified = True
except ed25519.BadSignatureError:
verified = False
return verified
2014-08-05 11:47:16 +02:00
2014-08-07 12:12:03 +02:00
def get(url, filename=None):
2014-09-03 00:32:44 +02:00
request = urllib.request.Request(url, headers={
2014-08-07 12:12:03 +02:00
'User-Agent': settings.USER_AGENT
})
2014-09-03 00:32:44 +02:00
with closing(urllib.request.urlopen(request)) as u:
2014-08-07 12:12:03 +02:00
if not filename:
data = u.read()
return data
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
2014-10-31 18:47:48 +01:00
with open(filename, 'wb') as fd:
2014-08-05 11:47:16 +02:00
data = u.read(4096)
2014-08-07 12:12:03 +02:00
while data:
fd.write(data)
data = u.read(4096)
2014-08-05 11:47:16 +02:00
def check():
2014-08-05 11:47:16 +02:00
if settings.release:
2015-11-26 13:40:39 +01:00
release_data = get(settings.server.get('release_url',
2019-01-20 16:46:58 +05:30
'https://downloads.openmedialibrary.com/release.json'))
2014-10-31 18:47:48 +01:00
release = json.loads(release_data.decode('utf-8'))
2015-03-08 00:47:13 +05:30
old = current_version('openmedialibrary')
2014-08-05 11:47:16 +02:00
new = release['modules']['openmedialibrary']['version']
return verify(release) and old < new
return False
2014-08-05 11:47:16 +02:00
2015-03-08 00:47:13 +05:30
def current_version(module):
2015-11-26 13:40:39 +01:00
if 'modules' in settings.release \
and module in settings.release['modules'] \
and 'version' in settings.release['modules'][module]:
version = settings.release['modules'][module]['version']
else:
version = ''
return version
2015-03-08 00:47:13 +05:30
2016-01-04 17:32:40 +05:30
def get_latest_release():
try:
release_data = get(settings.server.get('release_url'))
release = json.loads(release_data.decode('utf-8'))
if verify(release):
ox.makedirs(settings.updates_path)
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
fd.write(release_data)
return release
except:
2016-02-26 17:31:30 +05:30
logger.debug('failed to get latest release')
2016-01-04 17:32:40 +05:30
2016-01-31 15:08:16 +05:30
def get_platform():
name = sys.platform
if name.startswith('darwin'):
name = 'darwin64'
elif name.startswith('linux'):
import platform
machine = platform.machine()
if machine == 'armv7l':
name = 'linux_armv7l'
elif machine == 'aarch64':
name = 'linux_aarch64'
elif machine == 'x86_64':
2016-01-31 15:08:16 +05:30
name = 'linux64'
else:
name = 'linux32'
return name
2016-01-04 17:32:40 +05:30
def download():
2016-01-18 12:04:20 +05:30
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2016-01-04 17:32:40 +05:30
return True
release = get_latest_release()
2016-01-31 15:08:16 +05:30
platform = get_platform()
2016-01-04 17:32:40 +05:30
if release:
2014-08-05 12:10:42 +02:00
ox.makedirs(settings.updates_path)
os.chdir(os.path.dirname(settings.base_dir))
current_files = {'release.json'}
for module in release['modules']:
2016-02-01 13:32:59 +05:30
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
2016-01-31 15:08:16 +05:30
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
url = '/'.join([base_url, release['modules'][module]['name']])
if not os.path.exists(module_tar):
2016-01-13 10:41:38 +05:30
logger.debug('download %s', os.path.basename(module_tar))
2014-08-07 12:12:03 +02:00
get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
2016-01-13 10:41:38 +05:30
logger.debug('invalid checksum %s', os.path.basename(module_tar))
os.unlink(module_tar)
return False
2016-01-31 15:08:16 +05:30
current_files.add(os.path.basename(module_tar))
2014-12-13 15:56:17 +01:00
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
2014-08-11 19:11:07 +02:00
os.unlink(os.path.join(settings.updates_path, f))
return True
return True
def install():
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return True
2016-01-18 12:04:20 +05:30
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
2015-11-17 14:21:05 +01:00
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
if verify(release) and old_version < new_version:
2016-02-01 16:46:35 +05:30
base = os.path.dirname(settings.base_dir)
os.chdir(base)
2016-01-31 15:08:16 +05:30
platform = get_platform()
2014-08-05 11:47:16 +02:00
for module in release['modules']:
2016-01-31 15:08:16 +05:30
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
#tar fails if old platform is moved before extract
new = '%s_new' % module
ox.makedirs(new)
os.chdir(new)
tar = tarfile.open(module_tar)
2014-08-05 11:51:17 +02:00
tar.extractall()
tar.close()
2016-02-01 16:46:35 +05:30
os.chdir(base)
2015-03-14 17:44:08 +05:30
module_old = '%s_old' % module
2016-02-01 16:46:35 +05:30
if os.path.exists(module_old):
rmtree(module_old)
2015-03-14 17:44:08 +05:30
if os.path.exists(module):
2016-02-01 16:46:35 +05:30
move(module, module_old)
move(os.path.join(new, module), module)
2016-02-23 17:27:15 +05:30
if platform != 'win32' and os.path.exists(module_old):
2016-02-01 16:46:35 +05:30
rmtree(module_old)
rmtree(new)
2014-08-05 11:51:17 +02:00
else:
2016-01-13 10:49:26 +05:30
if os.path.exists(module_tar):
os.unlink(module_tar)
2014-08-05 11:51:17 +02:00
return False
2016-01-18 12:04:20 +05:30
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
2015-11-30 17:49:33 +01:00
upgrade_app()
# FIXME: still needed?
2019-01-29 18:48:48 +05:30
if old_version < '20160112-651-de984a3' and platform != 'win32':
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
2014-08-05 11:47:16 +02:00
return True
return True
2014-08-22 19:46:45 +02:00
2016-02-01 16:46:35 +05:30
def move(src, dst):
try:
shutil.move(src, dst)
except:
logger.debug('failed to move %s to %s', src, dst)
raise
def rmtree(path):
try:
shutil.rmtree(path)
except:
logger.debug('failed to remove %s', path)
raise
2016-01-12 20:30:51 +05:30
def update_available():
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
return True
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return False
2016-01-18 12:04:20 +05:30
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2016-01-12 20:30:51 +05:30
return False
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
return verify(release) and old_version < new_version
def restart_oml(update=False):
if update:
get_latest_release()
2016-02-01 13:15:34 +05:30
utils.ctl('restart')
2016-01-12 20:30:51 +05:30
2015-11-30 17:49:33 +01:00
def get_app_version(app):
plist = app + '/Contents/Info.plist'
if os.path.exists(plist):
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
return subprocess.check_output(cmd).strip()
def upgrade_app():
2016-04-14 21:27:30 +02:00
base = os.path.dirname(settings.base_dir)
2015-11-30 17:49:33 +01:00
if sys.platform == 'darwin':
2016-02-06 16:07:08 +05:30
bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
2015-11-30 17:49:33 +01:00
app = '/Applications/Open Media Library.app'
version = get_app_version(app)
current_version = get_app_version(bundled_app)
if version and current_version and version != current_version:
try:
shutil.rmtree(app)
shutil.copytree(bundled_app, app)
except:
2016-01-24 14:43:03 +05:30
logger.debug('Failed to update Application', exc_info=True)
2019-01-29 18:48:48 +05:30
'''
2016-04-14 21:27:30 +02:00
elif sys.platform == 'win32':
current_version = get_trayicon_version()
if current_version != '0.2.0.0':
msi = os.path.normpath(os.path.join(base, 'platform_win32', 'Open Media Library.msi'))
cmd = ['msiexec.exe', '/qb', '/I', msi]
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
subprocess.Popen(cmd, cwd=settings.base_dir, start_new_session=True, startupinfo=startupinfo)
2019-01-29 18:48:48 +05:30
'''
2015-11-30 17:49:33 +01:00
2014-08-22 19:46:45 +02:00
def getVersion(data):
'''
check if new version is available
'''
response = {
'current': settings.MINOR_VERSION,
2016-01-13 10:47:20 +05:30
'version': settings.MINOR_VERSION,
2014-08-22 19:46:45 +02:00
'upgrade': False,
}
2014-09-01 14:01:41 +02:00
if settings.MINOR_VERSION == 'git':
2015-12-01 09:58:03 +01:00
'''
2014-09-01 14:01:41 +02:00
cmd = ['git', 'rev-parse', '@']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
current = stdout.strip()
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
new = stdout.strip()[:40]
2015-12-01 00:26:35 +01:00
response['update'] = len(new) == 40 and current != new
2015-12-01 09:58:03 +01:00
'''
response['update'] = False
2014-09-01 14:01:41 +02:00
else:
2016-01-04 17:32:40 +05:30
get_latest_release()
2014-09-01 14:01:41 +02:00
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return response
2016-01-18 12:04:20 +05:30
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2014-09-01 14:01:41 +02:00
return response
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
2015-03-08 00:47:13 +05:30
current = current_version('openmedialibrary')
2014-09-01 14:01:41 +02:00
response['current'] = current
new = release['modules']['openmedialibrary']['version']
2016-01-13 10:47:20 +05:30
response['version'] = new
2014-09-01 14:01:41 +02:00
response['update'] = current < new
2014-08-22 19:46:45 +02:00
return response
actions.register(getVersion, cache=False)
2019-01-17 16:00:22 +05:30
actions.register(getVersion, cache=False, version='public')
2014-08-22 19:46:45 +02:00
def restart(data):
'''
restart (and upgrade if upgrades are available)
'''
2016-01-12 20:30:51 +05:30
restart_oml(data.get('update'))
2014-08-22 19:46:45 +02:00
return {}
actions.register(restart, cache=False)
2016-01-12 20:30:51 +05:30
class Update(Thread):
_status = {
'reload': False,
'status': 'Updating Open Media Library...'
}
2016-01-12 20:30:51 +05:30
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.start()
def status(self, status, reload=False):
from websocket import trigger_event
self._status = {
2016-01-12 20:30:51 +05:30
'reload': reload,
'status': status,
}
trigger_event('updatestatus', self._status)
2016-01-12 20:30:51 +05:30
def install(self):
while update_available():
2016-01-16 20:13:54 +05:30
self.status('Downloading...')
max_retry = 5
while max_retry > 0 and not download():
max_retry -= 1
2016-01-16 20:13:54 +05:30
self.status('Download failed, retrying...')
time.sleep(5)
self.status('Downloading...')
self.status('Installing...')
if not install():
2016-01-16 20:13:54 +05:30
self.status('Installation failed.')
2016-01-13 10:53:12 +05:30
return True
return False
2016-01-12 20:30:51 +05:30
def update_database(self):
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
2016-01-16 20:13:54 +05:30
self.status('Updating...')
if db_version < 3:
db_version = migrate_3()
if db_version < 4:
db_version = migrate_4()
2016-01-19 15:35:16 +05:30
if db_version < 5:
db_version = migrate_5()
2016-01-20 12:16:46 +05:30
if db_version < 6:
db_version = migrate_6()
if db_version < 7:
db_version = migrate_7()
2016-01-24 13:29:18 +05:30
if db_version < 9:
db_version = migrate_8()
2016-01-25 23:02:04 +05:30
if db_version < 10:
db_version = migrate_10()
2016-02-10 19:32:32 +05:30
if db_version < 11:
db_version = migrate_11()
2016-02-15 17:00:17 +05:30
if db_version < 12:
db_version = migrate_12()
2017-06-03 22:50:14 +02:00
if db_version < 13:
db_version = migrate_13()
2016-01-12 20:30:51 +05:30
settings.server['db_version'] = settings.DB_VERSION
def run(self):
2016-01-16 20:13:54 +05:30
self.status('Checking for updates...')
2016-01-12 20:30:51 +05:30
self.update_database()
2016-01-13 10:53:12 +05:30
if self.install():
restart_oml()
return
2016-01-13 23:16:09 +05:30
self.status('Relaunching...', True)
2016-01-12 20:30:51 +05:30
restart_oml()
def migrate_3():
with db.session():
import item.models
for i in item.models.Item.find({
'query': {
'conditions': [{
'key':'mediastate',
'value':'available',
'operator': '=='
}]
}
}):
if not i.files.all():
i.remove_file()
else:
f = i.files.all()[0]
if not 'pages' in i.info and 'pages' in f.info:
i.info['pages'] = f.info['pages']
i.save()
return 3
def migrate_4():
with db.session() as session:
import item.models
from meta.utils import to_isbn13
for i in item.models.Item.query:
update = False
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
isbns = [isbn for isbn in isbns if isbn]
if isbns:
i.meta['isbn'] = isbns[0]
if 'isbn' in i.info:
i.info['isbn'] = i.meta['isbn']
else:
del i.meta['isbn']
if 'isbn' in i.info:
del i.info['isbn']
update = True
if 'isbn' in i.meta and not i.meta['isbn']:
del i.meta['isbn']
update = True
if update:
session.add(i)
session.commit()
return 4
2016-01-19 15:35:16 +05:30
def migrate_5():
db.run_sql([
'DROP INDEX IF EXISTS user_metadata_index',
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
'UPDATE sort SET sharemetadata = 0',
2016-01-19 15:35:16 +05:30
]),
with db.session() as session:
import user.models
for m in user.models.Metadata.query:
data_hash = m.get_hash()
if m.data_hash != data_hash:
m.data_hash = data_hash
session.add(m)
session.commit()
import item.models
for i in item.models.Item.query:
update = False
if '_from' in i.info:
del i.info['_from']
update = True
if update:
session.add(i)
session.commit()
for i in item.models.Item.query:
i.sync_metadata()
return 5
2016-01-20 12:16:46 +05:30
def migrate_6():
with db.session() as session:
import item.models
for s in item.models.Sort.query.filter_by(author=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(publisher=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(language=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(place=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(isbn=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(date=''):
s.item.update_sort()
session.commit()
return 6
def migrate_7():
with db.session() as session:
import changelog
for c in changelog.Changelog.query:
if 'editmeta' in c.data or 'resetmeta' in c.data:
session.delete(c)
session.commit()
db.run_sql('DROP TABLE IF EXISTS metadata')
2016-01-21 12:38:02 +05:30
db.run_sql('DROP TABLE IF EXISTS scrape')
db.run_sql('VACUUM')
return 7
def migrate_8():
for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
if key in settings.server:
del settings.server[key]
list_cache = os.path.join(settings.data_path, 'list_cache.json')
if os.path.exists(list_cache):
os.unlink(list_cache)
with db.session() as session:
import item.models
for i in item.models.Item.query:
delta = set(i.meta)-set(i.meta_keys)
if delta:
for key in delta:
del i.meta[key]
session.add(i)
session.commit()
import changelog
import user.models
changelog.Changelog.query.delete()
u = user.models.User.get(settings.USER_ID)
u.rebuild_changelog()
for peer in user.models.User.query:
if peer.id != u.id:
if len(peer.id) != 16:
session.delete(peer)
session.commit()
return 8
2016-01-25 23:02:04 +05:30
def migrate_10():
with db.session() as session:
from item.models import Item, Find
from utils import get_by_id
from item.person import get_sort_name
import unicodedata
sort_names = {}
2016-01-27 15:09:10 +05:30
updates = {}
2016-01-27 14:25:36 +05:30
for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
if sort_type == 'person':
if f.value in sort_names:
2016-01-27 15:09:10 +05:30
sortvalue = sort_names[f.value]
2016-01-25 23:02:04 +05:30
else:
2016-01-27 15:09:10 +05:30
sortvalue = sort_names[f.value] = get_sort_name(f.value)
2016-01-27 14:25:36 +05:30
else:
2016-01-27 15:09:10 +05:30
sortvalue = f.value
if sortvalue:
sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
if not f.key in updates:
updates[f.key] = {}
updates[f.key][f.value] = sortvalue
for key in updates:
for value in updates[key]:
Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
2016-01-25 23:02:04 +05:30
session.commit()
return 10
2016-02-10 19:32:32 +05:30
def migrate_11():
with db.session() as session:
2016-02-11 21:25:41 +05:30
from user.models import User, Metadata, List
2016-02-10 19:32:32 +05:30
from changelog import Changelog
import utils
for u in User.query.filter_by(peered=True):
peer = utils.get_peer(u.id)
2016-07-03 17:21:51 +05:30
last = Changelog.query.filter_by(user_id=u.id).order_by(text('-revision')).first()
2016-02-10 19:32:32 +05:30
if last:
peer.info['revision'] = last.revision
2016-02-13 16:14:19 +05:30
listorder = []
for l in List.query.filter_by(user_id=u.id).order_by('index_'):
2016-02-11 21:25:41 +05:30
if l.name:
2016-02-12 18:57:55 +05:30
peer.info['lists'][l.name] = [i.id for i in l.get_items()]
2016-02-13 16:14:19 +05:30
listorder.append(l.name)
if 'listorder' not in peer.info:
peer.info['listorder'] = listorder
2016-02-10 19:32:32 +05:30
for m in Metadata.query.filter_by(user_id=u.id):
peer.library[m.item_id] = {
'meta': dict(m.data),
'meta_hash': m.data_hash,
'modified': m.modified,
}
peer.library.commit()
peer.sync_info()
peer.sync_db()
Changelog.query.filter_by(user_id=u.id).delete()
Metadata.query.filter_by(user_id=u.id).delete()
session.commit()
2016-02-15 17:00:17 +05:30
if db.table_exists('transfer'):
import state
import downloads
state.online = False
state.downloads = downloads.Downloads()
r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
for t in r:
item_id, added, progress = t
if added:
state.downloads.transfers[item_id] = {
'added': added,
'progress': progress
}
state.db.session.commit()
state.downloads.transfers.commit()
state.downloads = None
2016-02-10 19:32:32 +05:30
return 11
2016-02-15 17:00:17 +05:30
def migrate_12():
db.run_sql([
'DROP TABLE IF EXISTS transfer'
])
2016-02-15 17:00:17 +05:30
return 12
2017-06-03 22:50:14 +02:00
def migrate_13():
import settings
import changelog
import os
import json
path = os.path.join(settings.data_path, 'peers', '%s.log' % settings.USER_ID)
if not os.path.exists(path):
2019-01-18 17:02:44 +05:30
folder = os.path.dirname(path)
if not os.path.exists(folder):
os.makedirs(folder)
2017-06-03 22:50:14 +02:00
with db.session() as session:
revision = -1
qs = changelog.Changelog.query.filter_by(user_id=settings.USER_ID)
with open(path, 'w') as fd:
for c in qs.order_by('timestamp'):
data = json.dumps([c.revision, c.timestamp, json.loads(c.data)], ensure_ascii=False)
fd.write(data + '\n')
revision = c.revision
if revision > -1:
settings.server['revision'] = revision
return 13
2019-01-31 22:04:47 +05:30
def migrate_14():
from user.models import List
with db.session():
l = List.get(':Public')
if l and not len(l.items):
l.remove()
return 14