openmedialibrary/oml/update.py

739 lines
26 KiB
Python
Raw Normal View History

2014-08-05 09:47:16 +00:00
# -*- coding: utf-8 -*-
2014-09-02 22:32:44 +00:00
2014-08-05 09:47:16 +00:00
from contextlib import closing
import base64
2014-08-05 09:47:16 +00:00
import json
import os
import tarfile
2016-01-12 15:00:51 +00:00
from threading import Thread
2016-07-03 11:51:51 +00:00
import urllib.request
import urllib.error
import urllib.parse
2014-08-05 09:47:16 +00:00
import shutil
import subprocess
2015-11-30 16:49:33 +00:00
import sys
2016-01-12 15:00:51 +00:00
import time
2014-08-05 09:47:16 +00:00
import OpenSSL.crypto
2014-08-05 09:47:16 +00:00
import ox
2014-08-22 17:46:45 +00:00
from oxtornado import actions
2016-07-03 11:51:51 +00:00
from sqlalchemy.sql.expression import text
2014-08-05 09:47:16 +00:00
import settings
2016-02-01 07:45:34 +00:00
import utils
import db
2016-04-14 19:27:30 +00:00
from integration import get_trayicon_version
2014-08-05 09:47:16 +00:00
2015-11-30 16:49:33 +00:00
import logging
logger = logging.getLogger(__name__)
2014-08-22 17:46:45 +00:00
2016-07-03 11:51:51 +00:00
ENCODING = 'base64'
2014-08-05 09:47:16 +00:00
def verify(release):
verified = False
2014-08-05 09:47:16 +00:00
value = []
for module in sorted(release['modules']):
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
value = '\n'.join(value)
2014-10-31 17:53:10 +00:00
value = value.encode()
for digest in ('sha512', 'sha256', 'sha1'):
2019-01-17 10:30:22 +00:00
if 'signature_%s' % digest in release:
tls_sig = base64.b64decode(release['signature_%s' % digest].encode())
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
try:
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
verified = True
except OpenSSL.crypto.Error:
2016-02-01 08:02:59 +00:00
logger.debug('invalid tls signature')
verified = False
break
if 'signature' in release and not verified:
import ed25519
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
sig = release['signature'].encode()
try:
vk.verify(sig, value, encoding=ENCODING)
verified = True
except ed25519.BadSignatureError:
verified = False
return verified
2014-08-05 09:47:16 +00:00
2014-08-07 10:12:03 +00:00
def get(url, filename=None):
2014-09-02 22:32:44 +00:00
request = urllib.request.Request(url, headers={
2014-08-07 10:12:03 +00:00
'User-Agent': settings.USER_AGENT
})
2014-09-02 22:32:44 +00:00
with closing(urllib.request.urlopen(request)) as u:
2014-08-07 10:12:03 +00:00
if not filename:
data = u.read()
return data
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
2014-10-31 17:47:48 +00:00
with open(filename, 'wb') as fd:
2014-08-05 09:47:16 +00:00
data = u.read(4096)
2014-08-07 10:12:03 +00:00
while data:
fd.write(data)
data = u.read(4096)
2014-08-05 09:47:16 +00:00
def check():
2014-08-05 09:47:16 +00:00
if settings.release:
2015-11-26 12:40:39 +00:00
release_data = get(settings.server.get('release_url',
2019-01-20 11:16:58 +00:00
'https://downloads.openmedialibrary.com/release.json'))
2014-10-31 17:47:48 +00:00
release = json.loads(release_data.decode('utf-8'))
2015-03-07 19:17:13 +00:00
old = current_version('openmedialibrary')
2014-08-05 09:47:16 +00:00
new = release['modules']['openmedialibrary']['version']
return verify(release) and old < new
return False
2014-08-05 09:47:16 +00:00
2015-03-07 19:17:13 +00:00
def current_version(module):
2015-11-26 12:40:39 +00:00
if 'modules' in settings.release \
and module in settings.release['modules'] \
and 'version' in settings.release['modules'][module]:
version = settings.release['modules'][module]['version']
else:
version = ''
return version
2015-03-07 19:17:13 +00:00
2016-01-04 12:02:40 +00:00
def get_latest_release():
try:
release_data = get(settings.server.get('release_url'))
release = json.loads(release_data.decode('utf-8'))
if verify(release):
ox.makedirs(settings.updates_path)
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
fd.write(release_data)
return release
except:
2016-02-26 12:01:30 +00:00
logger.debug('failed to get latest release')
2016-01-04 12:02:40 +00:00
2016-01-31 09:38:16 +00:00
def get_platform():
name = sys.platform
if name.startswith('darwin'):
name = 'darwin64'
elif name.startswith('linux'):
import platform
machine = platform.machine()
if machine == 'armv7l':
name = 'linux_armv7l'
elif machine == 'aarch64':
name = 'linux_aarch64'
elif machine == 'x86_64':
2016-01-31 09:38:16 +00:00
name = 'linux64'
else:
name = 'linux32'
return name
2016-01-04 12:02:40 +00:00
def download():
2016-01-18 06:34:20 +00:00
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2016-01-04 12:02:40 +00:00
return True
release = get_latest_release()
2016-01-31 09:38:16 +00:00
platform = get_platform()
2016-01-04 12:02:40 +00:00
if release:
2014-08-05 10:10:42 +00:00
ox.makedirs(settings.updates_path)
os.chdir(os.path.dirname(settings.base_dir))
current_files = {'release.json'}
for module in release['modules']:
2016-02-01 08:02:59 +00:00
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
2016-01-31 09:38:16 +00:00
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
url = '/'.join([base_url, release['modules'][module]['name']])
if not os.path.exists(module_tar):
2016-01-13 05:11:38 +00:00
logger.debug('download %s', os.path.basename(module_tar))
2014-08-07 10:12:03 +00:00
get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
2016-01-13 05:11:38 +00:00
logger.debug('invalid checksum %s', os.path.basename(module_tar))
os.unlink(module_tar)
return False
2016-01-31 09:38:16 +00:00
current_files.add(os.path.basename(module_tar))
2014-12-13 14:56:17 +00:00
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
2014-08-11 17:11:07 +00:00
os.unlink(os.path.join(settings.updates_path, f))
return True
return True
def install():
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return True
2016-01-18 06:34:20 +00:00
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
2015-11-17 13:21:05 +00:00
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
if verify(release) and old_version < new_version:
2016-02-01 11:16:35 +00:00
base = os.path.dirname(settings.base_dir)
os.chdir(base)
2016-01-31 09:38:16 +00:00
platform = get_platform()
2014-08-05 09:47:16 +00:00
for module in release['modules']:
2016-01-31 09:38:16 +00:00
if release['modules'][module].get('platform', platform) == platform and \
release['modules'][module]['version'] > current_version(module):
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
#tar fails if old platform is moved before extract
new = '%s_new' % module
ox.makedirs(new)
os.chdir(new)
tar = tarfile.open(module_tar)
2014-08-05 09:51:17 +00:00
tar.extractall()
tar.close()
2016-02-01 11:16:35 +00:00
os.chdir(base)
2015-03-14 12:14:08 +00:00
module_old = '%s_old' % module
2016-02-01 11:16:35 +00:00
if os.path.exists(module_old):
rmtree(module_old)
2015-03-14 12:14:08 +00:00
if os.path.exists(module):
2016-02-01 11:16:35 +00:00
move(module, module_old)
move(os.path.join(new, module), module)
2016-02-23 11:57:15 +00:00
if platform != 'win32' and os.path.exists(module_old):
2016-02-01 11:16:35 +00:00
rmtree(module_old)
rmtree(new)
2014-08-05 09:51:17 +00:00
else:
2016-01-13 05:19:26 +00:00
if os.path.exists(module_tar):
os.unlink(module_tar)
2014-08-05 09:51:17 +00:00
return False
2016-01-18 06:34:20 +00:00
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
2015-11-30 16:49:33 +00:00
upgrade_app()
# FIXME: still needed?
2019-01-29 13:18:48 +00:00
if old_version < '20160112-651-de984a3' and platform != 'win32':
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
2014-08-05 09:47:16 +00:00
return True
return True
2014-08-22 17:46:45 +00:00
2016-02-01 11:16:35 +00:00
def move(src, dst):
try:
shutil.move(src, dst)
except:
logger.debug('failed to move %s to %s', src, dst)
raise
def rmtree(path):
try:
shutil.rmtree(path)
except:
logger.debug('failed to remove %s', path)
raise
2016-01-12 15:00:51 +00:00
def update_available():
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
return True
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return False
2016-01-18 06:34:20 +00:00
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2016-01-12 15:00:51 +00:00
return False
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
return verify(release) and old_version < new_version
def restart_oml(update=False):
if update:
get_latest_release()
2016-02-01 07:45:34 +00:00
utils.ctl('restart')
2016-01-12 15:00:51 +00:00
2015-11-30 16:49:33 +00:00
def get_app_version(app):
plist = app + '/Contents/Info.plist'
if os.path.exists(plist):
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
return subprocess.check_output(cmd).strip()
def upgrade_app():
2016-04-14 19:27:30 +00:00
base = os.path.dirname(settings.base_dir)
2015-11-30 16:49:33 +00:00
if sys.platform == 'darwin':
2016-02-06 10:37:08 +00:00
bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
2015-11-30 16:49:33 +00:00
app = '/Applications/Open Media Library.app'
version = get_app_version(app)
current_version = get_app_version(bundled_app)
if version and current_version and version != current_version:
try:
shutil.rmtree(app)
shutil.copytree(bundled_app, app)
except:
2016-01-24 09:13:03 +00:00
logger.debug('Failed to update Application', exc_info=True)
2019-01-29 13:18:48 +00:00
'''
2016-04-14 19:27:30 +00:00
elif sys.platform == 'win32':
current_version = get_trayicon_version()
if current_version != '0.2.0.0':
msi = os.path.normpath(os.path.join(base, 'platform_win32', 'Open Media Library.msi'))
cmd = ['msiexec.exe', '/qb', '/I', msi]
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
subprocess.Popen(cmd, cwd=settings.base_dir, start_new_session=True, startupinfo=startupinfo)
2019-01-29 13:18:48 +00:00
'''
2015-11-30 16:49:33 +00:00
2014-08-22 17:46:45 +00:00
def getVersion(data):
'''
check if new version is available
'''
response = {
'current': settings.MINOR_VERSION,
2016-01-13 05:17:20 +00:00
'version': settings.MINOR_VERSION,
2014-08-22 17:46:45 +00:00
'upgrade': False,
}
2014-09-01 12:01:41 +00:00
if settings.MINOR_VERSION == 'git':
2015-12-01 08:58:03 +00:00
'''
2014-09-01 12:01:41 +00:00
cmd = ['git', 'rev-parse', '@']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
current = stdout.strip()
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
new = stdout.strip()[:40]
2015-11-30 23:26:35 +00:00
response['update'] = len(new) == 40 and current != new
2015-12-01 08:58:03 +00:00
'''
response['update'] = False
2014-09-01 12:01:41 +00:00
else:
2016-01-04 12:02:40 +00:00
get_latest_release()
2014-09-01 12:01:41 +00:00
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return response
2016-01-18 06:34:20 +00:00
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
2014-09-01 12:01:41 +00:00
return response
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
2015-03-07 19:17:13 +00:00
current = current_version('openmedialibrary')
2014-09-01 12:01:41 +00:00
response['current'] = current
new = release['modules']['openmedialibrary']['version']
2016-01-13 05:17:20 +00:00
response['version'] = new
2014-09-01 12:01:41 +00:00
response['update'] = current < new
2014-08-22 17:46:45 +00:00
return response
actions.register(getVersion, cache=False)
2019-01-17 10:30:22 +00:00
actions.register(getVersion, cache=False, version='public')
2014-08-22 17:46:45 +00:00
def restart(data):
'''
restart (and upgrade if upgrades are available)
'''
2016-01-12 15:00:51 +00:00
restart_oml(data.get('update'))
2014-08-22 17:46:45 +00:00
return {}
actions.register(restart, cache=False)
2016-01-12 15:00:51 +00:00
class Update(Thread):
_status = {
'reload': False,
'status': 'Updating Open Media Library...'
}
2016-01-12 15:00:51 +00:00
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.start()
def status(self, status, reload=False):
from websocket import trigger_event
self._status = {
2016-01-12 15:00:51 +00:00
'reload': reload,
'status': status,
}
trigger_event('updatestatus', self._status)
2016-01-12 15:00:51 +00:00
def install(self):
while update_available():
2016-01-16 14:43:54 +00:00
self.status('Downloading...')
max_retry = 5
while max_retry > 0 and not download():
max_retry -= 1
2016-01-16 14:43:54 +00:00
self.status('Download failed, retrying...')
time.sleep(5)
self.status('Downloading...')
self.status('Installing...')
if not install():
2016-01-16 14:43:54 +00:00
self.status('Installation failed.')
2016-01-13 05:23:12 +00:00
return True
return False
2016-01-12 15:00:51 +00:00
def update_database(self):
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
2016-01-16 14:43:54 +00:00
self.status('Updating...')
if db_version < 3:
db_version = migrate_3()
if db_version < 4:
db_version = migrate_4()
2016-01-19 10:05:16 +00:00
if db_version < 5:
db_version = migrate_5()
2016-01-20 06:46:46 +00:00
if db_version < 6:
db_version = migrate_6()
if db_version < 7:
db_version = migrate_7()
2016-01-24 07:59:18 +00:00
if db_version < 9:
db_version = migrate_8()
2016-01-25 17:32:04 +00:00
if db_version < 10:
db_version = migrate_10()
2016-02-10 14:02:32 +00:00
if db_version < 11:
db_version = migrate_11()
2016-02-15 11:30:17 +00:00
if db_version < 12:
db_version = migrate_12()
2017-06-03 20:50:14 +00:00
if db_version < 13:
db_version = migrate_13()
2019-02-01 06:27:24 +00:00
if db_version < 15:
db_version = migrate_15()
if db_version < 16:
db_version = migrate_16()
2019-02-02 12:23:45 +00:00
if db_version < 17:
db_version = migrate_17()
if db_version < 18:
db_version = migrate_18()
2019-02-23 07:20:35 +00:00
if db_version < 19:
db_version = migrate_19()
2019-02-23 11:41:50 +00:00
if db_version < 20:
db_version = migrate_20()
2019-02-01 06:27:24 +00:00
settings.server['db_version'] = db_version
2016-01-12 15:00:51 +00:00
def run(self):
2016-01-16 14:43:54 +00:00
self.status('Checking for updates...')
2016-01-12 15:00:51 +00:00
self.update_database()
2016-01-13 05:23:12 +00:00
if self.install():
restart_oml()
return
2016-01-13 17:46:09 +00:00
self.status('Relaunching...', True)
2016-01-12 15:00:51 +00:00
restart_oml()
def migrate_3():
with db.session():
import item.models
for i in item.models.Item.find({
'query': {
'conditions': [{
'key':'mediastate',
'value':'available',
'operator': '=='
}]
}
}):
if not i.files.all():
i.remove_file()
else:
f = i.files.all()[0]
if not 'pages' in i.info and 'pages' in f.info:
i.info['pages'] = f.info['pages']
i.save()
return 3
def migrate_4():
with db.session() as session:
import item.models
from meta.utils import to_isbn13
for i in item.models.Item.query:
update = False
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
isbns = [isbn for isbn in isbns if isbn]
if isbns:
i.meta['isbn'] = isbns[0]
if 'isbn' in i.info:
i.info['isbn'] = i.meta['isbn']
else:
del i.meta['isbn']
if 'isbn' in i.info:
del i.info['isbn']
update = True
if 'isbn' in i.meta and not i.meta['isbn']:
del i.meta['isbn']
update = True
if update:
session.add(i)
session.commit()
return 4
2016-01-19 10:05:16 +00:00
def migrate_5():
db.run_sql([
'DROP INDEX IF EXISTS user_metadata_index',
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
'UPDATE sort SET sharemetadata = 0',
])
2016-01-19 10:05:16 +00:00
with db.session() as session:
import user.models
for m in user.models.Metadata.query:
data_hash = m.get_hash()
if m.data_hash != data_hash:
m.data_hash = data_hash
session.add(m)
session.commit()
import item.models
for i in item.models.Item.query:
update = False
if '_from' in i.info:
del i.info['_from']
update = True
if update:
session.add(i)
session.commit()
for i in item.models.Item.query:
i.sync_metadata()
return 5
2016-01-20 06:46:46 +00:00
def migrate_6():
with db.session() as session:
import item.models
for s in item.models.Sort.query.filter_by(author=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(publisher=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(language=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(place=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(isbn=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(date=''):
s.item.update_sort()
session.commit()
return 6
def migrate_7():
with db.session() as session:
import changelog
for c in changelog.Changelog.query:
if 'editmeta' in c.data or 'resetmeta' in c.data:
session.delete(c)
session.commit()
db.run_sql('DROP TABLE IF EXISTS metadata')
2016-01-21 07:08:02 +00:00
db.run_sql('DROP TABLE IF EXISTS scrape')
db.run_sql('VACUUM')
return 7
def migrate_8():
for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
if key in settings.server:
del settings.server[key]
list_cache = os.path.join(settings.data_path, 'list_cache.json')
if os.path.exists(list_cache):
os.unlink(list_cache)
with db.session() as session:
import item.models
for i in item.models.Item.query:
delta = set(i.meta)-set(i.meta_keys)
if delta:
for key in delta:
del i.meta[key]
session.add(i)
session.commit()
import changelog
import user.models
changelog.Changelog.query.delete()
u = user.models.User.get(settings.USER_ID)
u.rebuild_changelog()
for peer in user.models.User.query:
if peer.id != u.id:
if len(peer.id) != 16:
session.delete(peer)
session.commit()
return 8
2016-01-25 17:32:04 +00:00
def migrate_10():
with db.session() as session:
from item.models import Item, Find
from utils import get_by_id
from item.person import get_sort_name
import unicodedata
sort_names = {}
2016-01-27 09:39:10 +00:00
updates = {}
2016-01-27 08:55:36 +00:00
for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
if sort_type == 'person':
if f.value in sort_names:
2016-01-27 09:39:10 +00:00
sortvalue = sort_names[f.value]
2016-01-25 17:32:04 +00:00
else:
2016-01-27 09:39:10 +00:00
sortvalue = sort_names[f.value] = get_sort_name(f.value)
2016-01-27 08:55:36 +00:00
else:
2016-01-27 09:39:10 +00:00
sortvalue = f.value
if sortvalue:
sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
if not f.key in updates:
updates[f.key] = {}
updates[f.key][f.value] = sortvalue
for key in updates:
for value in updates[key]:
Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
2016-01-25 17:32:04 +00:00
session.commit()
return 10
2016-02-10 14:02:32 +00:00
def migrate_11():
with db.session() as session:
2016-02-11 15:55:41 +00:00
from user.models import User, Metadata, List
2016-02-10 14:02:32 +00:00
from changelog import Changelog
import utils
for u in User.query.filter_by(peered=True):
peer = utils.get_peer(u.id)
2016-07-03 11:51:51 +00:00
last = Changelog.query.filter_by(user_id=u.id).order_by(text('-revision')).first()
2016-02-10 14:02:32 +00:00
if last:
peer.info['revision'] = last.revision
2016-02-13 10:44:19 +00:00
listorder = []
for l in List.query.filter_by(user_id=u.id).order_by('index_'):
2016-02-11 15:55:41 +00:00
if l.name:
2016-02-12 13:27:55 +00:00
peer.info['lists'][l.name] = [i.id for i in l.get_items()]
2016-02-13 10:44:19 +00:00
listorder.append(l.name)
if 'listorder' not in peer.info:
peer.info['listorder'] = listorder
2016-02-10 14:02:32 +00:00
for m in Metadata.query.filter_by(user_id=u.id):
peer.library[m.item_id] = {
'meta': dict(m.data),
'meta_hash': m.data_hash,
'modified': m.modified,
}
peer.library.commit()
peer.sync_info()
peer.sync_db()
Changelog.query.filter_by(user_id=u.id).delete()
Metadata.query.filter_by(user_id=u.id).delete()
session.commit()
2016-02-15 11:30:17 +00:00
if db.table_exists('transfer'):
import state
import downloads
state.online = False
state.downloads = downloads.Downloads()
r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
for t in r:
item_id, added, progress = t
if added:
state.downloads.transfers[item_id] = {
'added': added,
'progress': progress
}
state.db.session.commit()
state.downloads.transfers.commit()
state.downloads = None
2016-02-10 14:02:32 +00:00
return 11
2016-02-15 11:30:17 +00:00
def migrate_12():
db.run_sql([
'DROP TABLE IF EXISTS transfer'
])
2016-02-15 11:30:17 +00:00
return 12
2017-06-03 20:50:14 +00:00
def migrate_13():
import settings
import changelog
import os
import json
path = os.path.join(settings.data_path, 'peers', '%s.log' % settings.USER_ID)
if not os.path.exists(path):
2019-01-18 11:32:44 +00:00
folder = os.path.dirname(path)
if not os.path.exists(folder):
os.makedirs(folder)
2017-06-03 20:50:14 +00:00
with db.session() as session:
revision = -1
qs = changelog.Changelog.query.filter_by(user_id=settings.USER_ID)
2019-03-05 17:23:28 +00:00
with open(path, 'wb') as fd:
2017-06-03 20:50:14 +00:00
for c in qs.order_by('timestamp'):
2019-03-05 17:23:28 +00:00
data = json.dumps([c.revision, c.timestamp, json.loads(c.data)], ensure_ascii=False).encode('utf-8')
fd.write(data + b'\n')
2017-06-03 20:50:14 +00:00
revision = c.revision
if revision > -1:
settings.server['revision'] = revision
return 13
2019-01-31 16:34:47 +00:00
2019-02-01 06:27:24 +00:00
def migrate_15():
from user.models import List, User
2019-01-31 16:34:47 +00:00
with db.session():
l = List.get(':Public')
if l and not len(l.items):
l.remove()
2019-02-01 06:27:24 +00:00
for u in User.query:
if 'local' in u.info:
del u.info['local']
u.save()
return 15
def migrate_16():
db.run_sql([
'''CREATE TABLE user2 (
created DATETIME,
modified DATETIME,
id VARCHAR(43) NOT NULL,
info BLOB,
nickname VARCHAR(256),
pending VARCHAR(64),
queued BOOLEAN,
peered BOOLEAN,
online BOOLEAN,
PRIMARY KEY (id),
CHECK (queued IN (0, 1)),
CHECK (peered IN (0, 1)),
CHECK (online IN (0, 1))
)''',
'''INSERT INTO user2 (created, modified, id, info, nickname, pending, queued, peered, online)
SELECT created, modified, id, info, nickname, pending, queued, peered, online FROM user''',
'DROP TABLE user',
'ALTER TABLE user2 RENAME TO user',
'CREATE INDEX IF NOT EXISTS ix_user_nickname ON user (nickname)'
])
return 16
2019-02-02 12:23:45 +00:00
def migrate_17():
from user.models import List, User
from changelog import add_record
with db.session():
l = List.get(':Public')
if not l:
add_record('removelist', 'Public')
lists = []
for l in List.query.filter_by(user_id=settings.USER_ID).order_by('index_'):
if l.type == 'static' and l.name not in ('', 'Inbox'):
lists.append(l.name)
add_record('orderlists', lists)
return 17
def migrate_18():
db.run_sql([
'''CREATE TABLE annotation (
_id INTEGER NOT NULL,
id VARCHAR(43),
created DATETIME,
modified DATETIME,
user_id VARCHAR(43),
item_id VARCHAR(43),
data BLOB,
findquotes TEXT,
findnotes TEXT,
PRIMARY KEY (_id),
FOREIGN KEY(user_id) REFERENCES user (id),
FOREIGN KEY(item_id) REFERENCES item (id)
)'''])
db.run_sql([
'CREATE INDEX ix_annotation_findquotes ON annotation (findquotes)',
'CREATE INDEX ix_annotation_findnotes ON annotation (findnotes)'
])
return 18
2019-02-23 07:20:35 +00:00
def migrate_19():
from user.models import User
with db.session():
peers = [u for u in User.query.filter_by(peered=True)]
peers.sort(key=lambda u: utils.user_sort_key(u.json()))
for u in peers:
peer = utils.get_peer(u.id)
if not peer.info.get('revision') and os.path.exists(peer._logpath) and os.path.getsize(peer._logpath):
logger.debug('try to apply pending logs for %s', u.id)
2019-02-23 11:41:50 +00:00
try:
peer.apply_log()
except:
logger.error('failed to apply log for %s', u.id)
2019-02-23 07:20:35 +00:00
return 19
2019-02-23 11:41:50 +00:00
def migrate_20():
from glob import glob
changed = False
for log in glob(os.path.join(settings.data_path, 'peers', '*.log')):
with open(log, 'rb') as fd:
data = fd.read()
try:
data.decode('utf-8')
except UnicodeDecodeError:
data = data.decode('Windows-1252')
logger.error('convert %s to utf-8', log)
with open(log, 'wb') as fd:
fd.write(data.encode('utf-8'))
changed = True
if changed:
migrate_19()
return 20